[med-svn] [python-avro] 01/01: Imported Upstream version 1.8.0-rc0

Afif Elghraoui afif-guest at moszumanska.debian.org
Sun Oct 25 00:50:04 UTC 2015


This is an automated email from the git hooks/post-receive script.

afif-guest pushed a commit to annotated tag upstream/1.8.0-rc0
in repository python-avro.

commit 6d86f88d85b33d9da9302d62b0b44cdd99874995
Author: Afif Elghraoui <afif at ghraoui.name>
Date:   Sat Oct 24 15:37:53 2015 -0700

    Imported Upstream version 1.8.0-rc0
---
 .gitignore                                         |     4 +
 BUILD.txt                                          |    18 +-
 CHANGES.txt                                        |   181 +
 README.txt                                         |     2 +-
 build.sh                                           |    40 +-
 doc/src/content/xdocs/gettingstartedpython.xml     |    20 +-
 doc/src/content/xdocs/spec.xml                     |    63 +-
 lang/c++/.gitignore                                |     2 +
 lang/c++/AUTHORS                                   |     4 +
 lang/c++/CMakeLists.txt                            |   171 +
 lang/c++/COPYING                                   |   202 +
 lang/c++/ChangeLog                                 |     1 +
 lang/c++/Doxyfile                                  |  1515 +++
 lang/c++/MainPage.dox                              |   338 +
 lang/c++/NEWS                                      |     5 +
 lang/c++/README                                    |    56 +
 lang/c++/api/AvroParse.hh                          |    86 +
 lang/c++/api/AvroSerialize.hh                      |    67 +
 lang/c++/api/AvroTraits.hh                         |    71 +
 lang/c++/api/Boost.hh                              |   113 +
 lang/c++/api/Compiler.hh                           |    63 +
 lang/c++/api/Config.hh                             |    41 +
 lang/c++/api/DataFile.hh                           |   331 +
 lang/c++/api/Decoder.hh                            |   210 +
 lang/c++/api/Encoder.hh                            |   165 +
 lang/c++/api/Exception.hh                          |    46 +
 lang/c++/api/Generic.hh                            |   141 +
 lang/c++/api/GenericDatum.hh                       |   505 +
 lang/c++/api/Layout.hh                             |    84 +
 lang/c++/api/Node.hh                               |   189 +
 lang/c++/api/NodeConcepts.hh                       |   224 +
 lang/c++/api/NodeImpl.hh                           |   543 +
 lang/c++/api/Parser.hh                             |   149 +
 lang/c++/api/Reader.hh                             |   208 +
 lang/c++/api/Resolver.hh                           |    56 +
 lang/c++/api/ResolverSchema.hh                     |    57 +
 lang/c++/api/ResolvingReader.hh                    |    54 +
 lang/c++/api/Schema.hh                             |   140 +
 lang/c++/api/SchemaResolution.hh                   |    55 +
 lang/c++/api/Serializer.hh                         |   134 +
 lang/c++/api/Specific.hh                           |   312 +
 lang/c++/api/Stream.hh                             |   417 +
 lang/c++/api/Types.hh                              |   114 +
 lang/c++/api/ValidSchema.hh                        |    62 +
 lang/c++/api/Validator.hh                          |   161 +
 lang/c++/api/Writer.hh                             |   185 +
 lang/c++/api/Zigzag.hh                             |    42 +
 lang/c++/api/buffer/Buffer.hh                      |   525 +
 lang/c++/api/buffer/BufferPrint.hh                 |   118 +
 lang/c++/api/buffer/BufferReader.hh                |   288 +
 lang/c++/api/buffer/BufferStream.hh                |   101 +
 lang/c++/api/buffer/BufferStreambuf.hh             |   255 +
 lang/c++/api/buffer/detail/BufferDetail.hh         |   555 +
 lang/c++/api/buffer/detail/BufferDetailIterator.hh |   230 +
 lang/c++/build.sh                                  |   108 +
 lang/c++/config/.gitignore                         |     1 +
 lang/c++/examples/cpx.hh                           |    48 +
 lang/c++/examples/cpx.json                         |     8 +
 lang/c++/examples/custom.cc                        |    59 +
 lang/c++/examples/datafile.cc                      |    62 +
 lang/c++/examples/generated.cc                     |    44 +
 lang/c++/examples/generic.cc                       |    68 +
 lang/c++/examples/imaginary.hh                     |    46 +
 lang/c++/examples/imaginary.json                   |     7 +
 lang/c++/examples/resolving.cc                     |    63 +
 lang/c++/examples/schemaload.cc                    |    32 +
 lang/c++/examples/validating.cc                    |    68 +
 lang/c++/impl/BinaryDecoder.cc                     |   236 +
 lang/c++/impl/BinaryEncoder.cc                     |   164 +
 lang/c++/impl/Compiler.cc                          |   531 +
 lang/c++/impl/DataFile.cc                          |   400 +
 lang/c++/impl/FileStream.cc                        |   355 +
 lang/c++/impl/Generic.cc                           |   264 +
 lang/c++/impl/GenericDatum.cc                      |   101 +
 lang/c++/impl/Node.cc                              |    83 +
 lang/c++/impl/NodeImpl.cc                          |   282 +
 lang/c++/impl/Resolver.cc                          |   870 ++
 lang/c++/impl/ResolverSchema.cc                    |    39 +
 lang/c++/impl/Schema.cc                            |   118 +
 lang/c++/impl/Stream.cc                            |   198 +
 lang/c++/impl/Types.cc                             |    82 +
 lang/c++/impl/ValidSchema.cc                       |   130 +
 lang/c++/impl/Validator.cc                         |   301 +
 lang/c++/impl/Zigzag.cc                            |    84 +
 lang/c++/impl/avrogencpp.cc                        |   828 ++
 lang/c++/impl/json/JsonDom.cc                      |   193 +
 lang/c++/impl/json/JsonDom.hh                      |   154 +
 lang/c++/impl/json/JsonIO.cc                       |   374 +
 lang/c++/impl/json/JsonIO.hh                       |   323 +
 lang/c++/impl/parsing/JsonCodec.cc                 |   696 +
 lang/c++/impl/parsing/ResolvingDecoder.cc          |   744 ++
 lang/c++/impl/parsing/Symbol.cc                    |   111 +
 lang/c++/impl/parsing/Symbol.hh                    |   798 ++
 lang/c++/impl/parsing/ValidatingCodec.cc           |   586 +
 lang/c++/impl/parsing/ValidatingCodec.hh           |    52 +
 lang/c++/jsonschemas/array                         |     1 +
 lang/c++/jsonschemas/bigrecord                     |   107 +
 lang/c++/jsonschemas/bigrecord2                    |   100 +
 lang/c++/jsonschemas/bigrecord_r                   |   166 +
 lang/c++/jsonschemas/circulardep                   |    33 +
 lang/c++/jsonschemas/empty_record                  |     5 +
 lang/c++/jsonschemas/enum                          |    10 +
 lang/c++/jsonschemas/fixed                         |     1 +
 lang/c++/jsonschemas/int                           |     1 +
 lang/c++/jsonschemas/large_schema.avsc             |     1 +
 lang/c++/jsonschemas/map                           |     5 +
 lang/c++/jsonschemas/nested                        |    17 +
 lang/c++/jsonschemas/nested.error                  |    17 +
 lang/c++/jsonschemas/recinrec                      |    18 +
 lang/c++/jsonschemas/record                        |     9 +
 lang/c++/jsonschemas/record2                       |    10 +
 lang/c++/jsonschemas/recursive                     |     9 +
 lang/c++/jsonschemas/reuse                         |    14 +
 lang/c++/jsonschemas/tweet                         |   152 +
 lang/c++/jsonschemas/union                         |     1 +
 lang/c++/jsonschemas/union_array_union             |    14 +
 lang/c++/jsonschemas/union_conflict                |     9 +
 lang/c++/jsonschemas/union_map_union               |     8 +
 lang/c++/jsonschemas/unionwithmap                  |     1 +
 lang/c++/jsonschemas/verboseint                    |     1 +
 lang/c++/m4/README                                 |     3 +
 lang/c++/m4/m4_ax_boost_asio.m4                    |   108 +
 lang/c++/m4/m4_ax_boost_base.m4                    |   219 +
 lang/c++/m4/m4_ax_boost_filesystem.m4              |   115 +
 lang/c++/m4/m4_ax_boost_regex.m4                   |   105 +
 lang/c++/m4/m4_ax_boost_system.m4                  |   117 +
 lang/c++/m4/m4_ax_boost_thread.m4                  |   146 +
 lang/c++/parser/AvroLex.ll                         |   203 +
 lang/c++/parser/AvroYacc.yy                        |   200 +
 lang/c++/scripts/gen-cppcode.py                    |   685 +
 lang/c++/test/AvrogencppTests.cc                   |   271 +
 lang/c++/test/CodecTests.cc                        |  1544 +++
 lang/c++/test/DataFileTests.cc                     |   501 +
 lang/c++/test/JsonTests.cc                         |   213 +
 lang/c++/test/LargeSchemaTests.cc                  |    46 +
 lang/c++/test/SchemaTests.cc                       |   178 +
 lang/c++/test/SpecificTests.cc                     |   200 +
 lang/c++/test/StreamTests.cc                       |   239 +
 lang/c++/test/buffertest.cc                        |  1153 ++
 lang/c++/test/precompile.cc                        |    50 +
 lang/c++/test/testgentest.cc                       |   556 +
 lang/c++/test/testparser.cc                        |    40 +
 lang/c++/test/unittest.cc                          |   794 ++
 lang/c/.gitignore                                  |     3 +
 lang/c/AUTHORS                                     |     4 +
 lang/c/CMakeLists.txt                              |   178 +
 lang/c/COPYING                                     |   202 +
 lang/c/CPackConfig.txt                             |    39 +
 lang/c/ChangeLog                                   |     4 +
 lang/c/FindSnappy.cmake                            |    54 +
 lang/c/INSTALL                                     |    63 +
 lang/c/NEWS                                        |     5 +
 lang/c/README                                      |    10 +
 lang/c/README.maintaining_win32.txt                |   166 +
 lang/c/build.sh                                    |    75 +
 lang/c/cmake_avrolib.bat                           |    48 +
 lang/c/cmake_avrolib.sh                            |    27 +
 lang/c/cmake_pretty.cmake                          |    33 +
 lang/c/docs/CMakeLists.txt                         |    53 +
 lang/c/docs/index.txt                              |   754 ++
 lang/c/examples/.gitignore                         |     2 +
 lang/c/examples/CMakeLists.txt                     |    32 +
 lang/c/examples/quickstop.c                        |   222 +
 lang/c/jansson/.gitignore                          |     1 +
 lang/c/jansson/CHANGES                             |   330 +
 lang/c/jansson/LICENSE                             |    19 +
 lang/c/jansson/Makefile.am                         |    10 +
 lang/c/jansson/Makefile.in                         |   640 +
 lang/c/jansson/README.rst                          |    59 +
 lang/c/jansson/aclocal.m4                          |  8895 +++++++++++++
 lang/c/jansson/config.guess                        |  1500 +++
 lang/c/jansson/config.h.in                         |    75 +
 lang/c/jansson/config.sub                          |  1616 +++
 lang/c/jansson/configure                           | 13010 +++++++++++++++++++
 lang/c/jansson/configure.ac                        |    49 +
 lang/c/jansson/depcomp                             |   584 +
 lang/c/jansson/doc/Makefile.am                     |    20 +
 lang/c/jansson/doc/Makefile.in                     |   352 +
 lang/c/jansson/doc/README                          |     5 +
 lang/c/jansson/doc/apiref.rst                      |  1221 ++
 lang/c/jansson/doc/changes.rst                     |     5 +
 lang/c/jansson/doc/conf.py                         |   217 +
 lang/c/jansson/doc/conformance.rst                 |   112 +
 lang/c/jansson/doc/ext/refcounting.py              |    59 +
 lang/c/jansson/doc/gettingstarted.rst              |   123 +
 lang/c/jansson/doc/github_commits.c                |   171 +
 lang/c/jansson/doc/index.rst                       |    47 +
 lang/c/jansson/doc/tutorial.rst                    |   275 +
 lang/c/jansson/doc/upgrading.rst                   |    76 +
 lang/c/jansson/install-sh                          |   507 +
 lang/c/jansson/jansson.pc.in                       |    10 +
 lang/c/jansson/ltmain.sh                           |  8745 +++++++++++++
 lang/c/jansson/missing                             |   367 +
 lang/c/jansson/src/Makefile.am                     |    27 +
 lang/c/jansson/src/Makefile.in                     |   477 +
 lang/c/jansson/src/dump.c                          |   465 +
 lang/c/jansson/src/error.c                         |    62 +
 lang/c/jansson/src/hashtable.c                     |   372 +
 lang/c/jansson/src/hashtable.h                     |   218 +
 lang/c/jansson/src/jansson.h                       |   257 +
 lang/c/jansson/src/jansson_config.h                |    34 +
 lang/c/jansson/src/jansson_config.h.in             |    34 +
 lang/c/jansson/src/jansson_private.h               |   106 +
 lang/c/jansson/src/load.c                          |   964 ++
 lang/c/jansson/src/memory.c                        |    51 +
 lang/c/jansson/src/pack_unpack.c                   |   610 +
 lang/c/jansson/src/strbuffer.c                     |   104 +
 lang/c/jansson/src/strbuffer.h                     |    42 +
 lang/c/jansson/src/utf.c                           |   190 +
 lang/c/jansson/src/utf.h                           |    50 +
 lang/c/jansson/src/value.c                         |   983 ++
 lang/c/jansson/test/.gitignore                     |    13 +
 lang/c/jansson/test/Makefile.am                    |    10 +
 lang/c/jansson/test/Makefile.in                    |   569 +
 lang/c/jansson/test/bin/Makefile.am                |     6 +
 lang/c/jansson/test/bin/Makefile.in                |   439 +
 lang/c/jansson/test/bin/json_process.c             |   124 +
 lang/c/jansson/test/run-suites                     |    49 +
 lang/c/jansson/test/scripts/run-tests.sh           |    89 +
 lang/c/jansson/test/scripts/valgrind.sh            |    35 +
 lang/c/jansson/test/suites/Makefile.am             |     2 +
 lang/c/jansson/test/suites/Makefile.in             |   487 +
 lang/c/jansson/test/suites/api/Makefile.am         |    32 +
 lang/c/jansson/test/suites/api/Makefile.in         |   553 +
 lang/c/jansson/test/suites/api/run                 |    36 +
 lang/c/jansson/test/suites/api/test_array.c        |   400 +
 lang/c/jansson/test/suites/api/test_copy.c         |   319 +
 lang/c/jansson/test/suites/api/test_dump.c         |   142 +
 lang/c/jansson/test/suites/api/test_equal.c        |   190 +
 lang/c/jansson/test/suites/api/test_load.c         |    60 +
 lang/c/jansson/test/suites/api/test_loadb.c        |    38 +
 lang/c/jansson/test/suites/api/test_memory_funcs.c |    84 +
 lang/c/jansson/test/suites/api/test_number.c       |    44 +
 lang/c/jansson/test/suites/api/test_object.c       |   451 +
 lang/c/jansson/test/suites/api/test_pack.c         |   232 +
 lang/c/jansson/test/suites/api/test_simple.c       |   185 +
 lang/c/jansson/test/suites/api/test_unpack.c       |   341 +
 lang/c/jansson/test/suites/api/util.h              |    55 +
 .../invalid-unicode/encoded-surrogate-half/error   |     2 +
 .../invalid-unicode/encoded-surrogate-half/input   |     1 +
 .../invalid-utf-8-after-backslash/error            |     2 +
 .../invalid-utf-8-after-backslash/input            |     1 +
 .../invalid-unicode/invalid-utf-8-in-array/error   |     2 +
 .../invalid-unicode/invalid-utf-8-in-array/input   |     1 +
 .../invalid-utf-8-in-bigger-int/error              |     2 +
 .../invalid-utf-8-in-bigger-int/input              |     1 +
 .../invalid-unicode/invalid-utf-8-in-escape/error  |     2 +
 .../invalid-unicode/invalid-utf-8-in-escape/input  |     1 +
 .../invalid-utf-8-in-exponent/error                |     2 +
 .../invalid-utf-8-in-exponent/input                |     1 +
 .../invalid-utf-8-in-identifier/error              |     2 +
 .../invalid-utf-8-in-identifier/input              |     1 +
 .../invalid-unicode/invalid-utf-8-in-int/error     |     2 +
 .../invalid-unicode/invalid-utf-8-in-int/input     |     1 +
 .../invalid-utf-8-in-real-after-e/error            |     2 +
 .../invalid-utf-8-in-real-after-e/input            |     1 +
 .../invalid-unicode/invalid-utf-8-in-string/error  |     2 +
 .../invalid-unicode/invalid-utf-8-in-string/input  |     1 +
 .../invalid-unicode/lone-invalid-utf-8/error       |     2 +
 .../invalid-unicode/lone-invalid-utf-8/input       |     1 +
 .../lone-utf-8-continuation-byte/error             |     2 +
 .../lone-utf-8-continuation-byte/input             |     1 +
 .../invalid-unicode/not-in-unicode-range/error     |     2 +
 .../invalid-unicode/not-in-unicode-range/input     |     1 +
 .../invalid-unicode/overlong-3-byte-encoding/error |     2 +
 .../invalid-unicode/overlong-3-byte-encoding/input |     1 +
 .../invalid-unicode/overlong-4-byte-encoding/error |     2 +
 .../invalid-unicode/overlong-4-byte-encoding/input |     1 +
 .../invalid-unicode/overlong-ascii-encoding/error  |     2 +
 .../invalid-unicode/overlong-ascii-encoding/input  |     1 +
 .../suites/invalid-unicode/restricted-utf-8/error  |     2 +
 .../suites/invalid-unicode/restricted-utf-8/input  |     1 +
 lang/c/jansson/test/suites/invalid-unicode/run     |    27 +
 .../suites/invalid-unicode/truncated-utf-8/error   |     2 +
 .../suites/invalid-unicode/truncated-utf-8/input   |     1 +
 .../c/jansson/test/suites/invalid/apostrophe/error |     2 +
 .../c/jansson/test/suites/invalid/apostrophe/input |     1 +
 .../suites/invalid/ascii-unicode-identifier/error  |     2 +
 .../suites/invalid/ascii-unicode-identifier/input  |     1 +
 .../jansson/test/suites/invalid/brace-comma/error  |     2 +
 .../jansson/test/suites/invalid/brace-comma/input  |     1 +
 .../test/suites/invalid/bracket-comma/error        |     2 +
 .../test/suites/invalid/bracket-comma/input        |     1 +
 .../suites/invalid/bracket-one-comma/error.normal  |     2 +
 .../suites/invalid/bracket-one-comma/error.strip   |     2 +
 .../test/suites/invalid/bracket-one-comma/input    |     1 +
 lang/c/jansson/test/suites/invalid/empty/error     |     2 +
 lang/c/jansson/test/suites/invalid/empty/input     |     0
 .../invalid/escaped-null-byte-in-string/error      |     2 +
 .../invalid/escaped-null-byte-in-string/input      |     1 +
 .../test/suites/invalid/extra-comma-in-array/error |     2 +
 .../test/suites/invalid/extra-comma-in-array/input |     1 +
 .../invalid/extra-comma-in-multiline-array/error   |     2 +
 .../invalid/extra-comma-in-multiline-array/input   |     6 +
 .../suites/invalid/garbage-after-newline/error     |     2 +
 .../suites/invalid/garbage-after-newline/input     |     2 +
 .../test/suites/invalid/garbage-at-the-end/error   |     2 +
 .../test/suites/invalid/garbage-at-the-end/input   |     1 +
 .../invalid/integer-starting-with-zero/error       |     2 +
 .../invalid/integer-starting-with-zero/input       |     1 +
 .../test/suites/invalid/invalid-escape/error       |     2 +
 .../test/suites/invalid/invalid-escape/input       |     1 +
 .../test/suites/invalid/invalid-identifier/error   |     2 +
 .../test/suites/invalid/invalid-identifier/input   |     1 +
 .../suites/invalid/invalid-negative-integer/error  |     2 +
 .../suites/invalid/invalid-negative-integer/input  |     1 +
 .../suites/invalid/invalid-negative-real/error     |     2 +
 .../suites/invalid/invalid-negative-real/input     |     1 +
 .../suites/invalid/invalid-second-surrogate/error  |     2 +
 .../suites/invalid/invalid-second-surrogate/input  |     1 +
 .../suites/invalid/lone-open-brace/error.normal    |     2 +
 .../suites/invalid/lone-open-brace/error.strip     |     2 +
 .../test/suites/invalid/lone-open-brace/input      |     1 +
 .../suites/invalid/lone-open-bracket/error.normal  |     2 +
 .../suites/invalid/lone-open-bracket/error.strip   |     2 +
 .../test/suites/invalid/lone-open-bracket/input    |     1 +
 .../suites/invalid/lone-second-surrogate/error     |     2 +
 .../suites/invalid/lone-second-surrogate/input     |     1 +
 .../suites/invalid/minus-sign-without-number/error |     2 +
 .../suites/invalid/minus-sign-without-number/input |     1 +
 .../negative-integer-starting-with-zero/error      |     2 +
 .../negative-integer-starting-with-zero/input      |     1 +
 .../test/suites/invalid/null-byte-in-string/error  |     2 +
 .../test/suites/invalid/null-byte-in-string/input  |   Bin 0 -> 28 bytes
 .../suites/invalid/null-byte-in-string/nostrip     |     2 +
 .../suites/invalid/null-byte-outside-string/error  |     2 +
 .../suites/invalid/null-byte-outside-string/input  |   Bin 0 -> 3 bytes
 .../invalid/null-byte-outside-string/nostrip       |     2 +
 lang/c/jansson/test/suites/invalid/null/error      |     2 +
 lang/c/jansson/test/suites/invalid/null/input      |     1 +
 .../test/suites/invalid/object-apostrophes/error   |     2 +
 .../test/suites/invalid/object-apostrophes/input   |     1 +
 .../suites/invalid/object-garbage-at-end/error     |     2 +
 .../suites/invalid/object-garbage-at-end/input     |     1 +
 .../object-in-unterminated-array/error.normal      |     2 +
 .../object-in-unterminated-array/error.strip       |     2 +
 .../invalid/object-in-unterminated-array/input     |     1 +
 .../suites/invalid/object-no-colon/error.normal    |     2 +
 .../suites/invalid/object-no-colon/error.strip     |     2 +
 .../test/suites/invalid/object-no-colon/input      |     1 +
 .../suites/invalid/object-no-value/error.normal    |     2 +
 .../suites/invalid/object-no-value/error.strip     |     2 +
 .../test/suites/invalid/object-no-value/input      |     1 +
 .../invalid/object-unterminated-value/error.normal |     2 +
 .../invalid/object-unterminated-value/error.strip  |     2 +
 .../suites/invalid/object-unterminated-value/input |     1 +
 .../test/suites/invalid/real-garbage-after-e/error |     2 +
 .../test/suites/invalid/real-garbage-after-e/input |     1 +
 .../suites/invalid/real-negative-overflow/error    |     2 +
 .../suites/invalid/real-negative-overflow/input    |     1 +
 .../suites/invalid/real-positive-overflow/error    |     2 +
 .../suites/invalid/real-positive-overflow/input    |     1 +
 .../test/suites/invalid/real-truncated-at-e/error  |     2 +
 .../test/suites/invalid/real-truncated-at-e/input  |     1 +
 .../suites/invalid/real-truncated-at-point/error   |     2 +
 .../suites/invalid/real-truncated-at-point/input   |     1 +
 lang/c/jansson/test/suites/invalid/run             |    57 +
 .../suites/invalid/tab-character-in-string/error   |     2 +
 .../suites/invalid/tab-character-in-string/input   |     1 +
 .../suites/invalid/too-big-negative-integer/error  |     2 +
 .../suites/invalid/too-big-negative-integer/input  |     1 +
 .../suites/invalid/too-big-positive-integer/error  |     2 +
 .../suites/invalid/too-big-positive-integer/input  |     1 +
 .../invalid/truncated-unicode-surrogate/error      |     2 +
 .../invalid/truncated-unicode-surrogate/input      |     1 +
 .../test/suites/invalid/unicode-identifier/error   |     2 +
 .../test/suites/invalid/unicode-identifier/input   |     1 +
 .../unterminated-array-and-object/error.normal     |     2 +
 .../unterminated-array-and-object/error.strip      |     2 +
 .../invalid/unterminated-array-and-object/input    |     1 +
 .../suites/invalid/unterminated-array/error.normal |     2 +
 .../suites/invalid/unterminated-array/error.strip  |     2 +
 .../test/suites/invalid/unterminated-array/input   |     1 +
 .../invalid/unterminated-empty-key/error.normal    |     2 +
 .../invalid/unterminated-empty-key/error.strip     |     2 +
 .../suites/invalid/unterminated-empty-key/input    |     1 +
 .../suites/invalid/unterminated-key/error.normal   |     2 +
 .../suites/invalid/unterminated-key/error.strip    |     2 +
 .../test/suites/invalid/unterminated-key/input     |     1 +
 .../invalid/unterminated-object-and-array/error    |     2 +
 .../invalid/unterminated-object-and-array/input    |     1 +
 .../invalid/unterminated-string/error.normal       |     2 +
 .../suites/invalid/unterminated-string/error.strip |     2 +
 .../test/suites/invalid/unterminated-string/input  |     1 +
 .../jansson/test/suites/valid/complex-array/input  |     5 +
 .../jansson/test/suites/valid/complex-array/output |     1 +
 lang/c/jansson/test/suites/valid/empty-array/input |     1 +
 .../c/jansson/test/suites/valid/empty-array/output |     1 +
 .../test/suites/valid/empty-object-in-array/input  |     1 +
 .../test/suites/valid/empty-object-in-array/output |     1 +
 .../c/jansson/test/suites/valid/empty-object/input |     1 +
 .../jansson/test/suites/valid/empty-object/output  |     1 +
 .../c/jansson/test/suites/valid/empty-string/input |     1 +
 .../jansson/test/suites/valid/empty-string/output  |     1 +
 .../suites/valid/escaped-utf-control-char/input    |     1 +
 .../suites/valid/escaped-utf-control-char/output   |     1 +
 lang/c/jansson/test/suites/valid/false/input       |     1 +
 lang/c/jansson/test/suites/valid/false/output      |     1 +
 .../c/jansson/test/suites/valid/negative-int/input |     1 +
 .../jansson/test/suites/valid/negative-int/output  |     1 +
 .../c/jansson/test/suites/valid/negative-one/input |     1 +
 .../jansson/test/suites/valid/negative-one/output  |     1 +
 .../jansson/test/suites/valid/negative-zero/input  |     1 +
 .../jansson/test/suites/valid/negative-zero/output |     1 +
 lang/c/jansson/test/suites/valid/null/input        |     1 +
 lang/c/jansson/test/suites/valid/null/output       |     1 +
 .../jansson/test/suites/valid/one-byte-utf-8/input |     1 +
 .../test/suites/valid/one-byte-utf-8/output        |     1 +
 .../valid/real-capital-e-negative-exponent/input   |     1 +
 .../valid/real-capital-e-negative-exponent/output  |     1 +
 .../valid/real-capital-e-positive-exponent/input   |     1 +
 .../valid/real-capital-e-positive-exponent/output  |     1 +
 .../jansson/test/suites/valid/real-capital-e/input |     1 +
 .../test/suites/valid/real-capital-e/output        |     1 +
 .../jansson/test/suites/valid/real-exponent/input  |     1 +
 .../jansson/test/suites/valid/real-exponent/output |     1 +
 .../test/suites/valid/real-fraction-exponent/input |     1 +
 .../suites/valid/real-fraction-exponent/output     |     1 +
 .../test/suites/valid/real-negative-exponent/input |     1 +
 .../suites/valid/real-negative-exponent/output     |     1 +
 .../test/suites/valid/real-positive-exponent/input |     1 +
 .../suites/valid/real-positive-exponent/output     |     1 +
 .../jansson/test/suites/valid/real-underflow/input |     1 +
 .../test/suites/valid/real-underflow/output        |     1 +
 lang/c/jansson/test/suites/valid/run               |    55 +
 .../c/jansson/test/suites/valid/short-string/input |     1 +
 .../jansson/test/suites/valid/short-string/output  |     1 +
 .../test/suites/valid/simple-ascii-string/input    |     1 +
 .../test/suites/valid/simple-ascii-string/output   |     1 +
 .../c/jansson/test/suites/valid/simple-int-0/input |     1 +
 .../jansson/test/suites/valid/simple-int-0/output  |     1 +
 .../c/jansson/test/suites/valid/simple-int-1/input |     1 +
 .../jansson/test/suites/valid/simple-int-1/output  |     1 +
 .../jansson/test/suites/valid/simple-int-123/input |     1 +
 .../test/suites/valid/simple-int-123/output        |     1 +
 .../jansson/test/suites/valid/simple-object/input  |     1 +
 .../jansson/test/suites/valid/simple-object/output |     1 +
 lang/c/jansson/test/suites/valid/simple-real/input |     1 +
 .../c/jansson/test/suites/valid/simple-real/output |     1 +
 .../jansson/test/suites/valid/string-escapes/input |     1 +
 .../test/suites/valid/string-escapes/output        |     1 +
 .../test/suites/valid/three-byte-utf-8/input       |     1 +
 .../test/suites/valid/three-byte-utf-8/output      |     1 +
 lang/c/jansson/test/suites/valid/true/input        |     1 +
 lang/c/jansson/test/suites/valid/true/output       |     1 +
 .../jansson/test/suites/valid/two-byte-utf-8/input |     1 +
 .../test/suites/valid/two-byte-utf-8/output        |     1 +
 .../c/jansson/test/suites/valid/utf-8-string/input |     1 +
 .../jansson/test/suites/valid/utf-8-string/output  |     1 +
 .../valid/utf-surrogate-four-byte-encoding/input   |     1 +
 .../valid/utf-surrogate-four-byte-encoding/output  |     1 +
 lang/c/src/.gitignore                              |     2 +
 lang/c/src/CMakeLists.txt                          |   164 +
 lang/c/src/allocation.c                            |   103 +
 lang/c/src/array.c                                 |   118 +
 lang/c/src/avro-c.pc.in                            |     7 +
 lang/c/src/avro.h                                  |    40 +
 lang/c/src/avro/allocation.h                       |    88 +
 lang/c/src/avro/basics.h                           |    95 +
 lang/c/src/avro/consumer.h                         |   317 +
 lang/c/src/avro/data.h                             |   526 +
 lang/c/src/avro/errors.h                           |    41 +
 lang/c/src/avro/generic.h                          |    88 +
 lang/c/src/avro/io.h                               |   156 +
 lang/c/src/avro/legacy.h                           |   264 +
 lang/c/src/avro/msinttypes.h                       |   315 +
 lang/c/src/avro/msstdint.h                         |   247 +
 lang/c/src/avro/platform.h                         |    45 +
 lang/c/src/avro/refcount.h                         |   305 +
 lang/c/src/avro/resolver.h                         |   130 +
 lang/c/src/avro/schema.h                           |   117 +
 lang/c/src/avro/value.h                            |   498 +
 lang/c/src/avro_generic_internal.h                 |    71 +
 lang/c/src/avro_private.h                          |    99 +
 lang/c/src/avroappend.c                            |   169 +
 lang/c/src/avrocat.c                               |   127 +
 lang/c/src/avromod.c                               |   168 +
 lang/c/src/avropipe.c                              |   432 +
 lang/c/src/codec.c                                 |   614 +
 lang/c/src/codec.h                                 |    53 +
 lang/c/src/consume-binary.c                        |   328 +
 lang/c/src/consumer.c                              |    23 +
 lang/c/src/datafile.c                              |   745 ++
 lang/c/src/datum.c                                 |  1255 ++
 lang/c/src/datum.h                                 |   123 +
 lang/c/src/datum_equal.c                           |   186 +
 lang/c/src/datum_read.c                            |    99 +
 lang/c/src/datum_size.c                            |   292 +
 lang/c/src/datum_skip.c                            |   202 +
 lang/c/src/datum_validate.c                        |   193 +
 lang/c/src/datum_value.c                           |   784 ++
 lang/c/src/datum_write.c                           |    91 +
 lang/c/src/dump.c                                  |    56 +
 lang/c/src/dump.h                                  |    34 +
 lang/c/src/encoding.h                              |   106 +
 lang/c/src/encoding_binary.c                       |   446 +
 lang/c/src/errors.c                                |   138 +
 lang/c/src/generic.c                               |  3697 ++++++
 lang/c/src/io.c                                    |   447 +
 lang/c/src/map.c                                   |   130 +
 lang/c/src/memoize.c                               |   165 +
 lang/c/src/resolved-reader.c                       |  3377 +++++
 lang/c/src/resolved-writer.c                       |  2911 +++++
 lang/c/src/resolver.c                              |  1338 ++
 lang/c/src/schema.c                                |  1702 +++
 lang/c/src/schema.h                                |    85 +
 lang/c/src/schema_equal.c                          |   194 +
 lang/c/src/schema_specific.c                       |   232 +
 lang/c/src/st.c                                    |   543 +
 lang/c/src/st.h                                    |    87 +
 lang/c/src/string.c                                |   304 +
 lang/c/src/value-hash.c                            |   294 +
 lang/c/src/value-json.c                            |   417 +
 lang/c/src/value-read.c                            |   392 +
 lang/c/src/value-sizeof.c                          |   230 +
 lang/c/src/value-write.c                           |   209 +
 lang/c/src/value.c                                 |   690 +
 lang/c/src/wrapped-buffer.c                        |   145 +
 lang/c/tests/.gitignore                            |     9 +
 lang/c/tests/CMakeLists.txt                        |    64 +
 lang/c/tests/avro-1237-bad-union-discriminant.avro |   Bin 0 -> 106 bytes
 lang/c/tests/avro-1237-good.avro                   |   Bin 0 -> 105 bytes
 lang/c/tests/avro-1238-good.avro                   |   Bin 0 -> 105 bytes
 lang/c/tests/avro-1238-truncated.avro              |   Bin 0 -> 98 bytes
 lang/c/tests/avro-1279-codec.avro                  |   Bin 0 -> 673 bytes
 lang/c/tests/avro-1279-no-codec.avro               |   Bin 0 -> 657 bytes
 lang/c/tests/generate_interop_data.c               |   122 +
 lang/c/tests/msdirent.h                            |   372 +
 lang/c/tests/performance.c                         |   848 ++
 .../tests/schema_tests/fail/enum_nonarray_symbols  |     3 +
 lang/c/tests/schema_tests/fail/enum_nonstring_name |     3 +
 lang/c/tests/schema_tests/fail/enum_without_name   |     3 +
 lang/c/tests/schema_tests/fail/fixed_without_name  |     2 +
 lang/c/tests/schema_tests/fail/fixed_without_size  |     2 +
 lang/c/tests/schema_tests/fail/illegal_type        |     1 +
 lang/c/tests/schema_tests/fail/invalid_avro_id     |     3 +
 .../fail/record_with_field_missing_name            |     5 +
 .../fail/record_with_field_missing_type            |     5 +
 .../fail/record_with_invalid_reference             |     7 +
 .../schema_tests/fail/record_with_nonarray_fields  |     3 +
 .../schema_tests/fail/record_with_nonstring_name   |     3 +
 lang/c/tests/schema_tests/pass/array               |     1 +
 lang/c/tests/schema_tests/pass/boolean_full        |     1 +
 lang/c/tests/schema_tests/pass/bytes_full          |     1 +
 lang/c/tests/schema_tests/pass/double_full         |     1 +
 lang/c/tests/schema_tests/pass/enum                |     4 +
 lang/c/tests/schema_tests/pass/extra_attributes    |     1 +
 lang/c/tests/schema_tests/pass/fixed               |     1 +
 lang/c/tests/schema_tests/pass/float_full          |     1 +
 lang/c/tests/schema_tests/pass/int_full            |     1 +
 lang/c/tests/schema_tests/pass/interop.avsc        |    28 +
 lang/c/tests/schema_tests/pass/long_full           |     1 +
 lang/c/tests/schema_tests/pass/map                 |     1 +
 lang/c/tests/schema_tests/pass/namespace_recursive |    28 +
 lang/c/tests/schema_tests/pass/namespace_simple    |     5 +
 lang/c/tests/schema_tests/pass/null_full           |     1 +
 lang/c/tests/schema_tests/pass/record              |     5 +
 .../schema_tests/pass/record_fields_with_defaults  |     6 +
 lang/c/tests/schema_tests/pass/recursive_record    |     7 +
 .../schema_tests/pass/string_extra_attributes      |     1 +
 lang/c/tests/schema_tests/pass/string_full         |     1 +
 lang/c/tests/schema_tests/pass/union               |     1 +
 lang/c/tests/test_avro_1034.c                      |   394 +
 lang/c/tests/test_avro_1084.c                      |    73 +
 lang/c/tests/test_avro_1087.c                      |    86 +
 lang/c/tests/test_avro_1165.c                      |    82 +
 lang/c/tests/test_avro_1237.c                      |   112 +
 lang/c/tests/test_avro_1238.c                      |   125 +
 lang/c/tests/test_avro_1279.c                      |    47 +
 lang/c/tests/test_avro_1379.c                      |   123 +
 lang/c/tests/test_avro_1405.c                      |   160 +
 lang/c/tests/test_avro_968.c                       |    68 +
 lang/c/tests/test_avro_984.c                       |   464 +
 lang/c/tests/test_avro_data.c                      |   684 +
 lang/c/tests/test_avro_schema.c                    |   316 +
 lang/c/tests/test_avro_schema_names.c              |   137 +
 lang/c/tests/test_avro_values.c                    |  1455 +++
 lang/c/tests/test_cpp.cpp                          |    10 +
 lang/c/tests/test_data_structures.c                |   263 +
 lang/c/tests/test_interop_data.c                   |     7 +
 lang/c/tests/test_refcount.c                       |    44 +
 lang/c/tests/test_valgrind                         |    33 +
 lang/c/version.sh                                  |    63 +
 lang/csharp/.gitignore                             |    49 +
 lang/csharp/Avro.dox                               |  1630 +++
 lang/csharp/Avro.nunit                             |    17 +
 lang/csharp/Avro.sln                               |    95 +
 lang/csharp/Avro.snk                               |   Bin 0 -> 596 bytes
 lang/csharp/README                                 |    36 +
 lang/csharp/build.sh                               |    63 +
 lang/csharp/lib/main/Castle.Core.dll               |   Bin 0 -> 430592 bytes
 lang/csharp/lib/main/Newtonsoft.Json.dll           |   Bin 0 -> 319488 bytes
 lang/csharp/lib/main/log4net.dll                   |   Bin 0 -> 270336 bytes
 lang/csharp/lib/test/nunit.framework.dll           |   Bin 0 -> 135168 bytes
 lang/csharp/src/apache/codegen/Avro.codegen.csproj |   162 +
 lang/csharp/src/apache/codegen/AvroGen.cs          |    83 +
 .../src/apache/codegen/Properties/AssemblyInfo.cs  |    33 +
 lang/csharp/src/apache/codegen/app.config          |    19 +
 lang/csharp/src/apache/ipc/Avro.ipc.csproj         |   108 +
 lang/csharp/src/apache/ipc/CallFuture.cs           |   124 +
 lang/csharp/src/apache/ipc/CountdownLatch.cs       |    81 +
 .../src/apache/ipc/Generic/GenericRequestor.cs     |    48 +
 .../src/apache/ipc/Generic/GenericResponder.cs     |    52 +
 lang/csharp/src/apache/ipc/HttpListenerServer.cs   |   105 +
 lang/csharp/src/apache/ipc/HttpTransceiver.cs      |   174 +
 lang/csharp/src/apache/ipc/LocalTransceiver.cs     |    55 +
 lang/csharp/src/apache/ipc/OutputStream.cs         |    62 +
 .../src/apache/ipc/Properties/AssemblyInfo.cs      |    33 +
 lang/csharp/src/apache/ipc/Requestor.cs            |   428 +
 lang/csharp/src/apache/ipc/Responder.cs            |   228 +
 lang/csharp/src/apache/ipc/RpcContext.cs           |   143 +
 lang/csharp/src/apache/ipc/RpcRequest.cs           |   109 +
 lang/csharp/src/apache/ipc/SocketServer.cs         |   235 +
 lang/csharp/src/apache/ipc/SocketTransceiver.cs    |   237 +
 .../src/apache/ipc/Specific/SpecificRequestor.cs   |   109 +
 .../src/apache/ipc/Specific/SpecificResponder.cs   |    83 +
 lang/csharp/src/apache/ipc/Transceiver.cs          |    98 +
 .../ipc/org/apache/avro/ipc/HandshakeMatch.cs      |    22 +
 .../ipc/org/apache/avro/ipc/HandshakeRequest.cs    |    97 +
 .../ipc/org/apache/avro/ipc/HandshakeResponse.cs   |    97 +
 .../src/apache/ipc/org/apache/avro/ipc/MD5.cs      |    39 +
 lang/csharp/src/apache/main/Avro.main.csproj       |   180 +
 .../apache/main/CodeGen/AvroRuntimeException.cs    |    38 +
 lang/csharp/src/apache/main/CodeGen/CodeGen.cs     |   855 ++
 .../src/apache/main/CodeGen/CodeGenException.cs    |    38 +
 lang/csharp/src/apache/main/CodeGen/CodeGenUtil.cs |   105 +
 lang/csharp/src/apache/main/File/Codec.cs          |   115 +
 lang/csharp/src/apache/main/File/DataBlock.cs      |    43 +
 .../src/apache/main/File/DataFileConstants.cs      |    46 +
 lang/csharp/src/apache/main/File/DataFileReader.cs |   425 +
 lang/csharp/src/apache/main/File/DataFileWriter.cs |   315 +
 lang/csharp/src/apache/main/File/DeflateCodec.cs   |    83 +
 lang/csharp/src/apache/main/File/Header.cs         |    40 +
 lang/csharp/src/apache/main/File/IFileReader.cs    |   119 +
 lang/csharp/src/apache/main/File/IFileWriter.cs    |    85 +
 lang/csharp/src/apache/main/File/NullCodec.cs      |    56 +
 lang/csharp/src/apache/main/Generic/DatumReader.cs |    36 +
 lang/csharp/src/apache/main/Generic/DatumWriter.cs |    28 +
 .../src/apache/main/Generic/GenericDatumReader.cs  |   211 +
 .../src/apache/main/Generic/GenericDatumWriter.cs  |   147 +
 lang/csharp/src/apache/main/Generic/GenericEnum.cs |    63 +
 .../csharp/src/apache/main/Generic/GenericFixed.cs |   109 +
 .../src/apache/main/Generic/GenericReader.cs       |   633 +
 .../src/apache/main/Generic/GenericRecord.cs       |   137 +
 .../src/apache/main/Generic/GenericWriter.cs       |   446 +
 .../apache/main/Generic/PreresolvingDatumReader.cs |   596 +
 .../apache/main/Generic/PreresolvingDatumWriter.cs |   375 +
 lang/csharp/src/apache/main/IO/BinaryDecoder.cs    |   299 +
 lang/csharp/src/apache/main/IO/BinaryEncoder.cs    |   195 +
 .../src/apache/main/IO/ByteBufferInputStream.cs    |    76 +
 .../src/apache/main/IO/ByteBufferOutputStream.cs   |   118 +
 lang/csharp/src/apache/main/IO/Decoder.cs          |   188 +
 lang/csharp/src/apache/main/IO/Encoder.cs          |    50 +
 lang/csharp/src/apache/main/IO/ICallback.cs        |    37 +
 lang/csharp/src/apache/main/IO/InputStream.cs      |    66 +
 lang/csharp/src/apache/main/IO/OutputStream.cs     |    62 +
 lang/csharp/src/apache/main/IO/Resolver.cs         |   166 +
 .../src/apache/main/Properties/AssemblyInfo.cs     |    33 +
 .../apache/main/Properties/Settings.Designer.cs    |    44 +
 lang/csharp/src/apache/main/Protocol/Message.cs    |   207 +
 lang/csharp/src/apache/main/Protocol/Protocol.cs   |   294 +
 .../apache/main/Protocol/ProtocolParseException.cs |    36 +
 lang/csharp/src/apache/main/Schema/ArraySchema.cs  |   111 +
 .../csharp/src/apache/main/Schema/AvroException.cs |    36 +
 .../src/apache/main/Schema/AvroTypeException.cs    |    31 +
 lang/csharp/src/apache/main/Schema/EnumSchema.cs   |   206 +
 lang/csharp/src/apache/main/Schema/Field.cs        |   245 +
 lang/csharp/src/apache/main/Schema/FixedSchema.cs  |   120 +
 lang/csharp/src/apache/main/Schema/JsonHelper.cs   |   122 +
 lang/csharp/src/apache/main/Schema/MapSchema.cs    |   117 +
 lang/csharp/src/apache/main/Schema/NamedSchema.cs  |   207 +
 .../src/apache/main/Schema/PrimitiveSchema.cs      |   130 +
 lang/csharp/src/apache/main/Schema/Property.cs     |   124 +
 lang/csharp/src/apache/main/Schema/RecordSchema.cs |   340 +
 lang/csharp/src/apache/main/Schema/Schema.cs       |   306 +
 lang/csharp/src/apache/main/Schema/SchemaName.cs   |   222 +
 .../src/apache/main/Schema/SchemaNormalization.cs  |   256 +
 .../src/apache/main/Schema/SchemaParseException.cs |    31 +
 lang/csharp/src/apache/main/Schema/UnionSchema.cs  |   164 +
 .../csharp/src/apache/main/Schema/UnnamedSchema.cs |    39 +
 .../src/apache/main/Specific/ObjectCreator.cs      |   321 +
 .../apache/main/Specific/SpecificDatumReader.cs    |   232 +
 .../apache/main/Specific/SpecificDatumWriter.cs    |   172 +
 .../src/apache/main/Specific/SpecificException.cs  |    29 +
 .../src/apache/main/Specific/SpecificFixed.cs      |    68 +
 .../src/apache/main/Specific/SpecificProtocol.cs   |    31 +
 .../src/apache/main/Specific/SpecificReader.cs     |   281 +
 .../src/apache/main/Specific/SpecificRecord.cs     |    34 +
 .../src/apache/main/Specific/SpecificWriter.cs     |   216 +
 lang/csharp/src/apache/msbuild/Avro.msbuild.csproj |    74 +
 lang/csharp/src/apache/msbuild/AvroBuilldTask.cs   |    67 +
 .../src/apache/msbuild/Properties/AssemblyInfo.cs  |    33 +
 lang/csharp/src/apache/perf/Avro.perf.csproj       |   143 +
 lang/csharp/src/apache/perf/PerfTest.cs            |   239 +
 lang/csharp/src/apache/perf/com/foo/A.cs           |    56 +
 lang/csharp/src/apache/perf/com/foo/Complex.cs     |   377 +
 lang/csharp/src/apache/perf/com/foo/MyEnum.cs      |    22 +
 lang/csharp/src/apache/perf/com/foo/MyFixed.cs     |    39 +
 lang/csharp/src/apache/perf/com/foo/Narrow.cs      |    85 +
 lang/csharp/src/apache/perf/com/foo/Simple.cs      |   153 +
 lang/csharp/src/apache/perf/com/foo/Wide.cs        |   545 +
 lang/csharp/src/apache/perf/com/foo/newRec.cs      |    56 +
 lang/csharp/src/apache/perf/schema.avsc            |   122 +
 lang/csharp/src/apache/test/Avro.test.csproj       |   168 +
 lang/csharp/src/apache/test/CodGen/CodeGenTest.cs  |   129 +
 lang/csharp/src/apache/test/File/FileTests.cs      |   781 ++
 .../csharp/src/apache/test/Generic/GenericTests.cs |   493 +
 lang/csharp/src/apache/test/IO/BinaryCodecTests.cs |   334 +
 .../Ipc/GeneratedFiles/org/apache/avro/test/All.cs |    95 +
 .../org/apache/avro/test/AllCallback.cs            |    24 +
 .../GeneratedFiles/org/apache/avro/test/AllEnum.cs |    21 +
 .../org/apache/avro/test/AllTestRecord.cs          |   209 +
 .../org/apache/avro/test/AllTestRecordPartial.cs   |    50 +
 .../org/apache/avro/test/FixedTest.cs              |    39 +
 .../GeneratedFiles/org/apache/avro/test/Kind.cs    |    22 +
 .../Ipc/GeneratedFiles/org/apache/avro/test/MD5.cs |    40 +
 .../GeneratedFiles/org/apache/avro/test/Mail.cs    |    87 +
 .../org/apache/avro/test/MailCallback.cs           |    20 +
 .../GeneratedFiles/org/apache/avro/test/Message.cs |    85 +
 .../GeneratedFiles/org/apache/avro/test/Simple.cs  |   103 +
 .../org/apache/avro/test/SimpleCallback.cs         |    30 +
 .../org/apache/avro/test/TestError.cs              |    56 +
 .../org/apache/avro/test/TestRecord.cs             |    83 +
 .../org/apache/avro/test/TestRecordExtensions.cs   |    29 +
 .../org/apache/avro/test/TestRecordWithUnion.cs    |    69 +
 .../src/apache/test/Ipc/HttpClientServerTest.cs    |    90 +
 .../src/apache/test/Ipc/LocalTransceiverTest.cs    |    72 +
 lang/csharp/src/apache/test/Ipc/MailResponder.cs   |   102 +
 .../src/apache/test/Ipc/SerializationTest.cs       |   153 +
 .../Ipc/SocketServerConcurrentExecutionTest.cs     |   146 +
 .../csharp/src/apache/test/Ipc/SocketServerTest.cs |   187 +
 .../test/Ipc/SocketServerWithCallbacksTest.cs      |   806 ++
 .../Ipc/SocketTransceiverWhenServerStopsTest.cs    |   188 +
 .../src/apache/test/Properties/AssemblyInfo.cs     |    33 +
 .../src/apache/test/Protocol/ProtocolTest.cs       |   445 +
 lang/csharp/src/apache/test/Schema/AliasTest.cs    |   239 +
 .../apache/test/Schema/SchemaNormalizationTests.cs |    95 +
 lang/csharp/src/apache/test/Schema/SchemaTests.cs  |   276 +
 .../src/apache/test/Specific/SpecificTests.cs      |   378 +
 lang/csharp/src/apache/test/Utils/CaseFinder.cs    |   134 +
 .../src/apache/test/Utils/CaseFinderTests.cs       |   163 +
 lang/java/.eclipse_launchers/AllTests.launch       |    36 +
 lang/java/.gitignore                               |    19 +
 .../java/archetypes/avro-service-archetype/pom.xml |    52 +
 .../avro-service-archetype/src/main/pom/pom.xml    |   140 +
 .../META-INF/maven/archetype-metadata.xml          |    48 +
 .../src/main/avro/order-service.avpr               |    46 +
 .../src/main/java/service/SimpleOrderService.java  |    45 +
 .../java/transport/SimpleOrderServiceClient.java   |    79 +
 .../java/transport/SimpleOrderServiceEndpoint.java |    67 +
 .../src/main/resources/logback.xml                 |    30 +
 .../SimpleOrderServiceIntegrationTest.java         |    87 +
 .../projects/basic/archetype.properties            |     6 +
 .../src/test/integration/projects/basic/goal.txt   |     1 +
 lang/java/archetypes/pom.xml                       |   121 +
 lang/java/avro/pom.xml                             |   175 +
 .../java/org/apache/avro/AvroRemoteException.java  |    46 +
 .../java/org/apache/avro/AvroRuntimeException.java |    29 +
 .../java/org/apache/avro/AvroTypeException.java    |    29 +
 .../src/main/java/org/apache/avro/Conversion.java  |   189 +
 .../src/main/java/org/apache/avro/Conversions.java |   123 +
 .../main/java/org/apache/avro/JsonProperties.java  |   233 +
 .../src/main/java/org/apache/avro/LogicalType.java |    95 +
 .../main/java/org/apache/avro/LogicalTypes.java    |   364 +
 .../src/main/java/org/apache/avro/Protocol.java    |   541 +
 .../avro/src/main/java/org/apache/avro/Schema.java |  1617 +++
 .../main/java/org/apache/avro/SchemaBuilder.java   |  2598 ++++
 .../org/apache/avro/SchemaBuilderException.java    |    26 +
 .../java/org/apache/avro/SchemaCompatibility.java  |   526 +
 .../java/org/apache/avro/SchemaNormalization.java  |   174 +
 .../java/org/apache/avro/SchemaParseException.java |    26 +
 .../org/apache/avro/SchemaValidationException.java |    39 +
 .../org/apache/avro/SchemaValidationStrategy.java  |    38 +
 .../main/java/org/apache/avro/SchemaValidator.java |    46 +
 .../org/apache/avro/SchemaValidatorBuilder.java    |    77 +
 .../org/apache/avro/UnresolvedUnionException.java  |    39 +
 .../src/main/java/org/apache/avro/ValidateAll.java |    55 +
 .../java/org/apache/avro/ValidateCanBeRead.java    |    42 +
 .../main/java/org/apache/avro/ValidateCanRead.java |    43 +
 .../main/java/org/apache/avro/ValidateLatest.java  |    55 +
 .../java/org/apache/avro/ValidateMutualRead.java   |    74 +
 .../java/org/apache/avro/data/ErrorBuilder.java    |    47 +
 .../src/main/java/org/apache/avro/data/Json.java   |   279 +
 .../java/org/apache/avro/data/RecordBuilder.java   |    30 +
 .../org/apache/avro/data/RecordBuilderBase.java    |   166 +
 .../java/org/apache/avro/data/TimeConversions.java |   146 +
 .../main/java/org/apache/avro/data/package.html    |    22 +
 .../main/java/org/apache/avro/file/BZip2Codec.java |   105 +
 .../src/main/java/org/apache/avro/file/Codec.java  |    48 +
 .../java/org/apache/avro/file/CodecFactory.java    |   121 +
 .../org/apache/avro/file/DataFileConstants.java    |    43 +
 .../java/org/apache/avro/file/DataFileReader.java  |   251 +
 .../org/apache/avro/file/DataFileReader12.java     |   227 +
 .../java/org/apache/avro/file/DataFileStream.java  |   378 +
 .../java/org/apache/avro/file/DataFileWriter.java  |   480 +
 .../java/org/apache/avro/file/DeflateCodec.java    |   145 +
 .../main/java/org/apache/avro/file/FileReader.java |    49 +
 .../apache/avro/file/LengthLimitedInputStream.java |    85 +
 .../main/java/org/apache/avro/file/NullCodec.java  |    64 +
 .../apache/avro/file/SeekableByteArrayInput.java   |    42 +
 .../org/apache/avro/file/SeekableFileInput.java    |    37 +
 .../java/org/apache/avro/file/SeekableInput.java   |    38 +
 .../java/org/apache/avro/file/SnappyCodec.java     |    83 +
 .../main/java/org/apache/avro/file/Syncable.java   |    35 +
 .../apache/avro/file/SyncableFileOutputStream.java |   107 +
 .../main/java/org/apache/avro/file/XZCodec.java    |   122 +
 .../main/java/org/apache/avro/file/package.html    |    23 +
 .../java/org/apache/avro/generic/GenericArray.java |    33 +
 .../org/apache/avro/generic/GenericContainer.java  |    27 +
 .../java/org/apache/avro/generic/GenericData.java  |  1149 ++
 .../apache/avro/generic/GenericDatumReader.java    |   569 +
 .../apache/avro/generic/GenericDatumWriter.java    |   275 +
 .../org/apache/avro/generic/GenericEnumSymbol.java |    25 +
 .../java/org/apache/avro/generic/GenericFixed.java |    24 +
 .../org/apache/avro/generic/GenericRecord.java     |    27 +
 .../apache/avro/generic/GenericRecordBuilder.java  |   260 +
 .../org/apache/avro/generic/IndexedRecord.java     |    30 +
 .../main/java/org/apache/avro/generic/package.html |    43 +
 .../main/java/org/apache/avro/io/BinaryData.java   |   428 +
 .../java/org/apache/avro/io/BinaryDecoder.java     |   992 ++
 .../java/org/apache/avro/io/BinaryEncoder.java     |   127 +
 .../org/apache/avro/io/BlockingBinaryEncoder.java  |   562 +
 .../org/apache/avro/io/BufferedBinaryEncoder.java  |   227 +
 .../main/java/org/apache/avro/io/DatumReader.java  |    38 +
 .../main/java/org/apache/avro/io/DatumWriter.java  |    36 +
 .../src/main/java/org/apache/avro/io/Decoder.java  |   284 +
 .../java/org/apache/avro/io/DecoderFactory.java    |   316 +
 .../org/apache/avro/io/DirectBinaryDecoder.java    |   204 +
 .../org/apache/avro/io/DirectBinaryEncoder.java    |   133 +
 .../src/main/java/org/apache/avro/io/Encoder.java  |   299 +
 .../java/org/apache/avro/io/EncoderFactory.java    |   365 +
 .../main/java/org/apache/avro/io/JsonDecoder.java  |   702 +
 .../main/java/org/apache/avro/io/JsonEncoder.java  |   321 +
 .../java/org/apache/avro/io/ParsingDecoder.java    |    77 +
 .../java/org/apache/avro/io/ParsingEncoder.java    |    70 +
 .../java/org/apache/avro/io/ResolvingDecoder.java  |   328 +
 .../java/org/apache/avro/io/ValidatingDecoder.java |   254 +
 .../java/org/apache/avro/io/ValidatingEncoder.java |   218 +
 .../src/main/java/org/apache/avro/io/package.html  |    31 +
 .../avro/io/parsing/JsonGrammarGenerator.java      |   101 +
 .../java/org/apache/avro/io/parsing/Parser.java    |   192 +
 .../avro/io/parsing/ResolvingGrammarGenerator.java |   544 +
 .../org/apache/avro/io/parsing/SkipParser.java     |    96 +
 .../java/org/apache/avro/io/parsing/Symbol.java    |   627 +
 .../io/parsing/ValidatingGrammarGenerator.java     |   137 +
 .../apache/avro/io/parsing/doc-files/parsing.html  |   615 +
 .../java/org/apache/avro/io/parsing/package.html   |    40 +
 .../src/main/java/org/apache/avro/ipc/package.html |    23 +
 .../src/main/java/org/apache/avro/package.html     |    44 +
 .../org/apache/avro/reflect/ArrayAccessor.java     |   228 +
 .../java/org/apache/avro/reflect/AvroAlias.java    |    37 +
 .../java/org/apache/avro/reflect/AvroDefault.java  |    32 +
 .../java/org/apache/avro/reflect/AvroEncode.java   |    35 +
 .../java/org/apache/avro/reflect/AvroIgnore.java   |    33 +
 .../java/org/apache/avro/reflect/AvroMeta.java     |    34 +
 .../java/org/apache/avro/reflect/AvroName.java     |    34 +
 .../java/org/apache/avro/reflect/AvroSchema.java   |    44 +
 .../org/apache/avro/reflect/CustomEncoding.java    |    53 +
 .../apache/avro/reflect/DateAsLongEncoding.java    |    53 +
 .../java/org/apache/avro/reflect/FieldAccess.java  |    26 +
 .../apache/avro/reflect/FieldAccessReflect.java    |   121 +
 .../org/apache/avro/reflect/FieldAccessUnsafe.java |   365 +
 .../org/apache/avro/reflect/FieldAccessor.java     |    55 +
 .../java/org/apache/avro/reflect/Nullable.java     |    33 +
 .../java/org/apache/avro/reflect/ReflectData.java  |   903 ++
 .../apache/avro/reflect/ReflectDatumReader.java    |   312 +
 .../apache/avro/reflect/ReflectDatumWriter.java    |   184 +
 .../org/apache/avro/reflect/ReflectionUtil.java    |   126 +
 .../java/org/apache/avro/reflect/Stringable.java   |    34 +
 .../main/java/org/apache/avro/reflect/Union.java   |    39 +
 .../main/java/org/apache/avro/reflect/package.html |   101 +
 .../org/apache/avro/specific/AvroGenerated.java    |    32 +
 .../apache/avro/specific/ExternalizableInput.java  |    50 +
 .../apache/avro/specific/ExternalizableOutput.java |    44 +
 .../java/org/apache/avro/specific/FixedSize.java   |    37 +
 .../org/apache/avro/specific/SpecificData.java     |   380 +
 .../apache/avro/specific/SpecificDatumReader.java  |   102 +
 .../apache/avro/specific/SpecificDatumWriter.java  |    73 +
 .../avro/specific/SpecificErrorBuilderBase.java    |   124 +
 .../avro/specific/SpecificExceptionBase.java       |    69 +
 .../org/apache/avro/specific/SpecificFixed.java    |    69 +
 .../org/apache/avro/specific/SpecificRecord.java   |    26 +
 .../apache/avro/specific/SpecificRecordBase.java   |    83 +
 .../avro/specific/SpecificRecordBuilderBase.java   |    53 +
 .../java/org/apache/avro/specific/package.html     |    45 +
 .../apache/avro/util/ByteBufferInputStream.java    |    89 +
 .../apache/avro/util/ByteBufferOutputStream.java   |   107 +
 .../main/java/org/apache/avro/util/ClassUtils.java |   109 +
 .../src/main/java/org/apache/avro/util/Utf8.java   |   175 +
 .../org/apache/avro/util/WeakIdentityHashMap.java  |   169 +
 .../apache/avro/util/internal/JacksonUtils.java    |   153 +
 .../main/java/org/apache/avro/util/package.html    |    23 +
 lang/java/avro/src/main/java/overview.html         |    88 +
 lang/java/avro/src/test/java/NoPackage.java        |    21 +
 .../test/java/org/apache/avro/AvroTestUtil.java    |    54 +
 .../src/test/java/org/apache/avro/BarRecord.java   |    51 +
 .../java/org/apache/avro/FooBarSpecificRecord.java |   375 +
 .../src/test/java/org/apache/avro/FooRecord.java   |    47 +
 .../java/org/apache/avro/GenerateBlockingData.java |    87 +
 .../src/test/java/org/apache/avro/RandomData.java  |   144 +
 .../org/apache/avro/TestCircularReferences.java    |   405 +
 .../test/java/org/apache/avro/TestDataFile.java    |   391 +
 .../java/org/apache/avro/TestDataFileConcat.java   |   184 +
 .../org/apache/avro/TestDataFileCorruption.java    |    97 +
 .../org/apache/avro/TestDataFileCustomSync.java    |    91 +
 .../java/org/apache/avro/TestDataFileDeflate.java  |    64 +
 .../java/org/apache/avro/TestDataFileMeta.java     |    79 +
 .../java/org/apache/avro/TestDataFileReflect.java  |   185 +
 .../test/java/org/apache/avro/TestLogicalType.java |   297 +
 .../test/java/org/apache/avro/TestProtocol.java    |    57 +
 .../src/test/java/org/apache/avro/TestSchema.java  |   103 +
 .../java/org/apache/avro/TestSchemaBuilder.java    |   728 ++
 .../org/apache/avro/TestSchemaCompatibility.java   |   608 +
 .../org/apache/avro/TestSchemaNormalization.java   |   122 +
 .../java/org/apache/avro/TestSchemaValidation.java |   190 +
 .../src/test/java/org/apache/avro/TypeEnum.java    |    13 +
 .../apache/avro/data/RecordBuilderBaseTest.java    |    93 +
 .../org/apache/avro/data/TestTimeConversions.java  |   250 +
 .../java/org/apache/avro/file/TestBZip2Codec.java  |    65 +
 .../java/org/apache/avro/file/TestCustomCodec.java |    58 +
 .../avro/file/TestLengthLimitedInputStream.java    |    58 +
 .../avro/file/TestSeekableByteArrayInput.java      |    86 +
 .../org/apache/avro/file/codec/CustomCodec.java    |    94 +
 .../org/apache/avro/generic/TestGenericData.java   |   481 +
 .../avro/generic/TestGenericDatumWriter.java       |   260 +
 .../avro/generic/TestGenericLogicalTypes.java      |   235 +
 .../avro/generic/TestGenericRecordBuilder.java     |   112 +
 .../org/apache/avro/io/LegacyBinaryEncoder.java    |   210 +
 .../java/org/apache/avro/io/TestBinaryDecoder.java |   463 +
 .../apache/avro/io/TestBinaryEncoderFidelity.java  |   198 +
 .../java/org/apache/avro/io/TestBlockingIO.java    |   477 +
 .../java/org/apache/avro/io/TestBlockingIO2.java   |   162 +
 .../test/java/org/apache/avro/io/TestEncoders.java |   196 +
 .../java/org/apache/avro/io/TestJsonDecoder.java   |    63 +
 .../java/org/apache/avro/io/TestResolvingIO.java   |   216 +
 .../apache/avro/io/TestResolvingIOResolving.java   |   206 +
 .../java/org/apache/avro/io/TestValidatingIO.java  |   860 ++
 .../io/parsing/TestResolvingGrammarGenerator.java  |   152 +
 .../io/parsing/TestResolvingGrammarGenerator2.java |   143 +
 .../org/apache/avro/reflect/TestByteBuffer.java    |   138 +
 .../apache/avro/reflect/TestNonStringMapKeys.java  |   509 +
 .../java/org/apache/avro/reflect/TestReflect.java  |  1051 ++
 .../apache/avro/reflect/TestReflectAllowNulls.java |   154 +
 .../avro/reflect/TestReflectDatumReader.java       |   178 +
 .../avro/reflect/TestReflectLogicalTypes.java      |   829 ++
 .../apache/avro/reflect/TestReflectionUtil.java    |    79 +
 .../org/apache/avro/specific/TestSpecificData.java |   172 +
 .../test/java/org/apache/avro/util/CaseFinder.java |   211 +
 .../java/org/apache/avro/util/TestCaseFinder.java  |   141 +
 .../test/java/org/apache/avro/util/TestUtf8.java   |    51 +
 .../avro/util/internal/TestJacksonUtils.java       |   101 +
 .../avro/src/test/resources/SchemaBuilder.avsc     |   284 +
 lang/java/checkstyle.xml                           |    59 +
 lang/java/compiler/pom.xml                         |   131 +
 .../avro/compiler/specific/ProtocolTask.java       |    95 +
 .../apache/avro/compiler/specific/SchemaTask.java  |    36 +
 .../avro/compiler/specific/SpecificCompiler.java   |   861 ++
 .../javacc/org/apache/avro/compiler/idl/idl.jj     |  1615 +++
 .../specific/templates/java/classic/enum.vm        |    34 +
 .../specific/templates/java/classic/fixed.vm       |    65 +
 .../specific/templates/java/classic/protocol.vm    |    96 +
 .../specific/templates/java/classic/record.vm      |   379 +
 lang/java/compiler/src/test/idl/input/bar.avpr     |     2 +
 .../java/compiler/src/test/idl/input/baseball.avdl |    23 +
 lang/java/compiler/src/test/idl/input/foo.avsc     |     3 +
 lang/java/compiler/src/test/idl/input/import.avdl  |    37 +
 lang/java/compiler/src/test/idl/input/interop.avdl |    50 +
 .../compiler/src/test/idl/input/mr_events.avdl     |    72 +
 .../compiler/src/test/idl/input/namespaces.avdl    |    42 +
 .../compiler/src/test/idl/input/nestedimport.avdl  |    31 +
 lang/java/compiler/src/test/idl/input/player.avsc  |     8 +
 .../java/compiler/src/test/idl/input/position.avsc |     3 +
 .../compiler/src/test/idl/input/reservedwords.avdl |    22 +
 lang/java/compiler/src/test/idl/input/simple.avdl  |    65 +
 lang/java/compiler/src/test/idl/input/unicode.avdl |    29 +
 .../compiler/src/test/idl/output/baseball.avpr     |    31 +
 lang/java/compiler/src/test/idl/output/import.avpr |    94 +
 .../java/compiler/src/test/idl/output/interop.avpr |    94 +
 .../compiler/src/test/idl/output/mr_events.avpr    |   119 +
 .../compiler/src/test/idl/output/namespaces.avpr   |    51 +
 .../compiler/src/test/idl/output/nestedimport.avpr |    53 +
 .../src/test/idl/output/reservedwords.avpr         |    16 +
 lang/java/compiler/src/test/idl/output/simple.avpr |   112 +
 .../java/compiler/src/test/idl/output/unicode.avpr |    18 +
 .../test/idl/putOnClassPath/OnTheClasspath.avdl    |    24 +
 .../test/idl/putOnClassPath/OnTheClasspath.avpr    |    11 +
 .../test/idl/putOnClassPath/OnTheClasspath.avsc    |     6 +
 .../src/test/idl/putOnClassPath/nestedtypes.avdl   |    23 +
 .../apache/avro/compiler/TestSpecificCompiler.java |   202 +
 .../java/org/apache/avro/compiler/idl/TestIdl.java |   183 +
 .../compiler/src/test/resources/simple_record.avsc |     7 +
 lang/java/ipc/pom.xml                              |   153 +
 .../main/java/org/apache/avro/ipc/CallFuture.java  |   162 +
 .../main/java/org/apache/avro/ipc/Callback.java    |    39 +
 .../java/org/apache/avro/ipc/DatagramServer.java   |    80 +
 .../org/apache/avro/ipc/DatagramTransceiver.java   |    85 +
 .../main/java/org/apache/avro/ipc/HttpServer.java  |   110 +
 .../java/org/apache/avro/ipc/HttpTransceiver.java  |   134 +
 .../ipc/src/main/java/org/apache/avro/ipc/Ipc.java |    53 +
 .../java/org/apache/avro/ipc/LocalTransceiver.java |    52 +
 .../main/java/org/apache/avro/ipc/NettyServer.java |   217 +
 .../java/org/apache/avro/ipc/NettyTransceiver.java |   651 +
 .../org/apache/avro/ipc/NettyTransportCodec.java   |   202 +
 .../main/java/org/apache/avro/ipc/RPCContext.java  |   215 +
 .../main/java/org/apache/avro/ipc/RPCPlugin.java   |    76 +
 .../main/java/org/apache/avro/ipc/Requestor.java   |   563 +
 .../main/java/org/apache/avro/ipc/Responder.java   |   260 +
 .../java/org/apache/avro/ipc/ResponderServlet.java |    56 +
 .../java/org/apache/avro/ipc/SaslSocketServer.java |   102 +
 .../org/apache/avro/ipc/SaslSocketTransceiver.java |   403 +
 .../src/main/java/org/apache/avro/ipc/Server.java  |    35 +
 .../java/org/apache/avro/ipc/SocketServer.java     |   145 +
 .../org/apache/avro/ipc/SocketTransceiver.java     |   120 +
 .../main/java/org/apache/avro/ipc/Transceiver.java |   109 +
 .../apache/avro/ipc/generic/GenericRequestor.java  |   105 +
 .../apache/avro/ipc/generic/GenericResponder.java  |    80 +
 .../apache/avro/ipc/reflect/ReflectRequestor.java  |    97 +
 .../apache/avro/ipc/reflect/ReflectResponder.java  |    73 +
 .../avro/ipc/specific/SpecificRequestor.java       |   209 +
 .../avro/ipc/specific/SpecificResponder.java       |   106 +
 .../org/apache/avro/ipc/stats/FloatHistogram.java  |    54 +
 .../java/org/apache/avro/ipc/stats/Histogram.java  |   267 +
 .../apache/avro/ipc/stats/IntegerHistogram.java    |    55 +
 .../org/apache/avro/ipc/stats/StaticServlet.java   |    46 +
 .../org/apache/avro/ipc/stats/StatsPlugin.java     |   226 +
 .../org/apache/avro/ipc/stats/StatsServer.java     |    54 +
 .../org/apache/avro/ipc/stats/StatsServlet.java    |   270 +
 .../java/org/apache/avro/ipc/stats/Stopwatch.java  |    74 +
 .../java/org/apache/avro/ipc/stats/package.html    |    23 +
 .../java/org/apache/avro/ipc/stats/static/avro.css |    21 +
 .../java/org/apache/avro/ipc/stats/static/avro.js  |   110 +
 .../java/org/apache/avro/ipc/stats/static/g.bar.js |   386 +
 .../avro/ipc/stats/static/jquery-1.4.2.min.js      |   154 +
 .../apache/avro/ipc/stats/static/jquery.tipsy.js   |   125 +
 .../apache/avro/ipc/stats/static/protovis-r3.2.js  |   304 +
 .../org/apache/avro/ipc/stats/static/tipsy.css     |    30 +
 .../java/org/apache/avro/ipc/stats/static/tipsy.js |    92 +
 .../apache/avro/ipc/stats/templates/statsview.vm   |    98 +
 .../java/org/apache/avro/DataFileInteropTest.java  |   103 +
 .../java/org/apache/avro/RPCMetaTestPlugin.java    |   186 +
 .../src/test/java/org/apache/avro/RandomData.java  |   142 +
 .../test/java/org/apache/avro/SimpleException.java |    25 +
 .../test/java/org/apache/avro/TestAnnotation.java  |    28 +
 .../test/java/org/apache/avro/TestBulkData.java    |   117 +
 .../src/test/java/org/apache/avro/TestCompare.java |   264 +
 .../java/org/apache/avro/TestDataFileSpecific.java |    77 +
 .../java/org/apache/avro/TestNamespaceReflect.java |    41 +
 .../org/apache/avro/TestNamespaceSpecific.java     |    87 +
 .../java/org/apache/avro/TestProtocolDatagram.java |    48 +
 .../java/org/apache/avro/TestProtocolGeneric.java  |   263 +
 .../org/apache/avro/TestProtocolGenericMeta.java   |    44 +
 .../java/org/apache/avro/TestProtocolHttp.java     |    91 +
 .../java/org/apache/avro/TestProtocolHttps.java    |    70 +
 .../java/org/apache/avro/TestProtocolNetty.java    |    47 +
 .../java/org/apache/avro/TestProtocolParsing.java  |    87 +
 .../java/org/apache/avro/TestProtocolReflect.java  |   161 +
 .../org/apache/avro/TestProtocolReflectMeta.java   |    47 +
 .../java/org/apache/avro/TestProtocolSpecific.java |   351 +
 .../org/apache/avro/TestProtocolSpecificMeta.java  |    48 +
 .../src/test/java/org/apache/avro/TestSchema.java  |  1024 ++
 .../compiler/specific/TestSpecificCompiler.java    |   742 ++
 .../java/org/apache/avro/generic/TestDeepCopy.java |   107 +
 .../ipc/src/test/java/org/apache/avro/io/Perf.java |  1752 +++
 .../ipc/NettyTransceiverWhenFailsToConnect.java    |    82 +
 .../org/apache/avro/ipc/TestLocalTransceiver.java  |    67 +
 .../java/org/apache/avro/ipc/TestNettyServer.java  |   180 +
 .../ipc/TestNettyServerConcurrentExecution.java    |   199 +
 .../avro/ipc/TestNettyServerWithCallbacks.java     |   691 +
 .../avro/ipc/TestNettyServerWithCompression.java   |   109 +
 .../apache/avro/ipc/TestNettyServerWithSSL.java    |   150 +
 .../ipc/TestNettyTransceiverWhenServerStops.java   |   139 +
 .../org/apache/avro/ipc/TestRpcPluginOrdering.java |   100 +
 .../org/apache/avro/ipc/TestSaslAnonymous.java     |    76 +
 .../org/apache/avro/ipc/TestSaslDigestMd5.java     |   172 +
 .../avro/ipc/specific/TestSpecificRequestor.java   |    72 +
 .../java/org/apache/avro/ipc/stats/FakeTicks.java  |    35 +
 .../apache/avro/ipc/stats/StatsPluginOverhead.java |   105 +
 .../org/apache/avro/ipc/stats/TestHistogram.java   |   142 +
 .../avro/ipc/stats/TestStatsPluginAndServlet.java  |   218 +
 .../org/apache/avro/ipc/stats/TestStopwatch.java   |    80 +
 .../avro/specific/TestSpecificBuilderTree.java     |   257 +
 .../org/apache/avro/specific/TestSpecificData.java |   154 +
 .../avro/specific/TestSpecificDatumReader.java     |   105 +
 .../avro/specific/TestSpecificDatumWriter.java     |    55 +
 .../avro/specific/TestSpecificErrorBuilder.java    |    74 +
 .../avro/specific/TestSpecificRecordBuilder.java   |   248 +
 lang/java/ipc/src/test/keystore                    |   Bin 0 -> 1366 bytes
 .../resources/org/apache/avro/ipc/servercert.p12   |   Bin 0 -> 1686 bytes
 lang/java/ipc/src/test/truststore                  |   Bin 0 -> 660 bytes
 lang/java/mapred/pom.xml                           |   205 +
 .../avro/hadoop/file/HadoopCodecFactory.java       |    78 +
 .../avro/hadoop/file/SortedKeyValueFile.java       |   633 +
 .../apache/avro/hadoop/io/AvroDatumConverter.java  |    38 +
 .../avro/hadoop/io/AvroDatumConverterFactory.java  |   368 +
 .../apache/avro/hadoop/io/AvroDeserializer.java    |   138 +
 .../apache/avro/hadoop/io/AvroKeyComparator.java   |    65 +
 .../apache/avro/hadoop/io/AvroKeyDeserializer.java |    66 +
 .../org/apache/avro/hadoop/io/AvroKeyValue.java    |   158 +
 .../apache/avro/hadoop/io/AvroSequenceFile.java    |   735 ++
 .../apache/avro/hadoop/io/AvroSerialization.java   |   266 +
 .../org/apache/avro/hadoop/io/AvroSerializer.java  |   129 +
 .../avro/hadoop/io/AvroValueDeserializer.java      |    66 +
 .../hadoop/util/AvroCharSequenceComparator.java    |    88 +
 .../apache/avro/mapred/AvroAsTextInputFormat.java  |    70 +
 .../apache/avro/mapred/AvroAsTextRecordReader.java |    99 +
 .../java/org/apache/avro/mapred/AvroCollector.java |    28 +
 .../org/apache/avro/mapred/AvroInputFormat.java    |    75 +
 .../main/java/org/apache/avro/mapred/AvroJob.java  |   254 +
 .../main/java/org/apache/avro/mapred/AvroKey.java  |    28 +
 .../org/apache/avro/mapred/AvroKeyComparator.java  |    50 +
 .../java/org/apache/avro/mapred/AvroMapper.java    |    55 +
 .../org/apache/avro/mapred/AvroMultipleInputs.java |   218 +
 .../apache/avro/mapred/AvroMultipleOutputs.java    |   626 +
 .../org/apache/avro/mapred/AvroOutputFormat.java   |   175 +
 .../org/apache/avro/mapred/AvroRecordReader.java   |    85 +
 .../java/org/apache/avro/mapred/AvroReducer.java   |    65 +
 .../org/apache/avro/mapred/AvroSerialization.java  |   140 +
 .../apache/avro/mapred/AvroTextOutputFormat.java   |   134 +
 .../apache/avro/mapred/AvroUtf8InputFormat.java    |   117 +
 .../java/org/apache/avro/mapred/AvroValue.java     |    28 +
 .../java/org/apache/avro/mapred/AvroWrapper.java   |    62 +
 .../apache/avro/mapred/DelegatingInputFormat.java  |   133 +
 .../org/apache/avro/mapred/DelegatingMapper.java   |    66 +
 .../main/java/org/apache/avro/mapred/FsInput.java  |    71 +
 .../org/apache/avro/mapred/HadoopCombiner.java     |    62 +
 .../java/org/apache/avro/mapred/HadoopMapper.java  |    64 +
 .../java/org/apache/avro/mapred/HadoopReducer.java |    60 +
 .../org/apache/avro/mapred/HadoopReducerBase.java  |    67 +
 .../java/org/apache/avro/mapred/MapCollector.java  |    51 +
 .../src/main/java/org/apache/avro/mapred/Pair.java |   533 +
 .../avro/mapred/SequenceFileInputFormat.java       |    43 +
 .../org/apache/avro/mapred/SequenceFileReader.java |   247 +
 .../avro/mapred/SequenceFileRecordReader.java      |    36 +
 .../org/apache/avro/mapred/TaggedInputSplit.java   |   162 +
 .../main/java/org/apache/avro/mapred/package.html  |   123 +
 .../org/apache/avro/mapred/tether/TetherData.java  |    42 +
 .../avro/mapred/tether/TetherInputFormat.java      |    70 +
 .../org/apache/avro/mapred/tether/TetherJob.java   |   171 +
 .../avro/mapred/tether/TetherKeyComparator.java    |    61 +
 .../avro/mapred/tether/TetherKeySerialization.java |    93 +
 .../apache/avro/mapred/tether/TetherMapRunner.java |    95 +
 .../avro/mapred/tether/TetherOutputFormat.java     |    80 +
 .../avro/mapred/tether/TetherOutputService.java    |   113 +
 .../avro/mapred/tether/TetherPartitioner.java      |    59 +
 .../avro/mapred/tether/TetherRecordReader.java     |    84 +
 .../apache/avro/mapred/tether/TetherReducer.java   |    81 +
 .../apache/avro/mapred/tether/TetheredProcess.java |   231 +
 .../org/apache/avro/mapred/tether/package.html     |    32 +
 .../java/org/apache/avro/mapreduce/AvroJob.java    |   208 +
 .../apache/avro/mapreduce/AvroKeyInputFormat.java  |    54 +
 .../apache/avro/mapreduce/AvroKeyOutputFormat.java |   109 +
 .../apache/avro/mapreduce/AvroKeyRecordReader.java |    69 +
 .../apache/avro/mapreduce/AvroKeyRecordWriter.java |    91 +
 .../avro/mapreduce/AvroKeyValueInputFormat.java    |    62 +
 .../avro/mapreduce/AvroKeyValueOutputFormat.java   |    66 +
 .../avro/mapreduce/AvroKeyValueRecordReader.java   |    85 +
 .../avro/mapreduce/AvroKeyValueRecordWriter.java   |   141 +
 .../apache/avro/mapreduce/AvroMultipleOutputs.java |   600 +
 .../avro/mapreduce/AvroOutputFormatBase.java       |   106 +
 .../avro/mapreduce/AvroRecordReaderBase.java       |   185 +
 .../mapreduce/AvroSequenceFileInputFormat.java     |   145 +
 .../mapreduce/AvroSequenceFileOutputFormat.java    |   128 +
 .../java/org/apache/avro/mapreduce/Syncable.java   |    32 +
 lang/java/mapred/src/test/avro/TextStats.avsc      |     9 +
 .../avro/hadoop/file/TestHadoopCodecFactory.java   |    60 +
 .../avro/hadoop/file/TestSortedKeyValueFile.java   |   350 +
 .../hadoop/io/TestAvroDatumConverterFactory.java   |   133 +
 .../avro/hadoop/io/TestAvroKeyDeserializer.java    |    71 +
 .../avro/hadoop/io/TestAvroSequenceFile.java       |   210 +
 .../avro/hadoop/io/TestAvroSerialization.java      |   178 +
 .../apache/avro/hadoop/io/TestAvroSerializer.java  |    67 +
 .../avro/hadoop/io/TestAvroValueDeserializer.java  |    71 +
 .../util/TestAvroCharSequenceComparator.java       |    90 +
 .../apache/avro/mapred/TestAvroInputFormat.java    |    83 +
 .../apache/avro/mapred/TestAvroMultipleInputs.java |   296 +
 .../avro/mapred/TestAvroMultipleOutputs.java       |   345 +
 .../apache/avro/mapred/TestAvroOutputFormat.java   |   142 +
 .../avro/mapred/TestAvroTextOutputFormat.java      |    90 +
 .../org/apache/avro/mapred/TestAvroTextSort.java   |    56 +
 .../org/apache/avro/mapred/TestAvroWrapper.java    |    32 +
 .../org/apache/avro/mapred/TestGenericJob.java     |   153 +
 .../test/java/org/apache/avro/mapred/TestPair.java |    40 +
 .../org/apache/avro/mapred/TestReflectJob.java     |   159 +
 .../apache/avro/mapred/TestSequenceFileReader.java |   250 +
 .../java/org/apache/avro/mapred/TestWeather.java   |   191 +
 .../java/org/apache/avro/mapred/TestWordCount.java |   147 +
 .../java/org/apache/avro/mapred/WordCountUtil.java |   176 +
 .../avro/mapred/tether/TestWordCountTether.java    |   129 +
 .../org/apache/avro/mapred/tether/TetherTask.java  |   238 +
 .../avro/mapred/tether/TetherTaskRunner.java       |   135 +
 .../org/apache/avro/mapred/tether/WordCount.avsc   |     6 +
 .../apache/avro/mapred/tether/WordCountTask.java   |    62 +
 .../java/org/apache/avro/mapreduce/AvroFiles.java  |    56 +
 .../avro/mapreduce/TestAvroKeyInputFormat.java     |    65 +
 .../avro/mapreduce/TestAvroKeyOutputFormat.java    |   163 +
 .../avro/mapreduce/TestAvroKeyRecordReader.java    |   134 +
 .../avro/mapreduce/TestAvroKeyRecordWriter.java    |   124 +
 .../mapreduce/TestAvroKeyValueRecordReader.java    |   149 +
 .../mapreduce/TestAvroKeyValueRecordWriter.java    |   235 +
 .../avro/mapreduce/TestAvroMultipleOutputs.java    |   418 +
 .../mapreduce/TestAvroMultipleOutputsSyncable.java |   418 +
 .../org/apache/avro/mapreduce/TestFsInput.java     |   137 +
 .../apache/avro/mapreduce/TestKeyValueInput.java   |   254 +
 .../avro/mapreduce/TestKeyValueWordCount.java      |   134 +
 .../org/apache/avro/mapreduce/TestWordCount.java   |   521 +
 .../mapred/src/test/resources/log4j.properties     |     8 +
 .../mapreduce-test-input.avro/._SUCCESS.crc        |   Bin 0 -> 8 bytes
 .../.part-r-00000.avro.crc                         |   Bin 0 -> 12 bytes
 .../mapreduce/mapreduce-test-input.avro/_SUCCESS   |     0
 .../mapreduce-test-input.avro/part-r-00000.avro    |   Bin 0 -> 251 bytes
 .../apache/avro/mapreduce/mapreduce-test-input.txt |     6 +
 lang/java/maven-plugin/pom.xml                     |   105 +
 .../org/apache/avro/mojo/AbstractAvroMojo.java     |   239 +
 .../java/org/apache/avro/mojo/IDLProtocolMojo.java |   114 +
 .../java/org/apache/avro/mojo/ProtocolMojo.java    |    76 +
 .../main/java/org/apache/avro/mojo/SchemaMojo.java |    94 +
 .../src/test/avro/AvdlClasspathImport.avdl         |    26 +
 lang/java/maven-plugin/src/test/avro/User.avdl     |    31 +
 lang/java/maven-plugin/src/test/avro/User.avpr     |    34 +
 lang/java/maven-plugin/src/test/avro/User.avsc     |    38 +
 .../avro/directImport/PrivacyDirectImport.avsc     |     7 +
 .../src/test/avro/imports/PrivacyImport.avsc       |     7 +
 .../org/apache/avro/mojo/AbstractAvroMojoTest.java |    60 +
 .../org/apache/avro/mojo/TestIDLProtocolMojo.java  |    44 +
 .../org/apache/avro/mojo/TestProtocolMojo.java     |    44 +
 .../java/org/apache/avro/mojo/TestSchemaMojo.java  |    44 +
 .../src/test/resources/unit/idl/pom.xml            |    65 +
 .../src/test/resources/unit/protocol/pom.xml       |    65 +
 .../src/test/resources/unit/schema/pom.xml         |    64 +
 lang/java/pom.xml                                  |   503 +
 lang/java/protobuf/pom.xml                         |    99 +
 .../org/apache/avro/protobuf/ProtobufData.java     |   348 +
 .../apache/avro/protobuf/ProtobufDatumReader.java  |    84 +
 .../apache/avro/protobuf/ProtobufDatumWriter.java  |    69 +
 .../java/org/apache/avro/protobuf/package.html     |    42 +
 .../test/java/org/apache/avro/protobuf/Test.java   |  3316 +++++
 .../org/apache/avro/protobuf/TestProtobuf.java     |    92 +
 lang/java/protobuf/src/test/protobuf/test.proto    |    62 +
 lang/java/thrift/README                            |     3 +
 lang/java/thrift/pom.xml                           |   100 +
 .../java/org/apache/avro/thrift/ThriftData.java    |   270 +
 .../org/apache/avro/thrift/ThriftDatumReader.java  |    92 +
 .../org/apache/avro/thrift/ThriftDatumWriter.java  |    58 +
 .../main/java/org/apache/avro/thrift/package.html  |    46 +
 .../java/org/apache/avro/thrift/TestThrift.java    |    96 +
 .../test/java/org/apache/avro/thrift/test/E.java   |    48 +
 .../java/org/apache/avro/thrift/test/Error.java    |   385 +
 .../test/java/org/apache/avro/thrift/test/Foo.java |  1996 +++
 .../java/org/apache/avro/thrift/test/FooOrBar.java |   356 +
 .../java/org/apache/avro/thrift/test/Nested.java   |   383 +
 .../java/org/apache/avro/thrift/test/Test.java     |  2011 +++
 lang/java/thrift/src/test/thrift/test.thrift       |    68 +
 lang/java/tools/pom.xml                            |   185 +
 .../apache/avro/tool/BinaryFragmentToJsonTool.java |   103 +
 .../main/java/org/apache/avro/tool/CatTool.java    |   216 +
 .../main/java/org/apache/avro/tool/ConcatTool.java |   152 +
 .../org/apache/avro/tool/CreateRandomFileTool.java |    97 +
 .../org/apache/avro/tool/DataFileGetMetaTool.java  |    94 +
 .../apache/avro/tool/DataFileGetSchemaTool.java    |    53 +
 .../org/apache/avro/tool/DataFileReadTool.java     |    93 +
 .../org/apache/avro/tool/DataFileRepairTool.java   |   308 +
 .../org/apache/avro/tool/DataFileWriteTool.java    |   112 +
 .../java/org/apache/avro/tool/FromTextTool.java    |   119 +
 .../org/apache/avro/tool/IdlToSchemataTool.java    |    90 +
 .../main/java/org/apache/avro/tool/IdlTool.java    |    79 +
 .../org/apache/avro/tool/InduceSchemaTool.java     |    76 +
 .../apache/avro/tool/JsonToBinaryFragmentTool.java |   104 +
 .../src/main/java/org/apache/avro/tool/Main.java   |   109 +
 .../java/org/apache/avro/tool/RecodecTool.java     |   104 +
 .../java/org/apache/avro/tool/RpcProtocolTool.java |   111 +
 .../java/org/apache/avro/tool/RpcReceiveTool.java  |   174 +
 .../java/org/apache/avro/tool/RpcSendTool.java     |   118 +
 .../org/apache/avro/tool/SpecificCompilerTool.java |   170 +
 .../main/java/org/apache/avro/tool/TetherTool.java |   178 +
 .../main/java/org/apache/avro/tool/ToTextTool.java |    88 +
 .../java/org/apache/avro/tool/ToTrevniTool.java    |    83 +
 .../src/main/java/org/apache/avro/tool/Tool.java   |    51 +
 .../apache/avro/tool/TrevniCreateRandomTool.java   |    67 +
 .../org/apache/avro/tool/TrevniMetadataTool.java   |   115 +
 .../org/apache/avro/tool/TrevniToJsonTool.java     |   181 +
 .../main/java/org/apache/avro/tool/TrevniUtil.java |    83 +
 .../src/main/java/org/apache/avro/tool/Util.java   |   262 +
 .../main/java/org/apache/avro/tool/package.html    |    23 +
 .../java/tools/src/test/compiler/input/player.avsc |     8 +
 .../tools/src/test/compiler/input/position.avsc    |     3 +
 .../avro/examples/baseball/Player.java             |   404 +
 .../avro/examples/baseball/Position.java           |    13 +
 .../tools/src/test/compiler/output/Player.java     |   404 +
 .../tools/src/test/compiler/output/Position.java   |    13 +
 lang/java/tools/src/test/idl/protocol.avdl         |    58 +
 .../java/org/apache/avro/tool/TestCatTool.java     |   347 +
 .../java/org/apache/avro/tool/TestConcatTool.java  |   225 +
 .../apache/avro/tool/TestCreateRandomFileTool.java |   109 +
 .../apache/avro/tool/TestDataFileRepairTool.java   |   207 +
 .../org/apache/avro/tool/TestDataFileTools.java    |   253 +
 .../apache/avro/tool/TestIdlToSchemataTool.java    |    41 +
 .../tool/TestJsonToFromBinaryFragmentTools.java    |   129 +
 .../test/java/org/apache/avro/tool/TestMain.java   |    35 +
 .../java/org/apache/avro/tool/TestRecodecTool.java |   109 +
 .../org/apache/avro/tool/TestRpcProtocolTool.java  |    96 +
 .../avro/tool/TestRpcReceiveAndSendTools.java      |    58 +
 .../apache/avro/tool/TestSpecificCompilerTool.java |   163 +
 .../java/org/apache/avro/tool/TestTetherTool.java  |   126 +
 .../org/apache/avro/tool/TestTextFileTools.java    |   145 +
 .../org/apache/avro/tool/TestToTrevniTool.java     |    75 +
 lang/java/trevni/avro/pom.xml                      |   144 +
 .../org/apache/trevni/avro/AvroColumnReader.java   |   276 +
 .../org/apache/trevni/avro/AvroColumnWriter.java   |   173 +
 .../org/apache/trevni/avro/AvroColumnator.java     |   168 +
 .../apache/trevni/avro/AvroTrevniInputFormat.java  |   105 +
 .../apache/trevni/avro/AvroTrevniOutputFormat.java |   119 +
 .../java/org/apache/trevni/avro/HadoopInput.java   |    51 +
 .../avro/mapreduce/AvroTrevniKeyInputFormat.java   |    57 +
 .../avro/mapreduce/AvroTrevniKeyOutputFormat.java  |    56 +
 .../avro/mapreduce/AvroTrevniKeyRecordReader.java  |    59 +
 .../avro/mapreduce/AvroTrevniKeyRecordWriter.java  |    62 +
 .../mapreduce/AvroTrevniKeyValueInputFormat.java   |    64 +
 .../mapreduce/AvroTrevniKeyValueOutputFormat.java  |    64 +
 .../mapreduce/AvroTrevniKeyValueRecordReader.java  |    68 +
 .../mapreduce/AvroTrevniKeyValueRecordWriter.java  |   101 +
 .../avro/mapreduce/AvroTrevniRecordReaderBase.java |   104 +
 .../avro/mapreduce/AvroTrevniRecordWriterBase.java |   136 +
 .../main/java/org/apache/trevni/avro/package.html  |    38 +
 .../trevni/avro/src/test/cases/dremel/input.avsc   |    78 +
 .../trevni/avro/src/test/cases/dremel/input.json   |    73 +
 .../avro/src/test/cases/dremel/sub1/sub.avsc       |    41 +
 .../avro/src/test/cases/dremel/sub1/sub.json       |    37 +
 .../trevni/avro/src/test/cases/enum/input.avsc     |    17 +
 .../trevni/avro/src/test/cases/enum/input.json     |     4 +
 .../java/org/apache/trevni/avro/RandomData.java    |   129 +
 .../java/org/apache/trevni/avro/TestCases.java     |   102 +
 .../org/apache/trevni/avro/TestEvolvedSchema.java  |   128 +
 .../apache/trevni/avro/TestMetadataFiltering.java  |    50 +
 .../java/org/apache/trevni/avro/TestShredder.java  |   276 +
 .../java/org/apache/trevni/avro/TestWordCount.java |   147 +
 .../java/org/apache/trevni/avro/WordCountUtil.java |   135 +
 .../avro/mapreduce/TestKeyValueWordCount.java      |   159 +
 .../trevni/avro/mapreduce/TestKeyWordCount.java    |   178 +
 lang/java/trevni/checkstyle.xml                    |    59 +
 lang/java/trevni/core/pom.xml                      |    60 +
 .../org/apache/trevni/ArrayColumnOutputBuffer.java |    81 +
 .../main/java/org/apache/trevni/BZip2Codec.java    |    77 +
 .../java/org/apache/trevni/BlockDescriptor.java    |    49 +
 .../src/main/java/org/apache/trevni/Checksum.java  |    41 +
 .../src/main/java/org/apache/trevni/Codec.java     |    46 +
 .../java/org/apache/trevni/ColumnDescriptor.java   |    96 +
 .../java/org/apache/trevni/ColumnFileMetaData.java |    31 +
 .../java/org/apache/trevni/ColumnFileReader.java   |   152 +
 .../java/org/apache/trevni/ColumnFileWriter.java   |   173 +
 .../java/org/apache/trevni/ColumnMetaData.java     |   122 +
 .../java/org/apache/trevni/ColumnOutputBuffer.java |   120 +
 .../main/java/org/apache/trevni/ColumnValues.java  |   150 +
 .../main/java/org/apache/trevni/Crc32Checksum.java |    39 +
 .../main/java/org/apache/trevni/DeflateCodec.java  |    76 +
 .../src/main/java/org/apache/trevni/Input.java     |    31 +
 .../main/java/org/apache/trevni/InputBuffer.java   |   379 +
 .../main/java/org/apache/trevni/InputBytes.java    |    46 +
 .../src/main/java/org/apache/trevni/InputFile.java |    49 +
 .../src/main/java/org/apache/trevni/MetaData.java  |   145 +
 .../main/java/org/apache/trevni/NullChecksum.java  |    31 +
 .../src/main/java/org/apache/trevni/NullCodec.java |    34 +
 .../main/java/org/apache/trevni/OutputBuffer.java  |   317 +
 .../main/java/org/apache/trevni/SnappyCodec.java   |    45 +
 .../org/apache/trevni/TrevniRuntimeException.java  |    29 +
 .../src/main/java/org/apache/trevni/ValueType.java |    34 +
 .../src/main/java/org/apache/trevni/package.html   |    23 +
 lang/java/trevni/core/src/main/java/overview.html  |    88 +
 .../java/org/apache/trevni/TestBZip2Codec.java     |    69 +
 .../java/org/apache/trevni/TestColumnFile.java     |   266 +
 .../test/java/org/apache/trevni/TestIOBuffers.java |   267 +
 .../java/org/apache/trevni/TestInputBytes.java     |    50 +
 .../src/test/java/org/apache/trevni/TestUtil.java  |   110 +
 lang/java/trevni/doc/apt/spec.apt                  |   471 +
 lang/java/trevni/doc/pom.xml                       |    54 +
 lang/java/trevni/doc/resources/css/site.css        |    31 +
 lang/java/trevni/doc/site.xml                      |    29 +
 lang/java/trevni/pom.xml                           |   101 +
 lang/js/Gruntfile.js                               |    57 +
 lang/js/README                                     |    20 +
 lang/js/build.sh                                   |    40 +
 lang/js/lib/validator.js                           |   448 +
 lang/js/package.json                               |    37 +
 lang/js/test/validator.js                          |   525 +
 lang/perl/.gitignore                               |    10 +
 lang/perl/.shipit                                  |     2 +
 lang/perl/Changes                                  |     7 +
 lang/perl/MANIFEST                                 |    32 +
 lang/perl/MANIFEST.SKIP                            |    16 +
 lang/perl/Makefile.PL                              |    43 +
 lang/perl/NOTICE.txt                               |     1 +
 lang/perl/README                                   |    24 +
 lang/perl/bin/avro-to-json                         |    37 +
 lang/perl/lib/Avro.pm                              |    61 +
 lang/perl/lib/Avro/BinaryDecoder.pm                |   391 +
 lang/perl/lib/Avro/BinaryEncoder.pm                |   295 +
 lang/perl/lib/Avro/DataFile.pm                     |    47 +
 lang/perl/lib/Avro/DataFileReader.pm               |   294 +
 lang/perl/lib/Avro/DataFileWriter.pm               |   210 +
 lang/perl/lib/Avro/Protocol.pm                     |   114 +
 lang/perl/lib/Avro/Protocol/Message.pm             |    64 +
 lang/perl/lib/Avro/Schema.pm                       |   838 ++
 lang/perl/t/00_compile.t                           |    23 +
 lang/perl/t/01_names.t                             |   167 +
 lang/perl/t/01_schema.t                            |   472 +
 lang/perl/t/02_bin_encode.t                        |   156 +
 lang/perl/t/03_bin_decode.t                        |   251 +
 lang/perl/t/04_datafile.t                          |   122 +
 lang/perl/t/05_protocol.t                          |    76 +
 lang/perl/xt/pod.t                                 |    21 +
 lang/php/README.txt                                |    33 +
 lang/php/build.sh                                  |    73 +
 lang/php/examples/write_read.php                   |    94 +
 lang/php/lib/avro.php                              |   195 +
 lang/php/lib/avro/data_file.php                    |   535 +
 lang/php/lib/avro/datum.php                        |   984 ++
 lang/php/lib/avro/debug.php                        |   194 +
 lang/php/lib/avro/gmp.php                          |   222 +
 lang/php/lib/avro/io.php                           |   494 +
 lang/php/lib/avro/protocol.php                     |    86 +
 lang/php/lib/avro/schema.php                       |  1457 +++
 lang/php/lib/avro/util.php                         |    67 +
 lang/php/test/AllTests.php                         |    47 +
 lang/php/test/DataFileTest.php                     |   270 +
 lang/php/test/DatumIOTest.php                      |   144 +
 lang/php/test/FloatIntEncodingTest.php             |   289 +
 lang/php/test/IODatumReaderTest.php                |    36 +
 lang/php/test/InterOpTest.php                      |    75 +
 lang/php/test/LongEncodingTest.php                 |   315 +
 lang/php/test/NameTest.php                         |   106 +
 lang/php/test/ProtocolFileTest.php                 |   353 +
 lang/php/test/SchemaTest.php                       |   463 +
 lang/php/test/StringIOTest.php                     |    72 +
 lang/php/test/generate_interop_data.php            |    46 +
 lang/php/test/test_helper.php                      |    42 +
 lang/py/build.xml                                  |    52 +-
 lang/py/ivy.xml                                    |    24 +
 lang/py/ivysettings.xml                            |    30 +
 lang/py/src/avro/schema.py                         |     6 +-
 lang/py/src/avro/tether/__init__.py                |     7 +
 lang/py/src/avro/tether/tether_task.py             |   498 +
 lang/py/src/avro/tether/tether_task_runner.py      |   227 +
 lang/py/src/avro/tether/util.py                    |    34 +
 lang/py/test/mock_tether_parent.py                 |    95 +
 lang/py/test/set_avro_test_path.py                 |    40 +
 lang/py/test/test_datafile.py                      |     3 +
 lang/py/test/test_datafile_interop.py              |     3 +
 lang/py/test/test_io.py                            |     3 +
 lang/py/test/test_ipc.py                           |     2 +
 lang/py/test/test_schema.py                        |     6 +
 lang/py/test/test_tether_task.py                   |   116 +
 lang/py/test/test_tether_task_runner.py            |   191 +
 lang/py/test/test_tether_word_count.py             |   213 +
 lang/py/test/word_count_task.py                    |    96 +
 lang/py3/avro/schema.py                            |     8 +-
 lang/py3/avro/tests/run_tests.py                   |     1 +
 .../avro/tests/test_enum.py}                       |    40 +-
 lang/py3/avro/tests/test_schema.py                 |     5 +
 lang/py3/setup.py                                  |     5 +-
 lang/ruby/.gitignore                               |     1 +
 lang/ruby/CHANGELOG                                |     1 +
 lang/ruby/Gemfile                                  |    20 +
 lang/ruby/Manifest                                 |    22 +
 lang/ruby/Rakefile                                 |    63 +
 lang/ruby/build.sh                                 |    50 +
 .../interop/test_interop.rb}                       |    44 +-
 .../test_datafile_interop.py => ruby/lib/avro.rb}  |    48 +-
 lang/ruby/lib/avro/data_file.rb                    |   366 +
 lang/ruby/lib/avro/io.rb                           |   615 +
 lang/ruby/lib/avro/ipc.rb                          |   550 +
 lang/ruby/lib/avro/protocol.rb                     |   161 +
 lang/ruby/lib/avro/schema.rb                       |   413 +
 lang/ruby/test/random_data.rb                      |    90 +
 lang/ruby/test/sample_ipc_client.rb                |    85 +
 lang/ruby/test/sample_ipc_http_client.rb           |    84 +
 lang/ruby/test/sample_ipc_http_server.rb           |    79 +
 lang/ruby/test/sample_ipc_server.rb                |    92 +
 lang/ruby/test/test_datafile.rb                    |   214 +
 .../test/test_help.rb}                             |    30 +-
 lang/ruby/test/test_io.rb                          |   406 +
 lang/ruby/test/test_protocol.rb                    |   199 +
 lang/ruby/test/test_schema.rb                      |   146 +
 .../test/test_socket_transport.rb}                 |    42 +-
 lang/ruby/test/tool.rb                             |   144 +
 pom.xml                                            |     8 +-
 share/VERSION.txt                                  |     2 +-
 share/docker/Dockerfile                            |    58 +
 share/rat-excludes.txt                             |     1 +
 .../org/apache/avro/ipc/trace/avroTrace.avdl       |    68 -
 .../org/apache/avro/ipc/trace/avroTrace.avpr       |    82 -
 share/test/schemas/http.avdl                       |    66 +
 share/test/schemas/reserved.avsc                   |     2 +
 share/test/schemas/specialtypes.avdl               |    98 +
 1492 files changed, 260957 insertions(+), 284 deletions(-)

diff --git a/.gitignore b/.gitignore
index 8c6b133..372789a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,7 @@
+*.iml
+*.ipr
+*.iws
+.idea/
 .project
 .settings
 .classpath
diff --git a/BUILD.txt b/BUILD.txt
index a59c80c..7c3eea7 100644
--- a/BUILD.txt
+++ b/BUILD.txt
@@ -21,9 +21,25 @@ The following packages must be installed before Avro can be built:
  - Apache Forrest 0.8 (for documentation)
  - md5sum, sha1sum, used by top-level dist target
 
+To simplify this, you can run a Docker container with all the above
+dependencies installed by installing docker.io and typing:
+
+ ./build.sh docker
+
+When this completes you will be in a shell running in the
+container. Building the image the first time may take a while (20
+minutes or more) since dependencies must be downloaded and
+installed. However subsequent invocations are much faster as the
+cached image is used.
+
+The working directory in the container is mounted from your host. This
+allows you to access the files in your Avro development tree from the
+Docker container.
+
 BUILDING
 
-Once the requirements are installed, build.sh can be used as follows:
+Once the requirements are installed (or from the Docker container),
+build.sh can be used as follows:
 
  './build.sh test' runs tests for all languages
  './build.sh dist' creates all release distribution files in dist/
diff --git a/CHANGES.txt b/CHANGES.txt
index 188ec44..afedefb 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,5 +1,186 @@
 Avro Change Log
 
+Avro 1.8.0 (10 August 2014)
+
+  INCOMPATIBLE CHANGES
+
+    AVRO-1334. Java: Update versions of many dependencies. (scottcarey, cutting)
+
+    AVRO-997. Java: For enum values, no longer sometimes permit any
+    Object whose toString() names an enum symbol, but rather always
+    require use of distinct enum types. (Sean Busbey via cutting)
+
+    AVRO-1602. Java: Remove Dapper-style RPC trace facility.  This
+    seems unused and has been a source of build problems.  (cutting)
+
+    AVRO-1586. Build against Hadoop 2. With this change the avro-mapred and
+    trevni-avro JARs without a hadoop1 or hadoop2 Maven classifier are Hadoop 2
+    artifacts. To use with Hadoop 1, set the classifier to hadoop1.
+    (tomwhite)
+
+    AVRO-1502. Java: Generated classes now implement Serializable.
+    Generated classes need to be regenerated to use this release. (cutting)
+
+  NEW FEATURES
+
+    AVRO-1555.  C#: Add support for RPC over HTTP. (Dmitry Kovalev via cutting)
+
+    AVRO-739. Add date, time, timestamp, and duration binary types to
+    specification. (Dmitry Kovalev and Ryan Blue via tomwhite)
+
+    AVRO-1590. Java: In resolving records in unions, permit structural
+    and shortname matches when fullname matching fails.
+    (Ryan Blue via cutting)
+
+    AVRO-570. Python: Add connector for tethered mapreduce.
+    (Jeremy Lewi and Steven Willis via cutting)    
+
+    AVRO-834. Java: Data File corruption recovery tool.
+    (scottcarey and tomwhite)
+
+    AVRO-1614. Java: In generated builder classes, add accessors to
+    field sub-builders, permitting easier creation of nested, optional
+    structures. (Niels Basjes via cutting)
+
+    AVRO-1537. Make it easier to set up a multi-language build environment.
+    Support for running a Docker container with all build dependencies.
+    (tomwhite)
+
+    AVRO-680. Java: Support non-string map keys. (Sachin Goyal via Ryan Blue).
+
+    AVRO-1497. Java: Add support for logical types. (blue)
+
+    AVRO-1685. Java: Allow specifying sync in DataFileWriter.create
+    (Sehrope Sarkuni via tomwhite)
+
+    AVRO-1683. Add microsecond time and timestamp logical types to the
+    specification. (blue)
+
+    AVRO-1672. Java: Add date/time logical types and conversions. (blue)
+
+  OPTIMIZATIONS
+
+  IMPROVEMENTS
+
+    AVRO-843. C#: Change Visual Studio project files to specify .NET 3.5.
+    (Dmitry Kovalev via cutting)
+
+    AVRO-1583. Java: Add stdin support to the tojson tool.
+    (Clément Mahtieu via cutting)
+
+    AVRO-1551. Java: Add an output encoding option to the compiler
+    command line tool. (Keegan Witt via cutting)
+
+    AVRO-1585. Java: Deprecate Jackson classes in public API. (tomwhite)
+
+    AVRO-1619. Java: Improve javadoc comments in generated code.
+    (Niels Basjes via cutting)
+
+    AVRO-1616. Add IntelliJ files to .gitignore. (Niels Basjes via cutting)
+
+    AVRO-1539. Java: Add FileSystem based FsInput constructor.
+    (Allan Shoup via cutting)
+
+    AVRO-1628. Java: Add Schema#createUnion(Schema ...) convenience method.
+    (Clément Mahtieu via cutting)
+
+    AVRO-1655. Java: Add Schema.createRecord with field list.
+    (Lars Francke via blue)
+
+    AVRO-1681. Improve generated JavaDocs.
+    (Charles Gariépy-Ikeson via tomwhite)
+
+    AVRO-1645. Ruby: Improved handling of missing named types.
+    (Daniel Schierbeck via tomwhite)
+
+    AVRO-1693. Ruby: Allow writing arbitrary metadata to data files.
+    (Daniel Schierbeck via tomwhite)
+
+    AVRO-1692. Allow more than one logical type for a Java class. (blue via
+    tomwhite)
+
+	AVRO-1697. Ruby: Add support for the Snappy codec to the Ruby library.
+	(Daniel Schierbeck via tomwhite)
+
+  BUG FIXES
+
+    AVRO-1553. Java: MapReduce never uses MapOutputValueSchema (tomwhite)
+
+    AVRO-1544. Java: Fix GenericData#validate for unions with null.
+    (Matthew Hayes via cutting)
+
+    AVRO-1589. Java: Fix ReflectData.AllowNulls to not create unions
+    for primitive types. (Ryan Blue via cutting)
+
+    AVRO-1591. Java: Fix specific RPC so that proxies implement hashCode(),
+    equals() and toString(). (Mark Spadoni via cutting)
+
+    AVRO-1489. Java: Avro fails to build with OpenJDK 8. (Ricardo Arguello via
+    tomwhite)
+
+    AVRO-1302. Python: Update documentation to open files as binary to
+    prevent EOL substitution. (Lars Francke via cutting)
+
+    AVRO-1598. Java: Fix flakiness in TestFileSpanStorage.
+    (Ryan Blue via cutting)
+
+    AVRO-1592. Java: Fix handling of Java reserved words as enum
+    constants in generated code. (Lukas Steiblys via cutting)
+
+    AVRO-1597. Java: Random data tool writes corrupt files to standard out.
+    (cutting)
+
+    AVRO-1596. Java: Cannot read past corrupted block in Avro data file.
+    (tomwhite)
+
+    AVRO-1564. Java: Fix handling of optional byte field in Thrift.
+    (Michael Pershyn via cutting)
+
+    AVRO-1407: Java: Fix infinite loop on slow connect in NettyTransceiver.
+    (Gareth Davis via cutting)
+
+    AVRO-1604. Java: Fix ReflectData.AllowNull to work with @Nullable
+    annotations. (Ryan Blue via cutting)
+
+    AVRO-1545. Python. Fix to retain schema properties on primitive types.
+    (Dustin Spicuzza via cutting)
+
+    AVRO-1623. Java: Fix GenericData#validate to correctly resolve unions.
+    (Jeffrey Mullins via cutting)
+
+    AVRO-1621. PHP: FloatIntEncodingTest fails for NAN. (tomwhite)
+
+    AVRO-1573. Javascript. Upgrade to Grunt 0.4 for testing. (tomwhite)
+
+    AVRO-1624. Java. Surefire forkMode is deprecated. (Niels Basjes via
+    tomwhite)
+
+    AVRO-1630. Java: Creating Builder from instance loses data. (Niels Basjes
+    via tomwhite)
+
+    AVRO-1653. Fix typo in spec (lenghted => length). (Sehrope Sarkuni via blue)
+
+    AVRO-1656. Fix 'How to Contribute' link. (Benjamin Clauss via blue)
+
+    AVRO-1652. Java: Do not warn or validate defaults if validation is off.
+    (Michael D'Angelo via blue)
+
+    AVRO-1655. Java: Fix NPE in RecordSchema#toString when fields are null.
+    (Lars Francke via blue)
+
+    AVRO-1689. Update Dockerfile to use official Java repository. (tomwhite)
+
+    AVRO-1576. TestSchemaCompatibility is platform dependant.
+    (Stevo Slavic via tomwhite)
+
+    AVRO-1688. Ruby test_union(TestIO) is failing. (tomwhite)
+
+    AVRO-1673. Python 3 EnumSchema changes the order of symbols.
+    (Marcin Białoń via tomwhite)
+
+    AVRO-1491. Avro.ipc.dll not included in release zip/build file.
+    (Dmitry Kovalev via tomwhite)
+
 Avro 1.7.7 (23 July 2014)
 
   NEW FEATURES
diff --git a/README.txt b/README.txt
index a8f66f7..566f192 100644
--- a/README.txt
+++ b/README.txt
@@ -6,4 +6,4 @@ Learn more about Avro, please visit our website at:
 
 To contribute to Avro, please read:
 
-  https://cwiki.apache.org/AVRO/how-to-contribute.html
+  https://cwiki.apache.org/confluence/display/AVRO/How+To+Contribute
diff --git a/build.sh b/build.sh
index 06961c0..cce0cfb 100755
--- a/build.sh
+++ b/build.sh
@@ -22,7 +22,7 @@ cd `dirname "$0"`				  # connect to root
 VERSION=`cat share/VERSION.txt`
 
 function usage {
-  echo "Usage: $0 {test|dist|sign|clean}"
+  echo "Usage: $0 {test|dist|sign|clean|docker}"
   exit 1
 }
 
@@ -96,8 +96,10 @@ case "$target" in
 
 	# build lang-specific artifacts
         
-	(cd lang/java; mvn package -DskipTests -Dhadoop.version=2; rm -rf mapred/target/classes/;
-	  mvn -P dist package -DskipTests -Davro.version=$VERSION javadoc:aggregate) 
+	(cd lang/java; mvn package -DskipTests -Dhadoop.version=1;
+	  rm -rf mapred/target/{classes,test-classes}/;
+	  rm -rf trevni/avro/target/{classes,test-classes}/;
+	  mvn -P dist package -DskipTests -Davro.version=$VERSION javadoc:aggregate)
         (cd lang/java/trevni/doc; mvn site)
         (mvn -N -P copy-artifacts antrun:run) 
 
@@ -169,9 +171,39 @@ case "$target" in
 
 	(cd lang/php; ./build.sh clean)
 
-	(cd lang/perl; [ -f Makefile ] && make clean)
+	(cd lang/perl; [ ! -f Makefile ] || make clean)
 	;;
 
+    docker)
+        docker build -t avro-build share/docker
+        if [ "$(uname -s)" == "Linux" ]; then
+          USER_NAME=${SUDO_USER:=$USER}
+          USER_ID=$(id -u $USER_NAME)
+          GROUP_ID=$(id -g $USER_NAME)
+        else # boot2docker uid and gid
+          USER_NAME=$USER
+          USER_ID=1000
+          GROUP_ID=50
+        fi
+        docker build -t avro-build-${USER_NAME} - <<UserSpecificDocker
+FROM avro-build
+RUN groupadd -g ${GROUP_ID} ${USER_NAME} || true
+RUN useradd -g ${GROUP_ID} -u ${USER_ID} -k /root -m ${USER_NAME}
+ENV HOME /home/${USER_NAME}
+UserSpecificDocker
+        # By mapping the .m2 directory you can do an mvn install from
+        # within the container and use the result on your normal
+        # system.  And this also is a significant speedup in subsequent
+        # builds because the dependencies are downloaded only once.
+        docker run --rm=true -t -i \
+          -v ${PWD}:/home/${USER_NAME}/avro \
+          -w /home/${USER_NAME}/avro \
+          -v ${HOME}/.m2:/home/${USER_NAME}/.m2 \
+          -v ${HOME}/.gnupg:/home/${USER_NAME}/.gnupg \
+          -u ${USER_NAME} \
+          avro-build-${USER_NAME}
+        ;;
+
     *)
         usage
         ;;
diff --git a/doc/src/content/xdocs/gettingstartedpython.xml b/doc/src/content/xdocs/gettingstartedpython.xml
index d8d9df8..156646a 100644
--- a/doc/src/content/xdocs/gettingstartedpython.xml
+++ b/doc/src/content/xdocs/gettingstartedpython.xml
@@ -136,14 +136,14 @@ import avro.schema
 from avro.datafile import DataFileReader, DataFileWriter
 from avro.io import DatumReader, DatumWriter
 
-schema = avro.schema.parse(open("user.avsc").read())
+schema = avro.schema.parse(open("user.avsc", "rb").read())
 
-writer = DataFileWriter(open("users.avro", "w"), DatumWriter(), schema)
+writer = DataFileWriter(open("users.avro", "wb"), DatumWriter(), schema)
 writer.append({"name": "Alyssa", "favorite_number": 256})
 writer.append({"name": "Ben", "favorite_number": 7, "favorite_color": "red"})
 writer.close()
 
-reader = DataFileReader(open("users.avro", "r"), DatumReader())
+reader = DataFileReader(open("users.avro", "rb"), DatumReader())
 for user in reader:
     print user
 reader.close()
@@ -154,10 +154,18 @@ reader.close()
 {u'favorite_color': u'red', u'favorite_number': 7, u'name': u'Ben'}
       </source>
       <p>
+        Do make sure that you open your files in binary mode (i.e. using the modes
+        <code>wb</code> or <code>rb</code> respectively). Otherwise you might
+        generate corrupt files due to
+        <a href="http://docs.python.org/library/functions.html#open">
+        automatic replacement</a> of newline characters with the
+        platform-specific representations.
+      </p>
+      <p>
         Let's take a closer look at what's going on here.
       </p>
       <source>
-schema = avro.schema.parse(open("user.avsc").read())
+schema = avro.schema.parse(open("user.avsc", "rb").read())
       </source>
       <p>
         <code>avro.schema.parse</code> takes a string containing a JSON schema
@@ -167,7 +175,7 @@ schema = avro.schema.parse(open("user.avsc").read())
         user.avsc schema file here.
       </p>
       <source>
-writer = DataFileWriter(open("users.avro", "w"), DatumWriter(), schema)
+writer = DataFileWriter(open("users.avro", "wb"), DatumWriter(), schema)
       </source>
       <p>
         We create a <code>DataFileWriter</code>, which we'll use to write
@@ -201,7 +209,7 @@ writer.append({"name": "Ben", "favorite_number": 7, "favorite_color": "red"})
           ignored.
         </p>
         <source>
-reader = DataFileReader(open("users.avro", "r"), DatumReader())
+reader = DataFileReader(open("users.avro", "rb"), DatumReader())
         </source>
         <p>
           We open the file again, this time for reading back from disk.  We use
diff --git a/doc/src/content/xdocs/spec.xml b/doc/src/content/xdocs/spec.xml
index 8c108c8..83c0420 100644
--- a/doc/src/content/xdocs/spec.xml
+++ b/doc/src/content/xdocs/spec.xml
@@ -871,7 +871,7 @@
               <li>that many bytes of <em>buffer data</em>.</li>
             </ul>
           </li>
-          <li>A message is always terminated by a zero-lenghted buffer.</li>
+          <li>A message is always terminated by a zero-length buffer.</li>
         </ul>
 
         <p>Framing is transparent to request and response message
@@ -1406,6 +1406,67 @@ void initFPTable() {
           precisions match.</p>
 
       </section>
+
+      <section>
+        <title>Date</title>
+        <p>
+          The <code>date</code> logical type represents a date within the calendar, with no reference to a particular time zone or time of day.
+        </p>
+        <p>
+          A <code>date</code> logical type annotates an Avro <code>int</code>, where the int stores the number of days from the unix epoch, 1 January 1970 (ISO calendar).
+        </p>
+      </section>
+
+      <section>
+        <title>Time (millisecond precision)</title>
+        <p>
+          The <code>time-millis</code> logical type represents a time of day, with no reference to a particular calendar, time zone or date, with a precision of one millisecond.
+        </p>
+        <p>
+          A <code>time-millis</code> logical type annotates an Avro <code>int</code>, where the int stores the number of milliseconds after midnight, 00:00:00.000.
+        </p>
+      </section>
+
+      <section>
+        <title>Time (microsecond precision)</title>
+        <p>
+          The <code>time-micros</code> logical type represents a time of day, with no reference to a particular calendar, time zone or date, with a precision of one microsecond.
+        </p>
+        <p>
+          A <code>time-micros</code> logical type annotates an Avro <code>long</code>, where the long stores the number of microseconds after midnight, 00:00:00.000000.
+        </p>
+      </section>
+
+      <section>
+        <title>Timestamp (millisecond precision)</title>
+        <p>
+          The <code>timestamp-millis</code> logical type represents an instant on the global timeline, independent of a particular time zone or calendar, with a precision of one millisecond.
+        </p>
+        <p>
+          A <code>timestamp-millis</code> logical type annotates an Avro <code>long</code>, where the long stores the number of milliseconds from the unix epoch, 1 January 1970 00:00:00.000 UTC.
+        </p>
+      </section>
+
+      <section>
+        <title>Timestamp (microsecond precision)</title>
+        <p>
+          The <code>timestamp-micros</code> logical type represents an instant on the global timeline, independent of a particular time zone or calendar, with a precision of one microsecond.
+        </p>
+        <p>
+          A <code>timestamp-micros</code> logical type annotates an Avro <code>long</code>, where the long stores the number of microseconds from the unix epoch, 1 January 1970 00:00:00.000000 UTC.
+        </p>
+      </section>
+
+      <section>
+        <title>Duration</title>
+        <p>
+          The <code>duration</code> logical type represents an amount of time defined by a number of months, days and milliseconds. This is not equivalent to a number of milliseconds, because, depending on the moment in time from which the duration is measured, the number of days in the month and number of milliseconds in a day may differ. Other standard periods such as years, quarters, hours and minutes can be expressed through these basic periods.
+        </p>
+        <p>
+          A <code>duration</code> logical type annotates Avro <code>fixed</code> type of size 12, which stores three little-endian unsigned integers that represent durations at different granularities of time. The first stores a number in months, the second stores a number in days, and the third stores a number in milliseconds.
+        </p>
+      </section>
+
     </section>
 
   <p><em>Apache Avro, Avro, Apache, and the Avro and Apache logos are
diff --git a/lang/c++/.gitignore b/lang/c++/.gitignore
new file mode 100644
index 0000000..091cd7f
--- /dev/null
+++ b/lang/c++/.gitignore
@@ -0,0 +1,2 @@
+build/
+build.mac/
diff --git a/lang/c++/AUTHORS b/lang/c++/AUTHORS
new file mode 100644
index 0000000..7b45e94
--- /dev/null
+++ b/lang/c++/AUTHORS
@@ -0,0 +1,4 @@
+
+See http://hadoop.apache.org/avro/ for a list of authors
+
+
diff --git a/lang/c++/CMakeLists.txt b/lang/c++/CMakeLists.txt
new file mode 100644
index 0000000..fc54a71
--- /dev/null
+++ b/lang/c++/CMakeLists.txt
@@ -0,0 +1,171 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+cmake_minimum_required (VERSION 2.6)
+
+set (CMAKE_LEGACY_CYGWIN_WIN32 0)
+
+if (NOT DEFINED CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS)
+    set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_NO_WARNINGS ON)
+endif()
+
+if (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt)
+    file(READ "${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt" AVRO_VERSION)
+else (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt)
+    file(READ "${CMAKE_CURRENT_SOURCE_DIR}/../../share/VERSION.txt"
+        AVRO_VERSION)
+endif (EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/VERSION.txt)
+
+set (AVRO_VERSION_MAJOR ${AVRO_VERSION})
+set (AVRO_VERSION_MINOR "0")
+
+project (Avro-cpp)
+
+if (WIN32 AND NOT CYGWIN AND NOT MSYS)
+add_definitions (/EHa)
+add_definitions (
+    -DBOOST_REGEX_DYN_LINK
+    -DBOOST_FILESYSTEM_DYN_LINK
+    -DBOOST_SYSTEM_DYN_LINK
+    -DBOOST_IOSTREAMS_DYN_LINK
+    -DBOOST_PROGRAM_OPTIONS_DYN_LINK
+    -DBOOST_ALL_NO_LIB)
+endif()
+
+if (CMAKE_COMPILER_IS_GNUCXX)
+    set(CMAKE_CXX_FLAGS "-Wall")
+endif ()
+
+
+find_package (Boost 1.38 REQUIRED
+    COMPONENTS filesystem system program_options iostreams)
+
+add_definitions (${Boost_LIB_DIAGNOSTIC_DEFINITIONS})
+
+include_directories (api ${CMAKE_CURRENT_BINARY_DIR} ${Boost_INCLUDE_DIRS})
+
+set (AVRO_SOURCE_FILES
+        impl/Compiler.cc impl/Node.cc
+        impl/NodeImpl.cc impl/ResolverSchema.cc impl/Schema.cc
+        impl/Types.cc impl/ValidSchema.cc impl/Zigzag.cc
+        impl/BinaryEncoder.cc impl/BinaryDecoder.cc
+        impl/Stream.cc impl/FileStream.cc
+        impl/Generic.cc impl/GenericDatum.cc
+        impl/DataFile.cc
+        impl/parsing/Symbol.cc
+        impl/parsing/ValidatingCodec.cc
+        impl/parsing/JsonCodec.cc
+        impl/parsing/ResolvingDecoder.cc
+        impl/json/JsonIO.cc
+        impl/json/JsonDom.cc
+        impl/Resolver.cc impl/Validator.cc
+        )
+
+add_library (avrocpp SHARED ${AVRO_SOURCE_FILES})
+
+set_property (TARGET avrocpp
+    APPEND PROPERTY COMPILE_DEFINITIONS AVRO_DYN_LINK)
+
+add_library (avrocpp_s STATIC ${AVRO_SOURCE_FILES})
+
+set_property (TARGET avrocpp avrocpp_s
+    APPEND PROPERTY COMPILE_DEFINITIONS AVRO_SOURCE)
+
+set_target_properties (avrocpp PROPERTIES
+    VERSION ${AVRO_VERSION_MAJOR}.${AVRO_VERSION_MINOR})
+
+set_target_properties (avrocpp_s PROPERTIES
+    VERSION ${AVRO_VERSION_MAJOR}.${AVRO_VERSION_MINOR})
+
+target_link_libraries (avrocpp ${Boost_LIBRARIES})
+
+add_executable (precompile test/precompile.cc)
+
+target_link_libraries (precompile avrocpp_s ${Boost_LIBRARIES})
+
+macro (gen file ns)
+    add_custom_command (OUTPUT ${file}.hh
+        COMMAND avrogencpp
+            -p -
+            -i ${CMAKE_CURRENT_SOURCE_DIR}/jsonschemas/${file}
+            -o ${file}.hh -n ${ns} -U
+        DEPENDS avrogencpp ${CMAKE_CURRENT_SOURCE_DIR}/jsonschemas/${file})
+    add_custom_target (${file}_hh DEPENDS ${file}.hh)
+endmacro (gen)
+
+gen (empty_record empty)
+gen (bigrecord testgen)
+gen (bigrecord_r testgen_r)
+gen (bigrecord2 testgen2)
+gen (tweet testgen3)
+gen (union_array_union uau)
+gen (union_map_union umu)
+gen (union_conflict uc)
+gen (recursive rec)
+gen (reuse ru)
+gen (circulardep cd)
+
+add_executable (avrogencpp impl/avrogencpp.cc)
+target_link_libraries (avrogencpp avrocpp_s ${Boost_LIBRARIES})
+
+enable_testing()
+
+macro (unittest name)
+    add_executable (${name} test/${name}.cc)
+    target_link_libraries (${name} avrocpp ${Boost_LIBRARIES})
+    add_test (NAME ${name} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+        COMMAND ${CMAKE_CURRENT_BINARY_DIR}/${name})
+endmacro (unittest)
+
+unittest (buffertest)
+unittest (unittest)
+unittest (SchemaTests)
+unittest (LargeSchemaTests)
+unittest (CodecTests)
+unittest (StreamTests)
+unittest (SpecificTests)
+unittest (DataFileTests)
+unittest (JsonTests)
+unittest (AvrogencppTests)
+
+add_dependencies (AvrogencppTests bigrecord_hh bigrecord_r_hh bigrecord2_hh
+    tweet_hh
+    union_array_union_hh union_map_union_hh union_conflict_hh
+    recursive_hh reuse_hh circulardep_hh empty_record_hh)
+
+include (InstallRequiredSystemLibraries)
+
+set (CPACK_PACKAGE_FILE_NAME "avrocpp-${AVRO_VERSION_MAJOR}")
+
+include (CPack)
+
+install (TARGETS avrocpp avrocpp_s
+    LIBRARY DESTINATION lib
+    ARCHIVE DESTINATION lib
+    RUNTIME DESTINATION lib)
+
+install (TARGETS avrogencpp RUNTIME DESTINATION bin)
+
+install (DIRECTORY api/ DESTINATION include/avro
+    FILES_MATCHING PATTERN *.hh)
+
+if (NOT CMAKE_BUILD_TYPE)
+    set (CMAKE_BUILD_TYPE Release CACHE STRING
+      "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel."
+      FORCE)
+endif (NOT CMAKE_BUILD_TYPE)
diff --git a/lang/c++/COPYING b/lang/c++/COPYING
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/lang/c++/COPYING
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/lang/c++/ChangeLog b/lang/c++/ChangeLog
new file mode 100644
index 0000000..317b99f
--- /dev/null
+++ b/lang/c++/ChangeLog
@@ -0,0 +1 @@
+Refer to CHANGES.txt in the root of avro repository for change log
diff --git a/lang/c++/Doxyfile b/lang/c++/Doxyfile
new file mode 100644
index 0000000..2d284a9
--- /dev/null
+++ b/lang/c++/Doxyfile
@@ -0,0 +1,1515 @@
+# Doxyfile 1.6.1
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project
+#
+# All text after a hash (#) is considered a comment and will be ignored
+# The format is:
+#       TAG = value [value, ...]
+# For lists items can also be appended using:
+#       TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (" ")
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# http://www.gnu.org/software/libiconv for the list of possible encodings.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
+# by quotes) that should identify the project.
+
+PROJECT_NAME           = "Avro C++"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number.
+# This could be handy for archiving the generated documentation or
+# if some version control system is used.
+
+PROJECT_NUMBER         =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
+# base path where the generated documentation will be put.
+# If a relative path is entered, it will be relative to the location
+# where doxygen was started. If left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = doc
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
+# 4096 sub-directories (in 2 levels) under the output directory of each output
+# format and will distribute the generated files over these directories.
+# Enabling this option can be useful when feeding doxygen a huge amount of
+# source files, where putting all generated files in the same directory would
+# otherwise cause performance problems for the file system.
+
+CREATE_SUBDIRS         = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# The default language is English, other supported languages are:
+# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
+# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German,
+# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English
+# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian,
+# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak,
+# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
+# include brief member descriptions after the members that are listed in
+# the file and class documentation (similar to JavaDoc).
+# Set to NO to disable this.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
+# the brief description of a member or function before the detailed description.
+# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator
+# that is used to form the text in various listings. Each string
+# in this list, if found as the leading text of the brief description, will be
+# stripped from the text and the result after processing the whole list, is
+# used as the annotated text. Otherwise, the brief description is used as-is.
+# If left blank, the following values are used ("$name" is automatically
+# replaced with the name of the entity): "The $name class" "The $name widget"
+# "The $name file" "is" "provides" "specifies" "contains"
+# "represents" "a" "an" "the"
+
+ABBREVIATE_BRIEF       =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# Doxygen will generate a detailed section even if there is only a brief
+# description.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+
+INLINE_INHERITED_MEMB  = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
+# path before files name in the file list and in the header files. If set
+# to NO the shortest path that makes the file name unique will be used.
+
+FULL_PATH_NAMES        = YES
+
+# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
+# can be used to strip a user-defined part of the path. Stripping is
+# only done if one of the specified strings matches the left-hand part of
+# the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the
+# path to strip.
+
+STRIP_FROM_PATH        =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
+# the path mentioned in the documentation of a class, which tells
+# the reader which header file to include in order to use a class.
+# If left blank only the name of the header file containing the class
+# definition is used. Otherwise one should specify the include paths that
+# are normally passed to the compiler using the -I flag.
+
+STRIP_FROM_INC_PATH    =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
+# (but less readable) file names. This can be useful is your file systems
+# doesn't support long names like on DOS, Mac, or CD-ROM.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
+# will interpret the first line (until the first dot) of a JavaDoc-style
+# comment as the brief description. If set to NO, the JavaDoc
+# comments will behave just like regular Qt-style comments
+# (thus requiring an explicit @brief command for a brief description.)
+
+JAVADOC_AUTOBRIEF      = YES
+
+# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
+# interpret the first line (until the first dot) of a Qt-style
+# comment as the brief description. If set to NO, the comments
+# will behave just like regular Qt-style comments (thus requiring
+# an explicit \brief command for a brief description.)
+
+QT_AUTOBRIEF           = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
+# treat a multi-line C++ special comment block (i.e. a block of //! or ///
+# comments) as a brief description. This used to be the default behaviour.
+# The new default is to treat a multi-line C++ comment block as a detailed
+# description. Set this tag to YES if you prefer the old behaviour instead.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
+# member inherits the documentation from any documented member that it
+# re-implements.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
+# a new page for each member. If set to NO, the documentation of a member will
+# be part of the file/class/namespace that contains it.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab.
+# Doxygen uses this value to replace tabs by spaces in code fragments.
+
+TAB_SIZE               = 4
+
+# This tag can be used to specify a number of aliases that acts
+# as commands in the documentation. An alias has the form "name=value".
+# For example adding "sideeffect=\par Side Effects:\n" will allow you to
+# put the command \sideeffect (or @sideeffect) in the documentation, which
+# will result in a user-defined paragraph with heading "Side Effects:".
+# You can put \n's in the value part of an alias to insert newlines.
+
+ALIASES                =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
+# sources only. Doxygen will then generate output that is more tailored for C.
+# For instance, some of the names that are used will be different. The list
+# of all members will be omitted, etc.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
+# sources only. Doxygen will then generate output that is more tailored for
+# Java. For instance, namespaces will be presented as packages, qualified
+# scopes will look different, etc.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources only. Doxygen will then generate output that is more tailored for
+# Fortran.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for
+# VHDL.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it parses.
+# With this tag you can assign which parser to use for a given extension.
+# Doxygen has a built-in mapping, but you can override or extend it using this tag.
+# The format is ext=language, where ext is a file extension, and language is one of
+# the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP,
+# Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat
+# .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran),
+# use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen.
+
+EXTENSION_MAPPING      =
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should
+# set this tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
+# func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+
+BUILTIN_STL_SUPPORT    = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
+# Doxygen will parse them like normal C++ but will assume all classes use public
+# instead of private inheritance when no explicit protection keyword is present.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate getter
+# and setter methods for a property. Setting this option to YES (the default)
+# will make doxygen to replace the get and set methods by a property in the
+# documentation. This will only work if the methods are indeed getting or
+# setting a simple type. If this is not the case, or you want to show the
+# methods anyway, you should set this option to NO.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
+# the same type (for instance a group of public functions) to be put as a
+# subgroup of that type (e.g. under the Public Functions section). Set it to
+# NO to prevent subgrouping. Alternatively, this can be done per class using
+# the \nosubgrouping command.
+
+SUBGROUPING            = YES
+
+# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
+# is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically
+# be useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+
+TYPEDEF_HIDES_STRUCT   = NO
+
+# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to
+# determine which symbols to keep in memory and which to flush to disk.
+# When the cache is full, less often used symbols will be written to disk.
+# For small to medium size projects (<1000 input files) the default value is
+# probably good enough. For larger projects a too small cache size can cause
+# doxygen to be busy swapping symbols to and from disk most of the time
+# causing a significant performance penality.
+# If the system has enough physical memory increasing the cache will improve the
+# performance by keeping more symbols in memory. Note that the value works on
+# a logarithmic scale so increasing the size by one will rougly double the
+# memory usage. The cache size is given by this formula:
+# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0,
+# corresponding to a cache size of 2^16 = 65536 symbols
+
+SYMBOL_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available.
+# Private class members and static file members will be hidden unless
+# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
+
+EXTRACT_ALL            = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
+# will be included in the documentation.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file
+# will be included in the documentation.
+
+EXTRACT_STATIC         = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
+# defined locally in source files will be included in the documentation.
+# If set to NO only classes defined in header files are included.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. When set to YES local
+# methods, which are defined in the implementation section but not in
+# the interface are included in the documentation.
+# If set to NO (the default) only methods in the interface are included.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base
+# name of the file that contains the anonymous namespace. By default
+# anonymous namespace are hidden.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
+# undocumented members of documented classes, files or namespaces.
+# If set to NO (the default) these members will be included in the
+# various overviews, but no documentation section is generated.
+# This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy.
+# If set to NO (the default) these classes will be included in the various
+# overviews. This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
+# friend (class|struct|union) declarations.
+# If set to NO (the default) these declarations will be included in the
+# documentation.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
+# documentation blocks found inside the body of a function.
+# If set to NO (the default) these blocks will be appended to the
+# function's detailed documentation block.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation
+# that is typed after a \internal command is included. If the tag is set
+# to NO (the default) then the documentation will be excluded.
+# Set it to YES to include the internal documentation.
+
+INTERNAL_DOCS          = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
+# file names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+
+CASE_SENSE_NAMES       = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
+# will show members with their full class and namespace scopes in the
+# documentation. If set to YES the scope will be hidden.
+
+HIDE_SCOPE_NAMES       = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
+# will put a list of the files that are included by a file in the documentation
+# of that file.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
+# is inserted in the documentation for inline members.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
+# will sort the (detailed) documentation of file and class members
+# alphabetically by member name. If set to NO the members will appear in
+# declaration order.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
+# brief documentation of file, namespace and class members alphabetically
+# by member name. If set to NO (the default) the members will appear in
+# declaration order.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the (brief and detailed) documentation of class members so that constructors and destructors are listed first. If set to NO (the default) the constructors will appear in the respective orders defined by SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
+# hierarchy of group names into alphabetical order. If set to NO (the default)
+# the group names will appear in their defined order.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
+# sorted by fully-qualified names, including namespaces. If set to
+# NO (the default), the class list will be sorted only by class name,
+# not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the
+# alphabetical list.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or
+# disable (NO) the todo list. This list is created by putting \todo
+# commands in the documentation.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or
+# disable (NO) the test list. This list is created by putting \test
+# commands in the documentation.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or
+# disable (NO) the bug list. This list is created by putting \bug
+# commands in the documentation.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
+# disable (NO) the deprecated list. This list is created by putting
+# \deprecated commands in the documentation.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional
+# documentation sections, marked by \if sectionname ... \endif.
+
+ENABLED_SECTIONS       =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
+# the initial value of a variable or define consists of for it to appear in
+# the documentation. If the initializer consists of more lines than specified
+# here it will be hidden. Use a value of 0 to hide initializers completely.
+# The appearance of the initializer of individual variables and defines in the
+# documentation can be controlled using \showinitializer or \hideinitializer
+# command in the documentation regardless of this setting.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
+# at the bottom of the documentation of classes and structs. If set to YES the
+# list will mention the files that were used to generate the documentation.
+
+SHOW_USED_FILES        = YES
+
+# If the sources in your project are distributed over multiple directories
+# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
+# in the documentation. The default is NO.
+
+SHOW_DIRECTORIES       = YES
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page.
+# This will remove the Files entry from the Quick Index and from the
+# Folder Tree View (if specified). The default is YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the
+# Namespaces page.
+# This will remove the Namespaces entry from the Quick Index
+# and from the Folder Tree View (if specified). The default is YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command <command> <input-file>, where <command> is the value of
+# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
+# provided by doxygen. Whatever the program writes to standard output
+# is used as the file version. See the manual for examples.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by
+# doxygen. The layout file controls the global structure of the generated output files
+# in an output format independent way. The create the layout file that represents
+# doxygen's defaults, run doxygen with the -l option. You can optionally specify a
+# file name after the option, if omitted DoxygenLayout.xml will be used as the name
+# of the layout file.
+
+LAYOUT_FILE            =
+
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated
+# by doxygen. Possible values are YES and NO. If left blank NO is used.
+
+QUIET                  = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated by doxygen. Possible values are YES and NO. If left blank
+# NO is used.
+
+WARNINGS               = YES
+
+# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
+# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
+# automatically be disabled.
+
+WARN_IF_UNDOCUMENTED   = YES
+
+# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some
+# parameters in a documented function, or documenting parameters that
+# don't exist or using markup commands wrongly.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be abled to get warnings for
+# functions that are documented, but have no documentation for their parameters
+# or return value. If set to NO (the default) doxygen will only warn about
+# wrong or incomplete parameter documentation, but not about the absence of
+# documentation.
+
+WARN_NO_PARAMDOC       = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that
+# doxygen can produce. The string should contain the $file, $line, and $text
+# tags, which will be replaced by the file and line number from which the
+# warning originated and the warning text. Optionally the format may contain
+# $version, which will be replaced by the version of the file (if it could
+# be obtained via FILE_VERSION_FILTER)
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning
+# and error messages should be written. If left blank the output is written
+# to stderr.
+
+WARN_LOGFILE           =
+
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag can be used to specify the files and/or directories that contain
+# documented source files. You may enter file names like "myfile.cpp" or
+# directories like "/usr/src/myproject". Separate the files or directories
+# with spaces.
+
+INPUT                  = MainPage.dox \
+                         api
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
+# also the default input encoding. Doxygen uses libiconv (or the iconv built
+# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
+# the list of possible encodings.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank the following patterns are tested:
+# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx
+# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90
+
+FILE_PATTERNS          =
+
+# The RECURSIVE tag can be used to turn specify whether or not subdirectories
+# should be searched for input files as well. Possible values are YES and NO.
+# If left blank NO is used.
+
+RECURSIVE              = NO
+
+# The EXCLUDE tag can be used to specify files and/or directories that should
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+
+EXCLUDE                =
+
+# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
+# directories that are symbolic links (a Unix filesystem feature) are excluded
+# from the input.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories. Note that the wildcards are matched
+# against the file with absolute path, so to exclude all test directories
+# for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+
+EXCLUDE_SYMBOLS        =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or
+# directories that contain example code fragments that are included (see
+# the \include command).
+
+EXAMPLE_PATH           = examples
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank all files are included.
+
+EXAMPLE_PATTERNS       =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude
+# commands irrespective of the value of the RECURSIVE tag.
+# Possible values are YES and NO. If left blank NO is used.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or
+# directories that contain image that are included in the documentation (see
+# the \image command).
+
+IMAGE_PATH             =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command <filter> <input-file>, where <filter>
+# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
+# input file. Doxygen will then use the output that the filter program writes
+# to standard output.
+# If FILTER_PATTERNS is specified, this tag will be
+# ignored.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis.
+# Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match.
+# The filters are a list of the form:
+# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
+# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER
+# is applied to all files.
+
+FILTER_PATTERNS        =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will be used to filter the input files when producing source
+# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+
+FILTER_SOURCE_FILES    = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will
+# be generated. Documented entities will be cross-referenced with these sources.
+# Note: To get rid of all source code in the generated output, make sure also
+# VERBATIM_HEADERS is set to NO.
+
+SOURCE_BROWSER         = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body
+# of functions and classes directly in the documentation.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
+# doxygen to hide any special comment blocks from generated source code
+# fragments. Normal C and C++ comments will always remain visible.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES
+# then for each documented function all documented
+# functions referencing it will be listed.
+
+REFERENCED_BY_RELATION = YES
+
+# If the REFERENCES_RELATION tag is set to YES
+# then for each documented function all documented entities
+# called/used by that function will be listed.
+
+REFERENCES_RELATION    = YES
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
+# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
+# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
+# link to the source code.
+# Otherwise they will link to the documentation.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code
+# will point to the HTML generated by the htags(1) tool instead of doxygen
+# built-in source browser. The htags tool is part of GNU's global source
+# tagging system (see http://www.gnu.org/software/global/global.html). You
+# will need version 4.8.6 or higher.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
+# will generate a verbatim copy of the header file for each class for
+# which an include is specified. Set to NO to disable this.
+
+VERBATIM_HEADERS       = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
+# of all compounds will be generated. Enable this if the project
+# contains a lot of classes, structs, unions or interfaces.
+
+ALPHABETICAL_INDEX     = NO
+
+# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
+# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
+# in which this list will be split (can be a number in the range [1..20])
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all
+# classes will be put under the same header in the alphabetical index.
+# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
+# should be ignored while generating the index headers.
+
+IGNORE_PREFIX          =
+
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
+# generate HTML output.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `html' will be used as the default path.
+
+HTML_OUTPUT            = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
+# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
+# doxygen will generate files with .html extension.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a personal HTML header for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard header.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a personal HTML footer for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard footer.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
+# style sheet that is used by each HTML page. It can be used to
+# fine-tune the look of the HTML output. If the tag is left blank doxygen
+# will generate a default style sheet. Note that doxygen will try to copy
+# the style sheet file to the HTML output directory, so don't put your own
+# stylesheet in the HTML output directory as well, or it will be erased!
+
+HTML_STYLESHEET        =
+
+# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
+# files or namespaces will be aligned in HTML using tables. If set to
+# NO a bullet list will be used.
+
+HTML_ALIGN_MEMBERS     = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded. For this to work a browser that supports
+# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox
+# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari).
+
+HTML_DYNAMIC_SECTIONS  = NO
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files
+# will be generated that can be used as input for Apple's Xcode 3
+# integrated development environment, introduced with OSX 10.5 (Leopard).
+# To create a documentation set, doxygen will generate a Makefile in the
+# HTML output directory. Running make will produce the docset in that
+# directory and running "make install" will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
+# it at startup.
+# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information.
+
+GENERATE_DOCSET        = NO
+
+# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
+# feed. A documentation feed provides an umbrella under which multiple
+# documentation sets from a single provider (such as a company or product suite)
+# can be grouped.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
+# should uniquely identify the documentation set bundle. This should be a
+# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
+# will append .docset to the name.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# If the GENERATE_HTMLHELP tag is set to YES, additional index files
+# will be generated that can be used as input for tools like the
+# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
+# of the generated HTML documentation.
+
+GENERATE_HTMLHELP      = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
+# be used to specify the file name of the resulting .chm file. You
+# can add a path in front of the file if the result should not be
+# written to the html output directory.
+
+CHM_FILE               =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
+# be used to specify the location (absolute path including file name) of
+# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
+# the HTML help compiler on the generated index.hhp.
+
+HHC_LOCATION           =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
+# controls if a separate .chi index file is generated (YES) or that
+# it should be included in the master .chm file (NO).
+
+GENERATE_CHI           = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING
+# is used to encode HtmlHelp index (hhk), content (hhc) and project file
+# content.
+
+CHM_INDEX_ENCODING     =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
+# controls whether a binary table of contents is generated (YES) or a
+# normal table of contents (NO) in the .chm file.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members
+# to the contents of the HTML help documentation and to the tree view.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER
+# are set, an additional index file will be generated that can be used as input for
+# Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated
+# HTML documentation.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can
+# be used to specify the file name of the resulting .qch file.
+# The path specified is relative to the HTML output folder.
+
+QCH_FILE               =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#namespace
+
+QHP_NAMESPACE          =
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#virtual-folders
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add.
+# For more information please see
+# http://doc.trolltech.com/qthelpproject.html#custom-filters
+
+QHP_CUST_FILTER_NAME   =
+
+# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see
+# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters">Qt Help Project / Custom Filters</a>.
+
+QHP_CUST_FILTER_ATTRS  =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's
+# filter section matches.
+# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes">Qt Help Project / Filter Attributes</a>.
+
+QHP_SECT_FILTER_ATTRS  =
+
+# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can
+# be used to specify the location of Qt's qhelpgenerator.
+# If non-empty doxygen will try to run qhelpgenerator on the generated
+# .qhp file.
+
+QHG_LOCATION           =
+
+# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
+# top of each HTML page. The value NO (the default) enables the index and
+# the value YES disables it.
+
+DISABLE_INDEX          = NO
+
+# This tag can be used to set the number of enum values (range [1..20])
+# that doxygen will group on one line in the generated HTML documentation.
+
+ENUM_VALUES_PER_LINE   = 4
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information.
+# If the tag value is set to YES, a side panel will be generated
+# containing a tree-like index structure (just like the one that
+# is generated for HTML Help). For this to work a browser that supports
+# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser).
+# Windows users are probably better off using the HTML help feature.
+
+GENERATE_TREEVIEW      = NO
+
+# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories,
+# and Class Hierarchy pages using a tree view instead of an ordered list.
+
+USE_INLINE_TREES       = NO
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
+# used to set the initial width (in pixels) of the frame in which the tree
+# is shown.
+
+TREEVIEW_WIDTH         = 250
+
+# Use this tag to change the font size of Latex formulas included
+# as images in the HTML documentation. The default is 10. Note that
+# when you change the font size after a successful doxygen run you need
+# to manually remove any form_*.png images from the HTML output directory
+# to force them to be regenerated.
+
+FORMULA_FONTSIZE       = 10
+
+# When the SEARCHENGINE tag is enable doxygen will generate a search box for the HTML output. The underlying search engine uses javascript
+# and DHTML and should work on any modern browser. Note that when using HTML help (GENERATE_HTMLHELP) or Qt help (GENERATE_QHP)
+# there is already a search function so this one should typically
+# be disabled.
+
+SEARCHENGINE           = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
+# generate Latex output.
+
+GENERATE_LATEX         = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `latex' will be used as the default path.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked. If left blank `latex' will be used as the default command name.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
+# generate index for LaTeX. If left blank `makeindex' will be used as the
+# default command name.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
+# LaTeX documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used
+# by the printer. Possible values are: a4, a4wide, letter, legal and
+# executive. If left blank a4wide will be used.
+
+PAPER_TYPE             = a4wide
+
+# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
+# packages that should be included in the LaTeX output.
+
+EXTRA_PACKAGES         =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
+# the generated latex document. The header should contain everything until
+# the first chapter. If it is left blank doxygen will generate a
+# standard header. Notice: only use this tag if you know what you are doing!
+
+LATEX_HEADER           =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
+# is prepared for conversion to pdf (using ps2pdf). The pdf file will
+# contain links (just like the HTML output) instead of page references
+# This makes the output suitable for online browsing using a pdf viewer.
+
+PDF_HYPERLINKS         = NO
+
+# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
+# plain latex in the generated Makefile. Set this option to YES to get a
+# higher quality PDF documentation.
+
+USE_PDFLATEX           = NO
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
+# command to the generated LaTeX files. This will instruct LaTeX to keep
+# running if errors occur, instead of asking the user for help.
+# This option is also used when generating formulas in HTML.
+
+LATEX_BATCHMODE        = NO
+
+# If LATEX_HIDE_INDICES is set to YES then doxygen will not
+# include the index chapters (such as File Index, Compound Index, etc.)
+# in the output.
+
+LATEX_HIDE_INDICES     = NO
+
+# If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER.
+
+LATEX_SOURCE_CODE      = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
+# The RTF output is optimized for Word 97 and may not look very pretty with
+# other RTF readers or editors.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `rtf' will be used as the default path.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
+# RTF documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
+# will contain hyperlink fields. The RTF file will
+# contain links (just like the HTML output) instead of page references.
+# This makes the output suitable for online browsing using WORD or other
+# programs which support those fields.
+# Note: wordpad (write) and others do not support links.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# config file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+
+RTF_STYLESHEET_FILE    =
+
+# Set optional variables used in the generation of an rtf document.
+# Syntax is similar to doxygen's config file.
+
+RTF_EXTENSIONS_FILE    =
+
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
+# generate man pages
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `man' will be used as the default path.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to
+# the generated man pages (default is the subroutine's section .3)
+
+MAN_EXTENSION          = .3
+
+# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
+# then it will generate one additional man file for each entity
+# documented in the real man page(s). These additional files
+# only source the real man page, but without them the man command
+# would be unable to find the correct page. The default is NO.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES Doxygen will
+# generate an XML file that captures the structure of
+# the code including all documentation.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `xml' will be used as the default path.
+
+XML_OUTPUT             = xml
+
+# The XML_SCHEMA tag can be used to specify an XML schema,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_SCHEMA             =
+
+# The XML_DTD tag can be used to specify an XML DTD,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_DTD                =
+
+# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
+# dump the program listings (including syntax highlighting
+# and cross-referencing information) to the XML output. Note that
+# enabling this will significantly increase the size of the XML output.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
+# generate an AutoGen Definitions (see autogen.sf.net) file
+# that captures the structure of the code including all
+# documentation. Note that this feature is still experimental
+# and incomplete at the moment.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES Doxygen will
+# generate a Perl module file that captures the structure of
+# the code including all documentation. Note that this
+# feature is still experimental and incomplete at the
+# moment.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
+# the necessary Makefile rules, Perl scripts and LaTeX code to be able
+# to generate PDF and DVI output from the Perl module output.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
+# nicely formatted so it can be parsed by a human reader.
+# This is useful
+# if you want to understand what is going on.
+# On the other hand, if this
+# tag is set to NO the size of the Perl module output will be much smaller
+# and Perl will parse it just the same.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file
+# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
+# This is useful so different doxyrules.make files included by the same
+# Makefile don't overwrite each other's variables.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
+# evaluate all C-preprocessor directives found in the sources and include
+# files.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
+# names in the source code. If set to NO (the default) only conditional
+# compilation will be performed. Macro expansion can be done in a controlled
+# way by setting EXPAND_ONLY_PREDEF to YES.
+
+MACRO_EXPANSION        = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
+# then the macro expansion is limited to the macros specified with the
+# PREDEFINED and EXPAND_AS_DEFINED tags.
+
+EXPAND_ONLY_PREDEF     = NO
+
+# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
+# in the INCLUDE_PATH (see below) will be search if a #include is found.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by
+# the preprocessor.
+
+INCLUDE_PATH           =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will
+# be used.
+
+INCLUDE_FILE_PATTERNS  =
+
+# The PREDEFINED tag can be used to specify one or more macro names that
+# are defined before the preprocessor is started (similar to the -D option of
+# gcc). The argument of the tag is a list of macros of the form: name
+# or name=definition (no spaces). If the definition and the = are
+# omitted =1 is assumed. To prevent a macro definition from being
+# undefined via #undef or recursively expanded use the := operator
+# instead of the = operator.
+
+PREDEFINED             =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
+# this tag can be used to specify a list of macro names that should be expanded.
+# The macro definition that is found in the sources will be used.
+# Use the PREDEFINED tag if you want to use a different macro definition.
+
+EXPAND_AS_DEFINED      =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
+# doxygen's preprocessor will remove all function-like macros that are alone
+# on a line, have an all uppercase name, and do not end with a semicolon. Such
+# function macros are typically used for boiler-plate code, and will confuse
+# the parser if not removed.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES option can be used to specify one or more tagfiles.
+# Optionally an initial location of the external documentation
+# can be added for each tagfile. The format of a tag file without
+# this location is as follows:
+#
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+#
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where "loc1" and "loc2" can be relative or absolute paths or
+# URLs. If a location is present for each tag, the installdox tool
+# does not have to be run to correct the links.
+# Note that each tag file must have a unique name
+# (where the name does NOT include the path)
+# If a tag file is not located in the directory in which doxygen
+# is run, you must also specify the path to the tagfile here.
+
+TAGFILES               =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create
+# a tag file that is based on the input files it reads.
+
+GENERATE_TAGFILE       =
+
+# If the ALLEXTERNALS tag is set to YES all external classes will be listed
+# in the class index. If set to NO only the inherited external classes
+# will be listed.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will
+# be listed.
+
+EXTERNAL_GROUPS        = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of `which perl').
+
+PERL_PATH              = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
+# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
+# or super classes. Setting the tag to NO turns the diagrams off. Note that
+# this option is superseded by the HAVE_DOT option below. This is only a
+# fallback. It is recommended to install and use dot, since it yields more
+# powerful graphs.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see
+# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            =
+
+# If set to YES, the inheritance and collaboration graphs will hide
+# inheritance and usage relations if the target is undocumented
+# or is not a class.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz, a graph visualization
+# toolkit from AT&T and Lucent Bell Labs. The other options in this section
+# have no effect if this option is set to NO (the default)
+
+HAVE_DOT               = NO
+
+# By default doxygen will write a font called FreeSans.ttf to the output
+# directory and reference it in all dot files that doxygen generates. This
+# font does not include all possible unicode characters however, so when you need
+# these (or just want a differently looking font) you can specify the font name
+# using DOT_FONTNAME. You need need to make sure dot is able to find the font,
+# which can be done by putting it in a standard location or by setting the
+# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory
+# containing the font.
+
+DOT_FONTNAME           = FreeSans
+
+# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs.
+# The default size is 10pt.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the output directory to look for the
+# FreeSans.ttf font (which doxygen will put there itself). If you specify a
+# different font using DOT_FONTNAME you can set the path where dot
+# can find it using this tag.
+
+DOT_FONTPATH           =
+
+# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect inheritance relations. Setting this tag to YES will force the
+# the CLASS_DIAGRAMS tag to NO.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect implementation dependencies (inheritance, containment, and
+# class references variables) of the class with other documented classes.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for groups, showing the direct groups dependencies
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+
+UML_LOOK               = NO
+
+# If set to YES, the inheritance and collaboration graphs will show the
+# relations between templates and their instances.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
+# tags are set to YES then doxygen will generate a graph for each documented
+# file showing the direct and indirect include dependencies of the file with
+# other documented files.
+
+INCLUDE_GRAPH          = YES
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
+# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
+# documented header file showing the documented files that directly or
+# indirectly include this file.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH and HAVE_DOT options are set to YES then
+# doxygen will generate a call dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable call graphs
+# for selected functions only using the \callgraph command.
+
+CALL_GRAPH             = NO
+
+# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
+# doxygen will generate a caller dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable caller
+# graphs for selected functions only using the \callergraph command.
+
+CALLER_GRAPH           = NO
+
+# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
+# will graphical hierarchy of all classes instead of a textual one.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
+# then doxygen will show the dependencies a directory has on other directories
+# in a graphical way. The dependency relations are determined by the #include
+# relations between the files in the directories.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. Possible values are png, jpg, or gif
+# If left blank png will be used.
+
+DOT_IMAGE_FORMAT       = png
+
+# The tag DOT_PATH can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the
+# \dotfile command).
+
+DOTFILE_DIRS           =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
+# nodes that will be shown in the graph. If the number of nodes in a graph
+# becomes larger than this value, doxygen will truncate the graph, which is
+# visualized by representing a node as a red box. Note that doxygen if the
+# number of direct children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
+# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
+# graphs generated by dot. A depth value of 3 means that only nodes reachable
+# from the root by following a path via at most 3 edges will be shown. Nodes
+# that lay further from the root node will be omitted. Note that setting this
+# option to 1 or 2 may greatly reduce the computation time needed for large
+# code bases. Also note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not
+# seem to support this out of the box. Warning: Depending on the platform used,
+# enabling this option may lead to badly anti-aliased labels on the edges of
+# a graph (i.e. they become hard to read).
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10)
+# support this, this feature is disabled by default.
+
+DOT_MULTI_TARGETS      = NO
+
+# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
+# generate a legend page explaining the meaning of the various boxes and
+# arrows in the dot generated graphs.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
+# remove the intermediate dot files that are used to generate
+# the various graphs.
+
+DOT_CLEANUP            = YES
diff --git a/lang/c++/MainPage.dox b/lang/c++/MainPage.dox
new file mode 100644
index 0000000..85d07a5
--- /dev/null
+++ b/lang/c++/MainPage.dox
@@ -0,0 +1,338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*!
+\mainpage
+
+<h2>Introduction to Avro C++</h2>
+
+<p>Avro is a data serialization system. See
+<a href="http://hadoop.apache.org/avro/docs/current/">http://avro.apache.org/docs/current/</a>
+for background information.</p>
+<p>Avro C++ is a C++ library which implementats parts of the <a href="http://avro.apache.org/docs/current/spec.html"> Avro Specification</a>. The library includes the following functionality:</p>
+<ul>
+	<li>Assembling schemas programmatically.
+    <li>A schema parser, which can parse Avro schema (written in JSON) into a Schema object.
+    <li>Encoders and decoders to encode data into Avro format and decode it back using primitive functions. There are multiple implementations of encoders and decoders.
+    <ul>
+        <li>A binary encoder, which encodes into binary Avro data.
+        <li>A JSON encoder, which encodes into JSON Avro data.
+        <li>A validating encoder, an encoder proxy, which validates the call sequence to the encoder before sending the calls to another encoder.
+        <li>A binary decoder, which decodes binay Avro data.
+        <li>A JSON decoder, which decodes JSON Avro data.
+        <li>A validating decoder, a decoder proxy, which validates the call sequence to the decoder before sending the calls to another decoder.
+        <li>A resolving decoder, which accepts calls for according to a reader's schema but decodes data corresponding to a different (writer's) schema doing schema resolution according to resolution rules in the Avro specification.
+    </ul>
+    <li>Streams for storing and reading data, which Encoders and Decoders use.
+    <li>Support for Avro DataFile.
+    <li>A code generator, which generates C++ classes fnd functions to encode and decode them. The code generator produces a C++ header file from a given schema file.
+</ul>
+Presently there is no support for the following specified in Avro specification.
+<ul>
+    <li>Avro RPC
+</ul>
+
+<b>Note:</b> Prior to Avro release 1.5, some of the functionality mentioned above was avilable through a somewhat different API and set tools. They are partially incompatible to the present ones. They continue to be available but will be deprecated and discontinued sometime in the future. The documentation on that API can be found at <a href="http://avro.apache.org/docs/1.4.0/api/cpp/html/index.html">http://avro.apache.org/docs/1.4.0/api/cpp/html/index.html</a> 
+
+<h2>Installing Avro C++</h2>
+<h3>Supported platforms and pre-requisites</h3>
+One should be able to build Avro C++ on (1) any UNIX flavor including cygwin for Windows and (2) natively on Windows using Visual Studio. We have tested it on (1) Linux systems (Ubuntu and RHEL) and Cygwin and Visual Studio 2010 Express edition.
+
+In order to build Avro C++, one needs the following:
+<ul>
+    <li>A C++ compiler and runtime libraries.
+    <li>Boost library version 1.38 or later. Apart from the header-only libraries of Boost, Avro C++ requires filesystem, system and program_options libraries. Please see <a href="http://www.boost.org/">http://www.boost.org</a> or your platform's documentation for details on how to set up Boost for your platform. 
+    <li>CMake build tool version 2.6 or later. Please see <a href="http://www.cmake.org">http://www.cmake.org</a> or your platform's documentation for details on how to set up CMake for your system.
+    <li>Python. If not already present, please consult your platform-specific documentation on how to install Python on your system.
+</ul>
+
+For Ubuntu Linux, for example, you can have these by doing
+<tt>apt-get install</tt> for the following packages:
+\ul
+\li cmake
+\li g++
+\li libboost-dev
+\li libboost-filesystem-dev
+\li libboost-system-dev
+\li libboost-program-options-dev
+
+For Windows native builds, you need to install the following:
+\ul
+\li cmake
+\li boost distribution from Boost consulting
+\li Visual studio
+
+<h3>Installing Avro C++</h3>
+<ol>
+    <li>Download the latest Avro distribution. Avro distribution is a compressed tarball.
+Please see the main documentation if you want to build anything more than Avro C++. 
+</ol>
+
+<h4>On Unix systems and on Cygwin</h4>
+<ol>
+    <li>Expand the tarball into a directory.
+    <li>Change to <tt>lang/c++</tt> subdirectory.
+    <li>Type <tt>./build.sh test</tt>. This builds Avro C++ and runs tests on it.
+    <li>Type <tt>./build.sh install</tt>. This installs Avro C++ under /usr/local on your system.
+</ol>
+
+<h4>On native Windows</h4>
+<ol>
+    <li>Ensure that Cmake's bin directory and Boost's lib directory are in the path.
+    <li>Expand the tarball into a directory.
+    <li>Change to <tt>lang/c++</tt> subdirectory.
+    <li>Create a subdirectory, say, build.win, and change to that directory.
+    <li>Type <tt>cmake -G "Visual Studio 10"</tt>. It creates, among other things, Avro-cpp.sln file.
+    <li>Open the solution file using Visual Studio and build the projects from within the Visual Studio.
+    <li>To run all unit tests, build the special project named "RUN_TESTS".
+    <li>After building all the projects, you can also execute the unit tests from command line. <tt>ctest -C release</tt> or <tt>ctest -C debug</tt>.
+
+</ol>
+
+<h2>Getting started with Avro C++</h2>
+
+<p>Although Avro does not require use of code generation, that is the easiest
+way to get started with the Avro C++ library.
+The code generator reads a schema, and generates a C++
+header file that defines one or more C++ <tt>struct</tt>s to represent
+the data for the schema and functions to encode and decode those
+<tt>struct</tt>s. Even if you wish to write custom code to encode and decode
+your objects using the core functionality of Avro C++, the generated code
+can serve as an example of how to use the code functionality.
+
+<p>
+Let's walk through an example, using a simple schema. Use the
+schema that represents an complex number:</p>
+
+<b>File: cpx.json</b>
+
+\includelineno cpx.json
+<p>
+<b>Note:</b> All the example code given here can be found under
+<tt>examples</tt> directory of the distribution.
+
+<p>
+Assume this JSON representation of the schema is stored in a file
+called <tt>cpx.json</tt>. To generate the code issue the command:.
+<pre>
+avrogencpp -i cpx.json -o cpx.hh -n c
+</pre>
+The <tt>-i</tt> flag specifies the input schema file and <tt>-o</tt> flag
+specifies the output header file to generate. The generated C++ code will be
+in the namespace specifed with <tt>-n</tt> flag.
+
+<p>
+The generated file, among other things will have the following:
+
+<pre>
+
+...
+namespace c {
+...
+
+struct cpx {
+    double re;
+    double im;
+};
+
+
+...
+
+}
+
+</pre>
+<tt>cpx</tt> is a C++ representation of the Avro schema <tt>cpx</tt>.
+
+Now let's see how we can use the code generated to encode data into avro and decode it back.
+
+<b>File: generated.cc</b>
+
+\includelineno generated.cc
+
+In line 9, we construct a memory output stream. By this we indicate that we
+want to send the encoded Avro data into memory. In line 10, we construct a
+binary encoder, whereby we mean the output should be encoded using the Avro
+binary standard. In line 11, we attach the output stream to the encoder. At any given time an incoder can write to only one output stream.
+<p>
+In line 14, we write the contents of c1 into the output stream using the
+encoder. Now the output stream contains the binary representation of
+the object. The rest of the code verifies that the data is indeed in the stream.
+<p>
+In line 17, we construct a memory input stream from the contents of the
+output stream. Thus the input stream has the binary representation of the
+object. In line 18 and 19, we construct a binary decoder and attach the
+input stream to it. Line 22 decodes the contents of the stream into another
+object c2. Now c1 and c2 should have identical contents, which one can readily
+verify from the output of the program, which should be:
+
+<pre>
+(1, 2.13)
+</pre>
+
+Now, if you want to encode the data using Avro JSON encoding, you should use
+avro::jsonEncoder() instead of avro::binaryEncoder() in line 10
+and avro::jsonDecoder() instead of avro::binaryDecoder() in line 18.
+<p>
+
+On the other hand, if you want to write the contents to a file instead of
+memory, you should use avro::fileOutputStream() instead of
+avro::memoryOutputStream() in ine 9 and avro::fileInputStream()
+instead of avro::memoryInputStream() in line 17.
+<p>
+
+<h2>Reading a JSON schema</h2>
+
+<p>The section above demonstrated pretty much all that's needed to
+know to get started reading and writing objects using the Avro C++
+code generator. The following sections will cover some more
+information.</p>
+<p>The library provides some utilities to read a schema that is
+stored in a JSON file:</p>
+
+<b>File: schemaload.cc</b>
+
+\includelineno schemaload.cc
+
+<p>
+This reads the file, and parses the JSON schema into an in-meory schema
+object of type avro::ValidSchema. If, for some reason, the schema is not valid,
+the <tt>cpxSchema</tt> object will not be set, and an exception will be
+thrown. 
+</p>
+If you always use code Avro generator you don't really need the in-memory
+schema objects. But if you use custom objects and routines to encode or decode
+avro data, you will need the schema objects. Other uses of schema objects
+are generic data objects and schema resolution described in the following
+sections.
+
+<h2>Custom encoding and decoding</h2>
+
+Suppose you want to encode objects of type std::complex<double> from
+C++ standard library using the schema defined in cpx.json.
+Since std::complex<double> was not generated by Avro, it does't know how to encode or decode objects of that
+type. You have to tell Avro how to do that.
+
+The recommended way to tell Avro how to encode or decode is to specialize
+Avro's codec_traits template. For std::complex<double>, here is what you'd do:
+
+<b>File: custom.cc</b>
+
+\includelineno custom.cc
+
+Please notice that the main function is pretty much similar to that we used
+for the generated class. Once <tt>codec_traits</tt> for a specific type is
+supplied, you do not really need to do anything special for your custom types.
+
+<p>
+
+But wait, how does Avro know that complex<double> represents the data for
+the schema in <tt>cpx.json</tt>? It doesn't. In fact, if you have used
+<tt>std::complex<float></tt> instead of <tt>std::complex<double></tt> program
+would have worked. But the data in the memory would not have been corresponding
+to the schema in <tt>cpx.json</tt>.
+
+<p>
+
+In order to ensure that you indeed use the correct type, you can use
+the validating encoders and decoder. Here is how:
+
+<b>File: validating.cc</b>
+
+\includelineno validating.cc
+
+Here, instead of using the plain binary encoder, you use a validating encoder
+backed by a binary encoder. Similarly, instead of using the plain binary
+decoder, you use a validating decoder backed by a binary decoder. Now,
+if you use <tt>std::complex<float></tt> intead of <tt>std::complex<double></tt>
+the validating encoder and decoder will throw exception stating that
+you are trying to encode or decode <tt>float</tt> instead of <tt>double</tt>.
+<p>
+You can use any encoder behind the validating encoder and any decoder
+behind the validating decoder. But in practice, only the binary encoder
+and the binary decoder have no knowledge of the underlying schema.
+All other encoders (JSON encoder) and decoders (JSON decoder,
+resolving decoder) do know about the schema and they validate internally. So,
+fronting them with a validating encoder or validating decoder is wasteful.
+
+<h2>Generic data objects</h2>
+
+A third way to encode and decode data is to use Avro's generic datum.
+Avro's generic datum allows you to read any arbitray data corresponding to
+an arbitrary schema into a generic object. One need not know anything
+about the schema or data at complie time. 
+
+Here is an example how one can use the generic datum.
+
+<b>File: generic.cc</b>
+
+\includelineno generic.cc
+
+In this example, we encode the data using generated code and decode it with
+generic datum. Then we examine the contents of the generic datum and extract
+them. Please see \ref avro::GenericDatum for more details on how to use it.
+
+<h2>Reading data with a schema different from that of the writer</h2>
+It is possible to read the data written according to one schema
+using a different schema, provided the reader's schema and the writer's
+schema are compatible according to the Avro's Schema resolution rules. 
+<p>
+For example, you have a reader which is interested only in the imaginary part
+of a complex number while the writer writes both the real and imaginary parts.
+It is possible to do automatic schema resolution between the writer's schema
+and schema as shown below.
+
+<b>File: imaginary.json</b>
+
+\includelineno imaginary.json
+
+<pre>
+avrogencpp -i imaginary.json -o imaginary.hh -n i
+</pre>
+
+<b>File: resolving.cc</b>
+
+\includelineno resolving.cc
+
+In this example, writer and reader deal with different schemas,
+both are recornd with the same name cpx. The writer schema has two fields and
+the reader's has just one. We generated code for writer's schema in a namespace
+<tt>c</tt> and the reader's in <tt>i</tt>.
+
+<p>
+Please notice how the reading part of the example at line 42 reads as if
+the stream contains the data corresponding to its schema. The schema resolution
+is automatically done by the resolving decoder.
+
+<p>
+In this example, we have used a simple (somewhat artificial) projection (where the set of fields in
+the reader's schema is a subset of set of fields in the writer's). But more
+complex resolutions are allowed by Avro specification.
+
+<h2>Using Avro data files</h2>
+Avro specification specifies a format for data files. Avro C++ implements
+the sepcification. The code below demonstrates how one can use the
+Avro data file to store and retrieve a collection of objects
+corresponding to a given schema.
+
+<b>File: datafile.cc</b>
+
+\includelineno datafile.cc
+
+Please see DataFile.hh for more details.
+*/
+
diff --git a/lang/c++/NEWS b/lang/c++/NEWS
new file mode 100644
index 0000000..e70f439
--- /dev/null
+++ b/lang/c++/NEWS
@@ -0,0 +1,5 @@
+
+For news, visit the Avro web site at
+http://hadoop.apache.org/avro/
+
+
diff --git a/lang/c++/README b/lang/c++/README
new file mode 100644
index 0000000..4c8392f
--- /dev/null
+++ b/lang/c++/README
@@ -0,0 +1,56 @@
+Avro C++ README.txt
+
+The C++ port is thus far incomplete.  Currently, it contains:
+
+ - Serializer/Parser- objects for writing/reading raw binary.
+
+ - xxxSchema- objects for composing schemas.
+
+ - ValidSchema- a schema object that has been converted to a parse tree
+   (with some sanity checks). 
+
+ - ValidSchema.toJson() writes the schema as a json object.
+
+ - ValidatingSerializer/ValidatingParser- check that reads/writes
+   match the expected schema type (more expensive than the raw
+   serializer/parser but they detect errors, and allow dynamic
+   discovery of parsed data/attributes).
+
+ - Compiler (compileJsonSchema())- converts a Json string schema to a
+   ValidSchema.
+
+ - Code Generation (experimental) - given a schema it generates C++
+   objects of the same data types, and the code to serialize and parse
+   it.
+
+What's missing: Defaults are not yet supported.  And the file and rpc
+containers are not yet implemented. Documentation, sparse.
+
+INSTRUCTIONS
+
+To compile requires boost headers, and the boost regex library.
+Additionally, to generate the avro spec compiler requires flex and bison.
+To build one requires cmake 2.6 or later.
+
+To generate a Makefile under Unix or Cygwin use:
+
+cmake -G "Unix Makefiles"
+
+If it doesn't work, either you are missing some packages (boost, flex or bison),
+or you need to help configure locate them.
+
+If the Makefile is configured correctly, then you can make and run tests:
+
+    make
+    ./build/unittest
+    ./build/buffertest
+    ./build/testgentest
+    ./build/CodecTests
+    ./build/StreamTests
+
+To install
+
+    make package
+
+and then untar the generated .tar.gz file.
+
diff --git a/lang/c++/api/AvroParse.hh b/lang/c++/api/AvroParse.hh
new file mode 100644
index 0000000..61d79ec
--- /dev/null
+++ b/lang/c++/api/AvroParse.hh
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_AvroParse_hh__
+#define avro_AvroParse_hh__
+
+#include "Config.hh"
+#include <boost/static_assert.hpp>
+#include "AvroTraits.hh"
+#include "ResolvingReader.hh"
+
+/// \file
+///
+/// Standalone parse functions for Avro types.
+
+namespace avro {
+    
+/// The main parse entry point function.  Takes a parser (either validating or
+/// plain) and the object that should receive the parsed data.
+
+template <typename Reader, typename T>
+void parse(Reader &p, T& val)
+{
+    parse(p, val, is_serializable<T>());
+}
+
+template <typename T>
+void parse(ResolvingReader &p, T& val)
+{
+    translatingParse(p, val, is_serializable<T>());
+}
+
+/// Type trait should be set to is_serializable in otherwise force the compiler to complain.
+
+template <typename Reader, typename T>
+void parse(Reader &p, T& val, const boost::false_type &)
+{
+    BOOST_STATIC_ASSERT(sizeof(T)==0);
+}
+
+template <typename Reader, typename T>
+void translatingParse(Reader &p, T& val, const boost::false_type &)
+{
+    BOOST_STATIC_ASSERT(sizeof(T)==0);
+}
+
+// @{
+
+/// The remainder of the file includes default implementations for serializable types.
+
+
+template <typename Reader, typename T>
+void parse(Reader &p, T &val, const boost::true_type &) {
+    p.readValue(val);
+}
+
+template <typename Reader>
+void parse(Reader &p, std::vector<uint8_t> &val, const boost::true_type &) {
+    p.readBytes(val);
+}
+
+template<typename T>
+void translatingParse(ResolvingReader &p, T& val, const boost::true_type &) {
+    p.parse(val);
+}
+
+// @}
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/AvroSerialize.hh b/lang/c++/api/AvroSerialize.hh
new file mode 100644
index 0000000..d21232a
--- /dev/null
+++ b/lang/c++/api/AvroSerialize.hh
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_AvroSerialize_hh__
+#define avro_AvroSerialize_hh__
+
+#include "Config.hh"
+#include <boost/static_assert.hpp>
+#include "AvroTraits.hh"
+
+/// \file
+///
+/// Standalone serialize functions for Avro types.
+
+namespace avro {
+    
+/// The main serializer entry point function.  Takes a serializer (either validating or
+/// plain) and the object that should be serialized.
+
+template <typename Writer, typename T>
+void serialize(Writer &s, const T& val)
+{
+    serialize(s, val, is_serializable<T>());
+}
+
+/// Type trait should be set to is_serializable in otherwise force the compiler to complain.
+
+template <typename Writer, typename T>
+void serialize(Writer &s, const T& val, const boost::false_type &)
+{
+    BOOST_STATIC_ASSERT(sizeof(T)==0);
+}
+
+/// The remainder of the file includes default implementations for serializable types.
+
+// @{
+
+template <typename Writer, typename T>
+void serialize(Writer &s, T val, const boost::true_type &) {
+    s.writeValue(val);
+}
+
+template <typename Writer>
+void serialize(Writer &s, const std::vector<uint8_t> &val, const boost::true_type &) {
+    s.writeBytes(&val[0], val.size());
+}
+
+// @}
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/AvroTraits.hh b/lang/c++/api/AvroTraits.hh
new file mode 100644
index 0000000..41db837
--- /dev/null
+++ b/lang/c++/api/AvroTraits.hh
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_AvroTraits_hh__
+#define avro_AvroTraits_hh__
+
+#include "Config.hh"
+#include "Boost.hh"
+#include "Types.hh"
+
+/// \file
+///
+/// Define an is_serializable trait for types we can serialize natively. 
+/// New types will need to define the trait as well.
+
+namespace avro {
+
+template <typename T>
+struct is_serializable : public boost::false_type{};
+
+template <typename T>
+struct is_promotable : public boost::false_type{};
+
+template <typename T>
+struct type_to_avro {
+    static const Type type = AVRO_NUM_TYPES;
+};
+
+#define DEFINE_PRIMITIVE(CTYPE, AVROTYPE) \
+template <> \
+struct is_serializable<CTYPE> : public boost::true_type{}; \
+\
+template <> \
+struct type_to_avro<CTYPE> { \
+    static const Type type = AVROTYPE; \
+};
+
+#define DEFINE_PROMOTABLE_PRIMITIVE(CTYPE, AVROTYPE) \
+template <> \
+struct is_promotable<CTYPE> : public boost::true_type{}; \
+\
+DEFINE_PRIMITIVE(CTYPE, AVROTYPE)
+
+DEFINE_PROMOTABLE_PRIMITIVE(int32_t, AVRO_INT)
+DEFINE_PROMOTABLE_PRIMITIVE(int64_t, AVRO_LONG)
+DEFINE_PROMOTABLE_PRIMITIVE(float, AVRO_FLOAT)
+DEFINE_PRIMITIVE(double, AVRO_DOUBLE)
+DEFINE_PRIMITIVE(bool, AVRO_BOOL)
+DEFINE_PRIMITIVE(Null, AVRO_NULL)
+DEFINE_PRIMITIVE(std::string, AVRO_STRING)
+DEFINE_PRIMITIVE(std::vector<uint8_t>, AVRO_BYTES)
+
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Boost.hh b/lang/c++/api/Boost.hh
new file mode 100644
index 0000000..4a0bb9b
--- /dev/null
+++ b/lang/c++/api/Boost.hh
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Boost_hh__
+#define avro_Boost_hh__
+
+#include <boost/version.hpp>
+
+#define BOOST_MINOR_VERSION ( BOOST_VERSION / 100 % 1000 )
+
+#if (BOOST_MINOR_VERSION < 33)
+
+/* 
+ * In boost 1.33, boost introduced the type trait definitions for true_type and
+ * false_type, the pointer containers, and allow any objects to return
+ * references.
+ *
+ * In order to support earlier versions of boost, if these do not exist we just
+ * create them here.
+ */
+
+#define AVRO_BOOST_NO_ANYREF
+#define AVRO_BOOST_NO_TRAIT
+#define AVRO_BOOST_NO_PTRVECTOR
+
+#else
+#endif
+
+#include <boost/any.hpp>
+
+#ifdef AVRO_BOOST_NO_TRAIT
+// this is copied directly from boost documentation
+namespace boost {
+    template <class T, T val>
+    struct integral_constant {
+        typedef integral_constant<T, val>  type;
+        typedef T                          value_type;
+        static const T value = val;
+    };
+
+    typedef integral_constant<bool, true>  true_type;
+    typedef integral_constant<bool, false> false_type;
+} // namespace boost
+#else 
+#include <boost/type_traits.hpp>
+#endif // AVRO_BOOST_NO_TRAIT
+
+#ifdef AVRO_BOOST_NO_PTRVECTOR
+#include <vector>
+// this implements a minimal subset of ptr_vector (the parts of the API used by avro)
+namespace boost {
+    template <class T>
+    class ptr_vector {
+      public:
+        ptr_vector() : ptrs_() {}
+        ~ptr_vector() {
+            for(size_t i=0; i < ptrs_.size(); ++i) {
+                delete ptrs_[i];
+            }
+        }
+        void push_back(T *v) {
+            ptrs_.push_back(v);
+        }
+        void pop_back() {
+            T *toDelete = ptrs_.back();
+            ptrs_.pop_back();
+            delete toDelete;
+        }
+        const T& back() const {
+            return *ptrs_.back();
+        };
+        T& back() {
+            return *ptrs_.back();
+        };
+        bool empty() const {
+            return ptrs_.empty();
+        }
+        const T& at(size_t index) const {
+            return *(ptrs_.at(index));
+        }
+        const T& operator[](size_t index) const {
+            return *(ptrs_[index]);
+        }
+        size_t size() const {
+            return ptrs_.size();
+        }
+        void reserve(size_t elems) {
+            ptrs_.reserve(elems);
+        }
+      private:
+        std::vector<T *> ptrs_;
+    };
+} // namespace boost
+#else 
+#include <boost/ptr_container/ptr_vector.hpp>
+#endif // AVRO_BOOST_NO_PTRVECTOR
+
+#endif // avro_Boost_hh__
diff --git a/lang/c++/api/Compiler.hh b/lang/c++/api/Compiler.hh
new file mode 100644
index 0000000..28fbc46
--- /dev/null
+++ b/lang/c++/api/Compiler.hh
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Compiler_hh__
+#define avro_Compiler_hh__
+
+#include "Config.hh"
+#include <stdint.h>
+#include <istream>
+
+namespace avro {
+
+class AVRO_DECL InputStream;
+
+/// This class is used to implement an avro spec parser using a flex/bison
+/// compiler.  In order for the lexer to be reentrant, this class provides a
+/// lexer object for each parse.  The bison parser also uses this class to
+/// build up an avro parse tree as the avro spec is parsed.
+    
+class AVRO_DECL ValidSchema;
+
+/// Given a stream comtaining a JSON schema, compiles the schema to a
+/// ValidSchema object.  Throws if the schema cannot be compiled to a valid
+/// schema
+
+AVRO_DECL void compileJsonSchema(std::istream &is, ValidSchema &schema);
+
+/// Non-throwing version of compileJsonSchema.  
+///
+/// \return True if no error, false if error (with the error string set)
+///
+
+AVRO_DECL bool compileJsonSchema(std::istream &is, ValidSchema &schema,
+    std::string &error);
+
+AVRO_DECL ValidSchema compileJsonSchemaFromStream(InputStream& is);
+
+AVRO_DECL ValidSchema compileJsonSchemaFromMemory(const uint8_t* input, size_t len);
+
+AVRO_DECL ValidSchema compileJsonSchemaFromString(const char* input);
+
+AVRO_DECL ValidSchema compileJsonSchemaFromString(const std::string& input);
+
+AVRO_DECL ValidSchema compileJsonSchemaFromFile(const char* filename);
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Config.hh b/lang/c++/api/Config.hh
new file mode 100644
index 0000000..7dc871c
--- /dev/null
+++ b/lang/c++/api/Config.hh
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Config_hh
+#define avro_Config_hh
+
+// Windows DLL suport
+
+#ifdef _WIN32
+#pragma warning (disable: 4275 4251)
+
+#if defined(AVRO_DYN_LINK)
+#ifdef AVRO_SOURCE
+# define AVRO_DECL __declspec(dllexport)
+#else
+# define AVRO_DECL __declspec(dllimport)
+#endif  // AVRO_SOURCE
+#endif  // AVRO_DYN_LINK
+#endif  // _WIN32
+
+#ifndef AVRO_DECL
+#define AVRO_DECL
+#endif
+
+#endif
+
diff --git a/lang/c++/api/DataFile.hh b/lang/c++/api/DataFile.hh
new file mode 100644
index 0000000..98779b6
--- /dev/null
+++ b/lang/c++/api/DataFile.hh
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_DataFile_hh__
+#define avro_DataFile_hh__
+
+#include "Config.hh"
+#include "Encoder.hh"
+#include "buffer/Buffer.hh"
+#include "ValidSchema.hh"
+#include "Specific.hh"
+#include "Stream.hh"
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "boost/array.hpp"
+#include "boost/utility.hpp"
+#include <boost/iostreams/filtering_stream.hpp>
+#include <boost/scoped_ptr.hpp>
+
+namespace avro {
+
+/** Specify type of compression to use when writing data files. */
+enum Codec {
+  NULL_CODEC,
+  DEFLATE_CODEC
+};
+
+/**
+ * The sync value.
+ */
+typedef boost::array<uint8_t, 16> DataFileSync;
+
+/**
+ * Type-independent portion of DataFileWriter.
+ *  At any given point in time, at most one file can be written using
+ *  this object.
+ */
+class AVRO_DECL DataFileWriterBase : boost::noncopyable {
+    const std::string filename_;
+    const ValidSchema schema_;
+    const EncoderPtr encoderPtr_;
+    const size_t syncInterval_;
+    Codec codec_;
+
+    std::auto_ptr<OutputStream> stream_;
+    std::auto_ptr<OutputStream> buffer_;
+    const DataFileSync sync_;
+    int64_t objectCount_;
+
+    typedef std::map<std::string, std::vector<uint8_t> > Metadata;
+
+    Metadata metadata_;
+
+    static std::auto_ptr<OutputStream> makeStream(const char* filename);
+    static DataFileSync makeSync();
+
+    void writeHeader();
+    void setMetadata(const std::string& key, const std::string& value);
+
+    /**
+     * Generates a sync marker in the file.
+     */
+    void sync();
+
+public:
+    /**
+     * Returns the current encoder for this writer.
+     */
+    Encoder& encoder() const { return *encoderPtr_; }
+
+    /**
+     * Returns true if the buffer has sufficient data for a sync to be
+     * inserted.
+     */
+    void syncIfNeeded();
+
+    /**
+     * Increments the object count.
+     */
+    void incr() {
+        ++objectCount_;
+    }
+    /**
+     * Constructs a data file writer with the given sync interval and name.
+     */
+    DataFileWriterBase(const char* filename, const ValidSchema& schema,
+        size_t syncInterval, Codec codec = NULL_CODEC);
+
+    ~DataFileWriterBase();
+    /**
+     * Closes the current file. Once closed this datafile object cannot be
+     * used for writing any more.
+     */
+    void close();
+
+    /**
+     * Returns the schema for this data file.
+     */
+    const ValidSchema& schema() const { return schema_; }
+
+    /**
+     * Flushes any unwritten data into the file.
+     */
+    void flush();
+};
+
+/**
+ *  An Avro datafile that can store objects of type T.
+ */
+template <typename T>
+class DataFileWriter : boost::noncopyable {
+    std::auto_ptr<DataFileWriterBase> base_;
+public:
+    /**
+     * Constructs a new data file.
+     */
+    DataFileWriter(const char* filename, const ValidSchema& schema,
+        size_t syncInterval = 16 * 1024, Codec codec = NULL_CODEC) :
+        base_(new DataFileWriterBase(filename, schema, syncInterval, codec)) { }
+
+    /**
+     * Writes the given piece of data into the file.
+     */
+    void write(const T& datum) {
+        base_->syncIfNeeded();
+        avro::encode(base_->encoder(), datum);
+        base_->incr();
+    }
+
+    /**
+     * Closes the current file. Once closed this datafile object cannot be
+     * used for writing any more.
+     */
+    void close() { base_->close(); }
+
+    /**
+     * Returns the schema for this data file.
+     */
+    const ValidSchema& schema() const { return base_->schema(); }
+
+    /**
+     * Flushes any unwritten data into the file.
+     */
+    void flush() { base_->flush(); }
+};
+
+/**
+ * The type independent portion of rader.
+ */
+class AVRO_DECL DataFileReaderBase : boost::noncopyable {
+    const std::string filename_;
+    const std::auto_ptr<InputStream> stream_;
+    const DecoderPtr decoder_;
+    int64_t objectCount_;
+    bool eof_;
+    Codec codec_;
+
+    ValidSchema readerSchema_;
+    ValidSchema dataSchema_;
+    DecoderPtr dataDecoder_;
+    std::auto_ptr<InputStream> dataStream_;
+    typedef std::map<std::string, std::vector<uint8_t> > Metadata;
+
+    Metadata metadata_;
+    DataFileSync sync_;
+
+    // for compressed buffer
+    boost::scoped_ptr<boost::iostreams::filtering_istream> os_;
+    std::vector<char> compressed_;
+
+    void readHeader();
+
+    bool readDataBlock();
+public:
+    /**
+     * Returns the current decoder for this reader.
+     */
+    Decoder& decoder() { return *dataDecoder_; }
+
+    /**
+     * Returns true if and only if there is more to read.
+     */
+    bool hasMore();
+
+    /**
+     * Decrements the number of objects yet to read.
+     */
+    void decr() { --objectCount_; }
+
+    /**
+     * Constructs the reader for the given file and the reader is
+     * expected to use the schema that is used with data.
+     * This function should be called exactly once after constructing
+     * the DataFileReaderBase object.
+     */
+    DataFileReaderBase(const char* filename);
+
+    /**
+     * Initializes the reader so that the reader and writer schemas
+     * are the same.
+     */
+    void init();
+
+    /**
+     * Initializes the reader to read objects according to the given
+     * schema. This gives an opportinity for the reader to see the schema
+     * in the data file before deciding the right schema to use for reading.
+     * This must be called exactly once after constructing the
+     * DataFileReaderBase object.
+     */
+    void init(const ValidSchema& readerSchema);
+
+    /**
+     * Returns the schema for this object.
+     */
+    const ValidSchema& readerSchema() { return readerSchema_; }
+
+    /**
+     * Returns the schema stored with the data file.
+     */
+    const ValidSchema& dataSchema() { return dataSchema_; }
+
+    /**
+     * Closes the reader. No further operation is possible on this reader.
+     */
+    void close();
+};
+
+/**
+ * Reads the contents of data file one after another.
+ */
+template <typename T>
+class DataFileReader : boost::noncopyable {
+    std::auto_ptr<DataFileReaderBase> base_;
+public:
+    /**
+     * Constructs the reader for the given file and the reader is
+     * expected to use the given schema.
+     */
+    DataFileReader(const char* filename, const ValidSchema& readerSchema) :
+        base_(new DataFileReaderBase(filename)) {
+        base_->init(readerSchema);
+    }
+
+    /**
+     * Constructs the reader for the given file and the reader is
+     * expected to use the schema that is used with data.
+     */
+    DataFileReader(const char* filename) :
+        base_(new DataFileReaderBase(filename)) {
+        base_->init();
+    }
+
+
+    /**
+     * Constructs a reader using the reader base. This form of constructor
+     * allows the user to examine the schema of a given file and then
+     * decide to use the right type of data to be desrialize. Without this
+     * the user must know the type of data for the template _before_
+     * he knows the schema within the file.
+     * The schema present in the data file will be used for reading
+     * from this reader.
+     */
+    DataFileReader(std::auto_ptr<DataFileReaderBase> base) : base_(base) {
+        base_->init();
+    }
+
+    /**
+     * Constructs a reader using the reader base. This form of constructor
+     * allows the user to examine the schema of a given file and then
+     * decide to use the right type of data to be desrialize. Without this
+     * the user must know the type of data for the template _before_
+     * he knows the schema within the file.
+     * The argument readerSchema will be used for reading
+     * from this reader.
+     */
+    DataFileReader(std::auto_ptr<DataFileReaderBase> base,
+        const ValidSchema& readerSchema) : base_(base) {
+        base_->init(readerSchema);
+    }
+
+    /**
+     * Reads the next entry from the data file.
+     * \return true if an object has been successfully read into \p datum and
+     * false if there are no more entries in the file.
+     */
+    bool read(T& datum) {
+        if (base_->hasMore()) {
+            base_->decr();
+            avro::decode(base_->decoder(), datum);
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * Returns the schema for this object.
+     */
+    const ValidSchema& readerSchema() { return base_->readerSchema(); }
+
+    /**
+     * Returns the schema stored with the data file.
+     */
+    const ValidSchema& dataSchema() { return base_->dataSchema(); }
+
+    /**
+     * Closes the reader. No further operation is possible on this reader.
+     */
+    void close() { return base_->close(); }
+};
+
+}   // namespace avro
+#endif
diff --git a/lang/c++/api/Decoder.hh b/lang/c++/api/Decoder.hh
new file mode 100644
index 0000000..1d8f928
--- /dev/null
+++ b/lang/c++/api/Decoder.hh
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Decoder_hh__
+#define avro_Decoder_hh__
+
+#include "Config.hh"
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include "ValidSchema.hh"
+#include "Stream.hh"
+
+#include <boost/shared_ptr.hpp>
+
+/// \file
+///
+/// Low level support for decoding avro values.
+/// This class has two types of funtions.  One type of functions support
+/// decoding of leaf values (for example, decodeLong and
+/// decodeString). These functions have analogs in Encoder.
+/// 
+/// The other type of functions support decoding of maps and arrays.
+/// These functions are arrayStart, startItem, and arrayEnd
+/// (and similar functions for maps).
+
+namespace avro {
+
+/**
+ * Decoder is an interface implemented by every decoder capable
+ * of decoding Avro data.
+ */
+class AVRO_DECL Decoder {
+public:
+    virtual ~Decoder() { };
+    /// All future decoding will come from is, which should be valid
+    /// until replaced by another call to init() or this Decoder is
+    /// destructed.
+    virtual void init(InputStream& is) = 0;
+
+    /// Decodes a null from the current stream.
+    virtual void decodeNull() = 0;
+
+    /// Decodes a bool from the current stream
+    virtual bool decodeBool() = 0;
+
+    /// Decodes a 32-bit int from the current stream.
+    virtual int32_t decodeInt() = 0;
+
+    /// Decodes a 64-bit signed int from the current stream.
+    virtual int64_t decodeLong() = 0;
+
+    /// Decodes a single-precision floating point number from current stream.
+    virtual float decodeFloat() = 0;
+
+    /// Decodes a double-precision floating point number from current stream.
+    virtual double decodeDouble() = 0;
+
+    /// Decodes a UTF-8 string from the current stream.
+    std::string decodeString() {
+        std::string result;
+        decodeString(result);
+        return result;
+    }
+
+    /**
+     * Decodes a UTF-8 string from the stream and assigns it to value.
+     */
+    virtual void decodeString(std::string& value) = 0;
+
+    /// Skips a string on the current stream.
+    virtual void skipString() = 0;
+
+    /// Decodes arbitray binary data from the current stream.
+    std::vector<uint8_t> decodeBytes() {
+        std::vector<uint8_t> result;
+        decodeBytes(result);
+        return result;
+    }
+
+    /// Decodes arbitray binary data from the current stream and puts it
+    /// in value.
+    virtual void decodeBytes(std::vector<uint8_t>& value) = 0;
+
+    /// Skips bytes on the current stream.
+    virtual void skipBytes() = 0;
+
+    /**
+     * Decodes fixed length binary from the current stream.
+     * \param[in] n The size (byte count) of the fixed being read.
+     * \return The fixed data that has been read. The size of the returned
+     * vector is guaranteed to be equal to \p n.
+     */
+    std::vector<uint8_t> decodeFixed(size_t n) {
+        std::vector<uint8_t> result;
+        decodeFixed(n, result);
+        return result;
+    }
+
+    /**
+     * Decodes a fixed from the current stream.
+     * \param[in] n The size (byte count) of the fixed being read.
+     * \param[out] value The value that receives the fixed. The vector will
+     * be size-adjusted based on the fixed's size.
+     */
+    virtual void decodeFixed(size_t n, std::vector<uint8_t>& value) = 0;
+
+    /// Skips fixed length binary on the current stream.
+    virtual void skipFixed(size_t n) = 0;
+
+    /// Decodes enum from the current stream.
+    virtual size_t decodeEnum() = 0;
+
+    /// Start decoding an array. Returns the number of entries in first chunk.
+    virtual size_t arrayStart() = 0;
+
+    /// Returns the number of entries in next chunk. 0 if last.
+    virtual size_t arrayNext() = 0;
+
+    /// Tries to skip an array. If it can, it returns 0. Otherwise
+    /// it returns the number of elements to be skipped. The client
+    /// should skip the individual items. In such cases, skipArray
+    /// is identical to arrayStart.
+    virtual size_t skipArray() = 0;
+
+    /// Start decoding a map. Returns the number of entries in first chunk.
+    virtual size_t mapStart() = 0;
+
+    /// Returns the number of entries in next chunk. 0 if last.
+    virtual size_t mapNext() = 0;
+
+    /// Tries to skip a map. If it can, it returns 0. Otherwise
+    /// it returns the number of elements to be skipped. The client
+    /// should skip the individual items. In such cases, skipMap
+    /// is identical to mapStart.
+    virtual size_t skipMap() = 0;
+
+    /// Decodes a branch of a union. The actual value is to follow.
+    virtual size_t decodeUnionIndex() = 0;
+};
+
+/**
+ * Shared pointer to Decoder.
+ */
+typedef boost::shared_ptr<Decoder> DecoderPtr;
+
+/**
+ * ResolvingDecoder is derived from \ref Decoder, with an additional
+ * function to obtain the field ordering of fiedls within a record.
+ */
+class AVRO_DECL ResolvingDecoder : public Decoder {
+public:
+    /// Returns the order of fields for records.
+    /// The order of fields could be different from the order of their
+    /// order in the schema because the writer's field order could
+    /// be different. In order to avoid buffering and later use,
+    /// we return the values in the writer's field order.
+    virtual const std::vector<size_t>& fieldOrder() = 0;
+};
+
+/**
+ * Shared pointer to ResolvingDecoder.
+ */
+typedef boost::shared_ptr<ResolvingDecoder> ResolvingDecoderPtr;
+/**
+ *  Returns an decoder that can decode binary Avro standard.
+ */
+AVRO_DECL DecoderPtr binaryDecoder();
+
+/**
+ *  Returns an decoder that validates sequence of calls to an underlying
+ *  Decoder against the given schema.
+ */
+AVRO_DECL DecoderPtr validatingDecoder(const ValidSchema& schema,
+    const DecoderPtr& base);
+
+/**
+ *  Returns an decoder that can decode Avro standard for JSON.
+ */
+AVRO_DECL DecoderPtr jsonDecoder(const ValidSchema& schema);
+
+/**
+ *  Returns a decoder that decodes avro data from base written according to
+ *  writerSchema and resolves against readerSchema.
+ *  The client uses the decoder as if the data were written using readerSchema.
+ *  // FIXME: Handle out of order fields.
+ */
+AVRO_DECL ResolvingDecoderPtr resolvingDecoder(const ValidSchema& writer,
+    const ValidSchema& reader, const DecoderPtr& base);
+
+
+}   // namespace avro
+
+#endif
diff --git a/lang/c++/api/Encoder.hh b/lang/c++/api/Encoder.hh
new file mode 100644
index 0000000..a5dce71
--- /dev/null
+++ b/lang/c++/api/Encoder.hh
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Encoder_hh__
+#define avro_Encoder_hh__
+
+#include "Config.hh"
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include "ValidSchema.hh"
+#include "Stream.hh"
+
+#include <boost/shared_ptr.hpp>
+
+/// \file
+///
+/// Low level support for encoding avro values.
+/// This class has two types of funtions.  One type of functions support
+/// the writing of leaf values (for example, encodeLong and
+/// encodeString).  These functions have analogs in Decoder.
+/// 
+/// The other type of functions support the writing of maps and arrays.
+/// These functions are arrayStart, startItem, and arrayEnd
+/// (and similar functions for maps).
+/// Some implementations of Encoder handle the
+/// buffering required to break large maps and arrays into blocks,
+/// which is necessary for applications that want to do streaming.
+
+namespace avro {
+
+/**
+ * The abstract base class for all Avro encoders. The implementations
+ * differ in the method of encoding (binary vresus JSON) or in capabilities
+ * such as ability to verify the order of invocation of different functions.
+ */
+class AVRO_DECL Encoder {
+public:
+    virtual ~Encoder() { };
+    /// All future encodings will go to os, which should be valid until
+    /// it is reset with another call to init() or the encoder is
+    /// destructed.
+    virtual void init(OutputStream& os) = 0;
+
+    /// Flushes any data in internal buffers.
+    virtual void flush() = 0;
+
+    /// Encodes a null to the current stream.
+    virtual void encodeNull() = 0;
+
+    /// Encodes a bool to the current stream
+    virtual void encodeBool(bool b) = 0;
+
+    /// Encodes a 32-bit int to the current stream.
+    virtual void encodeInt(int32_t i) = 0;
+
+    /// Encodes a 64-bit signed int to the current stream.
+    virtual void encodeLong(int64_t l) = 0;
+
+    /// Encodes a single-precision floating point number to the current stream.
+    virtual void encodeFloat(float f) = 0;
+
+    /// Encodes a double-precision floating point number to the current stream.
+    virtual void encodeDouble(double d) = 0;
+
+    /// Encodes a UTF-8 string to the current stream.
+    virtual void encodeString(const std::string& s) = 0;
+
+    /**
+     * Encodes aribtray binary data into tthe current stream as Avro "bytes"
+     * data type.
+     * \param bytes Where the data is
+     * \param len Number of bytes at \p bytes.
+     */
+    virtual void encodeBytes(const uint8_t *bytes, size_t len) = 0;
+
+    /**
+     * Encodes aribtray binary data into tthe current stream as Avro "bytes"
+     * data type.
+     * \param bytes The data.
+     */
+    void encodeBytes(const std::vector<uint8_t>& bytes) {
+        uint8_t b = 0; 
+        encodeBytes(bytes.empty() ? &b : &bytes[0], bytes.size());
+    }
+
+    /// Encodes fixed length binary to the current stream.
+    virtual void encodeFixed(const uint8_t *bytes, size_t len) = 0;
+
+    /**
+     * Encodes an Avro data type Fixed.
+     * \param bytes The fixed, the length of which is taken as the size
+     * of fixed.
+     */
+    void encodeFixed(const std::vector<uint8_t>& bytes) {
+        encodeFixed(&bytes[0], bytes.size());
+    }
+
+    /// Encodes enum to the current stream.
+    virtual void encodeEnum(size_t e) = 0;
+
+    /// Indicates that an array of items is being encoded.
+    virtual void arrayStart() = 0;
+
+    /// Indicates that the current array of items have ended.
+    virtual void arrayEnd() = 0;
+
+    /// Indicates that a map of items is being encoded.
+    virtual void mapStart() = 0;
+
+    /// Indicates that the current map of items have ended.
+    virtual void mapEnd() = 0;
+
+    /// Indicates that count number of items are to follow in the current array
+    /// or map.
+    virtual void setItemCount(size_t count) = 0;
+
+    /// Marks a beginning of an item in the current array or map.
+    virtual void startItem() = 0;
+
+    /// Encodes a branch of a union. The actual value is to follow.
+    virtual void encodeUnionIndex(size_t e) = 0;
+};
+
+/**
+ * Shared pointer to Encoder.
+ */
+typedef boost::shared_ptr<Encoder> EncoderPtr;
+
+/**
+ *  Returns an encoder that can encode binary Avro standard.
+ */
+AVRO_DECL EncoderPtr binaryEncoder();
+
+/**
+ *  Returns an encoder that validates sequence of calls to an underlying
+ *  Encoder against the given schema.
+ */
+AVRO_DECL EncoderPtr validatingEncoder(const ValidSchema& schema,
+    const EncoderPtr& base);
+
+/**
+ *  Returns an encoder that can encode Avro standard for JSON.
+ */
+AVRO_DECL EncoderPtr jsonEncoder(const ValidSchema& schema);
+
+}   // namespace avro
+
+#endif
diff --git a/lang/c++/api/Exception.hh b/lang/c++/api/Exception.hh
new file mode 100644
index 0000000..0a73592
--- /dev/null
+++ b/lang/c++/api/Exception.hh
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Exception_hh__
+#define avro_Exception_hh__
+
+#include "Config.hh"
+#include <stdexcept>
+#include <boost/format.hpp>
+
+namespace avro {
+
+/// Wrapper for std::runtime_error that provides convenience constructor
+/// for boost::format objects
+
+class AVRO_DECL Exception : public virtual std::runtime_error
+{
+  public:
+
+    Exception(const std::string &msg) :
+        std::runtime_error(msg)
+    { }
+
+    Exception(const boost::format &msg) :
+        std::runtime_error( boost::str(msg))
+    { }  
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Generic.hh b/lang/c++/api/Generic.hh
new file mode 100644
index 0000000..ae6cb09
--- /dev/null
+++ b/lang/c++/api/Generic.hh
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Generic_hh__
+#define avro_Generic_hh__
+
+#include <boost/utility.hpp>
+
+#include "Config.hh"
+#include "Types.hh"
+#include "Encoder.hh"
+#include "Decoder.hh"
+#include "GenericDatum.hh"
+
+namespace avro {
+/**
+ * A utility class to read generic datum from decoders.
+ */
+class AVRO_DECL GenericReader : boost::noncopyable {
+    const ValidSchema schema_;
+    const bool isResolving_;
+    const DecoderPtr decoder_;
+
+    static void read(GenericDatum& datum, Decoder& d, bool isResolving);
+public:
+    /**
+     * Constructs a reader for the given schema using the given decoder.
+     */
+    GenericReader(const ValidSchema& s, const DecoderPtr& decoder);
+
+    /**
+     * Constructs a reader for the given reader's schema \c readerSchema
+     * using the given
+     * decoder which holds data matching writer's schema \c writerSchema.
+     */
+    GenericReader(const ValidSchema& writerSchema,
+        const ValidSchema& readerSchema, const DecoderPtr& decoder);
+
+    /**
+     * Reads a value off the decoder.
+     */
+    void read(GenericDatum& datum) const;
+
+    /**
+     * Reads a generic datum from the stream, using the given schema.
+     */
+    static void read(Decoder& d, GenericDatum& g);
+
+    /**
+     * Reads a generic datum from the stream, using the given schema.
+     */
+    static void read(Decoder& d, GenericDatum& g, const ValidSchema& s);
+};
+
+
+/**
+ * A utility class to write generic datum to encoders.
+ */
+class AVRO_DECL GenericWriter : boost::noncopyable {
+    const ValidSchema schema_;
+    const EncoderPtr encoder_;
+
+    static void write(const GenericDatum& datum, Encoder& e);
+public:
+    /**
+     * Constructs a writer for the given schema using the given encoder.
+     */
+    GenericWriter(const ValidSchema& s, const EncoderPtr& encoder);
+
+    /**
+     * Writes a value onto the encoder.
+     */
+    void write(const GenericDatum& datum) const;
+
+    /**
+     * Writes a generic datum on to the stream.
+     */
+    static void write(Encoder& e, const GenericDatum& g);
+
+    /**
+     * Writes a generic datum on to the stream, using the given schema.
+     * Retained for backward compatibility.
+     */
+    static void write(Encoder& e, const GenericDatum& g, const ValidSchema&) {
+        write(e, g);
+    }
+};
+
+template <typename T> struct codec_traits;
+
+/**
+ * Specialization of codec_traits for Generic datum along with its schema.
+ * This is maintained for compatibility with old code. Please use the
+ * cleaner codec_traits<GenericDatum> instead.
+ */
+template <> struct codec_traits<std::pair<ValidSchema, GenericDatum> > {
+    /** Encodes */
+    static void encode(Encoder& e,
+        const std::pair<ValidSchema, GenericDatum>& p) {
+        GenericWriter::write(e, p.second, p.first);
+    }
+
+    /** Decodes */
+    static void decode(Decoder& d, std::pair<ValidSchema, GenericDatum>& p) {
+        GenericReader::read(d, p.second, p.first);
+    }
+};
+
+/**
+ * Specialization of codec_traits for GenericDatum.
+ */
+template <> struct codec_traits<GenericDatum> {
+    /** Encodes */
+    static void encode(Encoder& e, const GenericDatum& g) {
+        GenericWriter::write(e, g);
+    }
+
+    /** Decodes */
+    static void decode(Decoder& d, GenericDatum& g) {
+        GenericReader::read(d, g);
+    }
+};
+    
+}   // namespace avro
+#endif
+
diff --git a/lang/c++/api/GenericDatum.hh b/lang/c++/api/GenericDatum.hh
new file mode 100644
index 0000000..5efcb7f
--- /dev/null
+++ b/lang/c++/api/GenericDatum.hh
@@ -0,0 +1,505 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_GenericDatum_hh__
+#define avro_GenericDatum_hh__
+
+#include <stdint.h>
+#include <vector>
+#include <map>
+#include <string>
+
+#include <boost/any.hpp>
+
+#include "Node.hh"
+#include "ValidSchema.hh"
+
+namespace avro {
+/**
+ * Generic datum which can hold any Avro type. The datum has a type
+ * and a value. The type is one of the Avro data types. The C++ type for
+ * value corresponds to the Avro type.
+ * \li An Avro <tt>null</tt> corresponds to no C++ type. It is illegal to
+ * to try to access values for <tt>null</tt>.
+ * \li Avro <tt>boolean</tt> maps to C++ <tt>bool</tt>
+ * \li Avro <tt>int</tt> maps to C++ <tt>int32_t</tt>.
+ * \li Avro <tt>long</tt> maps to C++ <tt>int64_t</tt>.
+ * \li Avro <tt>float</tt> maps to C++ <tt>float</tt>.
+ * \li Avro <tt>double</tt> maps to C++ <tt>double</tt>.
+ * \li Avro <tt>string</tt> maps to C++ <tt>std::string</tt>.
+ * \li Avro <tt>bytes</tt> maps to C++ <tt>std::vector<uint_t></tt>.
+ * \li Avro <tt>fixed</tt> maps to C++ class <tt>GenericFixed</tt>.
+ * \li Avro <tt>enum</tt> maps to C++ class <tt>GenericEnum</tt>.
+ * \li Avro <tt>array</tt> maps to C++ class <tt>GenericArray</tt>.
+ * \li Avro <tt>map</tt> maps to C++ class <tt>GenericMap</tt>.
+ * \li There is no C++ type corresponding to Avro <tt>union</tt>. The
+ * object should have the C++ type corresponing to one of the constituent
+ * types of the union.
+ *
+ */
+class AVRO_DECL GenericDatum {
+    Type type_;
+    boost::any value_;
+
+    GenericDatum(Type t) : type_(t) { }
+
+    template <typename T>
+    GenericDatum(Type t, const T& v) : type_(t), value_(v) { }
+
+    void init(const NodePtr& schema);
+public:
+    /**
+     * The avro data type this datum holds.
+     */
+    Type type() const {
+        return type_;
+    }
+
+    /**
+     * Returns the value held by this datum.
+     * T The type for the value. This must correspond to the
+     * avro type returned by type().
+     */
+    template<typename T> const T& value() const {
+        return *boost::any_cast<T>(&value_);
+    }
+
+    /**
+     * Returns the reference to the value held by this datum, which
+     * can be used to change the contents. Please note that only
+     * value can be changed, the data type of the value held cannot
+     * be changed.
+     *
+     * T The type for the value. This must correspond to the
+     * avro type returned by type().
+     */
+    template<typename T> T& value() {
+        return *boost::any_cast<T>(&value_);
+    }
+
+    /**
+     * Returns true if and only if this datum is a union.
+     */
+    bool isUnion() const { return type_ == AVRO_UNION; }
+
+    /**
+     * Returns the index of the current branch, if this is a union.
+     * \sa isUnion().
+     */
+    size_t unionBranch() const;
+
+    /**
+     * Selects a new branch in the union if this is a union.
+     * \sa isUnion().
+     */
+    void selectBranch(size_t branch);
+
+    /// Makes a new AVRO_NULL datum.
+    GenericDatum() : type_(AVRO_NULL) { }
+
+    /// Makes a new AVRO_BOOL datum whose value is of type bool.
+    GenericDatum(bool v) : type_(AVRO_BOOL), value_(v) { }
+
+    /// Makes a new AVRO_INT datum whose value is of type int32_t.
+    GenericDatum(int32_t v) : type_(AVRO_INT), value_(v) { }
+
+    /// Makes a new AVRO_LONG datum whose value is of type int64_t.
+    GenericDatum(int64_t v) : type_(AVRO_LONG), value_(v) { }
+
+    /// Makes a new AVRO_FLOAT datum whose value is of type float.
+    GenericDatum(float v) : type_(AVRO_FLOAT), value_(v) { }
+
+    /// Makes a new AVRO_DOUBLE datum whose value is of type double.
+    GenericDatum(double v) : type_(AVRO_DOUBLE), value_(v) { }
+
+    /// Makes a new AVRO_STRING datum whose value is of type std::string.
+    GenericDatum(const std::string& v) : type_(AVRO_STRING), value_(v) { }
+
+    /// Makes a new AVRO_BYTES datum whose value is of type
+    /// std::vector<uint8_t>.
+    GenericDatum(const std::vector<uint8_t>& v) :
+        type_(AVRO_BYTES), value_(v) { }
+
+    /**
+     * Constructs a datum corresponding to the given avro type.
+     * The value will the appropraite default corresponding to the
+     * data type.
+     * \param schema The schema that defines the avro type.
+     */
+    GenericDatum(const NodePtr& schema);
+
+    /**
+     * Constructs a datum corresponding to the given avro type and set
+     * the value. 
+     * \param schema The schema that defines the avro type.
+     * \param v The value for this type.
+     */
+    template<typename T>
+    GenericDatum(const NodePtr& schema, const T& v) :
+        type_(schema->type()) {
+        init(schema);
+        value<T>() = v;
+    }
+
+    /**
+     * Constructs a datum corresponding to the given avro type.
+     * The value will the appropraite default corresponding to the
+     * data type.
+     * \param schema The schema that defines the avro type.
+     */
+    GenericDatum(const ValidSchema& schema);
+};
+
+/**
+ * The base class for all generic type for containers.
+ */
+class AVRO_DECL GenericContainer {
+    NodePtr schema_;
+    static void assertType(const NodePtr& schema, Type type);
+protected:
+    /**
+     * Constructs a container corresponding to the given schema.
+     */
+    GenericContainer(Type type, const NodePtr& s) : schema_(s) {
+        assertType(s, type);
+    }
+
+public:
+    /// Returns the schema for this object
+    const NodePtr& schema() const {
+        return schema_;
+    }
+};
+
+/**
+ * Generic container for unions.
+ */
+class AVRO_DECL GenericUnion : public GenericContainer {
+    size_t curBranch_;
+    GenericDatum datum_;
+
+public:
+    /**
+     * Constructs a generic union corresponding to the given schema \p schema,
+     * and the given value. The schema should be of Avro type union
+     * and the value should correspond to one of the branches of the union.
+     */
+    GenericUnion(const NodePtr& schema) :
+        GenericContainer(AVRO_UNION, schema), curBranch_(schema->leaves()) {
+    }
+
+    /**
+     * Returns the index of the current branch.
+     */
+    size_t currentBranch() const { return curBranch_; }
+
+    /**
+     * Selects a new branch. The type for the value is changed accordingly.
+     * \param branch The index for the selected branch.
+     */
+    void selectBranch(size_t branch) {
+        if (curBranch_ != branch) {
+            datum_ = GenericDatum(schema()->leafAt(branch));
+            curBranch_ = branch;
+        }
+    }
+
+    /**
+     * Returns the datum corresponding to the currently selected branch
+     * in this union.
+     */
+    GenericDatum& datum() {
+        return datum_;
+    }
+
+    /**
+     * Returns the datum corresponding to the currently selected branch
+     * in this union.
+     */
+    const GenericDatum& datum() const {
+        return datum_;
+    }
+};
+
+/**
+ * The generic container for Avro records.
+ */
+class AVRO_DECL GenericRecord : public GenericContainer {
+    std::vector<GenericDatum> fields_;
+public:
+    /**
+     * Constructs a generic record corresponding to the given schema \p schema,
+     * which should be of Avro type record.
+     */
+    GenericRecord(const NodePtr& schema);
+
+    /**
+     * Returns the number of fields in the current record.
+     */
+    size_t fieldCount() const {
+        return fields_.size();
+    }
+
+    /**
+     * Returns index of the field with the given name \p name
+     */
+    size_t fieldIndex(const std::string& name) const { 
+        size_t index = 0;
+        if (!schema()->nameIndex(name, index)) {
+            throw Exception("Invalid field name: " + name);
+        }
+        return index;
+    }
+
+    /**
+     * Returns true if a field with the given name \p name is located in this r
+     * false otherwise
+     */
+    bool hasField(const std::string& name) const {
+        size_t index = 0;
+        return schema()->nameIndex(name, index);
+    }
+
+    /**
+     * Returns the field with the given name \p name.
+     */
+    const GenericDatum& field(const std::string& name) const {
+        return fieldAt(fieldIndex(name));
+    }
+
+    /**
+     * Returns the reference to the field with the given name \p name,
+     * which can be used to change the contents.
+     */
+    GenericDatum& field(const std::string& name) {
+        return fieldAt(fieldIndex(name));
+    }
+
+    /**
+     * Returns the field at the given position \p pos.
+     */
+    const GenericDatum& fieldAt(size_t pos) const {
+        return fields_[pos];
+    }
+
+    /**
+     * Returns the reference to the field at the given position \p pos,
+     * which can be used to change the contents.
+     */
+    GenericDatum& fieldAt(size_t pos) {
+        return fields_[pos];
+    }
+
+    /**
+     * Replaces the field at the given position \p pos with \p v.
+     */
+    void setFieldAt(size_t pos, const GenericDatum& v) {
+        // assertSameType(v, schema()->leafAt(pos));    
+        fields_[pos] = v;
+    }
+};
+
+/**
+ * The generic container for Avro arrays.
+ */
+class AVRO_DECL GenericArray : public GenericContainer {
+public:
+    /**
+     * The contents type for the array.
+     */
+    typedef std::vector<GenericDatum> Value;
+
+    /**
+     * Constructs a generic array corresponding to the given schema \p schema,
+     * which should be of Avro type array.
+     */
+    GenericArray(const NodePtr& schema) : GenericContainer(AVRO_ARRAY, schema) {
+    }
+
+    /**
+     * Returns the contents of this array.
+     */
+    const Value& value() const {
+        return value_;
+    }
+
+    /**
+     * Returns the reference to the contents of this array.
+     */
+    Value& value() {
+        return value_;
+    }
+private:
+    Value value_;
+};
+
+/**
+ * The generic container for Avro maps.
+ */
+class AVRO_DECL GenericMap : public GenericContainer {
+public:
+    /**
+     * The contents type for the map.
+     */
+    typedef std::vector<std::pair<std::string, GenericDatum> > Value;
+
+    /**
+     * Constructs a generic map corresponding to the given schema \p schema,
+     * which should be of Avro type map.
+     */
+    GenericMap(const NodePtr& schema) : GenericContainer(AVRO_MAP, schema) {
+    }
+
+    /**
+     * Returns the contents of this map.
+     */
+    const Value& value() const {
+        return value_;
+    }
+
+    /**
+     * Returns the reference to the contents of this map.
+     */
+    Value& value() {
+        return value_;
+    }
+private:
+    Value value_;
+};
+
+/**
+ * Generic container for Avro enum.
+ */
+class AVRO_DECL GenericEnum : public GenericContainer {
+    size_t value_;
+
+    static size_t index(const NodePtr& schema, const std::string& symbol) {
+        size_t result;
+        if (schema->nameIndex(symbol, result)) {
+            return result;
+        }
+        throw Exception("No such symbol");
+    }
+
+public:
+    /**
+     * Constructs a generic enum corresponding to the given schema \p schema,
+     * which should be of Avro type enum.
+     */
+    GenericEnum(const NodePtr& schema) :
+        GenericContainer(AVRO_ENUM, schema), value_(0) {
+    }
+
+    GenericEnum(const NodePtr& schema, const std::string& symbol) :
+        GenericContainer(AVRO_ENUM, schema), value_(index(schema, symbol)) {
+    }
+
+    /**
+     * Returns the symbol corresponding to the cardinal \p n. If the
+     * value for \p n is not within the limits an exception is thrown.
+     */
+    const std::string& symbol(size_t n) {
+        if (n < schema()->names()) {
+            return schema()->nameAt(n);
+        }
+        throw Exception("Not as many symbols");
+    }
+
+    /**
+     * Returns the cardinal for the given symbol \c symbol. If the symbol
+     * is not defined for this enum and exception is thrown.
+     */
+    size_t index(const std::string& symbol) const {
+        return index(schema(), symbol);
+    }
+
+    /**
+     * Set the value for this enum corresponding to the given symbol \c symbol.
+     */
+    size_t set(const std::string& symbol) {
+        return value_ = index(symbol);
+    }
+
+    /**
+     * Set the value for this enum corresponding to the given cardinal \c n.
+     */
+    void set(size_t n) {
+        if (n < schema()->names()) {
+            value_ = n;
+            return;
+        }
+        throw Exception("Not as many symbols");
+    }
+
+    /**
+     * Returns the cardinal for the current value of this enum.
+     */
+    size_t value() const {
+        return value_;
+    }
+
+    /**
+     * Returns the symbol for the current value of this enum.
+     */
+    const std::string& symbol() const {
+        return schema()->nameAt(value_);
+    }
+};
+
+/**
+ * Generic container for Avro fixed.
+ */
+class AVRO_DECL GenericFixed : public GenericContainer {
+    std::vector<uint8_t> value_;
+public:
+    /**
+     * Constructs a generic enum corresponding to the given schema \p schema,
+     * which should be of Avro type fixed.
+     */
+    GenericFixed(const NodePtr& schema) : GenericContainer(AVRO_FIXED, schema) {
+        value_.resize(schema->fixedSize());
+    }
+
+    GenericFixed(const NodePtr& schema, const std::vector<uint8_t>& v) :
+        GenericContainer(AVRO_FIXED, schema), value_(v) { }
+
+    /**
+     * Returns the contents of this fixed.
+     */
+    const std::vector<uint8_t>& value() const {
+        return value_;
+    }
+
+    /**
+     * Returns the reference to the contents of this fixed.
+     */
+    std::vector<uint8_t>& value() {
+        return value_;
+    }
+};
+
+inline size_t GenericDatum::unionBranch() const {
+    return boost::any_cast<GenericUnion>(&value_)->currentBranch();
+}
+
+inline void GenericDatum::selectBranch(size_t branch) {
+    boost::any_cast<GenericUnion>(&value_)->selectBranch(branch);
+}
+
+}   // namespace avro
+#endif // avro_GenericDatum_hh__
diff --git a/lang/c++/api/Layout.hh b/lang/c++/api/Layout.hh
new file mode 100644
index 0000000..5965c53
--- /dev/null
+++ b/lang/c++/api/Layout.hh
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Layout_hh__
+#define avro_Layout_hh__
+
+#include <boost/noncopyable.hpp>
+#include "Config.hh"
+#include "Boost.hh"
+
+/// \file Layout.hh
+///
+
+namespace avro {
+    
+class AVRO_DECL Layout : private boost::noncopyable {
+
+  protected:
+
+    Layout(size_t offset = 0) :
+        offset_(offset)
+    {}
+
+  public:
+
+    size_t offset() const {
+        return offset_;
+    }
+
+    virtual ~Layout() {}
+
+  private:
+
+    const size_t offset_;
+};
+
+class AVRO_DECL PrimitiveLayout : public Layout {
+
+  public:
+
+    PrimitiveLayout(size_t offset = 0) :
+        Layout(offset)
+    {}
+};
+
+class AVRO_DECL CompoundLayout : public Layout {
+
+  public:
+
+    CompoundLayout(size_t offset = 0) :
+        Layout(offset)
+    {}
+
+    void add(Layout *layout) {
+        layouts_.push_back(layout);
+    }
+
+    const Layout &at (size_t idx) const {
+        return layouts_.at(idx);
+    }
+
+  private:
+
+    boost::ptr_vector<Layout> layouts_;
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Node.hh b/lang/c++/api/Node.hh
new file mode 100644
index 0000000..ff227b9
--- /dev/null
+++ b/lang/c++/api/Node.hh
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Node_hh__
+#define avro_Node_hh__
+
+#include "Config.hh"
+
+#include <cassert>
+#include <boost/noncopyable.hpp>
+#include <boost/shared_ptr.hpp>
+
+#include "Exception.hh"
+#include "Types.hh"
+#include "SchemaResolution.hh"
+
+namespace avro {
+
+class Node;
+class GenericDatum;
+
+typedef boost::shared_ptr<Node> NodePtr;
+
+class AVRO_DECL Name {
+    std::string ns_;
+    std::string simpleName_;
+public:
+    Name() { }
+    Name(const std::string& fullname);
+    Name(const std::string& simpleName, const std::string& ns) : ns_(ns), simpleName_(simpleName) { check(); }
+
+    const std::string fullname() const;
+    const std::string& ns() const { return ns_; }
+    const std::string& simpleName() const { return simpleName_; }
+
+    void ns(const std::string& n) { ns_ = n; }
+    void simpleName(const std::string& n) { simpleName_ = n; }
+    void fullname(const std::string& n);
+
+    bool operator < (const Name& n) const;
+    void check() const;
+    bool operator == (const Name& n) const;
+    bool operator != (const Name& n) const { return !((*this) == n); }
+    void clear() {
+        ns_.clear();
+        simpleName_.clear();
+    }
+    operator std::string() const {
+        return fullname();
+    }
+};
+
+inline
+std::ostream& operator << (std::ostream& os, const Name& n) {
+    return os << n.fullname();
+}
+
+/// Node is the building block for parse trees.  Each node represents an avro
+/// type.  Compound types have leaf nodes that represent the types they are
+/// composed of.
+///
+/// The user does not use the Node object directly, they interface with Schema
+/// objects.
+///
+/// The Node object uses reference-counted pointers.  This is so that schemas 
+/// may be reused in other other schemas, without needing to worry about memory
+/// deallocation for nodes that are added to multiple schema parse trees.
+///
+/// Node has minimal implementation, serving as an abstract base class for
+/// different node types.
+///
+
+class AVRO_DECL Node : private boost::noncopyable
+{
+  public:
+
+    Node(Type type) :
+        type_(type),
+        locked_(false)
+    {}
+
+    virtual ~Node();
+
+    Type type() const {
+        return type_;
+    }
+
+    void lock() {
+        locked_ = true;
+    }
+
+    bool locked() const {
+        return locked_;
+    }
+
+    virtual bool hasName() const = 0;
+
+    void setName(const Name &name) {
+        checkLock();
+        checkName(name);
+        doSetName(name);
+    }
+    virtual const Name &name() const = 0;
+
+    void addLeaf(const NodePtr &newLeaf) {
+        checkLock();
+        doAddLeaf(newLeaf);
+    }
+    virtual size_t leaves() const = 0;
+    virtual const NodePtr& leafAt(int index) const = 0;
+    virtual const GenericDatum& defaultValueAt(int index) {
+        throw Exception(boost::format("No default value at: %1%") % index);
+    }
+
+    void addName(const std::string &name) {
+        checkLock();
+        checkName(name);
+        doAddName(name);
+    }
+    virtual size_t names() const = 0;
+    virtual const std::string &nameAt(int index) const = 0;
+    virtual bool nameIndex(const std::string &name, size_t &index) const = 0;
+
+    void setFixedSize(int size) {
+        checkLock();
+        doSetFixedSize(size);
+    }
+    virtual int fixedSize() const = 0;
+
+    virtual bool isValid() const = 0;
+
+    virtual SchemaResolution resolve(const Node &reader) const = 0;
+
+    virtual void printJson(std::ostream &os, int depth) const = 0;
+
+    virtual void printBasicInfo(std::ostream &os) const = 0;
+
+    virtual void setLeafToSymbolic(int index, const NodePtr &node) = 0;
+
+  protected:
+
+    void checkLock() const {
+        if(locked()) {
+            throw Exception("Cannot modify locked schema");
+        }
+    }
+
+    virtual void checkName(const Name &name) const {
+        name.check();
+    }
+
+    virtual void doSetName(const Name &name) = 0;
+    virtual void doAddLeaf(const NodePtr &newLeaf) = 0;
+    virtual void doAddName(const std::string &name) = 0;
+    virtual void doSetFixedSize(int size) = 0;
+
+  private:
+
+    const Type type_;
+    bool locked_;
+};
+
+} // namespace avro
+
+namespace std {
+inline std::ostream& operator<<(std::ostream& os, const avro::Node& n)
+{
+    n.printJson(os, 0);
+    return os;
+}
+}
+
+
+#endif
diff --git a/lang/c++/api/NodeConcepts.hh b/lang/c++/api/NodeConcepts.hh
new file mode 100644
index 0000000..63d4201
--- /dev/null
+++ b/lang/c++/api/NodeConcepts.hh
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_NodeConcepts_hh__
+#define avro_NodeConcepts_hh__
+
+#include "Config.hh"
+
+#include <vector>
+#include <map>
+#include "Exception.hh"
+
+namespace avro {
+
+
+/// 
+/// The concept classes are used to simplify NodeImpl.  Since different types
+/// of avro types carry different attributes, such as names, or field names for
+/// record members.  Using the concept class of NoAttribute vs Attribute, the
+/// NodeImpl object can enable/disable the attribute, but the code is the same
+/// in either case.
+///
+/// Furthermore, attributes may have different types, for example, most
+/// attributes are strings, but fixed types have a size attribute, which is
+/// integer.
+///
+/// Since compound types are composed of other types, the leaf attribute
+/// concepts extend a NodeImpl to include leaf nodes, and attributes for leaf
+/// nodes, which are used to build parse trees.
+///
+///
+
+namespace concepts {
+
+template <typename Attribute>
+struct NoAttribute
+{
+    static const bool hasAttribute = false;
+
+    size_t size() const {
+        return 0;
+    }
+
+    void add( const Attribute &attr) {
+        // There must be an add function for the generic NodeImpl, but the
+        // Node APIs ensure that it is never called, the throw here is
+        // just in case
+        throw Exception("This type does not have attribute");
+    }
+
+    const Attribute &get(size_t index = 0) const {
+        // There must be an get function for the generic NodeImpl, but the
+        // Node APIs ensure that it is never called, the throw here is
+        // just in case
+        throw Exception("This type does not have attribute");
+        // even though this code is unreachable the compiler requires it
+        static const Attribute empty = Attribute();
+        return empty;
+    }
+
+    Attribute &get(size_t index = 0) {
+        // There must be an get function for the generic NodeImpl, but the
+        // Node APIs ensure that it is never called, the throw here is
+        // just in case
+        throw Exception("This type does not have attribute");
+    }
+
+};
+
+template<typename Attribute>
+struct SingleAttribute
+{
+    static const bool hasAttribute = true;
+
+    SingleAttribute() : attr_()
+    { }
+
+    SingleAttribute(const Attribute& a) : attr_(a) { }
+    // copy constructing from another single attribute is allowed
+    SingleAttribute(const SingleAttribute<Attribute> &rhs) : 
+        attr_(rhs.attr_)
+    { }
+
+    // copy constructing from a no attribute is allowed
+    SingleAttribute(const NoAttribute<Attribute> &rhs) : 
+        attr_()
+    { }
+
+    size_t size() const {
+        return 1;
+    }
+
+    void add(const Attribute &attr) {
+        attr_ = attr;
+    }
+
+    const Attribute &get(size_t index = 0) const {
+        if (index != 0) {
+            throw Exception("SingleAttribute has only 1 value");
+        }
+        return attr_;
+    }
+
+    Attribute &get(size_t index = 0) {
+        if (index != 0) {
+            throw Exception("SingleAttribute has only 1 value");
+        }
+        return attr_;
+    }
+
+private:
+    template<typename T> friend struct MultiAttribute;
+    Attribute attr_;
+};
+
+template<typename Attribute>
+struct MultiAttribute
+{
+    static const bool hasAttribute = true;
+
+    MultiAttribute() 
+    { }
+
+    // copy constructing from another single attribute is allowed, it
+    // pushes the attribute
+    MultiAttribute(const SingleAttribute<Attribute> &rhs) 
+    { 
+        // since map is the only type that does this we know it's
+        // final size will be two, so reserve 
+        attrs_.reserve(2);
+        attrs_.push_back(rhs.attr_);
+    }
+
+    MultiAttribute(const MultiAttribute<Attribute> &rhs)  :
+        attrs_(rhs.attrs_)
+    { }
+
+    MultiAttribute(const NoAttribute<Attribute> &rhs)
+    {}
+
+    size_t size() const {
+        return attrs_.size();
+    }
+
+    void add(const Attribute &attr) {
+        attrs_.push_back(attr); 
+    }
+
+    const Attribute &get(size_t index = 0) const {
+        return attrs_.at(index);
+    }
+
+    Attribute &get(size_t index) {
+        return attrs_.at(index);
+    }
+
+  private:
+
+    std::vector<Attribute> attrs_;
+};
+
+
+template<typename T>
+struct NameIndexConcept {
+
+    bool lookup(const std::string &name, size_t &index) const {
+        throw Exception("Name index does not exist");
+        return 0;
+    }
+
+    bool add(const::std::string &name, size_t index) {
+        throw Exception("Name index does not exist");
+        return false;
+    }
+};
+
+template<>
+struct NameIndexConcept < MultiAttribute<std::string> > 
+{
+    typedef std::map<std::string, size_t> IndexMap;
+
+    bool lookup(const std::string &name, size_t &index) const {
+        IndexMap::const_iterator iter = map_.find(name); 
+        if(iter == map_.end()) {
+            return false;
+        }
+        index = iter->second;
+        return true;
+    }
+
+    bool add(const::std::string &name, size_t index) {
+        bool added = false;
+        IndexMap::iterator lb = map_.lower_bound(name); 
+        if(lb == map_.end() || map_.key_comp()(name, lb->first)) {
+            map_.insert(lb, IndexMap::value_type(name, index));
+            added = true;
+        }
+        return added;
+    }
+
+  private:
+
+    IndexMap map_;
+};
+
+} // namespace concepts
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/NodeImpl.hh b/lang/c++/api/NodeImpl.hh
new file mode 100644
index 0000000..cbfcfb5
--- /dev/null
+++ b/lang/c++/api/NodeImpl.hh
@@ -0,0 +1,543 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_NodeImpl_hh__
+#define avro_NodeImpl_hh__
+
+#include "Config.hh"
+#include "GenericDatum.hh"
+
+#include <limits>
+#include <set>
+#include <boost/weak_ptr.hpp>
+
+#include "Node.hh"
+#include "NodeConcepts.hh"
+
+namespace avro {
+
+/// Implementation details for Node.  NodeImpl represents all the avro types,
+/// whose properties are enabled are disabled by selecting concept classes.
+
+template 
+< 
+    class NameConcept,
+    class LeavesConcept,
+    class LeafNamesConcept,
+    class SizeConcept
+>
+class NodeImpl : public Node
+{
+
+  protected:
+
+    NodeImpl(Type type) :
+        Node(type),
+        nameAttribute_(),
+        leafAttributes_(),
+        leafNameAttributes_(),
+        sizeAttribute_()
+    { }
+
+    NodeImpl(Type type, 
+             const NameConcept &name, 
+             const LeavesConcept &leaves, 
+             const LeafNamesConcept &leafNames,
+             const SizeConcept &size) :
+        Node(type),
+        nameAttribute_(name),
+        leafAttributes_(leaves),
+        leafNameAttributes_(leafNames),
+        sizeAttribute_(size)
+    { }
+
+    void swap(NodeImpl& impl) {
+        std::swap(nameAttribute_, impl.nameAttribute_);
+        std::swap(leafAttributes_, impl.leafAttributes_);
+        std::swap(leafNameAttributes_, impl.leafNameAttributes_);
+        std::swap(sizeAttribute_, impl.sizeAttribute_);
+        std::swap(nameIndex_, impl.nameIndex_);
+    }
+
+    bool hasName() const {
+        return NameConcept::hasAttribute;
+    }
+
+    void doSetName(const Name &name) {
+        nameAttribute_.add(name);
+    }
+    
+    const Name &name() const {
+        return nameAttribute_.get();
+    }
+
+    void doAddLeaf(const NodePtr &newLeaf) { 
+        leafAttributes_.add(newLeaf);
+    }
+
+    size_t leaves() const {
+        return leafAttributes_.size();
+    }
+
+    const NodePtr &leafAt(int index) const { 
+        return leafAttributes_.get(index);
+    }
+
+    void doAddName(const std::string &name) { 
+        if (! nameIndex_.add(name, leafNameAttributes_.size())) {
+            throw Exception(boost::format("Cannot add duplicate name: %1%") % name);
+        }
+        leafNameAttributes_.add(name);
+    }
+
+    size_t names() const {
+        return leafNameAttributes_.size();
+    }
+
+    const std::string &nameAt(int index) const { 
+        return leafNameAttributes_.get(index);
+    }
+
+    bool nameIndex(const std::string &name, size_t &index) const {
+        return nameIndex_.lookup(name, index);
+    }
+
+    void doSetFixedSize(int size) {
+        sizeAttribute_.add(size);
+    }
+
+    int fixedSize() const {
+        return sizeAttribute_.get();
+    }
+
+    virtual bool isValid() const = 0;
+
+    void printBasicInfo(std::ostream &os) const;
+
+    void setLeafToSymbolic(int index, const NodePtr &node);
+   
+    SchemaResolution furtherResolution(const Node &reader) const {
+        SchemaResolution match = RESOLVE_NO_MATCH;
+
+        if (reader.type() == AVRO_SYMBOLIC) {
+    
+            // resolve the symbolic type, and check again
+            const NodePtr &node = reader.leafAt(0);
+            match = resolve(*node);
+        }
+        else if(reader.type() == AVRO_UNION) {
+
+            // in this case, need to see if there is an exact match for the
+            // writer's type, or if not, the first one that can be promoted to a
+            // match
+        
+            for(size_t i= 0; i < reader.leaves(); ++i)  {
+
+                const NodePtr &node = reader.leafAt(i);
+                SchemaResolution thisMatch = resolve(*node);
+
+                // if matched then the search is done
+                if(thisMatch == RESOLVE_MATCH) {
+                    match = thisMatch;
+                    break;
+                }
+
+                // thisMatch is either no match, or promotable, this will set match to 
+                // promotable if it hasn't been set already
+                if (match == RESOLVE_NO_MATCH) {
+                    match = thisMatch;
+                }
+            }
+        }
+
+        return match;
+    }
+
+    NameConcept nameAttribute_;
+    LeavesConcept leafAttributes_;
+    LeafNamesConcept leafNameAttributes_;
+    SizeConcept sizeAttribute_;
+    concepts::NameIndexConcept<LeafNamesConcept> nameIndex_;
+};
+
+typedef concepts::NoAttribute<Name>     NoName;
+typedef concepts::SingleAttribute<Name> HasName;
+
+typedef concepts::NoAttribute<NodePtr>      NoLeaves;
+typedef concepts::SingleAttribute<NodePtr>  SingleLeaf;
+typedef concepts::MultiAttribute<NodePtr>   MultiLeaves;
+
+typedef concepts::NoAttribute<std::string>     NoLeafNames;
+typedef concepts::MultiAttribute<std::string>  LeafNames;
+
+typedef concepts::NoAttribute<int>     NoSize;
+typedef concepts::SingleAttribute<int> HasSize;
+
+typedef NodeImpl< NoName,  NoLeaves,    NoLeafNames,  NoSize  > NodeImplPrimitive;
+typedef NodeImpl< HasName, NoLeaves,    NoLeafNames,  NoSize  > NodeImplSymbolic;
+
+typedef NodeImpl< HasName, MultiLeaves, LeafNames,    NoSize  > NodeImplRecord;
+typedef NodeImpl< HasName, NoLeaves,    LeafNames,    NoSize  > NodeImplEnum;
+typedef NodeImpl< NoName,  SingleLeaf,  NoLeafNames,  NoSize  > NodeImplArray;
+typedef NodeImpl< NoName,  MultiLeaves, NoLeafNames,  NoSize  > NodeImplMap;
+typedef NodeImpl< NoName,  MultiLeaves, NoLeafNames,  NoSize  > NodeImplUnion;
+typedef NodeImpl< HasName, NoLeaves,    NoLeafNames,  HasSize > NodeImplFixed;
+
+class AVRO_DECL NodePrimitive : public NodeImplPrimitive
+{
+  public:
+
+    explicit NodePrimitive(Type type) :
+        NodeImplPrimitive(type)
+    { }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return true;
+    }
+};
+
+class AVRO_DECL NodeSymbolic : public NodeImplSymbolic
+{
+    typedef boost::weak_ptr<Node> NodeWeakPtr;
+
+  public:
+
+    NodeSymbolic() :
+        NodeImplSymbolic(AVRO_SYMBOLIC)
+    { }
+
+    explicit NodeSymbolic(const HasName &name) :
+        NodeImplSymbolic(AVRO_SYMBOLIC, name, NoLeaves(), NoLeafNames(), NoSize())
+    { }
+
+    NodeSymbolic(const HasName &name, const NodePtr n) :
+        NodeImplSymbolic(AVRO_SYMBOLIC, name, NoLeaves(), NoLeafNames(), NoSize()), actualNode_(n)
+    { }
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return (nameAttribute_.size() == 1);
+    }
+
+    bool isSet() const {
+         return (actualNode_.lock() != 0);
+    }
+
+    NodePtr getNode() const {
+        NodePtr node = actualNode_.lock();
+        if(!node) {
+            throw Exception(boost::format("Could not follow symbol %1%") % name());
+        }
+        return node;
+    }
+
+    void setNode(const NodePtr &node) {
+        actualNode_ = node;
+    }
+
+  protected:
+
+    NodeWeakPtr actualNode_;
+
+};
+
+class AVRO_DECL NodeRecord : public NodeImplRecord {
+    std::vector<GenericDatum> defaultValues;
+public:
+    NodeRecord() : NodeImplRecord(AVRO_RECORD) { } 
+    NodeRecord(const HasName &name, const MultiLeaves &fields,
+        const LeafNames &fieldsNames,
+        const std::vector<GenericDatum>& dv) :
+        NodeImplRecord(AVRO_RECORD, name, fields, fieldsNames, NoSize()),
+        defaultValues(dv) { 
+        for (size_t i = 0; i < leafNameAttributes_.size(); ++i) {
+            if (!nameIndex_.add(leafNameAttributes_.get(i), i)) {
+                throw Exception(boost::format(
+                    "Cannot add duplicate name: %1%") %
+                    leafNameAttributes_.get(i));
+            }
+        }
+    }
+
+    void swap(NodeRecord& r) {
+        NodeImplRecord::swap(r);
+        defaultValues.swap(r.defaultValues);
+    }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return ((nameAttribute_.size() == 1) && 
+            (leafAttributes_.size() == leafNameAttributes_.size()));
+    }
+
+    const GenericDatum& defaultValueAt(int index) {
+        return defaultValues[index];
+    }
+};
+
+class AVRO_DECL NodeEnum : public NodeImplEnum
+{
+  public:
+
+    NodeEnum() :
+        NodeImplEnum(AVRO_ENUM) 
+    { }
+
+    NodeEnum(const HasName &name, const LeafNames &symbols) :
+        NodeImplEnum(AVRO_ENUM, name, NoLeaves(), symbols, NoSize())
+    { 
+        for(size_t i=0; i < leafNameAttributes_.size(); ++i) {
+            if(!nameIndex_.add(leafNameAttributes_.get(i), i)) {
+                 throw Exception(boost::format("Cannot add duplicate name: %1%") % leafNameAttributes_.get(i));
+            }
+        }
+    }
+        
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return (
+                (nameAttribute_.size() == 1) && 
+                (leafNameAttributes_.size() > 0) 
+               );
+    }
+};
+
+class AVRO_DECL NodeArray : public NodeImplArray
+{
+  public:
+
+    NodeArray() :
+        NodeImplArray(AVRO_ARRAY)
+    { }
+
+    explicit NodeArray(const SingleLeaf &items) :
+        NodeImplArray(AVRO_ARRAY, NoName(), items, NoLeafNames(), NoSize())
+    { }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return (leafAttributes_.size() == 1);
+    }
+};
+
+class AVRO_DECL NodeMap : public NodeImplMap
+{
+  public:
+
+    NodeMap() :
+        NodeImplMap(AVRO_MAP)
+    { 
+         NodePtr key(new NodePrimitive(AVRO_STRING));
+         doAddLeaf(key);
+    }
+
+    explicit NodeMap(const SingleLeaf &values) :
+        NodeImplMap(AVRO_MAP, NoName(), values, NoLeafNames(), NoSize())
+    { 
+        // need to add the key for the map too
+        NodePtr key(new NodePrimitive(AVRO_STRING));
+        doAddLeaf(key);
+
+        // key goes before value
+        std::swap(leafAttributes_.get(0), leafAttributes_.get(1));
+    }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return (leafAttributes_.size() == 2);
+    }
+};
+
+class AVRO_DECL NodeUnion : public NodeImplUnion
+{
+  public:
+
+    NodeUnion() :
+        NodeImplUnion(AVRO_UNION)
+    { }
+
+    explicit NodeUnion(const MultiLeaves &types) :
+        NodeImplUnion(AVRO_UNION, NoName(), types, NoLeafNames(), NoSize())
+    { }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        std::set<std::string> seen;
+        if (leafAttributes_.size() >= 1) {
+            for (size_t i = 0; i < leafAttributes_.size(); ++i) {
+                std::string name;
+                const NodePtr& n = leafAttributes_.get(i);
+                switch (n->type()) {
+                case AVRO_STRING:
+                    name = "string";
+                    break;
+                case AVRO_BYTES:
+                    name = "bytes";
+                    break;
+                case AVRO_INT:
+                    name = "int";
+                    break;
+                case AVRO_LONG:
+                    name = "long";
+                    break;
+                case AVRO_FLOAT:
+                    name = "float";
+                    break;
+                case AVRO_DOUBLE:
+                    name = "double";
+                    break;
+                case AVRO_BOOL:
+                    name = "bool";
+                    break;
+                case AVRO_NULL:
+                    name = "null";
+                    break;
+                case AVRO_ARRAY:
+                    name = "array";
+                    break;
+                case AVRO_MAP:
+                    name = "map";
+                    break;
+                case AVRO_RECORD:
+                case AVRO_ENUM:
+                case AVRO_UNION:
+                case AVRO_FIXED:
+                case AVRO_SYMBOLIC:
+                    name = n->name().fullname();
+                    break;
+                default:
+                    return false;
+                }
+                if (seen.find(name) != seen.end()) {
+                    return false;
+                }
+                seen.insert(name);
+            }
+            return true;
+        }
+        return false;
+    }
+};
+
+class AVRO_DECL NodeFixed : public NodeImplFixed
+{
+  public:
+
+    NodeFixed() :
+        NodeImplFixed(AVRO_FIXED)
+    { }
+
+    NodeFixed(const HasName &name, const HasSize &size) :
+        NodeImplFixed(AVRO_FIXED, name, NoLeaves(), NoLeafNames(), size)
+    { }
+
+    SchemaResolution resolve(const Node &reader)  const;
+
+    void printJson(std::ostream &os, int depth) const;
+
+    bool isValid() const {
+        return (
+                (nameAttribute_.size() == 1) && 
+                (sizeAttribute_.size() == 1) 
+               );
+    }
+};
+
+template < class A, class B, class C, class D >
+inline void 
+NodeImpl<A,B,C,D>::setLeafToSymbolic(int index, const NodePtr &node)
+{
+    if(!B::hasAttribute) {
+        throw Exception("Cannot change leaf node for nonexistent leaf");
+    } 
+
+    NodePtr &replaceNode = const_cast<NodePtr &>(leafAttributes_.get(index));
+    if(replaceNode->name() != node->name()) {
+        throw Exception("Symbolic name does not match the name of the schema it references");
+    }
+
+    NodePtr symbol(new NodeSymbolic);
+    NodeSymbolic *ptr = static_cast<NodeSymbolic *> (symbol.get());
+
+    ptr->setName(node->name());
+    ptr->setNode(node);
+    replaceNode.swap(symbol);
+}
+
+template < class A, class B, class C, class D >
+inline void 
+NodeImpl<A,B,C,D>::printBasicInfo(std::ostream &os) const
+{
+    os << type();
+    if(hasName()) {
+        os << ' ' << nameAttribute_.get();
+    }
+
+    if(D::hasAttribute) {
+        os << " " << sizeAttribute_.get();
+    }
+    os << '\n';
+    int count = leaves();
+    count = count ? count : names();
+    for(int i= 0; i < count; ++i) {
+        if( C::hasAttribute ) {
+            os << "name " << nameAt(i) << '\n';
+        }
+        if( type() != AVRO_SYMBOLIC && leafAttributes_.hasAttribute) {
+            leafAt(i)->printBasicInfo(os);
+        }
+    }
+    if(isCompound(type())) {
+        os << "end " << type() << '\n';
+    }
+}
+
+
+inline NodePtr resolveSymbol(const NodePtr &node) 
+{
+    if(node->type() != AVRO_SYMBOLIC) {
+        throw Exception("Only symbolic nodes may be resolved");
+    }
+    boost::shared_ptr<NodeSymbolic> symNode = boost::static_pointer_cast<NodeSymbolic>(node);
+    return symNode->getNode();
+}
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Parser.hh b/lang/c++/api/Parser.hh
new file mode 100644
index 0000000..45aecf5
--- /dev/null
+++ b/lang/c++/api/Parser.hh
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Parser_hh__
+#define avro_Parser_hh__
+
+#include "Config.hh"
+#include "Reader.hh"
+
+namespace avro {
+
+///
+/// Class that wraps a reader or ValidatingReade with an interface that uses
+/// explicit get* names instead of getValue
+///
+
+template<class Reader>
+class Parser : private boost::noncopyable
+{
+
+  public:
+
+    // Constructor only works with Writer
+    explicit Parser(const InputBuffer &in) :
+        reader_(in)
+    {}
+
+    /// Constructor only works with ValidatingWriter
+    Parser(const ValidSchema &schema, const InputBuffer &in) :
+        reader_(schema, in)
+    {}
+
+    void readNull() {
+        Null null;
+        reader_.readValue(null);
+    }
+
+    bool readBool() {
+        bool val;
+        reader_.readValue(val);
+        return val;
+    }
+
+    int32_t readInt() {
+        int32_t val;
+        reader_.readValue(val);
+        return val;
+    }
+
+    int64_t readLong() {
+        int64_t val;
+        reader_.readValue(val);
+        return val;
+    }
+
+    float readFloat() {
+        float val;
+        reader_.readValue(val);
+        return val;
+    }
+
+    double readDouble() {
+        double val;
+        reader_.readValue(val);
+        return val;
+    }
+
+    void readString(std::string &val) {
+        reader_.readValue(val);
+    }
+
+    void readBytes(std::vector<uint8_t> &val) {
+        reader_.readBytes(val);
+    }
+
+    template <size_t N>
+    void readFixed(uint8_t (&val)[N]) {
+        reader_.readFixed(val);
+    }
+
+    template<size_t N>
+    void readFixed(boost::array<uint8_t, N> &val) {
+        reader_.readFixed(val);
+    }
+
+    void readRecord() { 
+        reader_.readRecord();
+    }
+
+    void readRecordEnd() { 
+        reader_.readRecordEnd();
+    }
+
+    int64_t readArrayBlockSize() {
+        return reader_.readArrayBlockSize();
+    }
+
+    int64_t readUnion() { 
+        return reader_.readUnion();
+    }
+
+    int64_t readEnum() {
+        return reader_.readEnum();
+    }
+
+    int64_t readMapBlockSize() {
+        return reader_.readMapBlockSize();
+    }
+
+  private:
+
+    friend Type nextType(Parser<ValidatingReader> &p);
+    friend bool currentRecordName(Parser<ValidatingReader> &p, std::string &name);
+    friend bool nextFieldName(Parser<ValidatingReader> &p, std::string &name);
+
+    Reader reader_;
+
+};
+
+inline Type nextType(Parser<ValidatingReader> &p) {
+    return p.reader_.nextType();
+}
+
+inline bool currentRecordName(Parser<ValidatingReader> &p, std::string &name) {
+    return p.reader_.currentRecordName(name);
+}
+
+inline bool nextFieldName(Parser<ValidatingReader> &p, std::string &name) {
+    return p.reader_.nextFieldName(name);
+}
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Reader.hh b/lang/c++/api/Reader.hh
new file mode 100644
index 0000000..d4ab96f
--- /dev/null
+++ b/lang/c++/api/Reader.hh
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Reader_hh__
+#define avro_Reader_hh__
+
+#include <stdint.h>
+#include <vector>
+#include <boost/noncopyable.hpp>
+
+#include "Config.hh"
+#include "Zigzag.hh"
+#include "Types.hh"
+#include "Validator.hh"
+#include "buffer/BufferReader.hh"
+
+namespace avro {
+
+///
+/// Parses from an avro encoding to the requested type.  Assumes the next item
+/// in the avro binary data is the expected type.
+///
+
+template<class ValidatorType>
+class ReaderImpl : private boost::noncopyable
+{
+
+  public:
+
+    explicit ReaderImpl(const InputBuffer &buffer) :
+        reader_(buffer)
+    {}
+
+    ReaderImpl(const ValidSchema &schema, const InputBuffer &buffer) :
+        validator_(schema),
+        reader_(buffer)
+    {}
+
+    void readValue(Null &) {
+        validator_.checkTypeExpected(AVRO_NULL);
+    }
+
+    void readValue(bool &val) {
+        validator_.checkTypeExpected(AVRO_BOOL);
+        uint8_t ival = 0;
+        reader_.read(ival);
+        val = (ival != 0);
+    }
+
+    void readValue(int32_t &val) {
+        validator_.checkTypeExpected(AVRO_INT);
+        uint32_t encoded = static_cast<uint32_t>(readVarInt());
+        val = decodeZigzag32(encoded);
+    }
+
+    void readValue(int64_t &val) {
+        validator_.checkTypeExpected(AVRO_LONG);
+        uint64_t encoded = readVarInt();
+        val = decodeZigzag64(encoded);
+    }
+
+    void readValue(float &val) {
+        validator_.checkTypeExpected(AVRO_FLOAT);
+        union { 
+            float f;
+            uint32_t i;
+        } v;
+        reader_.read(v.i);
+        val = v.f;
+    }
+
+    void readValue(double &val) {
+        validator_.checkTypeExpected(AVRO_DOUBLE);
+        union { 
+            double d;
+            uint64_t i;
+        } v;
+        reader_.read(v.i);
+        val = v.d;
+    }
+
+    void readValue(std::string &val) {
+        validator_.checkTypeExpected(AVRO_STRING);
+        size_t size = static_cast<size_t>(readSize());
+        reader_.read(val, size);
+    }
+
+    void readBytes(std::vector<uint8_t> &val) {
+        validator_.checkTypeExpected(AVRO_BYTES);
+        size_t size = static_cast<size_t>(readSize());
+        val.resize(size);
+        reader_.read(reinterpret_cast<char *>(&val[0]), size);
+    }
+
+    void readFixed(uint8_t *val, size_t size) {
+        validator_.checkFixedSizeExpected(size);
+        reader_.read(reinterpret_cast<char *>(val), size);
+    }
+
+    template <size_t N>
+    void readFixed(uint8_t (&val)[N]) {
+        this->readFixed(val, N);
+    }
+  
+    template <size_t N>
+    void readFixed(boost::array<uint8_t, N> &val) {
+        this->readFixed(val.c_array(), N);
+    }
+  
+    void readRecord() { 
+        validator_.checkTypeExpected(AVRO_RECORD);
+        validator_.checkTypeExpected(AVRO_LONG);
+        validator_.setCount(1);
+    }
+
+    void readRecordEnd() { 
+        validator_.checkTypeExpected(AVRO_RECORD);
+        validator_.checkTypeExpected(AVRO_LONG);
+        validator_.setCount(0);
+    }
+
+    int64_t readArrayBlockSize() {
+        validator_.checkTypeExpected(AVRO_ARRAY);
+        return readCount();
+    }
+
+    int64_t readUnion() { 
+        validator_.checkTypeExpected(AVRO_UNION);
+        return readCount();
+    }
+
+    int64_t readEnum() {
+        validator_.checkTypeExpected(AVRO_ENUM);
+        return readCount();
+    }
+
+    int64_t readMapBlockSize() {
+        validator_.checkTypeExpected(AVRO_MAP);
+        return readCount();
+    }
+
+    Type nextType() const {
+        return validator_.nextTypeExpected();
+    }
+
+    bool currentRecordName(std::string &name) const {
+        return validator_.getCurrentRecordName(name);
+    }
+
+    bool nextFieldName(std::string &name) const {
+        return validator_.getNextFieldName(name);
+    }
+
+  private:
+
+    uint64_t readVarInt() {
+        uint64_t encoded = 0;
+        uint8_t val = 0;
+        int shift = 0;
+        do {
+            reader_.read(val);
+            uint64_t newbits = static_cast<uint64_t>(val & 0x7f) << shift;
+            encoded |= newbits;
+            shift += 7;
+        } while (val & 0x80);
+
+        return encoded;
+    }
+
+    int64_t readSize() {
+        uint64_t encoded = readVarInt();
+        int64_t size = decodeZigzag64(encoded);
+        return size;
+    }
+
+    int64_t readCount() {
+        validator_.checkTypeExpected(AVRO_LONG);
+        int64_t count = readSize();
+        validator_.setCount(count);
+        return count;
+    }
+
+    ValidatorType validator_;
+    BufferReader  reader_;
+
+};
+
+typedef ReaderImpl<NullValidator> Reader;
+typedef ReaderImpl<Validator> ValidatingReader;
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Resolver.hh b/lang/c++/api/Resolver.hh
new file mode 100644
index 0000000..e1664f0
--- /dev/null
+++ b/lang/c++/api/Resolver.hh
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Resolver_hh__
+#define avro_Resolver_hh__
+
+#include <boost/noncopyable.hpp>
+#include <stdint.h>
+
+#include "Config.hh"
+#include "Reader.hh"
+
+/// \file Resolver.hh
+///
+
+namespace avro {
+
+class ValidSchema;
+class Layout;
+    
+class AVRO_DECL Resolver : private boost::noncopyable
+{
+
+  public:
+
+    virtual void parse(Reader &reader, uint8_t *address) const = 0;
+    virtual ~Resolver() {}
+
+};
+
+Resolver *constructResolver(
+        const ValidSchema &rwriterSchema,
+        const ValidSchema &readerSchema,
+        const Layout &readerLayout
+    );
+
+
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/ResolverSchema.hh b/lang/c++/api/ResolverSchema.hh
new file mode 100644
index 0000000..8eb6ec4
--- /dev/null
+++ b/lang/c++/api/ResolverSchema.hh
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_ResolverSchema_hh__
+#define avro_ResolverSchema_hh__
+
+#include <boost/noncopyable.hpp>
+#include <boost/shared_ptr.hpp>
+#include <stdint.h>
+
+#include "Config.hh"
+#include "Boost.hh"
+#include "Reader.hh"
+
+/// \file ResolverSchema.hh
+///
+
+namespace avro {
+    
+class ValidSchema;
+class Layout;
+class Resolver;
+
+class AVRO_DECL ResolverSchema {
+
+  public:
+
+    ResolverSchema(const ValidSchema &writer, const ValidSchema &reader, const Layout &readerLayout);
+
+  private:
+
+    friend class ResolvingReader;
+
+    void parse(Reader &reader, uint8_t *address); 
+
+    boost::shared_ptr<Resolver> resolver_;
+
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/ResolvingReader.hh b/lang/c++/api/ResolvingReader.hh
new file mode 100644
index 0000000..26aee6d
--- /dev/null
+++ b/lang/c++/api/ResolvingReader.hh
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_ResolvingReader_hh__
+#define avro_ResolvingReader_hh__
+
+#include <stdint.h>
+#include <boost/noncopyable.hpp>
+
+#include "Config.hh"
+#include "ResolverSchema.hh"
+#include "Reader.hh"
+
+namespace avro {
+
+class AVRO_DECL ResolvingReader : private boost::noncopyable
+{
+
+  public:
+
+    ResolvingReader(const ResolverSchema &schema, const InputBuffer &in) :
+        reader_(in),
+        schema_(schema)
+    {}
+
+    template<typename T>
+    void parse(T &object) {
+        schema_.parse(reader_, reinterpret_cast<uint8_t *>(&object));
+    }
+
+  private:
+
+    Reader reader_;
+    ResolverSchema schema_;
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Schema.hh b/lang/c++/api/Schema.hh
new file mode 100644
index 0000000..8ce5f8d
--- /dev/null
+++ b/lang/c++/api/Schema.hh
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Schema_hh__ 
+#define avro_Schema_hh__ 
+
+#include "Config.hh"
+#include "NodeImpl.hh"
+
+/// \file
+///
+/// Schemas for representing all the avro types.  The compound schema objects
+/// allow composition from other schemas.
+///
+
+namespace avro {
+
+
+/// The root Schema object is a base class.  Nobody constructs this class directly.
+
+class AVRO_DECL Schema {
+public:
+
+    virtual ~Schema();
+
+    Type type() const {
+        return node_->type();
+    }
+
+    const NodePtr &root() const {
+        return node_;
+    }
+
+    NodePtr &root() {
+        return node_;
+    }
+
+  protected:
+    Schema();
+    explicit Schema(const NodePtr &node);
+    explicit Schema(Node *node);
+
+    NodePtr node_;
+};
+
+class AVRO_DECL NullSchema : public Schema {
+public:
+    NullSchema(): Schema(new NodePrimitive(AVRO_NULL)) {}
+};
+
+class AVRO_DECL BoolSchema : public Schema {
+public:
+    BoolSchema(): Schema(new NodePrimitive(AVRO_BOOL)) {}
+};
+
+class AVRO_DECL IntSchema : public Schema {
+public:
+    IntSchema(): Schema(new NodePrimitive(AVRO_INT)) {}
+};
+
+class AVRO_DECL LongSchema : public Schema {
+public:
+    LongSchema(): Schema(new NodePrimitive(AVRO_LONG)) {}
+};
+
+class AVRO_DECL FloatSchema : public Schema {
+public:
+    FloatSchema(): Schema(new NodePrimitive(AVRO_FLOAT)) {}
+};
+
+class AVRO_DECL DoubleSchema : public Schema {
+public:
+    DoubleSchema(): Schema(new NodePrimitive(AVRO_DOUBLE)) {}
+};
+
+class AVRO_DECL StringSchema : public Schema {
+public:
+    StringSchema(): Schema(new NodePrimitive(AVRO_STRING)) {}
+};
+
+class AVRO_DECL BytesSchema : public Schema {
+public:
+    BytesSchema(): Schema(new NodePrimitive(AVRO_BYTES)) {}
+};
+
+class AVRO_DECL RecordSchema : public Schema {
+public:
+    RecordSchema(const std::string &name);
+    void addField(const std::string &name, const Schema &fieldSchema);
+};
+
+class AVRO_DECL EnumSchema : public Schema {
+public:
+    EnumSchema(const std::string &name);
+    void addSymbol(const std::string &symbol);
+};
+
+class AVRO_DECL ArraySchema : public Schema {
+public:
+    ArraySchema(const Schema &itemsSchema);
+};
+
+class AVRO_DECL MapSchema : public Schema {
+public:
+    MapSchema(const Schema &valuesSchema);
+};
+
+class AVRO_DECL UnionSchema : public Schema {
+public:
+    UnionSchema();
+    void addType(const Schema &typeSchema);
+};
+
+class AVRO_DECL FixedSchema : public Schema {
+public:
+    FixedSchema(int size, const std::string &name);
+};
+
+class AVRO_DECL SymbolicSchema : public Schema {
+public:
+    SymbolicSchema(const Name& name, const NodePtr& link);
+};
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/SchemaResolution.hh b/lang/c++/api/SchemaResolution.hh
new file mode 100644
index 0000000..c9c4190
--- /dev/null
+++ b/lang/c++/api/SchemaResolution.hh
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_SchemaResolution_hh__
+#define avro_SchemaResolution_hh__
+
+#include "Config.hh"
+
+namespace avro {
+
+
+enum SchemaResolution {
+
+    /// The schemas definitely do not match
+    
+    RESOLVE_NO_MATCH, 
+
+    /// The schemas match at a cursory level
+    ///
+    /// For records and enums, this means the name is the same, but it does not
+    /// necessarily mean that every symbol or field is an exact match.
+    
+    RESOLVE_MATCH,    
+
+    /// For primitives, the matching may occur if the type is promotable.  This means that the
+    /// writer matches reader if the writer's type is promoted the specified type.
+    
+    //@{
+    
+    RESOLVE_PROMOTABLE_TO_LONG,
+    RESOLVE_PROMOTABLE_TO_FLOAT,
+    RESOLVE_PROMOTABLE_TO_DOUBLE,
+
+    //@}
+
+};
+
+} // namespace avro 
+
+#endif
diff --git a/lang/c++/api/Serializer.hh b/lang/c++/api/Serializer.hh
new file mode 100644
index 0000000..b34a621
--- /dev/null
+++ b/lang/c++/api/Serializer.hh
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Serializer_hh__
+#define avro_Serializer_hh__
+
+#include <boost/noncopyable.hpp>
+
+#include "Config.hh"
+#include "Writer.hh"
+
+namespace avro {
+
+/// Class that wraps a Writer or ValidatingWriter with an interface that uses
+/// explicit write* names instead of writeValue
+
+template<class Writer>
+class Serializer : private boost::noncopyable
+{
+
+  public:
+
+    /// Constructor only works with Writer
+    explicit Serializer() :
+        writer_()
+    {}
+
+    /// Constructor only works with ValidatingWriter
+    Serializer(const ValidSchema &schema) :
+        writer_(schema)
+    {}
+
+    void writeNull() {
+        writer_.writeValue(Null());
+    }
+
+    void writeBool(bool val) {
+        writer_.writeValue(val);
+    }
+
+    void writeInt(int32_t val) {
+        writer_.writeValue(val);
+    }
+
+    void writeLong(int64_t val) {
+        writer_.writeValue(val);
+    }
+
+    void writeFloat(float val) {
+        writer_.writeValue(val);
+    }
+
+    void writeDouble(double val) {
+        writer_.writeValue(val);
+    }
+
+    void writeBytes(const void *val, size_t size) {
+        writer_.writeBytes(val);
+    }
+
+    template <size_t N>
+    void writeFixed(const uint8_t (&val)[N]) {
+        writer_.writeFixed(val);
+    }
+
+    template <size_t N>
+    void writeFixed(const boost::array<uint8_t, N> &val) {
+        writer_.writeFixed(val);
+    }
+
+    void writeString(const std::string &val) {
+        writer_.writeValue(val);
+    }
+
+    void writeRecord() {
+        writer_.writeRecord();
+    }
+
+    void writeRecordEnd() {
+        writer_.writeRecordEnd();
+    }
+
+    void writeArrayBlock(int64_t size) {
+        writer_.writeArrayBlock(size);
+    }
+
+    void writeArrayEnd() {
+        writer_.writeArrayEnd();
+    }
+
+    void writeMapBlock(int64_t size) {
+        writer_.writeMapBlock(size);
+    }
+
+    void writeMapEnd() {
+        writer_.writeMapEnd();
+    }
+
+    void writeUnion(int64_t choice) {
+        writer_.writeUnion(choice);
+    }
+
+    void writeEnum(int64_t choice) {
+        writer_.writeEnum(choice);
+    }
+
+    InputBuffer buffer() const {
+        return writer_.buffer();
+    }
+
+  private:
+
+    Writer writer_;
+
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Specific.hh b/lang/c++/api/Specific.hh
new file mode 100644
index 0000000..ef50318
--- /dev/null
+++ b/lang/c++/api/Specific.hh
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Codec_hh__
+#define avro_Codec_hh__
+
+#include <string>
+#include <vector>
+#include <map>
+#include <algorithm>
+
+#include "boost/array.hpp"
+
+#include "Config.hh"
+#include "Encoder.hh"
+#include "Decoder.hh"
+
+/**
+ * A bunch of templates and specializations for encoding and decoding
+ * specific types.
+ *
+ * Primitive AVRO types BOOLEAN, INT, LONG, FLOAT, DOUBLE, STRING and BYTES
+ * get decoded to and encoded from C++ types bool, int32_t, int64_t, float,
+ * double, std::string and std::vector<uint8_t> respectively. In addition,
+ * std::vector<T> for aribtrary type T gets encoded as an Avro array of T.
+ * Similarly, std::map<std::string, T> for arbitrary type T gets encoded
+ * as an Avro map with value type T.
+ * 
+ * Users can have their custom types encoded/decoded by specializing
+ * avro::codec_traits class for their types.
+ */
+namespace avro {
+
+template <typename T> void encode(Encoder& e, const T& t);
+template <typename T> void decode(Decoder& d, T& t);
+
+/**
+ * Codec_traits tells avro how to encode and decode an object of given type.
+ *
+ * The class is expected to have two static methods:
+ * \li static void encode(Encoder& e, const T& value);
+ * \li static void decode(Decoder& e, T& value);
+ * The default is empty.
+ */
+template <typename T>
+struct codec_traits {
+};
+
+/**
+ * codec_traits for Avro boolean.
+ */
+template <> struct codec_traits<bool> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, bool b) {
+        e.encodeBool(b);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, bool& b) {
+        b = d.decodeBool();
+    }
+};
+
+/**
+ * codec_traits for Avro int.
+ */
+template <> struct codec_traits<int32_t> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, int32_t i) {
+        e.encodeInt(i);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, int32_t& i) {
+        i = d.decodeInt();
+    }
+};
+
+/**
+ * codec_traits for Avro long.
+ */
+template <> struct codec_traits<int64_t> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, int64_t l) {
+        e.encodeLong(l);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, int64_t& l) {
+        l = d.decodeLong();
+    }
+};
+
+/**
+ * codec_traits for Avro float.
+ */
+template <> struct codec_traits<float> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, float f) {
+        e.encodeFloat(f);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, float& f) {
+        f = d.decodeFloat();
+    }
+};
+
+/**
+ * codec_traits for Avro double.
+ */
+template <> struct codec_traits<double> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, double d) {
+        e.encodeDouble(d);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, double& dbl) {
+        dbl = d.decodeDouble();
+    }
+};
+
+/**
+ * codec_traits for Avro string.
+ */
+template <> struct codec_traits<std::string> {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, const std::string& s) {
+        e.encodeString(s);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, std::string& s) {
+        s = d.decodeString();
+    }
+};
+
+/**
+ * codec_traits for Avro bytes.
+ */
+template <> struct codec_traits<std::vector<uint8_t> > {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, const std::vector<uint8_t>& b) {
+        e.encodeBytes(b);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, std::vector<uint8_t>& s) {
+        d.decodeBytes(s);
+    }
+};
+
+/**
+ * codec_traits for Avro fixed.
+ */
+template <size_t N> struct codec_traits<boost::array<uint8_t, N> > {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, const boost::array<uint8_t, N>& b) {
+        e.encodeFixed(&b[0], N);
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, boost::array<uint8_t, N>& s) {
+        std::vector<uint8_t> v(N);
+        d.decodeFixed(N, v);
+        std::copy(&v[0], &v[0] + N, &s[0]);
+    }
+};
+
+/**
+ * codec_traits for Avro arrays.
+ */
+template <typename T> struct codec_traits<std::vector<T> > {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, const std::vector<T>& b) {
+        e.arrayStart();
+        if (! b.empty()) {
+            e.setItemCount(b.size());
+            for (typename std::vector<T>::const_iterator it = b.begin();
+                it != b.end(); ++it) {
+                e.startItem();
+                avro::encode(e, *it);
+            }
+        }
+        e.arrayEnd();
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, std::vector<T>& s) {
+        s.clear();
+        for (size_t n = d.arrayStart(); n != 0; n = d.arrayNext()) {
+            for (size_t i = 0; i < n; ++i) {
+                T t;
+                avro::decode(d, t);
+                s.push_back(t);
+            }
+        }
+    }
+};
+
+/**
+ * codec_traits for Avro maps.
+ */
+template <typename T> struct codec_traits<std::map<std::string, T> > {
+    /**
+     * Encodes a given value.
+     */
+    static void encode(Encoder& e, const std::map<std::string, T>& b) {
+        e.mapStart();
+        if (! b.empty()) {
+            e.setItemCount(b.size());
+            for (typename std::map<std::string, T>::const_iterator
+                it = b.begin();
+                it != b.end(); ++it) {
+                e.startItem();
+                avro::encode(e, it->first);
+                avro::encode(e, it->second);
+            }
+        }
+        e.mapEnd();
+    }
+
+    /**
+     * Decodes into a given value.
+     */
+    static void decode(Decoder& d, std::map<std::string, T>& s) {
+        s.clear();
+        for (size_t n = d.mapStart(); n != 0; n = d.mapNext()) {
+            for (size_t i = 0; i < n; ++i) {
+                std::string k;
+                avro::decode(d, k);
+                T t;
+                avro::decode(d, t);
+                s[k] = t;
+            }
+        }
+    }
+};
+
+/**
+ * Generic encoder function that makes use of the codec_traits.
+ */
+template <typename T>
+void encode(Encoder& e, const T& t) {
+    codec_traits<T>::encode(e, t);
+}
+
+/**
+ * Generic decoder function that makes use of the codec_traits.
+ */
+template <typename T>
+void decode(Decoder& d, T& t) {
+    codec_traits<T>::decode(d, t);
+}
+
+}   // namespace avro
+#endif // avro_Codec_hh__
+
+
+
diff --git a/lang/c++/api/Stream.hh b/lang/c++/api/Stream.hh
new file mode 100644
index 0000000..92b2334
--- /dev/null
+++ b/lang/c++/api/Stream.hh
@@ -0,0 +1,417 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Stream_hh__
+#define avro_Stream_hh__
+
+#include <memory>
+#include <string.h>
+#include <stdint.h>
+
+#include "boost/utility.hpp"
+
+#include "Config.hh"
+#include "Exception.hh"
+
+namespace avro {
+
+/**
+ * A no-copy input stream.
+ */
+class AVRO_DECL InputStream : boost::noncopyable {
+protected:
+
+    /**
+     * An empty constuctor.
+     */
+    InputStream() { }
+
+public:
+    /**
+     * Destructor.
+     */
+    virtual ~InputStream() { }
+
+    /**
+     * Returns some of available data.
+     *
+     * Returns true if some data is available, false if no more data is
+     * available or an error has occurred.
+     */
+    virtual bool next(const uint8_t** data, size_t* len) = 0;
+
+    /**
+     * "Returns" back some of the data to the stream. The returned
+     * data must be less than what was obtained in the last call to
+     * next().
+     */
+    virtual void backup(size_t len) = 0;
+
+    /**
+     * Skips number of bytes specified by len.
+     */
+    virtual void skip(size_t len) = 0;
+
+    /**
+     * Returns the number of bytes read from this stream so far.
+     * All the bytes made available through next are considered
+     * to be used unless, retutned back using backup.
+     */
+    virtual size_t byteCount() const = 0;
+};
+
+/**
+ * A no-copy output stream.
+ */
+class AVRO_DECL OutputStream : boost::noncopyable {
+protected:
+
+    /**
+     * An empty constuctor.
+     */
+    OutputStream() { }
+public:
+
+    /**
+     * Destructor.
+     */
+    virtual ~OutputStream() { }
+
+    /**
+     * Returns a buffer that can be written into.
+     * On successful return, data has the pointer to the buffer
+     * and len has the number of bytes available at data.
+     */
+    virtual bool next(uint8_t** data, size_t* len) = 0;
+
+    /**
+     * "Returns" back to the stream some of the buffer obtained
+     * from in the last call to next().
+     */
+    virtual void backup(size_t len) = 0;
+
+    /**
+     * Number of bytes written so far into this stream. The whole buffer
+     * returned by next() is assumed to be written unless some of
+     * it was retutned using backup().
+     */
+    virtual uint64_t byteCount() const = 0;
+
+    /**
+     * Flushes any data remaining in the buffer to the stream's underlying
+     * store, if any.
+     */
+    virtual void flush() = 0;
+};
+
+/**
+ * Returns a new OutputStream, which grows in memory chunks of specified size.
+ */
+AVRO_DECL std::auto_ptr<OutputStream> memoryOutputStream(size_t chunkSize = 4 * 1024);
+
+/**
+ * Returns a new InputStream, with the data from the given byte array.
+ * It does not copy the data, the byte array should remain valid
+ * until the InputStream is used.
+ */
+AVRO_DECL std::auto_ptr<InputStream> memoryInputStream(const uint8_t* data, size_t len);
+
+/**
+ * Returns a new InputStream with the contents written into an
+ * outputstream. The output stream must have been returned by
+ * an earlier call to memoryOutputStream(). The contents for the new
+ * input stream are the snapshot of the outputstream. One can construct
+ * any number of memory input stream from a single memory output stream.
+ */
+AVRO_DECL std::auto_ptr<InputStream> memoryInputStream(const OutputStream& source);
+
+/**
+ * Returns the contents written so far into the output stream, which should
+ * be a memory output stream. That is it must have been returned by a pervious
+ * call to memoryOutputStream().
+ */
+AVRO_DECL boost::shared_ptr<std::vector<uint8_t> > snapshot(const OutputStream& source);
+
+/**
+ * Returns a new OutputStream whose contents would be stored in a file.
+ * Data is written in chunks of given buffer size.
+ *
+ * If there is a file with the given name, it is truncated and overwritten.
+ * If there is no file with the given name, it is created.
+ */
+AVRO_DECL std::auto_ptr<OutputStream> fileOutputStream(const char* filename,
+    size_t bufferSize = 8 * 1024);
+
+/**
+ * Returns a new InputStream whose contents come from the given file.
+ * Data is read in chunks of given buffer size.
+ */
+AVRO_DECL std::auto_ptr<InputStream> fileInputStream(const char* filename,
+    size_t bufferSize = 8 * 1024);
+
+/**
+ * Returns a new OutputStream whose contents will be sent to the given
+ * std::ostream. The std::ostream object should outlive the returned
+ * OutputStream.
+ */
+AVRO_DECL std::auto_ptr<OutputStream> ostreamOutputStream(std::ostream& os,
+    size_t bufferSize = 8 * 1024);
+
+/**
+ * Returns a new InputStream whose contents come from the given
+ * std::istream. The std::istream object should outlive the returned
+ * InputStream.
+ */
+AVRO_DECL std::auto_ptr<InputStream> istreamInputStream(std::istream& in,
+    size_t bufferSize = 8 * 1024);
+
+/** A convenience class for reading from an InputStream */
+struct StreamReader {
+    /**
+     * The underlying input stream.
+     */
+    InputStream* in_;
+
+    /**
+     * The next location to read from.
+     */
+    const uint8_t* next_;
+
+    /**
+     * One past the last valid location.
+     */
+    const uint8_t* end_;
+
+    /**
+     * Constructs an empty reader.
+     */
+    StreamReader() : in_(0), next_(0), end_(0) { }
+
+    /**
+     * Constructs a reader with the given underlying stream.
+     */
+    StreamReader(InputStream& in) : in_(0), next_(0), end_(0) { reset(in); }
+
+    /**
+     * Replaces the current input stream with the given one after backing up
+     * the original one if required.
+     */
+    void reset(InputStream& is) {
+        if (in_ != 0 && end_ != next_) {
+            in_->backup(end_ - next_);
+        }
+        in_ = &is;
+        next_ = end_ = 0;
+    }
+
+    /**
+     * Read just one byte from the underlying stream. If there are no
+     * more data, throws an exception.
+     */
+    uint8_t read() {
+        if (next_ == end_) {
+            more();
+        }
+        return *next_++;
+    }
+
+    /**
+     * Reads the given number of bytes from the underlying stream.
+     * If there are not that many bytes, throws an exception.
+     */
+    void readBytes(uint8_t* b, size_t n) {
+        while (n > 0) {
+            if (next_ == end_) {
+                more();
+            }
+            size_t q = end_ - next_;
+            if (q > n) {
+                q = n;
+            }
+            ::memcpy(b, next_, q);
+            next_ += q;
+            b += q;
+            n -= q;
+        }
+    }
+
+    /**
+     * Skips the given number of bytes. Of there are not so that many
+     * bytes, throws an exception.
+     */
+    void skipBytes(size_t n) {
+        if (n > static_cast<size_t>(end_ - next_)) {
+            n -= end_ - next_;
+            next_ = end_;
+            in_->skip(n);
+        } else {
+            next_ += n;
+        }
+    }
+
+    /**
+     * Get as many byes from the underlying stream as possible in a single
+     * chunk.
+     * \return true if some data could be obtained. False is no more
+     * data is available on the stream.
+     */
+    bool fill() {
+        size_t n = 0;
+        while (in_->next(&next_, &n)) {
+            if (n != 0) {
+                end_ = next_ + n;
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Tries to get more data and if it cannot, throws an exception.
+     */
+    void more() {
+        if (! fill()) {
+            throw Exception("EOF reached");
+        }
+    }
+
+    /**
+     * Returns true if and only if the end of stream is not reached.
+     */
+    bool hasMore() {
+        return (next_ == end_) ? fill() : true;
+    }
+};
+
+/**
+ * A convinience class to write data into an OutputStream.
+ */
+struct StreamWriter {
+    /**
+     * The underlying output stream for this writer.
+     */
+    OutputStream* out_;
+
+    /**
+     * The next location to write to.
+     */
+    uint8_t* next_;
+    
+    /**
+     * One past the last location one can write to.
+     */
+    uint8_t* end_;
+
+    /**
+     * Constructs a writer with no underlying stream.
+     */
+    StreamWriter() : out_(0), next_(0), end_(0) { }
+
+    /**
+     * Constructs a new writer with the given underlying stream.
+     */
+    StreamWriter(OutputStream& out) : out_(0), next_(0), end_(0) { reset(out); }
+
+    /**
+     * Replaces the current underlying stream with a new one.
+     * If required, it backs up unused bytes in the previous stream.
+     */
+    void reset(OutputStream& os) {
+        if (out_ != 0 && end_ != next_) {
+            out_->backup(end_ - next_);
+        }
+        out_ = &os;
+        next_ = end_;
+    }
+
+    /**
+     * Writes a single byte.
+     */
+    void write(uint8_t c) {
+        if (next_ == end_) {
+            more();
+        }
+        *next_++ = c;
+    }
+
+    /**
+     * Writes the specified number of bytes starting at \p b.
+     */
+    void writeBytes(const uint8_t* b, size_t n) {
+        while (n > 0) {
+            if (next_ == end_) {
+                more();
+            }
+            size_t q = end_ - next_;
+            if (q > n) {
+                q = n;
+            }
+            ::memcpy(next_, b, q);
+            next_ += q;
+            b += q;
+            n -= q;
+        }
+    }
+
+    /**
+     * backs up upto the currently written data and flushes the
+     * underlying stream.
+     */
+    void flush() {
+        if (next_ != end_) {
+            out_->backup(end_ - next_);
+            next_ = end_;
+        }
+        out_->flush();
+    }
+
+    /**
+     * Gets more space to write to. Throws an exception it cannot.
+     */
+    void more() {
+        size_t n = 0;
+        while (out_->next(&next_, &n)) {
+            if (n != 0) {
+                end_ = next_ + n;
+                return;
+            }
+        }
+        throw Exception("EOF reached");
+    }
+
+};
+
+/**
+ * A convenience function to copy all the contents of an input stream into
+ * an output stream.
+ */
+inline void copy(InputStream& in, OutputStream& out)
+{
+    const uint8_t *p = 0;
+    size_t n = 0;
+    StreamWriter w(out);
+    while (in.next(&p, &n)) {
+        w.writeBytes(p, n);
+    }
+    w.flush();
+}
+
+}   // namespace avro
+#endif
+
+
diff --git a/lang/c++/api/Types.hh b/lang/c++/api/Types.hh
new file mode 100644
index 0000000..46430d1
--- /dev/null
+++ b/lang/c++/api/Types.hh
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Types_hh__
+#define avro_Types_hh__
+
+#include <iostream>
+
+#include "Config.hh"
+
+namespace avro {
+
+/**
+ * The "type" for the schema.
+ */
+enum Type {
+
+    AVRO_STRING,    /*!< String */
+    AVRO_BYTES,     /*!< Sequence of variable length bytes data */
+    AVRO_INT,       /*!< 32-bit integer */
+    AVRO_LONG,      /*!< 64-bit integer */
+    AVRO_FLOAT,     /*!< Floating point number */
+    AVRO_DOUBLE,    /*!< Double precision floating point number */
+    AVRO_BOOL,      /*!< Boolean value */
+    AVRO_NULL,      /*!< Null */
+
+    AVRO_RECORD,    /*!< Record, a sequence of fields */
+    AVRO_ENUM,      /*!< Enumeration */
+    AVRO_ARRAY,     /*!< Homogeneous array of some specific type */
+    AVRO_MAP,       /*!< Homogeneous map from string to some specific type */
+    AVRO_UNION,     /*!< Union of one or more types */
+    AVRO_FIXED,     /*!< Fixed number of bytes */
+
+    AVRO_NUM_TYPES, /*!< Marker */
+    
+    // The following is a pseudo-type used in implementation
+    
+    AVRO_SYMBOLIC = AVRO_NUM_TYPES, /*!< User internally to avoid circular references. */
+    AVRO_UNKNOWN  = -1 /*!< Used internally. */
+
+};
+
+/**
+ * Returns true if and only if the given type is a primitive.
+ * Primitive types are: string, bytes, int, long, float, double, boolean
+ * and null
+ */
+inline bool isPrimitive(Type t) {
+    return (t >= AVRO_STRING) && (t < AVRO_RECORD);
+}
+
+/**
+ * Returns true if and only if the given type is a non primitive valid type.
+ * Primitive types are: string, bytes, int, long, float, double, boolean
+ * and null
+ */
+inline bool isCompound(Type t) {
+    return (t>= AVRO_RECORD) && (t < AVRO_NUM_TYPES);
+}
+
+/**
+ * Returns true if and only if the given type is a valid avro type.
+ */
+inline bool isAvroType(Type t) {
+    return (t >= AVRO_STRING) && (t < AVRO_NUM_TYPES);
+}
+
+/**
+ * Returns true if and only if the given type is within the valid range
+ * of enumeration.
+ */
+inline bool isAvroTypeOrPseudoType(Type t) {
+    return (t >= AVRO_STRING) && (t <= AVRO_NUM_TYPES);
+}
+
+/**
+ * Converts the given type into a string. Useful for generating messages.
+ */
+AVRO_DECL const std::string& toString(Type type);
+
+/**
+ * Writes a string form of the given type into the given ostream.
+ */
+AVRO_DECL std::ostream &operator<< (std::ostream &os, avro::Type type);
+
+/// define a type to identify Null in template functions
+struct AVRO_DECL Null { };
+
+/**
+ * Writes schema for null \p null type to \p os.
+ * \param os The ostream to write to.
+ * \param null The value to be written.
+ */
+std::ostream& operator<< (std::ostream &os, const Null &null);
+
+} // namespace avro
+
+
+#endif
diff --git a/lang/c++/api/ValidSchema.hh b/lang/c++/api/ValidSchema.hh
new file mode 100644
index 0000000..30eb33e
--- /dev/null
+++ b/lang/c++/api/ValidSchema.hh
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_ValidSchema_hh__ 
+#define avro_ValidSchema_hh__ 
+
+#include "Config.hh"
+#include "Node.hh"
+
+namespace avro {
+
+class AVRO_DECL Schema;
+
+/// A ValidSchema is basically a non-mutable Schema that has passed some
+/// minumum of sanity checks.  Once valididated, any Schema that is part of
+/// this ValidSchema is considered locked, and cannot be modified (an attempt
+/// to modify a locked Schema will throw).  Also, as it is validated, any
+/// recursive duplications of schemas are replaced with symbolic links to the
+/// original.
+///
+/// Once a Schema is converted to a valid schema it can be used in validating
+/// parsers/serializers, converted to a json schema, etc.
+///
+
+class AVRO_DECL ValidSchema {
+public:
+    explicit ValidSchema(const NodePtr &root);
+    explicit ValidSchema(const Schema &schema);
+    ValidSchema();
+
+    void setSchema(const Schema &schema);
+
+    const NodePtr &root() const {
+        return root_;
+    }
+
+    void toJson(std::ostream &os) const;
+
+    void toFlatList(std::ostream &os) const;
+
+  protected:
+    NodePtr root_;
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Validator.hh b/lang/c++/api/Validator.hh
new file mode 100644
index 0000000..1057330
--- /dev/null
+++ b/lang/c++/api/Validator.hh
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Validating_hh__
+#define avro_Validating_hh__
+
+#include <boost/noncopyable.hpp>
+#include <vector>
+#include <stdint.h>
+
+#include "Config.hh"
+#include "Types.hh"
+#include "ValidSchema.hh"
+
+namespace avro {
+
+class AVRO_DECL NullValidator : private boost::noncopyable
+{
+  public:
+
+    explicit NullValidator(const ValidSchema &schema) {}
+    NullValidator() {}
+
+    void setCount(int64_t val) {}
+
+    bool typeIsExpected(Type type) const {
+        return true;
+    }
+
+    Type nextTypeExpected() const {
+        return AVRO_UNKNOWN;
+    }
+
+    int nextSizeExpected() const {
+        return 0;
+    }
+
+    bool getCurrentRecordName(std::string &name) const {
+        return true;
+    }
+
+    bool getNextFieldName(std::string &name) const {
+        return true;
+    }
+
+    void checkTypeExpected(Type type) { }
+    void checkFixedSizeExpected(int size) { }
+
+
+};
+
+/// This class is used by both the ValidatingSerializer and ValidationParser
+/// objects.  It advances the parse tree (containing logic how to advance
+/// through the various compound types, for example a record must advance
+/// through all leaf nodes but a union only skips to one), and reports which
+/// type is next.
+
+class AVRO_DECL Validator : private boost::noncopyable
+{
+  public:
+
+    explicit Validator(const ValidSchema &schema);
+
+    void setCount(int64_t val);
+
+    bool typeIsExpected(Type type) const {
+        return (expectedTypesFlag_ & typeToFlag(type)) != 0;
+    }
+
+    Type nextTypeExpected() const {
+        return nextType_;
+    }
+
+    int nextSizeExpected() const;
+
+    bool getCurrentRecordName(std::string &name) const;
+    bool getNextFieldName(std::string &name) const;
+
+    void checkTypeExpected(Type type) {
+        if(! typeIsExpected(type)) {
+            throw Exception(
+                boost::format("Type %1% does not match schema %2%") 
+                    % type % nextType_
+            );
+        }
+        advance();
+    }
+
+    void checkFixedSizeExpected(int size) { 
+        if( nextSizeExpected() != size) {
+            throw Exception(
+                boost::format("Wrong size for fixed, got %1%, expected %2%") 
+                    % size % nextSizeExpected()
+            );
+        }
+        checkTypeExpected(AVRO_FIXED);
+    }
+
+  private:
+
+    typedef uint32_t flag_t;
+
+    flag_t typeToFlag(Type type) const {
+        flag_t flag = (1L << type);
+        return flag;
+    }
+
+    void setupOperation(const NodePtr &node);
+
+    void setWaitingForCount();
+
+    void advance();
+    void doAdvance();
+
+    void enumAdvance();
+    bool countingSetup();
+    void countingAdvance();
+    void unionAdvance();
+    void fixedAdvance();
+
+    void setupFlag(Type type);
+
+    const ValidSchema schema_;
+
+    Type nextType_; 
+    flag_t expectedTypesFlag_;
+    bool compoundStarted_;
+    bool waitingForCount_;
+    int64_t count_;
+
+    struct CompoundType {
+        explicit CompoundType(const NodePtr &n) :
+            node(n), pos(0)
+        {}
+        NodePtr node;  ///< save the node
+        size_t  pos;   ///< track the leaf position to visit
+    };
+
+    std::vector<CompoundType> compoundStack_;
+    std::vector<size_t> counters_;
+
+};
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Writer.hh b/lang/c++/api/Writer.hh
new file mode 100644
index 0000000..34419dd
--- /dev/null
+++ b/lang/c++/api/Writer.hh
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Writer_hh__
+#define avro_Writer_hh__
+
+#include <boost/noncopyable.hpp>
+
+#include "Config.hh"
+#include "buffer/Buffer.hh"
+#include "Zigzag.hh"
+#include "Types.hh"
+#include "Validator.hh"
+
+namespace avro {
+
+/// Class for writing avro data to a stream.
+
+template<class ValidatorType>
+class WriterImpl : private boost::noncopyable
+{
+
+  public:
+
+    WriterImpl() {}
+
+    explicit WriterImpl(const ValidSchema &schema) :
+        validator_(schema) 
+    {}
+
+    void writeValue(const Null &) {
+        validator_.checkTypeExpected(AVRO_NULL);
+    }
+
+    void writeValue(bool val) {
+        validator_.checkTypeExpected(AVRO_BOOL);
+        int8_t byte = (val != 0);
+        buffer_.writeTo(byte);
+    }
+
+    void writeValue(int32_t val) {
+        validator_.checkTypeExpected(AVRO_INT);
+        boost::array<uint8_t, 5> bytes;
+        size_t size = encodeInt32(val, bytes);
+        buffer_.writeTo(reinterpret_cast<const char *>(bytes.data()), size);
+    }
+
+    void writeValue(int64_t val) {
+        validator_.checkTypeExpected(AVRO_LONG);
+        putLong(val);
+    }
+
+    void writeValue(float val) {
+        validator_.checkTypeExpected(AVRO_FLOAT);
+        union {
+            float f;
+            int32_t i;
+        } v;
+    
+        v.f = val;
+        buffer_.writeTo(v.i);
+    }
+
+    void writeValue(double val) {
+        validator_.checkTypeExpected(AVRO_DOUBLE);
+        union {
+            double d;
+            int64_t i;
+        } v;
+        
+        v.d = val;
+        buffer_.writeTo(v.i);
+    }
+
+    void writeValue(const std::string &val) {
+        validator_.checkTypeExpected(AVRO_STRING);
+        putBytes(val.c_str(), val.size());
+    }
+
+    void writeBytes(const void *val, size_t size) {
+        validator_.checkTypeExpected(AVRO_BYTES);
+        putBytes(val, size);
+    }
+
+    template <size_t N>
+    void writeFixed(const uint8_t (&val)[N]) {
+        validator_.checkFixedSizeExpected(N);
+        buffer_.writeTo(reinterpret_cast<const char *>(val), N);
+    }
+
+    template <size_t N>
+    void writeFixed(const boost::array<uint8_t, N> &val) {
+        validator_.checkFixedSizeExpected(val.size());
+        buffer_.writeTo(reinterpret_cast<const char *>(val.data()), val.size());
+    }
+
+    void writeRecord() {
+        validator_.checkTypeExpected(AVRO_RECORD);
+        validator_.checkTypeExpected(AVRO_LONG);
+        validator_.setCount(1);
+    }
+
+    void writeRecordEnd() {
+        validator_.checkTypeExpected(AVRO_RECORD);
+        validator_.checkTypeExpected(AVRO_LONG);
+        validator_.setCount(0);
+    }
+
+    void writeArrayBlock(int64_t size) {
+        validator_.checkTypeExpected(AVRO_ARRAY);
+        writeCount(size);
+    }
+
+    void writeArrayEnd() {
+        writeArrayBlock(0);
+    }
+
+    void writeMapBlock(int64_t size) {
+        validator_.checkTypeExpected(AVRO_MAP);
+        writeCount(size);
+    }
+
+    void writeMapEnd() {
+        writeMapBlock(0);
+    }
+
+    void writeUnion(int64_t choice) {
+        validator_.checkTypeExpected(AVRO_UNION);
+        writeCount(choice);
+    }
+
+    void writeEnum(int64_t choice) {
+        validator_.checkTypeExpected(AVRO_ENUM);
+        writeCount(choice);
+    }
+
+    InputBuffer buffer() const {
+        return buffer_;
+    }
+
+  private:
+
+    void putLong(int64_t val) {
+        boost::array<uint8_t, 10> bytes;
+        size_t size = encodeInt64(val, bytes);
+        buffer_.writeTo(reinterpret_cast<const char *>(bytes.data()), size);
+    }
+
+    void putBytes(const void *val, size_t size) {
+        putLong(size);
+        buffer_.writeTo(reinterpret_cast<const char *>(val), size);
+    }
+
+    void writeCount(int64_t count) {
+        validator_.checkTypeExpected(AVRO_LONG);
+        validator_.setCount(count);
+        putLong(count);
+    }
+
+    ValidatorType validator_;
+    OutputBuffer buffer_;
+
+};
+
+typedef WriterImpl<NullValidator> Writer;
+typedef WriterImpl<Validator> ValidatingWriter;
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/Zigzag.hh b/lang/c++/api/Zigzag.hh
new file mode 100644
index 0000000..67fa1a0
--- /dev/null
+++ b/lang/c++/api/Zigzag.hh
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Encoding_hh__
+#define avro_Encoding_hh__
+
+#include <stdint.h>
+#include <boost/array.hpp>
+
+#include "Config.hh"
+/// \file
+/// Functions for encoding and decoding integers with zigzag compression
+
+namespace avro {
+
+AVRO_DECL uint64_t encodeZigzag64(int64_t input);
+AVRO_DECL int64_t decodeZigzag64(uint64_t input);
+
+AVRO_DECL uint32_t encodeZigzag32(int32_t input);
+AVRO_DECL int32_t decodeZigzag32(uint32_t input);
+
+AVRO_DECL size_t encodeInt32(int32_t input, boost::array<uint8_t, 5> &output);
+AVRO_DECL size_t encodeInt64(int64_t input, boost::array<uint8_t, 10> &output);
+
+} // namespace avro
+
+#endif
diff --git a/lang/c++/api/buffer/Buffer.hh b/lang/c++/api/buffer/Buffer.hh
new file mode 100644
index 0000000..cf2ab79
--- /dev/null
+++ b/lang/c++/api/buffer/Buffer.hh
@@ -0,0 +1,525 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_Buffer_hh__
+#define avro_Buffer_hh__
+
+#ifndef _WIN32
+#include <sys/uio.h>
+#endif
+#include <boost/type_traits.hpp>
+#include <vector>
+
+#include "../Config.hh"
+#include "detail/BufferDetail.hh"
+#include "detail/BufferDetailIterator.hh"
+
+/** 
+ * \file Buffer.hh 
+ *
+ * \brief Definitions for InputBuffer and OutputBuffer classes
+ *
+ **/
+
+namespace avro {
+
+class OutputBuffer;
+class InputBuffer;
+
+
+/** 
+ * The OutputBuffer (write-only buffer)
+ *
+ * Use cases for OutputBuffer 
+ *
+ * - write message to buffer using ostream class or directly
+ * - append messages to headers
+ * - building up streams of messages via append
+ * - converting to read-only buffers for sending
+ * - extracting parts of the messages into read-only buffers
+ *   
+ * -# ASIO access:
+ *     - write to a buffer(s) by asio using iterator
+ *     - convert to read buffer for deserializing
+ *
+ * OutputBuffer is assignable and copy-constructable.  On copy or assignment,
+ * only a pointer is copied, so the two resulting copies are identical, so 
+ * modifying one will modify both.
+ **/
+
+class AVRO_DECL OutputBuffer 
+{
+
+  public:
+
+    typedef detail::size_type size_type;
+    typedef detail::data_type data_type;
+
+    /** 
+     * The asio library expects a const_iterator (the const-ness refers to the
+     * fact that the underlying avro of buffers will not be modified, even
+     * though the data in those buffers is being modified).  The iterator
+     * provides the list of addresses an operation can write to.
+     **/
+
+    typedef detail::OutputBufferIterator const_iterator;
+
+    /**
+     * Default constructor.  Will pre-allocate at least the requested size, but
+     * can grow larger on demand.
+     *
+     * Destructor uses the default, which resets a shared pointer, deleting the
+     * underlying data if no other copies of exist.
+     *
+     * Copy and assignment operators are not explicitly provided because the
+     * default ones work fine.  The default makes only a shallow copy, so the
+     * copies will refer to the same memory.  This is required by asio
+     * functions, which will implicitly make copies for asynchronous
+     * operations.  Therefore, the user must be careful that if they create
+     * multiple copies of the same OutputBuffer, only one is being modified
+     * otherwise undefined behavior may occur.
+     *
+     **/
+
+    OutputBuffer(size_type reserveSize = 0) :
+        pimpl_(new detail::BufferImpl) 
+    { 
+        if(reserveSize) { 
+            reserve(reserveSize); 
+        }
+    }
+
+    /** 
+     * Reserve enough space for a wroteTo() operation.  When using writeTo(),
+     * the buffer will grow dynamically as needed.  But when using the iterator
+     * to write (followed by wroteTo()), data may only be written to the space
+     * available,  so this ensures there is enough room in the buffer before
+     * the write operation.
+     **/
+
+    void reserve(size_type reserveSize) 
+    {
+        pimpl_->reserveFreeSpace(reserveSize);
+    }
+
+    /** 
+     * Write a block of data to the buffer.  The buffer size will automatically
+     * grow if the size is larger than what is currently free.  
+     **/
+
+    size_type writeTo(const data_type *data, size_type size) {
+        return pimpl_->writeTo(data, size);
+    }
+
+    /** 
+     * Write a single value to the buffer. The buffer size will automatically
+     * grow if there is not room for the byte.  The value must be a
+     * "fundamental" type, e.g. int, float, etc.  (otherwise use the other
+     * writeTo tests).
+     **/
+
+    template<typename T>
+    void writeTo(T val) {
+        pimpl_->writeTo(val, boost::is_fundamental<T>());
+    }
+
+    /** 
+     * Update the state of the buffer after writing through the iterator
+     * interface.  This function exists primarily for the boost:asio which
+     * writes directly to the buffer using its iterator.  In this case, the
+     * internal state of the buffer does not reflect that the data was written
+     * This informs the buffer how much data was written.  
+     *
+     * The buffer does not automatically resize in this case, the bytes written
+     * cannot exceed the amount of free space.  Attempting to write more will
+     * throw a std::length_error exception.
+     **/
+
+    size_type wroteTo(size_type size) 
+    {
+        int wrote = 0;
+        if(size) {
+            if(size > freeSpace()) {
+                throw std::length_error("Impossible to write more data than free space");
+            }
+            wrote = pimpl_->wroteTo(size);
+        }
+        return wrote;
+    }
+
+    /**
+     * Does the buffer have any data? 
+     **/
+
+    bool empty() const {
+        return  (pimpl_->size()==0);
+    }
+
+    /** 
+     *  Returns the size of the buffer, in bytes. 
+     */
+
+    size_type size() const {
+        return  pimpl_->size();
+    }
+
+    /** 
+     * Returns the current free space that is available to write to in the
+     * buffer, in bytes.  This is not a strict limit in size, as writeTo() can
+     * automatically increase capacity if necessary.
+     **/
+
+    size_type freeSpace() const {
+        return  pimpl_->freeSpace();
+    }
+
+    /**
+     * Appends the data in the argument to the end of this buffer.  The
+     * argument can be either an InputBuffer or OutputBuffer.
+     *
+     **/
+
+    template <class BufferType>
+    void append(const BufferType &buf) {
+        // don't append an empty buffer
+        if(buf.size()) {
+            pimpl_->append(*(buf.pimpl_.get()));
+        }
+    }
+
+    /** 
+     * Return an iterator pointing to the first data chunk of this buffer
+     * that may be written to. 
+     **/
+
+    const_iterator begin() const {
+        return const_iterator(pimpl_->beginWrite());
+    }
+
+    /** 
+     * Return the end iterator for writing. 
+     **/
+
+    const_iterator end() const {
+        return const_iterator(pimpl_->endWrite());
+    }
+
+    /** 
+     * Discard any data in this buffer.
+     **/
+
+    void discardData()
+    {
+        pimpl_->discardData();
+    }
+
+    /** 
+     * Discard the specified number of bytes from this data, starting at the beginning.
+     * Throws if the size is greater than the number of bytes.
+     **/
+
+    void discardData(size_t bytes)
+    {
+        if(bytes > 0) {
+            if(bytes < pimpl_->size()) {
+                pimpl_->discardData(bytes);
+            }
+            else if(bytes == pimpl_->size()) {
+                pimpl_->discardData();
+            }
+            else {
+                throw std::out_of_range("trying to discard more data than exists");
+            }
+        }
+    }
+
+    /** 
+     * Remove bytes from this buffer, starting from the beginning, and place
+     * them into a new buffer.  Throws if the number of requested bytes exceeds
+     * the size of the buffer.  Data and freeSpace in the buffer after bytes
+     * remains in this buffer.
+     **/
+
+    InputBuffer extractData(size_type bytes);
+
+    /** 
+     * Remove all bytes from this buffer, returning them in a new buffer.
+     * After removing data, some freeSpace may remain in this buffer.
+     **/
+
+    InputBuffer extractData();
+
+    /** 
+     * Clone this buffer, creating a copy that contains the same data.
+     **/
+
+    OutputBuffer clone() const 
+    {
+        detail::BufferImpl::SharedPtr newImpl(new detail::BufferImpl(*pimpl_));
+        return OutputBuffer(newImpl);
+    }
+
+    /** 
+     * Add unmanaged data to the buffer.  The buffer will not automatically
+     * free the data, but it will call the supplied function when the data is
+     * no longer referenced by the buffer (or copies of the buffer).
+     **/
+
+    void appendForeignData(const data_type *data, size_type size, const detail::free_func &func) {
+        pimpl_->appendForeignData(data, size, func);
+    }
+
+    /** 
+     * Returns the number of chunks that contain free space.  
+     **/
+
+    int numChunks() const {
+        return  pimpl_->numFreeChunks();
+    }
+
+    /** 
+     * Returns the number of chunks that contain data
+     **/
+
+    int numDataChunks() const {
+        return  pimpl_->numDataChunks();
+    }
+
+  private:
+
+    friend class InputBuffer;
+    friend class BufferReader;
+
+    explicit OutputBuffer(const detail::BufferImpl::SharedPtr &pimpl) :
+        pimpl_(pimpl) 
+    { }
+
+    detail::BufferImpl::SharedPtr pimpl_; ///< Must never be null.
+};
+
+/** 
+ * The InputBuffer (read-only buffer)
+ *
+ * InputBuffer is an immutable buffer which that may be constructed from an
+ * OutputBuffer, or several of OutputBuffer's methods.  Once the data is
+ * transfered to an InputBuffer it cannot be modified, only read (via 
+ * BufferReader, istream, or its iterator).
+ *
+ * Assignments and copies are shallow copies.
+ *
+ * -# ASIO access: - iterate using const_iterator for sending messages
+ *
+ **/
+
+class AVRO_DECL InputBuffer 
+{
+
+  public:
+
+    typedef detail::size_type size_type;
+    typedef detail::data_type data_type;
+
+    // needed for asio
+    typedef detail::InputBufferIterator const_iterator;
+
+    /** 
+     * Default InputBuffer creates an empty buffer.
+     *
+     * Copy/assignment functions use the default ones.  They will do a shallow
+     * copy, and because InputBuffer is immutable, the copies will be
+     * identical.
+     *
+     * Destructor also uses the default, which resets a shared pointer,
+     * deleting the underlying data if no other copies of exist.
+     **/
+
+    InputBuffer() :
+        pimpl_(new detail::BufferImpl)
+    { }
+
+    /** 
+     * Construct an InputBuffer that contains the contents of an OutputBuffer.
+     * The two buffers will have the same contents, but this copy will be
+     * immutable, while the the OutputBuffer may still be written to.  
+     *
+     * If you wish to move the data from the OutputBuffer to a new InputBuffer
+     * (leaving only free space in the OutputBuffer),
+     * OutputBuffer::extractData() will do this more efficiently.
+     *
+     * Implicit conversion is allowed.
+     **/
+
+    InputBuffer(const OutputBuffer &src) :
+        pimpl_(new detail::BufferImpl(*src.pimpl_))
+    { }
+
+    /** 
+     * Does the buffer have any data? 
+     **/
+
+    bool empty() const {
+        return (pimpl_->size() == 0);
+    }
+
+    /** 
+     * Returns the size of the buffer, in bytes. 
+     **/
+
+    size_type size() const {
+        return pimpl_->size();
+    }
+
+    /**
+     * Return an iterator pointing to the first data chunk of this buffer
+     * that contains data.
+     **/
+
+    const_iterator begin() const {
+        return const_iterator(pimpl_->beginRead());
+    }
+
+    /**
+     * Return the end iterator. 
+     **/
+
+    const_iterator end() const {
+        return const_iterator(pimpl_->endRead());
+    }
+
+    /** 
+     * Returns the number of chunks containing data.
+     **/
+
+    int numChunks() const {
+        return pimpl_->numDataChunks();
+    }
+
+
+  private:
+
+    friend class OutputBuffer; // for append function
+    friend class istreambuf;
+    friend class BufferReader;
+
+    explicit InputBuffer(const detail::BufferImpl::SharedPtr &pimpl) :
+        pimpl_(pimpl) 
+    { }
+
+    /**
+     * Class to indicate that a copy of a OutputBuffer to InputBuffer should be
+     * a shallow copy, used to enable reading of the contents of an
+     * OutputBuffer without need to convert it to InputBuffer using a deep
+     * copy.  It is private and only used by BufferReader and istreambuf
+     * classes.
+     *
+     * Writing to an OutputBuffer while it is being read may lead to undefined
+     * behavior.
+     **/
+
+    class ShallowCopy {};
+
+    /** 
+     * Make a shallow copy of an OutputBuffer in order to read it without 
+     * causing conversion overhead.
+     **/
+    InputBuffer(const OutputBuffer &src, const ShallowCopy &) : 
+        pimpl_(src.pimpl_)
+    { }
+
+    /** 
+     * Make a shallow copy of an InputBuffer.  The default copy constructor
+     * already provides shallow copy, this is just provided for generic
+     * algorithms that wish to treat InputBuffer and OutputBuffer in the same
+     * manner.
+     **/
+
+     InputBuffer(const InputBuffer &src, const ShallowCopy &) : 
+        pimpl_(src.pimpl_)
+    { }
+
+
+    detail::BufferImpl::ConstSharedPtr pimpl_; ///< Must never be null.
+};
+
+
+/* 
+ * Implementations of some OutputBuffer functions are inlined here
+ * because InputBuffer definition was required before.
+ */
+
+inline InputBuffer OutputBuffer::extractData() 
+{
+    detail::BufferImpl::SharedPtr newImpl(new detail::BufferImpl);
+    if(pimpl_->size()) {
+        pimpl_->extractData(*newImpl);
+    }
+    return InputBuffer(newImpl);
+}
+
+inline InputBuffer OutputBuffer::extractData(size_type bytes)
+{
+    if(bytes > pimpl_->size()) {
+        throw std::out_of_range("trying to extract more data than exists");
+    }
+
+    detail::BufferImpl::SharedPtr newImpl(new detail::BufferImpl);
+    if(bytes > 0) {
+        if(bytes < pimpl_->size()) {
+            pimpl_->extractData(*newImpl, bytes);
+        }
+        else {
+            pimpl_->extractData(*newImpl);
+        }
+    }
+
+    return InputBuffer(newImpl);
+}
+
+/** 
+ * Create an array of iovec structures from the buffer.  This utility is used
+ * to support writev and readv function calls.  The caller should ensure the
+ * buffer object is not deleted while using the iovec vector.
+ *
+ * If the BufferType is an InputBuffer, the iovec will point to the data that
+ * already exists in the buffer, for reading.
+ *
+ * If the BufferType is an OutputBuffer, the iovec will point to the free
+ * space, which may be written to.  Before writing, the caller should call
+ * OutputBuffer::reserve() to create enough room for the desired write (which
+ * can be verified by calling OutputBuffer::freeSpace()), and after writing,
+ * they MUST call OutputBuffer::wroteTo(), otherwise the buffer will not know
+ * the space is not free anymore.
+ *
+ **/
+
+template<class BufferType>
+inline void toIovec(BufferType &buf, std::vector<struct iovec> &iov) 
+{
+    const int chunks = buf.numChunks();
+    iov.resize(chunks);
+    typename BufferType::const_iterator iter = buf.begin();
+    for (int i = 0; i < chunks; ++i) {
+        iov[i].iov_base = const_cast<typename BufferType::data_type *>(iter->data());
+        iov[i].iov_len = iter->size();
+        ++iter;
+    }
+}
+
+} // namespace
+
+#endif
diff --git a/lang/c++/api/buffer/BufferPrint.hh b/lang/c++/api/buffer/BufferPrint.hh
new file mode 100644
index 0000000..7631027
--- /dev/null
+++ b/lang/c++/api/buffer/BufferPrint.hh
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferPrint_hh__
+#define avro_BufferPrint_hh__
+
+#include <ctype.h>
+#include <iostream>
+#include <iomanip>
+#include "BufferReader.hh"
+
+/** 
+ * \file BufferPrint.hh
+ *
+ * \brief Convenience functions for printing buffer contents
+ **/
+
+namespace avro {
+
+namespace detail {
+
+/**
+ * \fn hexPrint
+ * 
+ * Prints a buffer to a stream in the canonical hex+ASCII format, 
+ * the same used by the program 'hexdump -C'
+ *
+ **/
+
+inline void
+hexPrint(std::ostream &os, BufferReader &reader)
+{
+    std::ios_base::fmtflags savedFlags = os.flags();
+
+    char sixteenBytes[16];
+    int offset = 0;
+
+    os << std::setfill('0');
+    os << std::hex;
+
+    while(reader.bytesRemaining()) {
+
+        os << std::setw(8) << offset << "  ";
+
+        size_t inBuffer = reader.read(sixteenBytes, sizeof(sixteenBytes));
+        offset += inBuffer;
+
+        // traverse 8 bytes or inBuffer, whatever is less
+        size_t cnt = std::min(inBuffer, static_cast<size_t>(8));
+
+        size_t i = 0;
+        for (; i < cnt; ++i) {
+            os << std::setw(2);
+            os << (static_cast<int>(sixteenBytes[i]) & 0xff) << ' ';
+        }
+        for (; i < 8; ++i) {
+            os << "   ";
+        }
+        os << ' ';
+
+        // traverse 16 bytes or inBuffer, whatever is less
+        cnt = std::min(inBuffer, static_cast<size_t>(16));
+
+        for (; i < cnt; ++i) {
+            os << std::setw(2);
+            os << (static_cast<int>(sixteenBytes[i]) & 0xff) << ' ';
+        }
+        for (; i < 16; ++i) {
+            os << "   ";
+        }
+        os << " |";
+        for(i = 0; i < inBuffer; ++i) {
+            os.put(isprint(sixteenBytes[i] & 0xff) ? sixteenBytes[i] : '.' );
+        }
+        os << "|\n";
+
+    }
+
+    // restore flags
+    os.flags( savedFlags);
+}
+
+} // namespace detail
+
+} // namespace
+
+inline
+std::ostream& operator<<(std::ostream& os, const avro::OutputBuffer& buffer)
+{
+    avro::BufferReader reader(buffer);
+    avro::detail::hexPrint(os, reader);
+    return os;
+}
+
+inline
+std::ostream& operator<<(std::ostream& os, const avro::InputBuffer& buffer)
+{
+    avro::BufferReader reader(buffer);
+    avro::detail::hexPrint(os, reader);
+    return os;
+}
+
+#endif 
diff --git a/lang/c++/api/buffer/BufferReader.hh b/lang/c++/api/buffer/BufferReader.hh
new file mode 100644
index 0000000..51af044
--- /dev/null
+++ b/lang/c++/api/buffer/BufferReader.hh
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferReader_hh__
+#define avro_BufferReader_hh__
+
+#include "Buffer.hh"
+
+#ifdef min
+#undef min
+#endif
+/** 
+ * \file BufferReader.hh 
+ *
+ * \brief Helper class for reading bytes from buffer in a streaming manner,
+ * without the overhead of istreams.
+ *
+ **/
+
+namespace avro {
+
+/** 
+ * Helper class for reading bytes from buffer without worrying about
+ * chunk boundaries.  May read from an InputBuffer or OutputBuffer.
+ *
+ **/
+class AVRO_DECL BufferReader : private boost::noncopyable 
+{
+
+  public:
+
+    typedef detail::data_type data_type;
+    typedef detail::size_type size_type;
+    
+  private:
+
+    size_type chunkRemaining() const {
+        return iter_->dataSize() - chunkPos_;
+    }
+
+    void incrementChunk(size_type howmuch) {
+        bytesRemaining_ -= howmuch;
+        chunkPos_ += howmuch;
+        if(chunkPos_ == iter_->dataSize()) {
+            chunkPos_ = 0;
+            ++iter_;
+        }
+    }
+
+    void rewind() {
+        iter_ = bufferImpl_->beginRead();
+        bytesRemaining_ = bytes_;
+        chunkPos_ = 0;
+    }
+
+    const data_type *addr() const {
+        return iter_->tellReadPos() + chunkPos_;
+    }
+
+  public:
+
+    BufferReader(const InputBuffer &buf) :
+        bufferImpl_(buf.pimpl_),
+        iter_(bufferImpl_->beginRead()),
+        bytes_(bufferImpl_->size()),
+        bytesRemaining_(bytes_),
+        chunkPos_(0)
+    { }
+
+    BufferReader(const OutputBuffer &buf) :
+        bufferImpl_(buf.pimpl_),
+        iter_(bufferImpl_->beginRead()),
+        bytes_(bufferImpl_->size()),
+        bytesRemaining_(bytes_),
+        chunkPos_(0)
+    { }
+
+    /** 
+     * How many bytes are still not read from this buffer.
+     **/
+
+    size_type bytesRemaining() const {
+        return bytesRemaining_;
+    }
+
+    /** 
+     * Read a block of data from the front of the buffer.
+     **/
+
+    size_type bytesRead() const {
+        return bytes_ - bytesRemaining_;
+    }
+
+    /** 
+     * Read a block of data from the buffer.
+     **/
+
+    size_type read(data_type *data, size_type size) { 
+
+        if(size > bytesRemaining_) {
+            size = bytesRemaining_;
+        }
+        size_type sizeToRead = size;
+
+        while(sizeToRead) {
+            const size_type toRead = std::min(sizeToRead, chunkRemaining());
+            memcpy(data, addr(), toRead);
+            sizeToRead -= toRead;
+            data += toRead;
+            incrementChunk(toRead);
+        }
+
+        return size;
+    }
+
+    /** 
+     * Read a block of data from the buffer.
+     **/
+
+    bool read(std::string &str, size_type size) { 
+        if(size > bytesRemaining_) {
+            return false;
+        }
+
+        if(size <= chunkRemaining()) {
+            fastStringRead(str, size);
+        }
+        else {
+            slowStringRead(str, size);
+        }
+
+        return true;
+    }
+
+
+    /** 
+     * Read a single value from the buffer.  The value must be a "fundamental"
+     * type, e.g. int, float, etc.  (otherwise use the other writeTo tests).
+     *
+     **/
+
+    template<typename T>
+    bool read(T &val)  {
+        return read(val, boost::is_fundamental<T>());
+    }
+
+    /** 
+     * Skips a block of data from the buffer.
+     **/
+
+    bool skip(size_type bytes) {
+        bool skipped = false;
+        if(bytes <= bytesRemaining_) {
+            doSkip(bytes);
+            skipped = true;
+        }
+        return skipped;
+    }
+
+    /** 
+     * Seek to a position in the buffer.
+     **/
+
+    bool seek(size_type pos) {
+        if(pos > bytes_) {
+            return false;
+        }
+
+        size_type toSkip = pos;
+        size_type curPos = bytesRead();
+        // if the seek position is ahead, we can use skip to get there
+        if(pos >= curPos) {
+            toSkip -= curPos;
+        }
+        // if the seek position is ahead of the start of the chunk we can back up to
+        // start of the chunk
+        else if(pos >= (curPos - chunkPos_)) {
+            curPos -= chunkPos_;
+            bytesRemaining_ += chunkPos_;
+            chunkPos_ = 0;
+            toSkip -= curPos;
+        }
+        else {
+            rewind();
+        }
+        doSkip(toSkip);
+        return true;
+    }
+
+    bool peek(char &val) {
+        bool ret = (bytesRemaining_ > 0);
+        if(ret) {
+            val = *(addr());
+        }
+        return ret;
+    }
+
+    InputBuffer copyData(size_type bytes) {
+        if(bytes > bytesRemaining_) {
+            // force no copy
+            bytes = 0;
+        }
+        detail::BufferImpl::SharedPtr newImpl(new detail::BufferImpl);
+        if(bytes) {
+            bufferImpl_->copyData(*newImpl, iter_, chunkPos_, bytes);
+            doSkip(bytes);
+        }
+        return InputBuffer(newImpl);
+    }
+
+  private:
+
+    void doSkip(size_type sizeToSkip) {
+
+        while(sizeToSkip) {
+            const size_type toSkip = std::min(sizeToSkip, chunkRemaining());
+            sizeToSkip -= toSkip;
+            incrementChunk(toSkip);
+        }
+    }
+
+    template<typename T>
+    bool read(T &val, const boost::true_type&)
+    {
+        if(sizeof(T) > bytesRemaining_) {
+            return false;
+        }
+
+        if (sizeof(T) <= chunkRemaining()) {
+            val = *(reinterpret_cast<const T*> (addr()));
+            incrementChunk(sizeof(T));
+        }
+        else {
+            read(reinterpret_cast<data_type *>(&val), sizeof(T));
+        }
+        return true;
+    }
+
+    /// An uninstantiable function, this is if boost::is_fundamental check fails
+    template<typename T>
+    bool read(T &val, const boost::false_type&)
+    {
+        BOOST_STATIC_ASSERT(sizeof(T)==0);
+        return false;
+    }
+
+    void fastStringRead(std::string &str, size_type sizeToCopy) {
+        str.assign(addr(), sizeToCopy);
+        incrementChunk(sizeToCopy);
+    }
+            
+    void slowStringRead(std::string &str, size_type sizeToCopy) {
+        str.clear();
+        str.reserve(sizeToCopy);
+        while(sizeToCopy) {
+            const size_type toCopy = std::min(sizeToCopy, chunkRemaining());
+            str.append(addr(), toCopy);
+            sizeToCopy -= toCopy;
+            incrementChunk(toCopy);
+        }
+    }
+
+    detail::BufferImpl::ConstSharedPtr bufferImpl_;
+    detail::BufferImpl::ChunkList::const_iterator iter_;
+    size_type bytes_;
+    size_type bytesRemaining_;
+    size_type chunkPos_;
+};
+
+
+} // namespace
+
+#endif
diff --git a/lang/c++/api/buffer/BufferStream.hh b/lang/c++/api/buffer/BufferStream.hh
new file mode 100644
index 0000000..4987abb
--- /dev/null
+++ b/lang/c++/api/buffer/BufferStream.hh
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferStream_hh__
+#define avro_BufferStream_hh__
+
+#include "BufferStreambuf.hh"
+
+/** 
+ * \file BufferStream.hh
+ *
+ * \brief Custom istream and ostream classes for use with buffers
+ **/
+
+namespace avro {
+
+/**
+ *
+ * \brief Custom ostream class for writing to an OutputBuffer
+ *
+ **/
+
+class AVRO_DECL ostream : public std::ostream {
+
+  public:
+
+    /// Default constructor, creates a new OutputBuffer.
+    ostream() : 
+        std::ostream(&obuf_) 
+    { }
+
+    /// Output to a specific buffer.
+    ostream(OutputBuffer &buf) : 
+        std::ostream(&obuf_),
+        obuf_(buf)
+    { }
+
+    /// Return the output buffer created by the write operations to this ostream.
+    const OutputBuffer &getBuffer() const {
+        return obuf_.getBuffer();
+    }
+
+  protected:
+
+    ostreambuf obuf_;
+};
+
+/** 
+ * \brief Custom istream class for reading from an InputBuffer.
+ *
+ * If the buffer contains binary data, then it is recommended to only use the
+ * read() and readsome() functions--get() or getline() may be confused if the
+ * binary data happens to contain an EOF character.
+ *
+ * For buffers containing text, the full implementation of istream is safe.
+ *
+ **/
+
+class AVRO_DECL istream : public std::istream {
+
+  public:
+
+    /// Constructor, requires an InputBuffer to read from.
+    explicit istream(const InputBuffer &buf) : 
+        std::istream(&ibuf_), ibuf_(buf)
+    { }
+
+    /// Constructor, takes an OutputBuffer to read from (by making a shallow copy to an InputBuffer).
+    /// Writing to the OutputBuffer while an istream is using it may lead to undefined behavior.
+    explicit istream(const OutputBuffer &buf) : 
+        std::istream(&ibuf_), ibuf_(buf)
+    { }
+
+    /// Return the InputBuffer this stream is reading from.
+    const InputBuffer &getBuffer() const {
+        return ibuf_.getBuffer();
+    }
+
+  protected:
+
+    istreambuf ibuf_;
+};
+
+} // namespace avro
+
+#endif 
diff --git a/lang/c++/api/buffer/BufferStreambuf.hh b/lang/c++/api/buffer/BufferStreambuf.hh
new file mode 100644
index 0000000..a34f071
--- /dev/null
+++ b/lang/c++/api/buffer/BufferStreambuf.hh
@@ -0,0 +1,255 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferStreambuf_hh__
+#define avro_BufferStreambuf_HH__
+
+#include "Buffer.hh"
+
+/** \file BufferStreambuf.hh
+    \brief streambuf implementation for istream and ostream.
+*/
+
+#ifdef min
+#undef min
+#endif
+namespace avro {
+
+/**
+ * \brief Implementation of streambuf for use by the Buffer's ostream.
+ *
+ * This class derives from std::streambuf and implements the virtual functions
+ * needed to operate on OutputBuffer.  The override functions are overflow and
+ * xsputn.   Typically custom streambufs will also override sync for output,
+ * but we have no need since all writes are immediately stored in the buffer.
+ **/
+
+class AVRO_DECL ostreambuf : public std::streambuf {
+
+  public:
+
+    /// Default constructor creates a new OutputBuffer.
+    ostreambuf() : 
+        std::streambuf(),
+        buffer_()
+    { }
+
+    /// Construct using an existing OutputBuffer.
+    explicit ostreambuf(OutputBuffer &buffer) :
+        std::streambuf(),
+        buffer_( buffer )
+    { }
+
+    /// Return the buffer.
+    const OutputBuffer &getBuffer() const {
+        return buffer_;
+    }
+
+  protected:
+    
+    /// Write a single character to the stream.
+    virtual int_type overflow(int_type c) 
+    {
+        buffer_.writeTo(static_cast<OutputBuffer::data_type>(c));
+        return c;
+    }
+
+    /// Write a block of characters to the stream.
+    virtual std::streamsize xsputn(const char_type *s, std::streamsize n) 
+    {
+        return buffer_.writeTo(s, static_cast<size_t>(n));
+    }
+
+  private:
+
+    OutputBuffer buffer_;
+};
+
+/** 
+ * \brief Implementation of streambuf for use by the Buffer's istream.
+ *
+ * This class derives from std::streambuf and implements the virtual functions
+ * needed to operate on InputBuffer.  The override functions are underflow,
+ * seekpos, showmanyc, and seek.  This is considered a buffered streambuf,
+ * because it can access a chunk of the InputBuffer at a time, using the
+ * iterator interface.  Because the input is already buffered, uflow is not
+ * required.  pbackfail is not yet implemented but can be if necessary (the
+ * inherited behavior is to fail, and has yet to be a problem).
+ *
+ **/
+
+class AVRO_DECL istreambuf : public std::streambuf {
+
+  public:
+
+    /// Default constructor requires an InputBuffer to read from.
+    explicit istreambuf(const InputBuffer &buffer) :
+        std::streambuf(),
+        buffer_( buffer ),
+        basePos_(0),
+        iter_(buffer_.begin())
+    { 
+        setBuffer();
+    }
+
+    /// Default constructor converts an OutputBuffer to an InputBuffer 
+    explicit istreambuf(const OutputBuffer &buffer) :
+        std::streambuf(),
+        buffer_( buffer, InputBuffer::ShallowCopy()),
+        basePos_(0),
+        iter_(buffer_.begin())
+    { 
+        setBuffer();
+    }
+
+    /// Return the buffer.
+    const InputBuffer &getBuffer() const {
+        return buffer_;
+    }
+
+  protected:
+
+    /// The current chunk of data is exhausted, read the next chunk.
+    virtual int_type underflow() {
+        if(iter_ != buffer_.end()) {
+            basePos_ += (egptr()-eback());
+            ++iter_;
+        }
+        return setBuffer();
+    }
+
+    /// Get a block of data from the stream.  Overrides default behavior
+    /// to ignore eof characters that may reside in the stream.
+    virtual std::streamsize xsgetn(char_type *c, std::streamsize len) 
+    {
+        std::streamsize bytesCopied = 0;
+
+        while (bytesCopied < len) {
+
+            size_t inBuffer = egptr() - gptr();
+
+            if (inBuffer) {
+                size_t remaining = static_cast<size_t>(len - bytesCopied);
+                size_t toCopy = std::min(inBuffer, remaining);
+                memcpy(c, gptr(), toCopy);
+                c += toCopy;
+                bytesCopied += toCopy;
+                gbump(toCopy);
+            }
+
+            if(bytesCopied < len) {
+                underflow();
+                if(iter_ == buffer_.end()) {
+                    break;
+                }
+            }
+        }
+
+        return bytesCopied;
+    }
+
+    /// Special seek override to navigate InputBuffer chunks.
+    virtual pos_type seekoff(off_type off, std::ios::seekdir dir, std::ios_base::openmode) {
+
+        off_type curpos = basePos_ + (gptr() - eback()); 
+        off_type newpos = off;
+
+        if(dir == std::ios::cur) {
+            newpos += curpos;
+        }
+        else if (dir == std::ios::end) {
+            newpos += buffer_.size();
+        }
+        // short circuit for tell()  
+        if(newpos == curpos) {
+            return curpos;
+        }
+
+        off_type endpos = basePos_ + (egptr() - eback());
+
+        // if the position is after our current buffer make
+        // sure it's not past the end of the buffer
+        if((newpos > endpos) && (newpos > static_cast<off_type>(buffer_.size()) )) {
+            return pos_type(-1);
+        }
+        // if the new position is before our current iterator
+        // reset the iterator to the beginning
+        else if (newpos < basePos_) {
+            iter_ = buffer_.begin();
+            basePos_ = 0;
+            setBuffer();
+            endpos = (egptr() -eback());
+        }
+
+        // now if the new position is after the end of the buffer
+        // increase the buffer until it is not
+        while (newpos > endpos) {
+            istreambuf::underflow();
+            endpos = basePos_ + (egptr() - eback()); 
+        }
+
+        setg(eback(), eback() + (newpos - basePos_), egptr());
+        return newpos;
+    }
+
+    /// Calls seekoff for implemention.
+    virtual pos_type seekpos(pos_type pos, std::ios_base::openmode) {
+        return istreambuf::seekoff(pos, std::ios::beg, std::ios_base::openmode(0));
+    }
+    
+    /// Shows the number of bytes buffered in the current chunk, or next chunk if
+    /// current is exhausted.
+    virtual std::streamsize showmanyc() {
+
+        // this function only gets called when the current buffer has been
+        // completely read, verify this is the case, and if so, underflow to
+        // fetch the next buffer
+
+        if(egptr() - gptr() == 0) {
+            istreambuf::underflow();
+        }
+        return egptr() - gptr();
+    }
+
+  private:
+    
+    /// Setup the streambuf buffer pointers after updating
+    /// the value of the iterator.  Returns the first character
+    /// in the new buffer, or eof if there is no buffer.
+    int_type setBuffer() {
+        int_type ret = traits_type::eof();
+
+        if(iter_ != buffer_.end()) {
+            char *loc = const_cast <char *> (iter_->data()) ;
+            setg(loc, loc, loc + iter_->size());
+            ret = std::char_traits<char>::to_int_type(*gptr());
+        }
+        else {
+            setg(0,0,0);
+        }
+        return ret;
+    }
+
+    const InputBuffer buffer_;
+    off_type basePos_;
+    InputBuffer::const_iterator iter_;
+};
+
+} // namespace
+
+#endif 
diff --git a/lang/c++/api/buffer/detail/BufferDetail.hh b/lang/c++/api/buffer/detail/BufferDetail.hh
new file mode 100644
index 0000000..c481c07
--- /dev/null
+++ b/lang/c++/api/buffer/detail/BufferDetail.hh
@@ -0,0 +1,555 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferDetail_hh__
+#define avro_BufferDetail_hh__
+
+#include <boost/shared_ptr.hpp>
+#include <boost/shared_array.hpp>
+#include <boost/static_assert.hpp>
+#include <boost/function.hpp>
+#include <boost/utility.hpp>
+#ifdef HAVE_BOOST_ASIO
+#include <boost/asio/buffer.hpp>
+#endif
+#include <exception>
+#include <cassert>
+#include <deque>
+
+/** 
+ * \file BufferDetail.hh
+ *
+ * \brief The implementation details for the Buffer class.
+ *
+ **/
+
+namespace avro {
+
+namespace detail {
+
+typedef char    data_type;
+typedef size_t  size_type;
+#ifdef HAVE_BOOST_ASIO
+typedef boost::asio::const_buffer   ConstAsioBuffer;
+typedef boost::asio::mutable_buffer MutableAsioBuffer;
+#endif
+
+/// The size in bytes for blocks backing buffer chunks. 
+const size_type kMinBlockSize = 4096;
+const size_type kMaxBlockSize = 16384;
+const size_type kDefaultBlockSize = kMinBlockSize;
+
+typedef boost::function<void(void)>  free_func;
+
+/**
+ * Simple class to hold a functor that executes on delete
+ **/
+class CallOnDestroy {
+  public:
+    CallOnDestroy(const free_func &func) : func_(func)
+    { }
+    ~CallOnDestroy() {
+        if (func_) {
+            func_();
+        }
+    }
+  private:
+    free_func func_;
+};
+
+/** 
+ * \brief A chunk is the building block for buffers.
+ *
+ * A chunk is backed by a memory block, and internally it maintains information
+ * about which area of the block it may use, and the portion of this area that
+ * contains valid data.  More than one chunk may share the same underlying
+ * block, but the areas should never overlap.  Chunk holds a shared pointer to
+ * an array of bytes so that shared blocks are reference counted.
+ *
+ * When a chunk is copied, the copy shares the same underlying buffer, but the
+ * copy receives its own copies of the start/cursor/end pointers, so each copy
+ * can be manipulated independently.  This allows different buffers to share
+ * the same non-overlapping parts of a chunk, or even overlapping parts of a
+ * chunk if the situation arises.
+ *
+ **/
+
+class Chunk 
+{
+
+  public:
+
+    typedef boost::shared_ptr<Chunk> SharedPtr;
+
+    /// Default constructor, allocates a new underlying block for this chunk.
+    Chunk(size_type size) :
+        underlyingBlock_(new data_type[size]), 
+        readPos_(underlyingBlock_.get()),
+        writePos_(readPos_),
+        endPos_(readPos_ + size)
+    { }
+
+    /// Foreign buffer constructor, uses the supplied data for this chunk, and
+    /// only for reading.
+    Chunk(const data_type *data, size_type size, const free_func &func) :
+        callOnDestroy_(new CallOnDestroy(func)),
+        readPos_(const_cast<data_type *>(data)),
+        writePos_(readPos_ + size),
+        endPos_(writePos_)
+    { }
+
+  private:
+    // reference counted object will call a functor when it's destroyed
+    boost::shared_ptr<CallOnDestroy> callOnDestroy_;
+
+  public:
+
+    /// Remove readable bytes from the front of the chunk by advancing the
+    /// chunk start position.
+    void truncateFront(size_type howMuch) {
+        readPos_ += howMuch;
+        assert(readPos_ <= writePos_);
+    }
+
+    /// Remove readable bytes from the back of the chunk by moving the
+    /// chunk cursor position.
+    void truncateBack(size_type howMuch) {
+        writePos_ -= howMuch;
+        assert(readPos_ <= writePos_);
+    }
+
+    /// Tell the position the next byte may be written to.
+    data_type *tellWritePos() const {
+        return writePos_;
+    }
+
+    /// Tell the position of the first byte containing valid data.
+    const data_type *tellReadPos() const {
+        return readPos_;
+    }
+
+    /// After a write operation, increment the write position.
+    void incrementCursor(size_type howMuch) {
+        writePos_  += howMuch; 
+        assert(writePos_ <= endPos_);
+    }
+
+    /// Tell how many bytes of data were written to this chunk.
+    size_type dataSize() const {
+        return (writePos_ - readPos_);
+    }
+
+    /// Tell how many bytes this chunk has available to write to.
+    size_type freeSize() const {
+        return (endPos_ - writePos_);
+    }
+
+    /// Tell how many bytes of data this chunk can hold (used and free).
+    size_type capacity() const {
+        return (endPos_ - readPos_);
+    }
+
+  private:
+
+    friend bool operator==(const Chunk &lhs, const Chunk &rhs);
+    friend bool operator!=(const Chunk &lhs, const Chunk &rhs);
+
+    // more than one buffer can share an underlying block, so use SharedPtr
+    boost::shared_array<data_type> underlyingBlock_;
+
+    data_type *readPos_;  ///< The first readable byte in the block 
+    data_type *writePos_; ///< The end of written data and start of free space
+    data_type *endPos_;   ///< Marks the end of the usable block area
+};
+
+/**
+ * Compare underlying buffers and return true if they are equal
+ **/
+inline bool operator==(const Chunk &lhs, const Chunk &rhs) {
+    return lhs.underlyingBlock_ == rhs.underlyingBlock_;
+}
+
+/**
+ * Compare underlying buffers and return true if they are unequal
+ **/
+inline bool operator!=(const Chunk &lhs, const Chunk &rhs) {
+    return lhs.underlyingBlock_ != rhs.underlyingBlock_;
+}
+
+
+/** 
+ * \brief Implementation details for Buffer class
+ *
+ * Internally, BufferImpl keeps two lists of chunks, one list consists entirely of
+ * chunks containing data, and one list which contains chunks with free space.
+ *
+ *
+ */
+
+class BufferImpl : boost::noncopyable
+{
+
+    /// Add a new chunk to the list of chunks for this buffer, growing the
+    /// buffer by the default block size.
+    void allocChunkChecked(size_type size = kDefaultBlockSize) 
+    {
+        writeChunks_.push_back(Chunk(size));
+        freeSpace_ += writeChunks_.back().freeSize();
+    }
+
+    /// Add a new chunk to the list of chunks for this buffer, growing the
+    /// buffer by the requested size, but within the range of a minimum and
+    /// maximum.
+    void allocChunk(size_type size) 
+    {
+        if(size < kMinBlockSize) {
+            size = kMinBlockSize;
+        }
+        else if (size > kMaxBlockSize) {
+            size = kMaxBlockSize;
+        }
+        allocChunkChecked(size);
+    }
+
+    /// Update the state of the chunks after a write operation.  This function
+    /// ensures the chunk states are consistent with the write.
+    void postWrite(size_type size) 
+    {
+
+        // precondition to this function is that the writeChunk_.front()
+        // contains the data that was just written, so make sure writeChunks_
+        // is not empty:
+        
+        assert(size <= freeSpace_ && !writeChunks_.empty());
+
+        // This is probably the one tricky part of BufferImpl.  The data that
+        // was written now exists in writeChunks_.front().  Now we must make
+        // sure that same data exists in readChunks_.back().
+        //
+        // There are two cases: 
+        //
+        // 1. readChunks_.last() and writeChunk_.front() refer to the same
+        // underlying block, in which case they both just need their cursor
+        // updated to reflect the new state.
+        //
+        // 2. readChunk_.last() is not the same block as writeChunks_.front(),
+        // in which case it should be, since the writeChunk.front() contains
+        // the next bit of data that will be appended to readChunks_, and
+        // therefore needs to be copied there so we can proceed with updating
+        // their state.
+        //
+
+        // if readChunks_ is not the same as writeChunks_.front(), make a copy
+        // of it there
+        
+        if(readChunks_.empty() || (readChunks_.back() != writeChunks_.front())) {
+            const Chunk &curChunk = writeChunks_.front();
+            readChunks_.push_back(curChunk);
+
+            // Any data that existed in the write chunk previously doesn't
+            // belong to this buffer (otherwise it would have already been
+            // added to the readChunk_ list).  Here, adjust the start of the
+            // readChunk to begin after any data already existing in curChunk
+            
+            readChunks_.back().truncateFront( curChunk.dataSize());
+        }
+
+        assert(readChunks_.back().freeSize() == writeChunks_.front().freeSize());
+
+        // update the states of both readChunks_ and writeChunks_ to indicate that they are
+        // holding the new data
+        
+        readChunks_.back().incrementCursor(size);
+        writeChunks_.front().incrementCursor(size);
+        size_ += size;
+        freeSpace_ -= size;
+
+        // if there is no more free space in writeChunks_, the next write cannot use
+        // it, so dispose of it now
+        
+        if(writeChunks_.front().freeSize() == 0) {
+            writeChunks_.pop_front();
+        }
+    }
+
+  public:
+
+    typedef std::deque<Chunk> ChunkList;
+    typedef boost::shared_ptr<BufferImpl> SharedPtr;
+    typedef boost::shared_ptr<const BufferImpl> ConstSharedPtr;
+
+    /// Default constructor, creates a buffer without any chunks
+    BufferImpl() :
+        freeSpace_(0),
+        size_(0)
+    { }
+
+    /// Copy constructor, gets a copy of all the chunks with data.
+    explicit BufferImpl(const BufferImpl &src) :
+        readChunks_(src.readChunks_), 
+        freeSpace_(0),
+        size_(src.size_)
+    { }
+
+    /// Amount of data held in this buffer.
+    size_type size() const {
+        return size_;
+    }
+
+    /// Capacity that may be written before the buffer must allocate more memory.
+    size_type freeSpace() const {
+        return freeSpace_;
+    }
+
+    /// Add enough free chunks to make the reservation size available.
+    /// Actual amount may be more (rounded up to next chunk).
+    void reserveFreeSpace(size_type reserveSize) {
+        while(freeSpace_ < reserveSize) {
+            allocChunk(reserveSize - freeSpace_);
+        }
+    }
+
+    /// Return the chunk avro's begin iterator for reading.
+    ChunkList::const_iterator beginRead() const {
+        return readChunks_.begin();
+    }
+    
+    /// Return the chunk avro's end iterator for reading.
+    ChunkList::const_iterator endRead() const {
+        return readChunks_.end();
+    }
+
+    /// Return the chunk avro's begin iterator for writing.
+    ChunkList::const_iterator beginWrite() const {
+        return writeChunks_.begin();
+    }
+    
+    /// Return the chunk avro's end iterator for writing.
+    ChunkList::const_iterator endWrite() const {
+        return writeChunks_.end();
+    }
+
+    /// Write a single value to buffer, add a new chunk if necessary.
+    template<typename T>
+    void writeTo(T val, const boost::true_type&)
+    {
+        if(freeSpace_ && (sizeof(T) <= writeChunks_.front().freeSize())) {
+            // fast path, there's enough room in the writeable chunk to just
+            // straight out copy it
+            *(reinterpret_cast <T*> ( writeChunks_.front().tellWritePos()) ) = val;
+            postWrite(sizeof(T));
+        }
+        else {
+            // need to fixup chunks first, so use the regular memcpy 
+            // writeTo method
+            writeTo(reinterpret_cast<data_type*>(&val), sizeof(T));
+        }
+    }
+
+    /// An uninstantiable function, this is if boost::is_fundamental check fails,
+    /// and will compile-time assert.
+    template<typename T>
+    void writeTo(T val, const boost::false_type&) 
+    {
+        BOOST_STATIC_ASSERT(sizeof(T)==0);
+    }
+
+    /// Write a block of data to the buffer, adding new chunks if necessary.
+    size_type writeTo(const data_type *data, size_type size) 
+    {
+        size_type bytesLeft = size; 
+        while(bytesLeft) {
+
+            if(freeSpace_ == 0) {
+                allocChunkChecked();
+            }
+
+            Chunk &chunk = writeChunks_.front();
+            size_type toCopy = std::min<size_type>(chunk.freeSize(), bytesLeft);
+            assert(toCopy);
+            memcpy(chunk.tellWritePos(), data, toCopy);
+            postWrite(toCopy);
+            data      += toCopy; 
+            bytesLeft -= toCopy; 
+        }
+        return size;
+    }
+
+    /// Update internal status of chunks after data is written using iterator.
+    size_type wroteTo(size_type size) 
+    {
+        assert(size <= freeSpace_);
+        size_type bytesLeft = size;
+        while (bytesLeft) {
+
+            Chunk &chunk = writeChunks_.front();
+            size_type wrote = std::min<size_type>(chunk.freeSize(), bytesLeft);
+            assert(wrote);
+            postWrite(wrote);
+            bytesLeft -= wrote;
+        }
+        return size;
+    }
+
+    /// Append the chunks that have data in src to this buffer
+    void append(const BufferImpl &src) {
+        std::copy(src.readChunks_.begin(), src.readChunks_.end(), std::back_inserter(readChunks_));
+        size_ += src.size_;
+    }
+
+    /// Remove all the chunks that contain data from this buffer.
+    void discardData() {
+        readChunks_.clear();
+        size_ = 0;
+    }
+
+    /// Remove the specified amount of data from the chunks, starting at the front.
+    void discardData(size_type bytes)
+    {
+        assert(bytes && bytes <= size_);
+
+        size_type bytesToDiscard = bytes;
+        while( bytesToDiscard ) {
+          
+            size_t currentSize = readChunks_.front().dataSize();
+
+            // see if entire chunk is discarded
+            if(currentSize <= bytesToDiscard) {
+                readChunks_.pop_front();
+                bytesToDiscard -= currentSize;
+            }
+            else {
+                readChunks_.front().truncateFront(bytesToDiscard);
+                bytesToDiscard = 0;
+            }
+        }
+
+        size_ -= bytes;
+    }
+
+    /// Remove the specified amount of data from the chunks, moving the
+    /// data to dest's chunks
+    void extractData(BufferImpl &dest, size_type bytes)
+    {
+        assert(bytes && bytes <= size_);
+
+        size_type bytesToExtract = bytes;
+        while( bytesToExtract ) {
+          
+            size_t currentSize = readChunks_.front().dataSize();
+            dest.readChunks_.push_back(readChunks_.front());
+
+            // see if entire chunk was extracted 
+            if(currentSize <= bytesToExtract) {
+                readChunks_.pop_front();
+                bytesToExtract -= currentSize;
+            }
+            else {
+                readChunks_.front().truncateFront(bytesToExtract);
+                size_t excess = currentSize - bytesToExtract;
+                dest.readChunks_.back().truncateBack(excess);
+                bytesToExtract = 0;
+            }
+        }
+
+        size_ -= bytes;
+        dest.size_ += bytes;
+    }
+
+    /// Move data from this to the destination, leaving this buffer without data
+    void extractData(BufferImpl &dest) 
+    {
+        assert(dest.readChunks_.empty());
+        dest.readChunks_.swap(readChunks_);
+        dest.size_ = size_;
+        size_ = 0;
+    }
+
+    /// Copy data to a different buffer by copying the chunks.  It's
+    /// a bit like extract, but without modifying the source buffer.
+    void copyData(BufferImpl &dest, 
+                  ChunkList::const_iterator iter,
+                  size_type offset, 
+                  size_type bytes) const
+    {
+        // now we are positioned to start the copying, copy as many
+        // chunks as we need, the first chunk may have a non-zero offset 
+        // if the data to copy is not at the start of the chunk 
+        size_type copied = 0;
+        while(copied < bytes) {
+
+            dest.readChunks_.push_back(*iter);
+
+            // offset only applies in the first chunk, 
+            // all subsequent chunks are copied from the start
+            dest.readChunks_.back().truncateFront(offset);
+            offset = 0;
+
+            copied += dest.readChunks_.back().dataSize(); 
+            ++iter;
+        }
+
+        // if the last chunk copied has more bytes than we need, truncate it
+        size_type excess = copied - bytes;
+        dest.readChunks_.back().truncateBack(excess);
+
+        dest.size_ += bytes;
+    }
+
+    /// The number of chunks containing data.  Used for debugging.
+    int numDataChunks() const {
+        return readChunks_.size();
+    }
+
+    /// The number of chunks containing free space (note that an entire chunk
+    /// may not be free).  Used for debugging.
+    int numFreeChunks() const {
+        return writeChunks_.size();
+    }
+
+     /// Add unmanaged data to the buffer.  The buffer will not automatically
+     /// free the data, but it will call the supplied function when the data is
+     /// no longer referenced by the buffer (or copies of the buffer).
+    void appendForeignData(const data_type *data, size_type size, const free_func &func) {
+        readChunks_.push_back(Chunk(data, size, func));
+        size_ += size;
+    }
+
+  private:
+
+    /// Assignment not allowed
+    BufferImpl& operator=(const BufferImpl &src);
+    /* {
+        readChunks_.assign(src.readChunks_.begin(), src.readChunks_.end());
+        size_ = src.size();
+        return *this;
+    } */
+
+    ChunkList readChunks_;     ///< chunks of this buffer containing data
+    ChunkList writeChunks_;    ///< chunks of this buffer containing free space
+
+    size_type freeSpace_;  ///< capacity of buffer before allocation required
+    size_type size_;       ///< amount of data in buffer
+
+};
+
+} // detail namespace
+
+} // namespace
+
+#endif
diff --git a/lang/c++/api/buffer/detail/BufferDetailIterator.hh b/lang/c++/api/buffer/detail/BufferDetailIterator.hh
new file mode 100644
index 0000000..ef4c997
--- /dev/null
+++ b/lang/c++/api/buffer/detail/BufferDetailIterator.hh
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_BufferDetailIterator_hh__
+#define avro_BufferDetailIterator_hh__
+
+#include "BufferDetail.hh"
+
+/** 
+ * \file BufferDetailIterator.hh
+ *
+ * \brief The implementation details for the Buffer iterators.
+ **/
+
+namespace avro {
+
+namespace detail {
+
+/** 
+ * \brief Implements conversion from a chunk to asio::const_buffer
+ * 
+ * Iterators for an InputBuffer will iterate over the avro of chunks, so
+ * internally they contain an iterator.  But the iterator needs to be
+ * convertable to an asio buffer for use in boost::asio functions.  This class
+ * wraps the iterator with a cast operator to do this conversion.
+ **/
+
+struct InputIteratorHelper
+{
+    /// Construct a helper with an unnassigned iterator. 
+    InputIteratorHelper() :
+        iter_()
+    {}
+
+    /// Construct a helper with an iterator.
+    InputIteratorHelper(const BufferImpl::ChunkList::const_iterator &iter) :
+        iter_(iter)
+    {}
+
+    /// The location of valid data in this chunk.
+    const data_type *data() const {
+        return iter_->tellReadPos();
+    }
+
+    /// The size of valid data in this chunk.
+    size_type size() const {
+        return iter_->dataSize();
+    }
+
+    /// Conversion operator.   It doesn't check for null, because the only
+    /// the only time the chunk should be null is when it's the iterator 
+    /// end(), which should never be dereferenced anyway.
+#ifdef HAVE_BOOST_ASIO
+    operator ConstAsioBuffer() const {
+        return ConstAsioBuffer(data(), size());
+    }
+#endif
+
+    BufferImpl::ChunkList::const_iterator iter_; ///< the current iterator
+};
+
+/** 
+ * \brief Implements conversion from a chunk to asio::buffer 
+ * 
+ * Iterators for an OutputBuffer will iterate over the avro of chunks, so
+ * internally they contain an iterator.  But the iterator needs to be
+ * convertable to an asio buffer for use in boost::asio functions.  This class
+ * wraps the iterator with a cast operator to do this conversion.
+ */
+
+struct OutputIteratorHelper
+{
+    /// Construct a helper with an unnassigned iterator.
+    OutputIteratorHelper() :
+            iter_()
+    {}
+
+    /// Construct a helper with an iterator.
+    OutputIteratorHelper(const BufferImpl::ChunkList::const_iterator &iter) :
+            iter_(iter)
+    {}
+
+    /// The location of the first writable byte in this chunk.
+    data_type *data() const {
+        return iter_->tellWritePos();
+    }
+
+    /// The size of area that can be written in this chunk.
+    size_type size() const {
+        return iter_->freeSize();
+    }
+
+    /// Conversion operator.   It doesn't check for null, because the only
+    /// the only time the chunk should be null is when it's the iterator 
+    /// end(), which should never be dereferenced anyway.
+#ifdef HAVE_BOOST_ASIO
+    operator MutableAsioBuffer() const {
+        return MutableAsioBuffer(data(), size());
+    }
+#endif
+
+    BufferImpl::ChunkList::const_iterator iter_; ///< the current iterator
+};
+
+/** 
+ * \brief Implements the iterator for Buffer, that iterates through the
+ * buffer's chunks.
+ **/
+
+template<typename Helper>
+class BufferIterator 
+{
+
+  public:
+
+    typedef BufferIterator<Helper> this_type;
+
+    /** 
+     * @name Typedefs
+     *
+     * STL iterators define the following declarations.  According to
+     * boost::asio documentation, the library expects the iterator to be
+     * bidirectional, however this implements only the forward iterator type.
+     * So far this has not created any problems with asio, but may change if
+     * future versions of the asio require it.
+     **/
+
+    //@{
+    typedef std::forward_iterator_tag iterator_category; // this is a lie to appease asio
+    typedef Helper value_type;
+    typedef std::ptrdiff_t difference_type;
+    typedef value_type* pointer;
+    typedef value_type& reference;
+    //@}
+
+    /// Construct an unitialized iterator.
+    BufferIterator() :
+        helper_()
+    { }
+
+    /* The default implementations are good here
+    /// Copy constructor.
+    BufferIterator(const BufferIterator &src) :
+        helper_(src.helper_)
+    { }
+    /// Assignment.
+    this_type& operator= (const this_type &rhs) {
+        helper_ = rhs.helper_;
+        return *this;
+    }
+    */
+
+    /// Construct iterator at the position in the buffer's chunk list.
+    explicit BufferIterator(BufferImpl::ChunkList::const_iterator iter) :
+        helper_(iter)
+    { }
+
+    /// Dereference iterator, returns InputIteratorHelper or OutputIteratorHelper wrapper.
+    reference operator *() {
+        return helper_;
+    }
+
+    /// Dereference iterator, returns const InputIteratorHelper or OutputIteratorHelper wrapper.
+    const value_type &operator *() const {
+        return helper_;
+    }
+
+    /// Dereference iterator, returns InputIteratorHelper or OutputIteratorHelper wrapper.
+    pointer operator->() {
+        return &helper_;
+    }
+
+    /// Dereference iterator, returns const InputIteratorHelper or OutputIteratorHelper wrapper.
+    const value_type *operator->() const {
+        return &helper_;
+    }
+    
+    /// Increment to next chunk in list, or to end() iterator.
+    this_type& operator++() 
+    {
+        ++helper_.iter_;
+        return *this;
+    }
+
+    /// Increment to next chunk in list, or to end() iterator.
+    this_type operator++(int)
+    {
+        this_type ret = *this;
+        ++helper_.iter_;
+        return ret;
+    }
+
+    /// True if iterators point to same chunks.
+    bool operator==(const this_type &rhs) const {
+        return (helper_.iter_ == rhs.helper_.iter_);
+    }
+
+    /// True if iterators point to different chunks.
+    bool operator!=(const this_type &rhs) const {
+        return (helper_.iter_ != rhs.helper_.iter_);
+    }
+
+  private:
+
+    Helper helper_;
+};
+
+typedef BufferIterator<InputIteratorHelper> InputBufferIterator;
+typedef BufferIterator<OutputIteratorHelper> OutputBufferIterator;
+
+} // detail namespace
+
+} // namespace
+
+#endif
diff --git a/lang/c++/build.sh b/lang/c++/build.sh
new file mode 100755
index 0000000..f0e4146
--- /dev/null
+++ b/lang/c++/build.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e # exit on error
+
+function usage {
+  echo "Usage: $0 {test|dist|clean}"
+  exit 1
+}
+
+if [ $# -eq 0 ]
+then
+  usage
+fi
+
+if [ -f VERSION.txt ]
+then
+VERSION=`cat VERSION.txt`
+else
+VERSION=`cat ../../share/VERSION.txt`
+fi
+
+BUILD=../../build
+AVRO_CPP=avro-cpp-$VERSION
+AVRO_DOC=avro-doc-$VERSION
+BUILD_DIR=../../build
+BUILD_CPP=$BUILD/$AVRO_CPP
+DIST_DIR=../../dist/$AVRO_CPP
+DOC_CPP=$BUILD/$AVRO_DOC/api/cpp
+DIST_DIR=../../dist/cpp
+TARFILE=../dist/cpp/$AVRO_CPP.tar.gz 
+
+(mkdir -p build; cd build; cmake -G "Unix Makefiles" ..)
+for target in "$@"
+do
+
+function do_doc() {
+    doxygen
+    if [ -d doc ]
+    then
+        mkdir -p $DOC_CPP
+        cp -R doc/* $DOC_CPP
+    else
+        exit 1
+    fi
+}
+function do_dist() {
+    rm -rf $BUILD_CPP/
+    mkdir -p $BUILD_CPP
+    cp -r api AUTHORS build.sh CMakeLists.txt ChangeLog \
+        COPYING impl jsonschemas NEWS parser README scripts test examples \
+        $BUILD_CPP
+    find $BUILD_CPP -name '.svn' | xargs rm -rf
+    cp ../../share/VERSION.txt $BUILD_CPP
+    mkdir -p $DIST_DIR
+    (cd $BUILD_DIR; tar cvzf $TARFILE $AVRO_CPP && cp $TARFILE $AVRO_CPP )
+    if [ ! -f $DIST_FILE ]
+    then
+        exit 1
+    fi
+}
+
+case "$target" in
+    test)
+    (cd build && make && cd .. \
+        && ./build/buffertest \
+        && ./build/unittest \
+        && ./build/CodecTests \
+        && ./build/StreamTests \
+        && ./build/SpecificTests \
+        && ./build/AvrogencppTests \
+        && ./build/DataFileTests)
+	;;
+
+    dist)
+        do_dist
+        do_doc
+    ;;
+
+    clean)
+    (cd build && make clean)
+	;;
+
+    install)
+    (cd build && make install)
+    ;;
+
+    *)
+        usage
+esac
+
+done
+
+exit 0
diff --git a/lang/c++/config/.gitignore b/lang/c++/config/.gitignore
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/lang/c++/config/.gitignore
@@ -0,0 +1 @@
+*
diff --git a/lang/c++/examples/cpx.hh b/lang/c++/examples/cpx.hh
new file mode 100644
index 0000000..cb83aa6
--- /dev/null
+++ b/lang/c++/examples/cpx.hh
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef CPX_HH_1278398428__H_
+#define CPX_HH_1278398428__H_
+
+
+#include "avro/Specific.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+
+namespace c {
+struct cpx {
+    double re;
+    double im;
+};
+
+}
+namespace avro {
+template<> struct codec_traits<c::cpx> {
+    static void encode(Encoder& e, const c::cpx& v) {
+        avro::encode(e, v.re);
+        avro::encode(e, v.im);
+    }
+    static void decode(Decoder& d, c::cpx& v) {
+        avro::decode(d, v.re);
+        avro::decode(d, v.im);
+    }
+};
+
+}
+#endif
diff --git a/lang/c++/examples/cpx.json b/lang/c++/examples/cpx.json
new file mode 100644
index 0000000..d20345c
--- /dev/null
+++ b/lang/c++/examples/cpx.json
@@ -0,0 +1,8 @@
+{
+    "type": "record", 
+    "name": "cpx",
+    "fields" : [
+        {"name": "re", "type": "double"},    
+        {"name": "im", "type" : "double"}
+    ]
+}
diff --git a/lang/c++/examples/custom.cc b/lang/c++/examples/custom.cc
new file mode 100644
index 0000000..cfdbeab
--- /dev/null
+++ b/lang/c++/examples/custom.cc
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <complex>
+
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+#include "avro/Specific.hh"
+
+namespace avro {
+template<typename T>
+struct codec_traits<std::complex<T> > {
+    static void encode(Encoder& e, const std::complex<T>& c) {
+        avro::encode(e, std::real(c));
+        avro::encode(e, std::imag(c));
+    }
+
+    static void decode(Decoder& d, std::complex<T>& c) {
+        T re, im;
+        avro::decode(d, re);
+        avro::decode(d, im);
+        c = std::complex<T>(re, im);
+    }
+};
+
+}
+int
+main()
+{
+    std::auto_ptr<avro::OutputStream> out = avro::memoryOutputStream();
+    avro::EncoderPtr e = avro::binaryEncoder();
+    e->init(*out);
+    std::complex<double> c1(1.0, 2.0);
+    avro::encode(*e, c1);
+
+    std::auto_ptr<avro::InputStream> in = avro::memoryInputStream(*out);
+    avro::DecoderPtr d = avro::binaryDecoder();
+    d->init(*in);
+
+    std::complex<double> c2;
+    avro::decode(*d, c2);
+    std::cout << '(' << std::real(c2) << ", " << std::imag(c2) << ')' << std::endl;
+    return 0;
+}
diff --git a/lang/c++/examples/datafile.cc b/lang/c++/examples/datafile.cc
new file mode 100644
index 0000000..2401015
--- /dev/null
+++ b/lang/c++/examples/datafile.cc
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+
+#include "cpx.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+#include "avro/ValidSchema.hh"
+#include "avro/Compiler.hh"
+#include "avro/DataFile.hh"
+
+
+avro::ValidSchema loadSchema(const char* filename)
+{
+    std::ifstream ifs(filename);
+    avro::ValidSchema result;
+    avro::compileJsonSchema(ifs, result);
+    return result;
+}
+
+int
+main()
+{
+    avro::ValidSchema cpxSchema = loadSchema("cpx.json");
+
+    {
+        avro::DataFileWriter<c::cpx> dfw("test.bin", cpxSchema);
+        c::cpx c1;
+        for (int i = 0; i < 100; i++) {
+            c1.re = i * 100;
+            c1.im = i + 100;
+            dfw.write(c1);
+        }
+        dfw.close();
+    }
+
+    {
+        avro::DataFileReader<c::cpx> dfr("test.bin", cpxSchema);
+        c::cpx c2;
+        while (dfr.read(c2)) {
+            std::cout << '(' << c2.re << ", " << c2.im << ')' << std::endl;
+        }
+    }
+    return 0;
+}
+
diff --git a/lang/c++/examples/generated.cc b/lang/c++/examples/generated.cc
new file mode 100644
index 0000000..ab93ad2
--- /dev/null
+++ b/lang/c++/examples/generated.cc
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "cpx.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+
+
+int
+main()
+{
+    std::auto_ptr<avro::OutputStream> out = avro::memoryOutputStream();
+    avro::EncoderPtr e = avro::binaryEncoder();
+    e->init(*out);
+    c::cpx c1;
+    c1.re = 1.0;
+    c1.im = 2.13;
+    avro::encode(*e, c1);
+
+    std::auto_ptr<avro::InputStream> in = avro::memoryInputStream(*out);
+    avro::DecoderPtr d = avro::binaryDecoder();
+    d->init(*in);
+
+    c::cpx c2;
+    avro::decode(*d, c2);
+    std::cout << '(' << c2.re << ", " << c2.im << ')' << std::endl;
+    return 0;
+}
+
diff --git a/lang/c++/examples/generic.cc b/lang/c++/examples/generic.cc
new file mode 100644
index 0000000..12c171f
--- /dev/null
+++ b/lang/c++/examples/generic.cc
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <complex>
+
+#include "cpx.hh"
+
+#include "avro/Compiler.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+#include "avro/Specific.hh"
+#include "avro/Generic.hh"
+
+int
+main()
+{
+    std::ifstream ifs("cpx.json");
+
+    avro::ValidSchema cpxSchema;
+    avro::compileJsonSchema(ifs, cpxSchema);
+
+    std::auto_ptr<avro::OutputStream> out = avro::memoryOutputStream();
+    avro::EncoderPtr e = avro::binaryEncoder();
+    e->init(*out);
+    c::cpx c1;
+    c1.re = 100.23;
+    c1.im = 105.77;
+    avro::encode(*e, c1);
+
+    std::auto_ptr<avro::InputStream> in = avro::memoryInputStream(*out);
+    avro::DecoderPtr d = avro::binaryDecoder();
+    d->init(*in);
+
+    avro::GenericDatum datum(cpxSchema);
+    avro::decode(*d, datum);
+    std::cout << "Type: " << datum.type() << std::endl;
+    if (datum.type() == avro::AVRO_RECORD) {
+        const avro::GenericRecord& r = datum.value<avro::GenericRecord>();
+        std::cout << "Field-count: " << r.fieldCount() << std::endl;
+        if (r.fieldCount() == 2) {
+            const avro::GenericDatum& f0 = r.fieldAt(0);
+            if (f0.type() == avro::AVRO_DOUBLE) {
+                std::cout << "Real: " << f0.value<double>() << std::endl;
+            }
+            const avro::GenericDatum& f1 = r.fieldAt(1);
+            if (f1.type() == avro::AVRO_DOUBLE) {
+                std::cout << "Imaginary: " << f1.value<double>() << std::endl;
+            }
+        }
+    }
+    return 0;
+}
diff --git a/lang/c++/examples/imaginary.hh b/lang/c++/examples/imaginary.hh
new file mode 100644
index 0000000..cfdcc70
--- /dev/null
+++ b/lang/c++/examples/imaginary.hh
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef IMAGINARY_HH_3460301992__H_
+#define IMAGINARY_HH_3460301992__H_
+
+
+#include "boost/any.hpp"
+#include "avro/Specific.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+
+namespace i {
+struct cpx {
+    double im;
+};
+
+}
+namespace avro {
+template<> struct codec_traits<i::cpx> {
+    static void encode(Encoder& e, const i::cpx& v) {
+        avro::encode(e, v.im);
+    }
+    static void decode(Decoder& d, i::cpx& v) {
+        avro::decode(d, v.im);
+    }
+};
+
+}
+#endif
diff --git a/lang/c++/examples/imaginary.json b/lang/c++/examples/imaginary.json
new file mode 100644
index 0000000..ebaea51
--- /dev/null
+++ b/lang/c++/examples/imaginary.json
@@ -0,0 +1,7 @@
+{
+    "type": "record", 
+    "name": "cpx",
+    "fields" : [
+        {"name": "im", "type" : "double"}
+    ]
+}
diff --git a/lang/c++/examples/resolving.cc b/lang/c++/examples/resolving.cc
new file mode 100644
index 0000000..a35eb34
--- /dev/null
+++ b/lang/c++/examples/resolving.cc
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+
+#include "cpx.hh"
+#include "imaginary.hh"
+
+#include "avro/Compiler.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+#include "avro/Specific.hh"
+#include "avro/Generic.hh"
+
+
+
+avro::ValidSchema load(const char* filename)
+{
+    std::ifstream ifs(filename);
+    avro::ValidSchema result;
+    avro::compileJsonSchema(ifs, result);
+    return result;
+}
+
+int
+main()
+{
+    avro::ValidSchema cpxSchema = load("cpx.json");
+    avro::ValidSchema imaginarySchema = load("imaginary.json");
+
+    std::auto_ptr<avro::OutputStream> out = avro::memoryOutputStream();
+    avro::EncoderPtr e = avro::binaryEncoder();
+    e->init(*out);
+    c::cpx c1;
+    c1.re = 100.23;
+    c1.im = 105.77;
+    avro::encode(*e, c1);
+
+    std::auto_ptr<avro::InputStream> in = avro::memoryInputStream(*out);
+    avro::DecoderPtr d = avro::resolvingDecoder(cpxSchema, imaginarySchema,
+        avro::binaryDecoder());
+    d->init(*in);
+
+    i::cpx c2;
+    avro::decode(*d, c2);
+    std::cout << "Imaginary: " << c2.im << std::endl;
+
+}
diff --git a/lang/c++/examples/schemaload.cc b/lang/c++/examples/schemaload.cc
new file mode 100644
index 0000000..0557bf2
--- /dev/null
+++ b/lang/c++/examples/schemaload.cc
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+
+#include "avro/ValidSchema.hh"
+#include "avro/Compiler.hh"
+
+
+int
+main()
+{
+    std::ifstream in("cpx.json");
+
+    avro::ValidSchema cpxSchema;
+    avro::compileJsonSchema(in, cpxSchema);
+}
diff --git a/lang/c++/examples/validating.cc b/lang/c++/examples/validating.cc
new file mode 100644
index 0000000..b44555e
--- /dev/null
+++ b/lang/c++/examples/validating.cc
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include <complex>
+
+#include "avro/Compiler.hh"
+#include "avro/Encoder.hh"
+#include "avro/Decoder.hh"
+#include "avro/Specific.hh"
+
+namespace avro {
+template<typename T>
+struct codec_traits<std::complex<T> > {
+    static void encode(Encoder& e, const std::complex<T>& c) {
+        avro::encode(e, std::real(c));
+        avro::encode(e, std::imag(c));
+    }
+
+    static void decode(Decoder& d, std::complex<T>& c) {
+        T re, im;
+        avro::decode(d, re);
+        avro::decode(d, im);
+        c = std::complex<T>(re, im);
+    }
+};
+
+}
+int
+main()
+{
+    std::ifstream ifs("cpx.json");
+
+    avro::ValidSchema cpxSchema;
+    avro::compileJsonSchema(ifs, cpxSchema);
+
+    std::auto_ptr<avro::OutputStream> out = avro::memoryOutputStream();
+    avro::EncoderPtr e = avro::validatingEncoder(cpxSchema,
+        avro::binaryEncoder());
+    e->init(*out);
+    std::complex<double> c1(1.0, 2.0);
+    avro::encode(*e, c1);
+
+    std::auto_ptr<avro::InputStream> in = avro::memoryInputStream(*out);
+    avro::DecoderPtr d = avro::validatingDecoder(cpxSchema,
+        avro::binaryDecoder());
+    d->init(*in);
+
+    std::complex<double> c2;
+    avro::decode(*d, c2);
+    std::cout << '(' << std::real(c2) << ", " << std::imag(c2) << ')' << std::endl;
+    return 0;
+}
diff --git a/lang/c++/impl/BinaryDecoder.cc b/lang/c++/impl/BinaryDecoder.cc
new file mode 100644
index 0000000..af71eac
--- /dev/null
+++ b/lang/c++/impl/BinaryDecoder.cc
@@ -0,0 +1,236 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define __STDC_LIMIT_MACROS
+
+#include "Decoder.hh"
+#include "Zigzag.hh"
+#include "Exception.hh"
+
+#include <boost/array.hpp>
+#include <boost/make_shared.hpp>
+
+namespace avro {
+
+using boost::make_shared;
+
+class BinaryDecoder : public Decoder {
+    StreamReader in_;
+    const uint8_t* next_;
+    const uint8_t* end_;
+
+    void init(InputStream& ib);
+    void decodeNull();
+    bool decodeBool();
+    int32_t decodeInt();
+    int64_t decodeLong();
+    float decodeFloat();
+    double decodeDouble();
+    void decodeString(std::string& value);
+    void skipString();
+    void decodeBytes(std::vector<uint8_t>& value);
+    void skipBytes();
+    void decodeFixed(size_t n, std::vector<uint8_t>& value);
+    void skipFixed(size_t n);
+    size_t decodeEnum();
+    size_t arrayStart();
+    size_t arrayNext();
+    size_t skipArray();
+    size_t mapStart();
+    size_t mapNext();
+    size_t skipMap();
+    size_t decodeUnionIndex();
+
+    int64_t doDecodeLong();
+    size_t doDecodeItemCount();
+    void more();
+};
+
+DecoderPtr binaryDecoder()
+{
+    return make_shared<BinaryDecoder>();
+}
+
+void BinaryDecoder::init(InputStream& is)
+{
+    in_.reset(is);
+}
+
+void BinaryDecoder::decodeNull()
+{
+}
+
+bool BinaryDecoder::decodeBool()
+{
+    uint8_t v = in_.read();
+    if (v == 0) {
+        return false;
+    } else if (v == 1) {
+        return true;
+    }
+    throw Exception("Invalid value for bool");
+}
+
+int32_t BinaryDecoder::decodeInt()
+{
+    int64_t val = doDecodeLong();
+    if (val < INT32_MIN || val > INT32_MAX) {
+        throw Exception(
+            boost::format("Value out of range for Avro int: %1%") % val);
+    }
+    return static_cast<int32_t>(val);
+}
+
+int64_t BinaryDecoder::decodeLong()
+{
+    return doDecodeLong();
+}
+
+float BinaryDecoder::decodeFloat()
+{
+    float result;
+    in_.readBytes(reinterpret_cast<uint8_t *>(&result), sizeof(float));
+    return result;
+}
+
+double BinaryDecoder::decodeDouble()
+{
+    double result;
+    in_.readBytes(reinterpret_cast<uint8_t *>(&result), sizeof(double));
+    return result;
+}
+
+void BinaryDecoder::decodeString(std::string& value)
+{
+    size_t len = decodeInt();
+    value.resize(len);
+    if (len > 0) {
+        in_.readBytes(reinterpret_cast<uint8_t*>(&value[0]), len);
+    }
+}
+
+void BinaryDecoder::skipString()
+{
+    size_t len = decodeInt();
+    in_.skipBytes(len);
+}
+
+void BinaryDecoder::decodeBytes(std::vector<uint8_t>& value)
+{
+    size_t len = decodeInt();
+    value.resize(len);
+    if (len > 0) {
+        in_.readBytes(&value[0], len);
+    }
+}
+
+void BinaryDecoder::skipBytes()
+{
+    size_t len = decodeInt();
+    in_.skipBytes(len);
+}
+
+void BinaryDecoder::decodeFixed(size_t n, std::vector<uint8_t>& value)
+{
+    value.resize(n);
+    if (n > 0) {
+        in_.readBytes(&value[0], n);
+    }
+}
+
+void BinaryDecoder::skipFixed(size_t n)
+{
+    in_.skipBytes(n);
+}
+
+size_t BinaryDecoder::decodeEnum()
+{
+    return static_cast<size_t>(doDecodeLong());
+}
+
+size_t BinaryDecoder::arrayStart()
+{
+    return doDecodeItemCount();
+}
+
+size_t BinaryDecoder::doDecodeItemCount()
+{
+    int64_t result = doDecodeLong();
+    if (result < 0) {
+        doDecodeLong();
+        return static_cast<size_t>(-result);
+    }
+    return static_cast<size_t>(result);
+}
+
+size_t BinaryDecoder::arrayNext()
+{
+    return static_cast<size_t>(doDecodeLong());
+}
+
+size_t BinaryDecoder::skipArray()
+{
+    for (; ;) {
+        int64_t r = doDecodeLong();
+        if (r < 0) {
+            size_t n = static_cast<size_t>(doDecodeLong()); 
+            in_.skipBytes(n);
+        } else {
+            return static_cast<size_t>(r);
+        }
+    }
+}
+
+size_t BinaryDecoder::mapStart()
+{
+    return doDecodeItemCount();
+}
+
+size_t BinaryDecoder::mapNext()
+{
+    return doDecodeItemCount();
+}
+
+size_t BinaryDecoder::skipMap()
+{
+    return skipArray();
+}
+
+size_t BinaryDecoder::decodeUnionIndex()
+{
+    return static_cast<size_t>(doDecodeLong());
+}
+
+int64_t BinaryDecoder::doDecodeLong() {
+    uint64_t encoded = 0;
+    int shift = 0;
+    uint8_t u;
+    do {
+        if (shift >= 64) {
+            throw Exception("Invalid Avro varint");
+        }
+        u = in_.read();
+        encoded |= static_cast<uint64_t>(u & 0x7f) << shift;
+        shift += 7;
+    } while (u & 0x80);
+
+    return decodeZigzag64(encoded);
+}
+
+}   // namespace avro
+
diff --git a/lang/c++/impl/BinaryEncoder.cc b/lang/c++/impl/BinaryEncoder.cc
new file mode 100644
index 0000000..dcb56af
--- /dev/null
+++ b/lang/c++/impl/BinaryEncoder.cc
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Encoder.hh"
+#include "Zigzag.hh"
+#include <boost/array.hpp>
+#include <boost/make_shared.hpp>
+
+namespace avro {
+
+using boost::make_shared;
+using boost::shared_ptr;
+
+class BinaryEncoder : public Encoder {
+    StreamWriter out_;
+
+    void init(OutputStream& os);
+    void flush();
+    void encodeNull();
+    void encodeBool(bool b);
+    void encodeInt(int32_t i);
+    void encodeLong(int64_t l);
+    void encodeFloat(float f);
+    void encodeDouble(double d);
+    void encodeString(const std::string& s);
+    void encodeBytes(const uint8_t *bytes, size_t len);
+    void encodeFixed(const uint8_t *bytes, size_t len);
+    void encodeEnum(size_t e);
+    void arrayStart();
+    void arrayEnd();
+    void mapStart();
+    void mapEnd();
+    void setItemCount(size_t count);
+    void startItem();
+    void encodeUnionIndex(size_t e);
+
+    void doEncodeLong(int64_t l);
+};
+
+EncoderPtr binaryEncoder()
+{
+    return make_shared<BinaryEncoder>();
+}
+
+void BinaryEncoder::init(OutputStream& os)
+{
+    out_.reset(os);
+}
+
+void BinaryEncoder::flush()
+{
+    out_.flush();
+}
+
+void BinaryEncoder::encodeNull()
+{
+}
+
+void BinaryEncoder::encodeBool(bool b)
+{
+    out_.write(b ? 1 : 0);
+}
+
+void BinaryEncoder::encodeInt(int32_t i)
+{
+    doEncodeLong(i);
+}
+
+void BinaryEncoder::encodeLong(int64_t l)
+{
+    doEncodeLong(l);
+}
+
+void BinaryEncoder::encodeFloat(float f)
+{
+    const uint8_t* p = reinterpret_cast<const uint8_t*>(&f);
+    out_.writeBytes(p, sizeof(float));
+}
+
+void BinaryEncoder::encodeDouble(double d)
+{
+    const uint8_t* p = reinterpret_cast<const uint8_t*>(&d);
+    out_.writeBytes(p, sizeof(double));
+}
+
+void BinaryEncoder::encodeString(const std::string& s)
+{
+    doEncodeLong(s.size());
+    out_.writeBytes(reinterpret_cast<const uint8_t*>(s.c_str()), s.size());
+}
+
+void BinaryEncoder::encodeBytes(const uint8_t *bytes, size_t len)
+{
+    doEncodeLong(len);
+    out_.writeBytes(bytes, len);
+}
+
+void BinaryEncoder::encodeFixed(const uint8_t *bytes, size_t len)
+{
+    out_.writeBytes(bytes, len);
+}
+
+void BinaryEncoder::encodeEnum(size_t e)
+{
+    doEncodeLong(e);
+}
+
+void BinaryEncoder::arrayStart()
+{
+}
+
+void BinaryEncoder::arrayEnd()
+{
+    doEncodeLong(0);
+}
+
+void BinaryEncoder::mapStart()
+{
+}
+
+void BinaryEncoder::mapEnd()
+{
+    doEncodeLong(0);
+}
+
+void BinaryEncoder::setItemCount(size_t count)
+{
+    if (count == 0) {
+        throw Exception("Count cannot be zero");
+    }
+    doEncodeLong(count);
+}
+
+void BinaryEncoder::startItem()
+{
+}
+
+void BinaryEncoder::encodeUnionIndex(size_t e)
+{
+    doEncodeLong(e);
+}
+
+void BinaryEncoder::doEncodeLong(int64_t l)
+{
+    boost::array<uint8_t, 10> bytes;
+    size_t size = encodeInt64(l, bytes);
+    out_.writeBytes(bytes.data(), size);
+}
+}   // namespace avro
diff --git a/lang/c++/impl/Compiler.cc b/lang/c++/impl/Compiler.cc
new file mode 100644
index 0000000..01bbde2
--- /dev/null
+++ b/lang/c++/impl/Compiler.cc
@@ -0,0 +1,531 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <sstream>
+
+#include "Compiler.hh"
+#include "Types.hh"
+#include "Schema.hh"
+#include "ValidSchema.hh"
+#include "Stream.hh"
+
+#include "json/JsonDom.hh"
+
+using std::string;
+using std::map;
+using std::vector;
+using std::pair;
+using std::make_pair;
+
+namespace avro {
+using json::Entity;
+using json::Object;
+using json::Array;
+using json::EntityType;
+
+typedef map<Name, NodePtr> SymbolTable;
+
+
+// #define DEBUG_VERBOSE
+
+static NodePtr makePrimitive(const std::string& t)
+{
+    if (t == "null") {
+        return NodePtr(new NodePrimitive(AVRO_NULL));
+    } else if (t == "boolean") {
+        return NodePtr(new NodePrimitive(AVRO_BOOL));
+    } else if (t == "int") {
+        return NodePtr(new NodePrimitive(AVRO_INT));
+    } else if (t == "long") {
+        return NodePtr(new NodePrimitive(AVRO_LONG));
+    } else if (t == "float") {
+        return NodePtr(new NodePrimitive(AVRO_FLOAT));
+    } else if (t == "double") {
+        return NodePtr(new NodePrimitive(AVRO_DOUBLE));
+    } else if (t == "string") {
+        return NodePtr(new NodePrimitive(AVRO_STRING));
+    } else if (t == "bytes") {
+        return NodePtr(new NodePrimitive(AVRO_BYTES));
+    } else {
+        return NodePtr();
+    }
+}
+
+static NodePtr makeNode(const json::Entity& e, SymbolTable& st, const string& ns);
+
+template <typename T>
+concepts::SingleAttribute<T> asSingleAttribute(const T& t)
+{
+    concepts::SingleAttribute<T> n;
+    n.add(t);
+    return n;
+}
+
+static bool isFullName(const string& s)
+{
+    return s.find('.') != string::npos;
+}
+    
+static Name getName(const string& name, const string& ns)
+{
+    return (isFullName(name)) ? Name(name) : Name(name, ns);
+}
+
+static NodePtr makeNode(const std::string& t, SymbolTable& st, const string& ns)
+{
+    NodePtr result = makePrimitive(t);
+    if (result) {
+        return result;
+    }
+    Name n = getName(t, ns);
+
+    SymbolTable::const_iterator it = st.find(n);
+    if (it != st.end()) {
+        return NodePtr(new NodeSymbolic(asSingleAttribute(n), it->second));
+    }
+    throw Exception(boost::format("Unknown type: %1%") % n.fullname());
+}
+
+const json::Object::const_iterator findField(const Entity& e,
+    const Object& m, const string& fieldName)
+{
+    Object::const_iterator it = m.find(fieldName);
+    if (it == m.end()) {
+        throw Exception(boost::format("Missing Json field \"%1%\": %2%") %
+            fieldName % e.toString());
+    } else {
+        return it;
+    }
+}
+
+template <typename T> void ensureType(const Entity& e, const string& name)
+{
+    if (e.type() != json::type_traits<T>::type()) {
+        throw Exception(boost::format("Json field \"%1%\" is not a %2%: %3%") %
+            name % json::type_traits<T>::name() % e.toString());
+    }
+}
+
+const string& getStringField(const Entity& e, const Object& m,
+                             const string& fieldName)
+{
+    Object::const_iterator it = findField(e, m, fieldName);
+    ensureType<string>(it->second, fieldName);
+    return it->second.stringValue();
+}
+
+const Array& getArrayField(const Entity& e, const Object& m,
+                           const string& fieldName)
+{
+    Object::const_iterator it = findField(e, m, fieldName);
+    ensureType<Array >(it->second, fieldName);
+    return it->second.arrayValue();
+}
+
+const int64_t getLongField(const Entity& e, const Object& m,
+                           const string& fieldName)
+{
+    Object::const_iterator it = findField(e, m, fieldName);
+    ensureType<int64_t>(it->second, fieldName);
+    return it->second.longValue();
+}
+    
+struct Field {
+    const string& name;
+    const NodePtr schema;
+    const GenericDatum defaultValue;
+    Field(const string& n, const NodePtr& v, GenericDatum dv) :
+        name(n), schema(v), defaultValue(dv) { }
+};
+
+static void assertType(const Entity& e, EntityType et)
+{
+    if (e.type() != et) {
+        throw Exception(boost::format("Unexpected type for default value: "
+            "Expected %1%, but found %2%") % et % e.type());
+    }
+}
+
+static vector<uint8_t> toBin(const std::string& s)
+{
+    vector<uint8_t> result;
+    result.resize(s.size());
+    std::copy(s.c_str(), s.c_str() + s.size(), &result[0]);
+    return result;
+}
+
+static string nameof(const NodePtr& n)
+{
+    Type t = n->type();
+    switch (t) {
+    case AVRO_STRING:
+        return "string";
+    case AVRO_BYTES:
+        return "bytes";
+    case AVRO_INT:
+        return "int";
+    case AVRO_LONG:
+        return "long";
+    case AVRO_FLOAT:
+        return "float";
+    case AVRO_DOUBLE:
+        return "double";
+    case AVRO_BOOL:
+        return "boolean";
+    case AVRO_NULL:
+        return "null";
+    case AVRO_RECORD:
+    case AVRO_ENUM:
+    case AVRO_FIXED:
+    case AVRO_SYMBOLIC:
+        return n->name().fullname();
+    case AVRO_ARRAY:
+        return "array";
+    case AVRO_MAP:
+        return "map";
+    case AVRO_UNION:
+        return "union";
+    default:
+        throw Exception(boost::format("Unknown type: %1%") % t);
+    }
+}
+
+static GenericDatum makeGenericDatum(NodePtr n, const Entity& e,
+    const SymbolTable& st)
+{
+    Type t = n->type();
+    if (t == AVRO_SYMBOLIC) {
+        n = st.find(n->name())->second;
+        t = n->type();
+    }
+    switch (t) {
+    case AVRO_STRING:
+        assertType(e, json::etString);
+        return GenericDatum(e.stringValue());
+    case AVRO_BYTES:
+        assertType(e, json::etString);
+        return GenericDatum(toBin(e.stringValue()));
+    case AVRO_INT:
+        assertType(e, json::etLong);
+        return GenericDatum(static_cast<int32_t>(e.longValue()));
+    case AVRO_LONG:
+        assertType(e, json::etLong);
+        return GenericDatum(e.longValue());
+    case AVRO_FLOAT:
+        assertType(e, json::etDouble);
+        return GenericDatum(static_cast<float>(e.doubleValue()));
+    case AVRO_DOUBLE:
+        assertType(e, json::etDouble);
+        return GenericDatum(e.doubleValue());
+    case AVRO_BOOL:
+        assertType(e, json::etBool);
+        return GenericDatum(e.boolValue());
+    case AVRO_NULL:
+        assertType(e, json::etNull);
+        return GenericDatum();
+    case AVRO_RECORD:
+    {
+        assertType(e, json::etObject);
+        GenericRecord result(n);
+        const map<string, Entity>& v = e.objectValue();
+        for (size_t i = 0; i < n->leaves(); ++i) {
+            map<string, Entity>::const_iterator it = v.find(n->nameAt(i));
+            if (it == v.end()) {
+                throw Exception(boost::format(
+                    "No value found in default for %1%") % n->nameAt(i));
+            }
+            result.setFieldAt(i,
+                makeGenericDatum(n->leafAt(i), it->second, st));
+        }
+        return GenericDatum(n, result);
+    }
+    case AVRO_ENUM:
+        assertType(e, json::etString);
+        return GenericDatum(n, GenericEnum(n, e.stringValue()));
+    case AVRO_ARRAY:
+    {
+        assertType(e, json::etArray);
+        GenericArray result(n);
+        const vector<Entity>& elements = e.arrayValue();
+        for (vector<Entity>::const_iterator it = elements.begin();
+            it != elements.end(); ++it) {
+            result.value().push_back(makeGenericDatum(n->leafAt(0), *it, st));
+        }
+        return GenericDatum(n, result);
+    }
+    case AVRO_MAP:
+    {
+        assertType(e, json::etObject);
+        GenericMap result(n);
+        const map<string, Entity>& v = e.objectValue();
+        for (map<string, Entity>::const_iterator it = v.begin();
+            it != v.end(); ++it) {
+            result.value().push_back(make_pair(it->first,
+                makeGenericDatum(n->leafAt(1), it->second, st)));
+        }
+        return GenericDatum(n, result);
+    }
+    case AVRO_UNION:
+    {
+        GenericUnion result(n);
+        string name;
+        Entity e2;
+        if (e.type() == json::etNull) {
+            name = "null";
+            e2 = e;
+        } else {
+            assertType(e, json::etObject);
+            const map<string, Entity>& v = e.objectValue();
+            if (v.size() != 1) {
+                throw Exception(boost::format("Default value for "
+                    "union has more than one field: %1%") % e.toString());
+            }
+            map<string, Entity>::const_iterator it = v.begin();
+            name = it->first;
+            e2 = it->second;
+        }
+        for (size_t i = 0; i < n->leaves(); ++i) {
+            const NodePtr& b = n->leafAt(i);
+            if (nameof(b) == name) {
+                result.selectBranch(i);
+                result.datum() = makeGenericDatum(b, e2, st);
+                return GenericDatum(n, result);
+            }
+        }
+        throw Exception(boost::format("Invalid default value %1%") %
+            e.toString());
+    }
+    case AVRO_FIXED:
+        assertType(e, json::etString);
+        return GenericDatum(n, GenericFixed(n, toBin(e.stringValue())));
+    default:
+        throw Exception(boost::format("Unknown type: %1%") % t);
+    }
+    return GenericDatum();
+}
+
+
+static Field makeField(const Entity& e, SymbolTable& st, const string& ns)
+{
+    const Object& m = e.objectValue();
+    const string& n = getStringField(e, m, "name");
+    Object::const_iterator it = findField(e, m, "type");
+    map<string, Entity>::const_iterator it2 = m.find("default");
+    NodePtr node = makeNode(it->second, st, ns);
+    GenericDatum d = (it2 == m.end()) ? GenericDatum() :
+        makeGenericDatum(node, it2->second, st);
+    return Field(n, node, d);
+}
+
+static NodePtr makeRecordNode(const Entity& e,
+    const Name& name, const Object& m, SymbolTable& st, const string& ns)
+{        
+    const Array& v = getArrayField(e, m, "fields");
+    concepts::MultiAttribute<string> fieldNames;
+    concepts::MultiAttribute<NodePtr> fieldValues;
+    vector<GenericDatum> defaultValues;
+    
+    for (Array::const_iterator it = v.begin(); it != v.end(); ++it) {
+        Field f = makeField(*it, st, ns);
+        fieldNames.add(f.name);
+        fieldValues.add(f.schema);
+        defaultValues.push_back(f.defaultValue);
+    }
+    return NodePtr(new NodeRecord(asSingleAttribute(name),
+        fieldValues, fieldNames, defaultValues));
+}
+
+static NodePtr makeEnumNode(const Entity& e,
+    const Name& name, const Object& m)
+{
+    const Array& v = getArrayField(e, m, "symbols");
+    concepts::MultiAttribute<string> symbols;
+    for (Array::const_iterator it = v.begin(); it != v.end(); ++it) {
+        if (it->type() != json::etString) {
+            throw Exception(boost::format("Enum symbol not a string: %1%") %
+                it->toString());
+        }
+        symbols.add(it->stringValue());
+    }
+    return NodePtr(new NodeEnum(asSingleAttribute(name), symbols));
+}
+
+static NodePtr makeFixedNode(const Entity& e,
+    const Name& name, const Object& m)
+{
+    int v = static_cast<int>(getLongField(e, m, "size"));
+    if (v <= 0) {
+        throw Exception(boost::format("Size for fixed is not positive: ") %
+            e.toString());
+    }
+    return NodePtr(new NodeFixed(asSingleAttribute(name),
+        asSingleAttribute(v)));
+}
+
+static NodePtr makeArrayNode(const Entity& e, const Object& m,
+    SymbolTable& st, const string& ns)
+{
+    Object::const_iterator it = findField(e, m, "items");
+    return NodePtr(new NodeArray(asSingleAttribute(
+        makeNode(it->second, st, ns))));
+}
+
+static NodePtr makeMapNode(const Entity& e, const Object& m,
+    SymbolTable& st, const string& ns)
+{
+    Object::const_iterator it = findField(e, m, "values");
+
+    return NodePtr(new NodeMap(asSingleAttribute(
+        makeNode(it->second, st, ns))));
+}
+
+static Name getName(const Entity& e, const Object& m, const string& ns)
+{
+    const string& name = getStringField(e, m, "name");
+
+    if (isFullName(name)) {
+        return Name(name);
+    } else {
+        Object::const_iterator it = m.find("namespace");
+        if (it != m.end()) {
+            if (it->second.type() != json::type_traits<string>::type()) {
+                throw Exception(boost::format(
+                    "Json field \"%1%\" is not a %2%: %3%") %
+                        "namespace" % json::type_traits<string>::name() %
+                        it->second.toString());
+            }
+            Name result = Name(name, it->second.stringValue());
+            return result;
+        }
+        return Name(name, ns);
+    }
+}
+
+static NodePtr makeNode(const Entity& e, const Object& m,
+    SymbolTable& st, const string& ns)
+{
+    const string& type = getStringField(e, m, "type");
+    if (NodePtr result = makePrimitive(type)) {
+        return result;
+    } else if (type == "record" || type == "error" ||
+        type == "enum" || type == "fixed") {
+        Name nm = getName(e, m, ns);
+        NodePtr result;
+        if (type == "record" || type == "error") {
+            result = NodePtr(new NodeRecord());
+            st[nm] = result;
+            NodePtr r = makeRecordNode(e, nm, m, st, nm.ns());
+            (boost::dynamic_pointer_cast<NodeRecord>(r))->swap(
+                *boost::dynamic_pointer_cast<NodeRecord>(result));
+        } else {
+            result = (type == "enum") ? makeEnumNode(e, nm, m) :
+                makeFixedNode(e, nm, m);
+            st[nm] = result;
+        }
+        return result;
+    } else if (type == "array") {
+        return makeArrayNode(e, m, st, ns);
+    } else if (type == "map") {
+        return makeMapNode(e, m, st, ns);
+    }
+    throw Exception(boost::format("Unknown type definition: %1%")
+        % e.toString());
+}
+
+static NodePtr makeNode(const Entity& e, const Array& m,
+    SymbolTable& st, const string& ns)
+{
+    concepts::MultiAttribute<NodePtr> mm;
+    for (Array::const_iterator it = m.begin(); it != m.end(); ++it) {
+        mm.add(makeNode(*it, st, ns));
+    }
+    return NodePtr(new NodeUnion(mm));
+}
+
+static NodePtr makeNode(const json::Entity& e, SymbolTable& st, const string& ns)
+{
+    switch (e.type()) {
+    case json::etString:
+        return makeNode(e.stringValue(), st, ns);
+    case json::etObject:
+        return makeNode(e, e.objectValue(), st, ns);
+    case json::etArray:
+        return makeNode(e, e.arrayValue(), st, ns);
+    default:
+        throw Exception(boost::format("Invalid Avro type: %1%") % e.toString());
+    }
+}
+
+AVRO_DECL ValidSchema compileJsonSchemaFromStream(InputStream& is)
+{
+    json::Entity e = json::loadEntity(is);
+    SymbolTable st;
+    NodePtr n = makeNode(e, st, "");
+    return ValidSchema(n);
+}
+
+AVRO_DECL ValidSchema compileJsonSchemaFromFile(const char* filename)
+{
+    std::auto_ptr<InputStream> s = fileInputStream(filename);
+    return compileJsonSchemaFromStream(*s);
+}
+
+AVRO_DECL ValidSchema compileJsonSchemaFromMemory(const uint8_t* input, size_t len)
+{
+    return compileJsonSchemaFromStream(*memoryInputStream(input, len));
+}
+
+AVRO_DECL ValidSchema compileJsonSchemaFromString(const char* input)
+{
+    return compileJsonSchemaFromMemory(reinterpret_cast<const uint8_t*>(input),
+        ::strlen(input));
+}
+
+AVRO_DECL ValidSchema compileJsonSchemaFromString(const std::string& input)
+{
+    return compileJsonSchemaFromMemory(
+        reinterpret_cast<const uint8_t*>(&input[0]), input.size());
+}
+
+static ValidSchema compile(std::istream& is)
+{
+    std::auto_ptr<InputStream> in = istreamInputStream(is);
+    return compileJsonSchemaFromStream(*in);
+}
+
+AVRO_DECL void compileJsonSchema(std::istream &is, ValidSchema &schema)
+{
+    if (!is.good()) {
+        throw Exception("Input stream is not good");
+    }
+
+    schema = compile(is);
+}
+
+AVRO_DECL bool compileJsonSchema(std::istream &is, ValidSchema &schema, std::string &error)
+{
+    try {
+        compileJsonSchema(is, schema);
+        return true;
+    } catch (const Exception &e) {
+        error = e.what();
+        return false;
+    }
+
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/DataFile.cc b/lang/c++/impl/DataFile.cc
new file mode 100644
index 0000000..035dd27
--- /dev/null
+++ b/lang/c++/impl/DataFile.cc
@@ -0,0 +1,400 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "DataFile.hh"
+#include "Compiler.hh"
+#include "Exception.hh"
+
+#include <sstream>
+
+#include <boost/random/mersenne_twister.hpp>
+#include <boost/iostreams/device/file.hpp>
+#include <boost/iostreams/filter/gzip.hpp>
+#include <boost/iostreams/filter/zlib.hpp>
+
+namespace avro {
+using std::auto_ptr;
+using std::ostringstream;
+using std::istringstream;
+using std::vector;
+using std::copy;
+using std::string;
+
+using boost::array;
+
+namespace {
+const string AVRO_SCHEMA_KEY("avro.schema");
+const string AVRO_CODEC_KEY("avro.codec");
+const string AVRO_NULL_CODEC("null");
+const string AVRO_DEFLATE_CODEC("deflate");
+
+const size_t minSyncInterval = 32;
+const size_t maxSyncInterval = 1u << 30;
+const size_t defaultSyncInterval = 64 * 1024;
+
+boost::iostreams::zlib_params get_zlib_params() {
+  boost::iostreams::zlib_params ret;
+  ret.method = boost::iostreams::zlib::deflated;
+  ret.noheader = true;
+  return ret;
+}
+}
+
+
+static string toString(const ValidSchema& schema)
+{
+    ostringstream oss;
+    schema.toJson(oss);
+    return oss.str();
+}
+
+DataFileWriterBase::DataFileWriterBase(const char* filename,
+    const ValidSchema& schema, size_t syncInterval, Codec codec) :
+    filename_(filename), schema_(schema), encoderPtr_(binaryEncoder()),
+    syncInterval_(syncInterval),
+    codec_(codec),
+    stream_(fileOutputStream(filename)),
+    buffer_(memoryOutputStream()),
+    sync_(makeSync()), objectCount_(0)
+{
+    if (syncInterval < minSyncInterval || syncInterval > maxSyncInterval) {
+        throw Exception(boost::format("Invalid sync interval: %1%. "
+            "Should be between %2% and %3%") % syncInterval %
+            minSyncInterval % maxSyncInterval);
+    }
+    setMetadata(AVRO_CODEC_KEY, AVRO_NULL_CODEC);
+
+    if (codec_ == NULL_CODEC) {
+      setMetadata(AVRO_CODEC_KEY, AVRO_NULL_CODEC);
+    } else if (codec_ == DEFLATE_CODEC) {
+      setMetadata(AVRO_CODEC_KEY, AVRO_DEFLATE_CODEC);
+    } else {
+      throw Exception("Unknown codec codec");
+    }
+    setMetadata(AVRO_SCHEMA_KEY, toString(schema));
+
+    writeHeader();
+    encoderPtr_->init(*buffer_);
+}
+
+DataFileWriterBase::~DataFileWriterBase()
+{
+    if (stream_.get()) {
+        close();
+    }
+}
+
+void DataFileWriterBase::close()
+{
+    flush();
+    stream_.reset();
+}
+
+void DataFileWriterBase::sync()
+{
+    encoderPtr_->flush();
+
+    encoderPtr_->init(*stream_);
+    avro::encode(*encoderPtr_, objectCount_);
+    if (codec_ == NULL_CODEC) {
+        int64_t byteCount = buffer_->byteCount();
+        avro::encode(*encoderPtr_, byteCount);
+        encoderPtr_->flush();
+        std::auto_ptr<InputStream> in = memoryInputStream(*buffer_);
+        copy(*in, *stream_);
+    } else {
+        std::vector<char> buf;
+        {
+            boost::iostreams::filtering_ostream os;
+            if (codec_ == DEFLATE_CODEC) {
+                os.push(boost::iostreams::zlib_compressor(get_zlib_params()));
+            }
+            os.push(boost::iostreams::back_inserter(buf));
+            const uint8_t* data;
+            size_t len;
+
+            std::auto_ptr<InputStream> input = memoryInputStream(*buffer_);
+            while (input->next(&data, &len)) {
+                boost::iostreams::write(os, reinterpret_cast<const char*>(data), len);
+            }
+        } // make sure all is flushed
+        std::auto_ptr<InputStream> in = memoryInputStream(
+           reinterpret_cast<const uint8_t*>(&buf[0]), buf.size());
+        int64_t byteCount = buf.size();
+        avro::encode(*encoderPtr_, byteCount);
+        encoderPtr_->flush();
+        copy(*in, *stream_);
+    }
+
+    encoderPtr_->init(*stream_);
+    avro::encode(*encoderPtr_, sync_);
+    encoderPtr_->flush();
+
+
+    buffer_ = memoryOutputStream();
+    encoderPtr_->init(*buffer_);
+    objectCount_ = 0;
+}
+
+void DataFileWriterBase::syncIfNeeded()
+{
+    encoderPtr_->flush();
+    if (buffer_->byteCount() >= syncInterval_) {
+        sync();
+    }
+}
+
+void DataFileWriterBase::flush()
+{
+    sync();
+}
+
+boost::mt19937 random(static_cast<uint32_t>(time(0)));
+
+DataFileSync DataFileWriterBase::makeSync()
+{
+    DataFileSync sync;
+    for (size_t i = 0; i < sync.size(); ++i) {
+        sync[i] = random();
+    }
+    return sync;
+}
+
+typedef array<uint8_t, 4> Magic;
+static Magic magic = { { 'O', 'b', 'j', '\x01' } };
+
+void DataFileWriterBase::writeHeader()
+{
+    encoderPtr_->init(*stream_);
+    avro::encode(*encoderPtr_, magic);
+    avro::encode(*encoderPtr_, metadata_);
+    avro::encode(*encoderPtr_, sync_);
+    encoderPtr_->flush();
+}
+
+void DataFileWriterBase::setMetadata(const string& key, const string& value)
+{
+    vector<uint8_t> v(value.size());
+    copy(value.begin(), value.end(), v.begin());
+    metadata_[key] = v;
+}
+
+DataFileReaderBase::DataFileReaderBase(const char* filename) :
+    filename_(filename), stream_(fileInputStream(filename)),
+    decoder_(binaryDecoder()), objectCount_(0), eof_(false)
+{
+    readHeader();
+}
+
+void DataFileReaderBase::init()
+{
+    readerSchema_ = dataSchema_;
+    dataDecoder_  = binaryDecoder();
+    readDataBlock();
+}
+
+void DataFileReaderBase::init(const ValidSchema& readerSchema)
+{
+    readerSchema_ = readerSchema;
+    dataDecoder_  = (toString(readerSchema_) != toString(dataSchema_)) ?
+        resolvingDecoder(dataSchema_, readerSchema_, binaryDecoder()) :
+        binaryDecoder();
+    readDataBlock();
+}
+
+static void drain(InputStream& in)
+{
+    const uint8_t *p = 0;
+    size_t n = 0;
+    while (in.next(&p, &n));
+}
+
+char hex(unsigned int x)
+{
+    return x + (x < 10 ? '0' :  ('a' - 10));
+}
+
+std::ostream& operator << (std::ostream& os, const DataFileSync& s)
+{
+    for (size_t i = 0; i < s.size(); ++i) {
+        os << hex(s[i] / 16)  << hex(s[i] % 16) << ' ';
+    }
+    os << std::endl;
+    return os;
+}
+
+
+bool DataFileReaderBase::hasMore()
+{
+     if (eof_) {
+        return false;
+    } else if (objectCount_ != 0) {
+        return true;
+    }
+
+    dataDecoder_->init(*dataStream_);
+    drain(*dataStream_);
+    DataFileSync s;
+    decoder_->init(*stream_);
+    avro::decode(*decoder_, s);
+    if (s != sync_) {
+        throw Exception("Sync mismatch");
+    }
+    return readDataBlock();
+}
+
+class BoundedInputStream : public InputStream {
+    InputStream& in_;
+    size_t limit_;
+
+    bool next(const uint8_t** data, size_t* len) {
+        if (limit_ != 0 && in_.next(data, len)) {
+            if (*len > limit_) {
+                in_.backup(*len - limit_);
+                *len = limit_;
+            }
+            limit_ -= *len;
+            return true;
+        }
+        return false;
+    }
+
+    void backup(size_t len) {
+        in_.backup(len);
+        limit_ += len;
+    }
+
+    void skip(size_t len) {
+        if (len > limit_) {
+            len = limit_;
+        }
+        in_.skip(len);
+        limit_ -= len;
+    }
+
+    size_t byteCount() const {
+        return in_.byteCount();
+    }
+
+public:
+    BoundedInputStream(InputStream& in, size_t limit) :
+        in_(in), limit_(limit) { }
+};
+
+auto_ptr<InputStream> boundedInputStream(InputStream& in, size_t limit)
+{
+    return auto_ptr<InputStream>(new BoundedInputStream(in, limit));
+}
+
+bool DataFileReaderBase::readDataBlock()
+{
+    decoder_->init(*stream_);
+    const uint8_t* p = 0;
+    size_t n = 0;
+    if (! stream_->next(&p, &n)) {
+        eof_ = true;
+        return false;
+    }
+    stream_->backup(n);
+    avro::decode(*decoder_, objectCount_);
+    int64_t byteCount;
+    avro::decode(*decoder_, byteCount);
+    decoder_->init(*stream_);
+
+    auto_ptr<InputStream> st = boundedInputStream(*stream_, static_cast<size_t>(byteCount));
+    if (codec_ == NULL_CODEC) {
+        dataDecoder_->init(*st);
+        dataStream_ = st;
+    } else {
+        compressed_.clear();
+        const uint8_t* data;
+        size_t len;
+        while (st->next(&data, &len)) {
+            compressed_.insert(compressed_.end(), data, data + len);
+        }
+        // boost::iostreams::write(os, reinterpret_cast<const char*>(data), len);
+        os_.reset(new boost::iostreams::filtering_istream());
+        if (codec_ == DEFLATE_CODEC) {
+            os_->push(boost::iostreams::zlib_decompressor(get_zlib_params()));
+        } else {
+            throw Exception("Bad codec");
+        }
+        os_->push(boost::iostreams::basic_array_source<char>(
+            &compressed_[0], compressed_.size()));
+
+        std::auto_ptr<InputStream> in = istreamInputStream(*os_);
+        dataDecoder_->init(*in);
+        dataStream_ = in;
+    }
+    return true;
+}
+
+void DataFileReaderBase::close()
+{
+}
+
+static string toString(const vector<uint8_t>& v)
+{
+    string result;
+    result.resize(v.size());
+    copy(v.begin(), v.end(), result.begin());
+    return result;
+}
+
+static ValidSchema makeSchema(const vector<uint8_t>& v)
+{
+    istringstream iss(toString(v));
+    ValidSchema vs;
+    compileJsonSchema(iss, vs);
+    return ValidSchema(vs);
+}
+
+void DataFileReaderBase::readHeader()
+{
+    decoder_->init(*stream_);
+    Magic m;
+    avro::decode(*decoder_, m);
+    if (magic != m) {
+        throw Exception("Invalid data file. Magic does not match: "
+            + filename_);
+    }
+    avro::decode(*decoder_, metadata_);
+    Metadata::const_iterator it = metadata_.find(AVRO_SCHEMA_KEY);
+    if (it == metadata_.end()) {
+        throw Exception("No schema in metadata");
+    }
+
+    dataSchema_ = makeSchema(it->second);
+    if (! readerSchema_.root()) {
+        readerSchema_ = dataSchema();
+    }
+
+    it = metadata_.find(AVRO_CODEC_KEY);
+    if (it != metadata_.end() && toString(it->second) == AVRO_DEFLATE_CODEC) {
+        codec_ = DEFLATE_CODEC;
+    } else {
+        codec_ = NULL_CODEC;
+        if (it != metadata_.end() && toString(it->second) != AVRO_NULL_CODEC) {
+            throw Exception("Unknown codec in data file: " + toString(it->second));
+        }
+    }
+
+    avro::decode(*decoder_, sync_);
+}
+
+}   // namespace avro
diff --git a/lang/c++/impl/FileStream.cc b/lang/c++/impl/FileStream.cc
new file mode 100644
index 0000000..39c5af2
--- /dev/null
+++ b/lang/c++/impl/FileStream.cc
@@ -0,0 +1,355 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include "Stream.hh"
+#ifndef _WIN32
+#include "unistd.h"
+#include "fcntl.h"
+#include "errno.h"
+
+#ifndef O_BINARY
+#define O_BINARY 0
+#endif
+#else
+#include "Windows.h"
+
+#ifdef min
+#undef min
+#endif
+#endif
+
+using std::auto_ptr;
+using std::istream;
+using std::ostream;
+
+namespace avro {
+namespace {
+struct BufferCopyIn {
+    virtual ~BufferCopyIn() { }
+    virtual void seek(size_t len) = 0;
+    virtual bool read(uint8_t* b, size_t toRead, size_t& actual) = 0;
+
+};
+
+struct FileBufferCopyIn : public BufferCopyIn {
+#ifdef _WIN32
+    HANDLE h_;
+    FileBufferCopyIn(const char* filename) :
+        h_(::CreateFile(filename, GENERIC_READ, 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL)) {
+        if (h_ == INVALID_HANDLE_VALUE) {
+            throw Exception(boost::format("Cannot open file: %1%") % ::GetLastError());
+        }
+    }
+
+    ~FileBufferCopyIn() {
+        ::CloseHandle(h_);
+    }
+
+    void seek(size_t len) {
+        if (::SetFilePointer(h_, len, NULL, FILE_CURRENT) != INVALID_SET_FILE_POINTER) {
+            throw Exception(boost::format("Cannot skip file: %1%") % ::GetLastError());
+        }
+    }
+
+    bool read(uint8_t* b, size_t toRead, size_t& actual) {
+        DWORD dw = 0;
+        if (! ::ReadFile(h_, b, toRead, &dw, NULL)) {
+            throw Exception(boost::format("Cannot read file: %1%") % ::GetLastError());
+        }
+        actual = static_cast<size_t>(dw);
+        return actual != 0;
+    }
+#else
+    const int fd_;
+
+    FileBufferCopyIn(const char* filename) :
+        fd_(open(filename, O_RDONLY | O_BINARY)) {
+        if (fd_ < 0) {
+            throw Exception(boost::format("Cannot open file: %1%") %
+                ::strerror(errno));
+        }
+    }
+
+    ~FileBufferCopyIn() {
+        ::close(fd_);
+    }
+
+    void seek(size_t len) {
+        off_t r = ::lseek(fd_, len, SEEK_CUR);
+        if (r == static_cast<off_t>(-1)) {
+            throw Exception(boost::format("Cannot skip file: %1%") %
+                strerror(errno));
+        }
+    }
+
+    bool read(uint8_t* b, size_t toRead, size_t& actual) {
+        int n = ::read(fd_, b, toRead);
+        if (n > 0) {
+            actual = n;
+            return true;
+        }
+        return false;
+    }
+#endif
+  
+};
+
+struct IStreamBufferCopyIn : public BufferCopyIn {
+    istream& is_;
+
+    IStreamBufferCopyIn(istream& is) : is_(is) {
+    }
+
+    void seek(size_t len) {
+        if (! is_.seekg(len, std::ios_base::cur)) {
+            throw Exception("Cannot skip stream");
+        }
+    }
+
+    bool read(uint8_t* b, size_t toRead, size_t& actual) {
+        is_.read(reinterpret_cast<char*>(b), toRead);
+        if (is_.bad()) {
+            return false;
+        }
+        actual = static_cast<size_t>(is_.gcount());
+        return (! is_.eof() || actual != 0);
+    }
+
+};
+
+}
+
+class BufferCopyInInputStream : public InputStream {
+    const size_t bufferSize_;
+    uint8_t* const buffer_;
+    auto_ptr<BufferCopyIn> in_;
+    size_t byteCount_;
+    uint8_t* next_;
+    size_t available_;
+
+    bool next(const uint8_t** data, size_t *size) {
+        if (available_ == 0 && ! fill()) {
+            return false;
+        }
+        *data = next_;
+        *size = available_;
+        next_ += available_;
+        byteCount_ += available_;
+        available_ = 0;
+        return true;
+    }
+
+    void backup(size_t len) {
+        next_ -= len;
+        available_ += len;
+        byteCount_ -= len;
+    }
+
+    void skip(size_t len) {
+        while (len > 0) {
+            if (available_ == 0) {
+                in_->seek(len);
+                byteCount_ += len;
+                return;
+            }
+            size_t n = std::min(available_, len);
+            available_ -= n;
+            next_ += n;
+            len -= n;
+            byteCount_ += n;
+        }
+    }
+
+    size_t byteCount() const { return byteCount_; }
+
+    bool fill() {
+        size_t n = 0;
+        if (in_->read(buffer_, bufferSize_, n)) {
+            next_ = buffer_;
+            available_ = n;
+            return true;
+        }
+        return false;
+    }
+
+
+public:
+    BufferCopyInInputStream(auto_ptr<BufferCopyIn>& in, size_t bufferSize) :
+        bufferSize_(bufferSize),
+        buffer_(new uint8_t[bufferSize]),
+        in_(in),
+        byteCount_(0),
+        next_(buffer_),
+        available_(0) { }
+
+    ~BufferCopyInInputStream() {
+        delete[] buffer_;
+    }
+};
+
+namespace {
+struct BufferCopyOut {
+    virtual ~BufferCopyOut() { }
+    virtual void write(const uint8_t* b, size_t len) = 0;
+};
+
+struct FileBufferCopyOut : public BufferCopyOut {
+#ifdef _WIN32
+    HANDLE h_;
+    FileBufferCopyOut(const char* filename) :
+        h_(::CreateFile(filename, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL)) {
+        if (h_ == INVALID_HANDLE_VALUE) {
+            throw Exception(boost::format("Cannot open file: %1%") % ::GetLastError());
+        }
+    }
+
+    ~FileBufferCopyOut() {
+        ::CloseHandle(h_);
+    }
+
+    void write(const uint8_t* b, size_t len) {
+        while (len > 0) {
+            DWORD dw = 0;
+            if (! ::WriteFile(h_, b, len, &dw, NULL)) {
+                throw Exception(boost::format("Cannot read file: %1%") % ::GetLastError());
+            }
+            b += dw;
+            len -= dw;
+        }
+    }
+#else
+    const int fd_;
+
+    FileBufferCopyOut(const char* filename) :
+        fd_(::open(filename, O_WRONLY | O_CREAT | O_TRUNC | O_BINARY, 0644)) {
+
+        if (fd_ < 0) {
+            throw Exception(boost::format("Cannot open file: %1%") %
+                ::strerror(errno));
+        }
+    }
+
+    ~FileBufferCopyOut() {
+        ::close(fd_);
+    }
+
+    void write(const uint8_t* b, size_t len) {
+        if (::write(fd_, b, len) < 0) {
+            throw Exception(boost::format("Cannot write file: %1%") %
+                ::strerror(errno));
+        }
+    }
+#endif
+  
+};
+
+struct OStreamBufferCopyOut : public BufferCopyOut {
+    ostream& os_;
+
+    OStreamBufferCopyOut(ostream& os) : os_(os) {
+    }
+
+    void write(const uint8_t* b, size_t len) {
+        os_.write(reinterpret_cast<const char*>(b), len);
+    }
+
+};
+
+}
+
+class BufferCopyOutputStream : public OutputStream {
+    size_t bufferSize_;
+    uint8_t* const buffer_;
+    auto_ptr<BufferCopyOut> out_;
+    uint8_t* next_;
+    size_t available_;
+    size_t byteCount_;
+
+    // Invaiant: byteCount_ == byteswritten + bufferSize_ - available_;
+    bool next(uint8_t** data, size_t* len) {
+        if (available_ == 0) {
+            flush();
+        }
+        *data = next_;
+        *len = available_;
+        next_ += available_;
+        byteCount_ += available_;
+        available_ = 0;
+        return true;
+    }
+
+    void backup(size_t len) {
+        available_ += len;
+        next_ -= len;
+        byteCount_ -= len;
+    }
+
+    uint64_t byteCount() const {
+        return byteCount_;
+    }
+
+    void flush() {
+        out_->write(buffer_, bufferSize_ - available_);
+        next_ = buffer_;
+        available_ = bufferSize_;
+    }
+
+public:
+    BufferCopyOutputStream(auto_ptr<BufferCopyOut> out, size_t bufferSize) :
+        bufferSize_(bufferSize),
+        buffer_(new uint8_t[bufferSize]),
+        out_(out),
+        next_(buffer_),
+        available_(bufferSize_), byteCount_(0) { }
+
+    ~BufferCopyOutputStream() {
+        delete[] buffer_;
+    }
+};
+
+auto_ptr<InputStream> fileInputStream(const char* filename,
+    size_t bufferSize)
+{
+    auto_ptr<BufferCopyIn> in(new FileBufferCopyIn(filename));
+    return auto_ptr<InputStream>( new BufferCopyInInputStream(in, bufferSize));
+}
+
+auto_ptr<InputStream> istreamInputStream(istream& is,
+    size_t bufferSize)
+{
+    auto_ptr<BufferCopyIn> in(new IStreamBufferCopyIn(is));
+    return auto_ptr<InputStream>( new BufferCopyInInputStream(in, bufferSize));
+}
+
+auto_ptr<OutputStream> fileOutputStream(const char* filename,
+    size_t bufferSize)
+{
+    auto_ptr<BufferCopyOut> out(new FileBufferCopyOut(filename));
+    return auto_ptr<OutputStream>(new BufferCopyOutputStream(out, bufferSize));
+}
+
+auto_ptr<OutputStream> ostreamOutputStream(ostream& os,
+    size_t bufferSize)
+{
+    auto_ptr<BufferCopyOut> out(new OStreamBufferCopyOut(os));
+    return auto_ptr<OutputStream>(new BufferCopyOutputStream(out, bufferSize));
+}
+
+
+}   // namespace avro
diff --git a/lang/c++/impl/Generic.cc b/lang/c++/impl/Generic.cc
new file mode 100644
index 0000000..884fadb
--- /dev/null
+++ b/lang/c++/impl/Generic.cc
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Generic.hh"
+#include <sstream>
+
+namespace avro {
+
+using std::string;
+using std::vector;
+using std::ostringstream;
+
+typedef vector<uint8_t> bytes;
+
+void GenericContainer::assertType(const NodePtr& schema, Type type) {
+    if (schema->type() != type) {
+        throw Exception(boost::format("Schema type %1 expected %2") %
+            toString(schema->type()) % toString(type));
+    }
+}
+
+GenericReader::GenericReader(const ValidSchema& s, const DecoderPtr& decoder) :
+    schema_(s), isResolving_(dynamic_cast<ResolvingDecoder*>(&(*decoder)) != 0),
+    decoder_(decoder)
+{
+}
+
+GenericReader::GenericReader(const ValidSchema& writerSchema,
+    const ValidSchema& readerSchema, const DecoderPtr& decoder) :
+    schema_(readerSchema),
+    isResolving_(true),
+    decoder_(resolvingDecoder(writerSchema, readerSchema, decoder))
+{
+}
+
+void GenericReader::read(GenericDatum& datum) const
+{
+    datum = GenericDatum(schema_.root());
+    read(datum, *decoder_, isResolving_);
+}
+
+void GenericReader::read(GenericDatum& datum, Decoder& d, bool isResolving)
+{
+    if (datum.isUnion()) {
+        datum.selectBranch(d.decodeUnionIndex());
+        read(datum.value<GenericUnion>().datum(), d, isResolving);
+        return;
+    }
+    switch (datum.type()) {
+    case AVRO_NULL:
+        d.decodeNull();
+        break;
+    case AVRO_BOOL:
+        datum.value<bool>() = d.decodeBool();
+        break;
+    case AVRO_INT:
+        datum.value<int32_t>() = d.decodeInt();
+        break;
+    case AVRO_LONG:
+        datum.value<int64_t>() = d.decodeLong();
+        break;
+    case AVRO_FLOAT:
+        datum.value<float>() = d.decodeFloat();
+        break;
+    case AVRO_DOUBLE:
+        datum.value<double>() = d.decodeDouble();
+        break;
+    case AVRO_STRING:
+        d.decodeString(datum.value<string>());
+        break;
+    case AVRO_BYTES:
+        d.decodeBytes(datum.value<bytes>());
+        break;
+    case AVRO_FIXED:
+        {
+            GenericFixed& f = datum.value<GenericFixed>();
+            d.decodeFixed(f.schema()->fixedSize(), f.value());
+        }
+        break;
+    case AVRO_RECORD:
+        {
+            GenericRecord& r = datum.value<GenericRecord>();
+            size_t c = r.schema()->leaves();
+            if (isResolving) {
+                std::vector<size_t> fo =
+                    static_cast<ResolvingDecoder&>(d).fieldOrder();
+                for (size_t i = 0; i < c; ++i) {
+                    read(r.fieldAt(fo[i]), d, isResolving);
+                }
+            } else {
+                for (size_t i = 0; i < c; ++i) {
+                    read(r.fieldAt(i), d, isResolving);
+                }
+            }
+        }
+        break;
+    case AVRO_ENUM:
+        datum.value<GenericEnum>().set(d.decodeEnum());
+        break;
+    case AVRO_ARRAY:
+        {
+            GenericArray& v = datum.value<GenericArray>();
+            vector<GenericDatum>& r = v.value();
+            const NodePtr& nn = v.schema()->leafAt(0);
+            r.resize(0);
+            size_t start = 0;
+            for (size_t m = d.arrayStart(); m != 0; m = d.arrayNext()) {
+                r.resize(r.size() + m);
+                for (; start < r.size(); ++start) {
+                    r[start] = GenericDatum(nn);
+                    read(r[start], d, isResolving);
+                }
+            }
+        }
+        break;
+    case AVRO_MAP:
+        {
+            GenericMap& v = datum.value<GenericMap>();
+            GenericMap::Value& r = v.value();
+            const NodePtr& nn = v.schema()->leafAt(1);
+            r.resize(0);
+            size_t start = 0;
+            for (size_t m = d.mapStart(); m != 0; m = d.mapNext()) {
+                r.resize(r.size() + m);
+                for (; start < r.size(); ++start) {
+                    d.decodeString(r[start].first);
+                    r[start].second = GenericDatum(nn);
+                    read(r[start].second, d, isResolving);
+                }
+            }
+        }
+        break;
+    default:
+        throw Exception(boost::format("Unknown schema type %1%") %
+            toString(datum.type()));
+    }
+}
+
+void GenericReader::read(Decoder& d, GenericDatum& g, const ValidSchema& s)
+{
+    g = GenericDatum(s);
+    read(d, g);
+}
+
+void GenericReader::read(Decoder& d, GenericDatum& g)
+{
+    read(g, d, dynamic_cast<ResolvingDecoder*>(&d) != 0);
+}
+
+GenericWriter::GenericWriter(const ValidSchema& s, const EncoderPtr& encoder) :
+    schema_(s), encoder_(encoder)
+{
+}
+
+void GenericWriter::write(const GenericDatum& datum) const
+{
+    write(datum, *encoder_);
+}
+
+void GenericWriter::write(const GenericDatum& datum, Encoder& e)
+{
+    if (datum.isUnion()) {
+        e.encodeUnionIndex(datum.unionBranch());
+        write(datum.value<GenericUnion>().datum(), e);
+        return;
+    }
+    switch (datum.type()) {
+    case AVRO_NULL:
+        e.encodeNull();
+        break;
+    case AVRO_BOOL:
+        e.encodeBool(datum.value<bool>());
+        break;
+    case AVRO_INT:
+        e.encodeInt(datum.value<int32_t>());
+        break;
+    case AVRO_LONG:
+        e.encodeLong(datum.value<int64_t>());
+        break;
+    case AVRO_FLOAT:
+        e.encodeFloat(datum.value<float>());
+        break;
+    case AVRO_DOUBLE:
+        e.encodeDouble(datum.value<double>());
+        break;
+    case AVRO_STRING:
+        e.encodeString(datum.value<string>());
+        break;
+    case AVRO_BYTES:
+        e.encodeBytes(datum.value<bytes>());
+        break;
+    case AVRO_FIXED:
+        e.encodeFixed(datum.value<GenericFixed>().value());
+        break;
+    case AVRO_RECORD:
+        {
+            const GenericRecord& r = datum.value<GenericRecord>();
+            size_t c = r.schema()->leaves();
+            for (size_t i = 0; i < c; ++i) {
+                write(r.fieldAt(i), e);
+            }
+        }
+        break;
+    case AVRO_ENUM:
+        e.encodeEnum(datum.value<GenericEnum>().value());
+        break;
+    case AVRO_ARRAY:
+        {
+            const GenericArray::Value& r = datum.value<GenericArray>().value();
+            e.arrayStart();
+            if (! r.empty()) {
+                e.setItemCount(r.size());
+                for (GenericArray::Value::const_iterator it = r.begin();
+                    it != r.end(); ++it) {
+                    e.startItem();
+                    write(*it, e);
+                }
+            }
+            e.arrayEnd();
+        }
+        break;
+    case AVRO_MAP:
+        {
+            const GenericMap::Value& r = datum.value<GenericMap>().value();
+            e.mapStart();
+            if (! r.empty()) {
+                e.setItemCount(r.size());
+                for (GenericMap::Value::const_iterator it = r.begin();
+                    it != r.end(); ++it) {
+                    e.startItem();
+                    e.encodeString(it->first);
+                    write(it->second, e);
+                }
+            }
+            e.mapEnd();
+        }
+        break;
+    default:
+        throw Exception(boost::format("Unknown schema type %1%") %
+            toString(datum.type()));
+    }
+}
+
+void GenericWriter::write(Encoder& e, const GenericDatum& g)
+{
+    write(g, e);
+}
+
+}   // namespace avro
diff --git a/lang/c++/impl/GenericDatum.cc b/lang/c++/impl/GenericDatum.cc
new file mode 100644
index 0000000..b5998a8
--- /dev/null
+++ b/lang/c++/impl/GenericDatum.cc
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "GenericDatum.hh"
+#include "NodeImpl.hh"
+
+using std::string;
+using std::vector;
+
+namespace avro {
+
+GenericDatum::GenericDatum(const ValidSchema& schema) :
+    type_(schema.root()->type())
+{
+    init(schema.root());
+}
+
+GenericDatum::GenericDatum(const NodePtr& schema) : type_(schema->type())
+{
+    init(schema);
+}
+
+void GenericDatum::init(const NodePtr& schema)
+{
+    NodePtr sc = schema;
+    if (type_ == AVRO_SYMBOLIC) {
+        sc = resolveSymbol(schema);
+        type_ = sc->type();
+    }
+    switch (type_) {
+    case AVRO_NULL:
+        break;
+    case AVRO_BOOL:
+        value_ = bool();
+        break;
+    case AVRO_INT:
+        value_ = int32_t();
+        break;
+    case AVRO_LONG:
+        value_ = int64_t();
+        break;
+    case AVRO_FLOAT:
+        value_ = float();
+        break;
+    case AVRO_DOUBLE:
+        value_ = double();
+        break;
+    case AVRO_STRING:
+        value_ = string();
+        break;
+    case AVRO_BYTES:
+        value_ = vector<uint8_t>();
+        break;
+    case AVRO_FIXED:
+        value_ = GenericFixed(sc);
+        break;
+    case AVRO_RECORD:
+        value_ = GenericRecord(sc);
+        break;
+    case AVRO_ENUM:
+        value_ = GenericEnum(sc);
+        break;
+    case AVRO_ARRAY:
+        value_ = GenericArray(sc);
+        break;
+    case AVRO_MAP:
+        value_ = GenericMap(sc);
+        break;
+    case AVRO_UNION:
+        value_ = GenericUnion(sc);
+        break;
+    default:
+        throw Exception(boost::format("Unknown schema type %1%") %
+            toString(type_));
+    }
+}
+
+GenericRecord::GenericRecord(const NodePtr& schema) :
+    GenericContainer(AVRO_RECORD, schema) {
+    fields_.resize(schema->leaves());
+    for (size_t i = 0; i < schema->leaves(); ++i) {
+        fields_[i] = GenericDatum(schema->leafAt(i));
+    }
+}
+
+}   // namespace avro
diff --git a/lang/c++/impl/Node.cc b/lang/c++/impl/Node.cc
new file mode 100644
index 0000000..5fa965f
--- /dev/null
+++ b/lang/c++/impl/Node.cc
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Node.hh"
+
+namespace avro {
+
+using std::string;
+
+Node::~Node()
+{ }
+
+Name::Name(const std::string& name)
+{
+    fullname(name);
+}
+
+const string Name::fullname() const
+{
+    return (ns_.empty()) ? simpleName_ : ns_ + "." + simpleName_;
+}
+
+void Name::fullname(const string& name)
+{
+    string::size_type n = name.find_last_of('.');
+    if (n == string::npos) {
+        simpleName_ = name;
+        ns_.clear();
+    } else {
+        ns_ = name.substr(0, n);
+        simpleName_ = name.substr(n + 1);
+    }
+    check();
+}
+
+bool Name::operator < (const Name& n) const
+{
+    return (ns_ < n.ns_) ? true :
+        (n.ns_ < ns_) ? false :
+        (simpleName_ < n.simpleName_);
+}
+
+static bool invalidChar1(char c)
+{
+    return !isalnum(c) && c != '_' && c != '.';
+}
+
+static bool invalidChar2(char c)
+{
+    return !isalnum(c) && c != '_';
+}
+
+void Name::check() const
+{
+    if (! ns_.empty() && (ns_[0] == '.' || ns_[ns_.size() - 1] == '.' || std::find_if(ns_.begin(), ns_.end(), invalidChar1) != ns_.end())) {
+        throw Exception("Invalid namespace: " + ns_);
+    }
+    if (simpleName_.empty() || std::find_if(simpleName_.begin(), simpleName_.end(), invalidChar2) != simpleName_.end()) {
+        throw Exception("Invalid name: " + simpleName_);
+    }
+}
+
+bool Name::operator == (const Name& n) const
+{
+    return ns_ == n.ns_ && simpleName_ == n.simpleName_;
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/NodeImpl.cc b/lang/c++/impl/NodeImpl.cc
new file mode 100644
index 0000000..aba2a73
--- /dev/null
+++ b/lang/c++/impl/NodeImpl.cc
@@ -0,0 +1,282 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "NodeImpl.hh"
+
+namespace avro {
+
+SchemaResolution 
+NodePrimitive::resolve(const Node &reader) const
+{
+    if(type() == reader.type()) {
+        return RESOLVE_MATCH;
+    }
+
+    switch ( type() ) {
+
+      case AVRO_INT:
+
+        if( reader.type() == AVRO_LONG ) { 
+            return RESOLVE_PROMOTABLE_TO_LONG;
+        }   
+
+        // fall-through intentional
+
+      case AVRO_LONG:
+ 
+        if (reader.type() == AVRO_FLOAT) {
+            return RESOLVE_PROMOTABLE_TO_FLOAT;
+        }   
+
+        // fall-through intentional
+
+      case AVRO_FLOAT:
+
+        if (reader.type() == AVRO_DOUBLE) {
+            return RESOLVE_PROMOTABLE_TO_DOUBLE;
+        }   
+
+      default:
+        break;
+    }   
+
+    return furtherResolution(reader);
+}
+
+SchemaResolution 
+NodeRecord::resolve(const Node &reader) const
+{
+    if(reader.type() == AVRO_RECORD) {
+        if(name() == reader.name()) {
+            return RESOLVE_MATCH;
+        }
+    }
+    return furtherResolution(reader);
+}
+
+SchemaResolution 
+NodeEnum::resolve(const Node &reader) const
+{
+    if(reader.type() == AVRO_ENUM) {
+        return (name() == reader.name()) ? RESOLVE_MATCH : RESOLVE_NO_MATCH;
+    }
+    return furtherResolution(reader);
+}
+
+SchemaResolution 
+NodeArray::resolve(const Node &reader) const
+{
+    if(reader.type() == AVRO_ARRAY) {
+        const NodePtr &arrayType = leafAt(0);
+        return arrayType->resolve(*reader.leafAt(0));
+    }
+    return furtherResolution(reader);
+}
+
+SchemaResolution 
+NodeMap::resolve(const Node &reader) const
+{
+    if(reader.type() == AVRO_MAP) {
+        const NodePtr &mapType = leafAt(1);
+        return mapType->resolve(*reader.leafAt(1));
+    }
+    return furtherResolution(reader);
+}
+
+SchemaResolution
+NodeUnion::resolve(const Node &reader) const 
+{
+
+    // If the writer is union, resolution only needs to occur when the selected
+    // type of the writer is known, so this function is not very helpful.
+    //
+    // In this case, this function returns if there is a possible match given
+    // any writer type, so just search type by type returning the best match
+    // found.
+    
+    SchemaResolution match = RESOLVE_NO_MATCH;
+    for(size_t i=0; i < leaves(); ++i) {
+        const NodePtr &node = leafAt(i);
+        SchemaResolution thisMatch = node->resolve(reader);
+        if(thisMatch == RESOLVE_MATCH) {
+            match = thisMatch;
+            break;
+        }
+        if(match == RESOLVE_NO_MATCH) {
+            match = thisMatch;
+        }
+    }
+    return match;
+}
+
+SchemaResolution 
+NodeFixed::resolve(const Node &reader) const
+{
+    if(reader.type() == AVRO_FIXED) {
+        return (
+                (reader.fixedSize() == fixedSize()) &&
+                (reader.name() == name()) 
+            ) ? 
+            RESOLVE_MATCH : RESOLVE_NO_MATCH;
+    }
+    return furtherResolution(reader);
+}
+
+SchemaResolution 
+NodeSymbolic::resolve(const Node &reader) const
+{
+    const NodePtr &node = leafAt(0);
+    return node->resolve(reader);
+}
+
+// Wrap an indentation in a struct for ostream operator<< 
+struct indent { 
+    indent(int depth) :
+        d(depth)
+    { }
+    int d; 
+};
+
+/// ostream operator for indent
+std::ostream& operator <<(std::ostream &os, indent x)
+{
+    static const std::string spaces("    ");
+    while(x.d--) {
+        os << spaces; 
+    }
+    return os;
+}
+
+void 
+NodePrimitive::printJson(std::ostream &os, int depth) const
+{
+    os << '\"' << type() << '\"';
+}
+
+void 
+NodeSymbolic::printJson(std::ostream &os, int depth) const
+{
+    os << '\"' << nameAttribute_.get() << '\"';
+}
+
+static void printName(std::ostream& os, const Name& n, int depth)
+{
+    if (!n.ns().empty()) {
+        os << indent(depth) << "\"namespace\": \"" << n.ns() << "\",\n";
+    }
+    os << indent(depth) << "\"name\": \"" << n.simpleName() << "\",\n";
+}
+
+void 
+NodeRecord::printJson(std::ostream &os, int depth) const
+{
+    os << "{\n";
+    os << indent(++depth) << "\"type\": \"record\",\n";
+    printName(os, nameAttribute_.get(), depth);
+    os << indent(depth) << "\"fields\": [\n";
+
+    int fields = leafAttributes_.size();
+    ++depth;
+    for(int i = 0; i < fields; ++i) {
+        if(i > 0) {
+            os << indent(depth) << "},\n";
+        }
+        os << indent(depth) << "{\n";
+        os << indent(++depth) << "\"name\": \"" << leafNameAttributes_.get(i) << "\",\n";
+        os << indent(depth) << "\"type\": ";
+        leafAttributes_.get(i)->printJson(os, depth);
+        os << '\n';
+        --depth;
+    }
+    os << indent(depth) << "}\n";
+    os << indent(--depth) << "]\n";
+    os << indent(--depth) << '}';
+}
+
+void 
+NodeEnum::printJson(std::ostream &os, int depth) const
+{
+    os << "{\n";
+    os << indent(++depth) << "\"type\": \"enum\",\n";
+    printName(os, nameAttribute_.get(), depth);
+    os << indent(depth) << "\"symbols\": [\n";
+
+    int names = leafNameAttributes_.size();
+    ++depth;
+    for(int i = 0; i < names; ++i) {
+        if(i > 0) {
+            os << ",\n";
+        }
+        os << indent(depth) << '\"' << leafNameAttributes_.get(i) << '\"';
+    }
+    os << '\n';
+    os << indent(--depth) << "]\n";
+    os << indent(--depth) << '}';
+}
+
+void 
+NodeArray::printJson(std::ostream &os, int depth) const
+{
+    os << "{\n";
+    os << indent(depth+1) << "\"type\": \"array\",\n";
+    os << indent(depth+1) <<  "\"items\": ";
+    leafAttributes_.get()->printJson(os, depth+1);
+    os << '\n';
+    os << indent(depth) << '}';
+}
+
+void 
+NodeMap::printJson(std::ostream &os, int depth) const
+{
+    os << "{\n";
+    os << indent(depth+1) <<"\"type\": \"map\",\n";
+    os << indent(depth+1) << "\"values\": ";
+    leafAttributes_.get(1)->printJson(os, depth+1);
+    os << '\n';
+    os << indent(depth) << '}';
+}
+
+void 
+NodeUnion::printJson(std::ostream &os, int depth) const
+{
+    os << "[\n";
+    int fields = leafAttributes_.size();
+    ++depth;
+    for(int i = 0; i < fields; ++i) {
+        if(i > 0) {
+            os << ",\n";
+        }
+        os << indent(depth);
+        leafAttributes_.get(i)->printJson(os, depth);
+    }
+    os << '\n';
+    os << indent(--depth) << ']';
+}
+
+void 
+NodeFixed::printJson(std::ostream &os, int depth) const
+{
+    os << "{\n";
+    os << indent(++depth) << "\"type\": \"fixed\",\n";
+    printName(os, nameAttribute_.get(), depth);
+    os << indent(depth) << "\"size\": " << sizeAttribute_.get() << "\n";
+    os << indent(--depth) << '}';
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/Resolver.cc b/lang/c++/impl/Resolver.cc
new file mode 100644
index 0000000..61f6780
--- /dev/null
+++ b/lang/c++/impl/Resolver.cc
@@ -0,0 +1,870 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "boost/scoped_array.hpp"
+#include "Resolver.hh"
+#include "Layout.hh"
+#include "NodeImpl.hh"
+#include "ValidSchema.hh"
+#include "Reader.hh"
+#include "Boost.hh"
+#include "AvroTraits.hh"
+
+namespace avro {
+
+class ResolverFactory;
+typedef boost::shared_ptr<Resolver> ResolverPtr;
+typedef boost::ptr_vector<Resolver> ResolverPtrVector;
+
+// #define DEBUG_VERBOSE
+
+#ifdef DEBUG_VERBOSE
+#define DEBUG_OUT(str) std::cout << str << '\n'
+#else
+class NoOp {};
+template<typename T> NoOp& operator<<(NoOp &noOp, const T&) {
+    return noOp;
+}
+NoOp noop;
+#define DEBUG_OUT(str) noop << str 
+#endif
+
+template<typename T>
+class PrimitiveSkipper : public Resolver
+{
+  public:
+
+    PrimitiveSkipper() : 
+        Resolver()
+    {}
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        T val;
+        reader.readValue(val);
+        DEBUG_OUT("Skipping " << val);
+    }
+};
+
+template<typename T>
+class PrimitiveParser : public Resolver
+{
+  public:
+
+    PrimitiveParser(const PrimitiveLayout &offset) : 
+        Resolver(),
+        offset_(offset.offset())
+    {}
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        T* location = reinterpret_cast<T *> (address + offset_);
+        reader.readValue(*location);
+        DEBUG_OUT("Reading " << *location);
+    }
+
+  private:
+
+    size_t offset_;
+};
+
+template<typename WT, typename RT>
+class PrimitivePromoter : public Resolver
+{
+  public:
+
+    PrimitivePromoter(const PrimitiveLayout &offset) : 
+        Resolver(),
+        offset_(offset.offset())
+    {}
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        parseIt<WT>(reader, address);
+    }
+
+  private:
+
+    void parseIt(Reader &reader, uint8_t *address, const boost::true_type &) const
+    {
+        WT val;
+        reader.readValue(val);
+        RT *location = reinterpret_cast<RT *> (address + offset_);
+        *location = static_cast<RT>(val);
+        DEBUG_OUT("Promoting " << val);
+    }
+
+    void parseIt(Reader &reader, uint8_t *address, const boost::false_type &) const
+    { }
+
+    template<typename T>
+    void parseIt(Reader &reader, uint8_t *address) const
+    {
+        parseIt(reader, address, is_promotable<T>());
+    }
+
+    size_t offset_;
+};
+
+template <>
+class PrimitiveSkipper<std::vector<uint8_t> > : public Resolver
+{
+  public:
+
+    PrimitiveSkipper() : 
+        Resolver()
+    {}
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        std::vector<uint8_t> val;
+        reader.readBytes(val);
+        DEBUG_OUT("Skipping bytes");
+    }
+};
+
+template <>
+class PrimitiveParser<std::vector<uint8_t> > : public Resolver
+{
+  public:
+
+    PrimitiveParser(const PrimitiveLayout &offset) : 
+        Resolver(),
+        offset_(offset.offset()) 
+    {}
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        std::vector<uint8_t> *location = reinterpret_cast<std::vector<uint8_t> *> (address + offset_);
+        reader.readBytes(*location);
+        DEBUG_OUT("Reading bytes");
+    }
+
+  private:
+
+    size_t offset_;
+};
+
+class RecordSkipper : public Resolver
+{
+  public:
+
+    RecordSkipper(ResolverFactory &factory, const NodePtr &writer);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Skipping record");
+
+        reader.readRecord();
+        size_t steps = resolvers_.size();
+        for(size_t i = 0; i < steps; ++i) {
+            resolvers_[i].parse(reader, address);
+        }
+    }
+
+  protected:
+    
+    ResolverPtrVector resolvers_;
+
+};
+
+class RecordParser : public Resolver
+{
+  public:
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading record");
+
+        reader.readRecord();
+        size_t steps = resolvers_.size();
+        for(size_t i = 0; i < steps; ++i) {
+            resolvers_[i].parse(reader, address);
+        }
+    }
+
+    RecordParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets);
+
+  protected:
+    
+    ResolverPtrVector resolvers_;
+
+};
+
+
+class MapSkipper : public Resolver
+{
+  public:
+
+    MapSkipper(ResolverFactory &factory, const NodePtr &writer);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Skipping map");
+
+        std::string key;
+        int64_t size = 0;
+        do {
+            size = reader.readMapBlockSize();
+            for(int64_t i = 0; i < size; ++i) {
+                reader.readValue(key);
+                resolver_->parse(reader, address);
+            }
+        } while (size != 0);
+    }
+
+  protected:
+
+    ResolverPtr resolver_;
+};
+
+
+class MapParser : public Resolver
+{
+  public:
+
+    typedef uint8_t *(*GenericMapSetter)(uint8_t *map, const std::string &key);
+
+    MapParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading map");
+
+        uint8_t *mapAddress = address + offset_;
+
+        std::string key;
+        GenericMapSetter* setter = reinterpret_cast<GenericMapSetter *> (address + setFuncOffset_);
+
+        int64_t size = 0;
+        do {
+            size = reader.readMapBlockSize();
+            for(int64_t i = 0; i < size; ++i) {
+                reader.readValue(key);
+
+                // create a new map entry and get the address
+                uint8_t *location = (*setter)(mapAddress, key);
+                resolver_->parse(reader, location);
+            }
+        } while (size != 0);
+    }
+
+  protected:
+    
+    ResolverPtr  resolver_;
+    size_t          offset_;
+    size_t          setFuncOffset_;
+};
+
+class ArraySkipper : public Resolver
+{
+  public:
+
+    ArraySkipper(ResolverFactory &factory, const NodePtr &writer);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Skipping array");
+
+        int64_t size = 0;
+        do {
+            size = reader.readArrayBlockSize();
+            for(int64_t i = 0; i < size; ++i) {
+                resolver_->parse(reader, address);
+            }
+        } while (size != 0);
+    }
+
+  protected:
+   
+    ResolverPtr resolver_;
+};
+
+typedef uint8_t *(*GenericArraySetter)(uint8_t *array);
+
+class ArrayParser : public Resolver
+{
+  public:
+
+    ArrayParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading array");
+
+        uint8_t *arrayAddress = address + offset_;
+
+        GenericArraySetter* setter = reinterpret_cast<GenericArraySetter *> (address + setFuncOffset_);
+
+        int64_t size = 0;
+        do {
+            size = reader.readArrayBlockSize();
+            for(int64_t i = 0; i < size; ++i) {
+                // create a new map entry and get the address
+                uint8_t *location = (*setter)(arrayAddress);
+                resolver_->parse(reader, location);
+            }
+        } while (size != 0);
+    }
+
+  protected:
+    
+    ArrayParser() :
+        Resolver()
+    {}
+    
+    ResolverPtr resolver_;
+    size_t         offset_;
+    size_t         setFuncOffset_;
+};
+
+class EnumSkipper : public Resolver
+{
+  public:
+
+    EnumSkipper(ResolverFactory &factory, const NodePtr &writer) :
+        Resolver()
+    { }
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        int64_t val = reader.readEnum();
+        DEBUG_OUT("Skipping enum" << val);
+    }
+};
+
+class EnumParser : public Resolver
+{
+  public:
+
+    enum EnumRepresentation {
+        VAL
+    };
+
+    EnumParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+        Resolver(),
+        offset_(offsets.at(0).offset()),
+        readerSize_(reader->names())
+    { 
+        const size_t writerSize = writer->names();
+
+        mapping_.reserve(writerSize);
+
+        for(size_t i = 0; i < writerSize; ++i) {
+            const std::string &name = writer->nameAt(i);
+            size_t readerIndex = readerSize_;
+            reader->nameIndex(name, readerIndex);
+            mapping_.push_back(readerIndex);
+        }
+    }
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        size_t val = static_cast<size_t>(reader.readEnum());
+        assert(static_cast<size_t>(val) < mapping_.size());
+
+        if(mapping_[val] < readerSize_) {
+            EnumRepresentation* location = reinterpret_cast<EnumRepresentation *> (address + offset_);
+            *location = static_cast<EnumRepresentation>(mapping_[val]);
+            DEBUG_OUT("Setting enum" << *location);
+        }
+    }
+
+protected:
+
+    size_t offset_;
+    size_t readerSize_;
+    std::vector<size_t> mapping_;
+    
+};
+
+class UnionSkipper : public Resolver
+{
+  public:
+
+    UnionSkipper(ResolverFactory &factory, const NodePtr &writer);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Skipping union");
+        size_t choice = static_cast<size_t>(reader.readUnion());
+        resolvers_[choice].parse(reader, address);
+    }
+
+  protected:
+    
+    ResolverPtrVector resolvers_;
+};
+
+
+class UnionParser : public Resolver
+{
+  public:
+
+    typedef uint8_t *(*GenericUnionSetter)(uint8_t *, int64_t);
+
+    UnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading union");
+        size_t writerChoice = static_cast<size_t>(reader.readUnion());
+        int64_t *readerChoice = reinterpret_cast<int64_t *>(address + choiceOffset_);
+
+        *readerChoice = choiceMapping_[writerChoice];
+        GenericUnionSetter* setter = reinterpret_cast<GenericUnionSetter *> (address + setFuncOffset_);
+        uint8_t *value = reinterpret_cast<uint8_t *> (address + offset_);
+        uint8_t *location = (*setter)(value, *readerChoice);
+
+        resolvers_[writerChoice].parse(reader, location);
+    }
+
+  protected:
+    
+    ResolverPtrVector resolvers_;
+    std::vector<int64_t> choiceMapping_;
+    size_t offset_;
+    size_t choiceOffset_;
+    size_t setFuncOffset_;
+};
+
+class UnionToNonUnionParser : public Resolver
+{
+  public:
+
+    typedef uint8_t *(*GenericUnionSetter)(uint8_t *, int64_t);
+
+    UnionToNonUnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const Layout &offsets);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading union to non-union");
+        size_t choice = static_cast<size_t>(reader.readUnion());
+        resolvers_[choice].parse(reader, address);
+    }
+
+  protected:
+    
+    ResolverPtrVector resolvers_;
+};
+
+class NonUnionToUnionParser : public Resolver
+{
+  public:
+
+    typedef uint8_t *(*GenericUnionSetter)(uint8_t *, int64_t);
+
+    NonUnionToUnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets);
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading non-union to union");
+
+        int64_t *choice = reinterpret_cast<int64_t *>(address + choiceOffset_);
+        *choice = choice_;
+        GenericUnionSetter* setter = reinterpret_cast<GenericUnionSetter *> (address + setFuncOffset_);
+        uint8_t *value = reinterpret_cast<uint8_t *> (address + offset_);
+        uint8_t *location = (*setter)(value, choice_);
+
+        resolver_->parse(reader, location);
+    }
+
+  protected:
+    
+    ResolverPtr resolver_;
+    size_t choice_;
+    size_t offset_;
+    size_t choiceOffset_;
+    size_t setFuncOffset_;
+};
+
+class FixedSkipper : public Resolver
+{
+  public:
+
+    FixedSkipper(ResolverFactory &factory, const NodePtr &writer) :
+        Resolver() 
+    {
+        size_ = writer->fixedSize();
+    }
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Skipping fixed");
+        boost::scoped_array<uint8_t> val(new uint8_t[size_]);
+        reader.readFixed(&val[0], size_);
+    }
+
+  protected:
+
+    int size_;
+    
+};
+
+class FixedParser : public Resolver
+{
+  public:
+
+    FixedParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+        Resolver() 
+    {
+        size_ = writer->fixedSize();
+        offset_ = offsets.at(0).offset();
+    }
+
+    virtual void parse(Reader &reader, uint8_t *address) const
+    {
+        DEBUG_OUT("Reading fixed");
+        uint8_t *location = reinterpret_cast<uint8_t *> (address + offset_);
+        reader.readFixed(location, size_);
+    }
+
+  protected:
+
+    int size_;
+    size_t offset_;
+    
+};
+
+
+class ResolverFactory : private boost::noncopyable {
+
+    template<typename T>
+    Resolver*
+    constructPrimitiveSkipper(const NodePtr &writer) 
+    {
+        return new PrimitiveSkipper<T>();
+    }
+
+    template<typename T>
+    Resolver*
+    constructPrimitive(const NodePtr &writer, const NodePtr &reader, const Layout &offset)
+    {
+        Resolver *instruction = 0;
+
+        SchemaResolution match = writer->resolve(*reader);
+
+        if (match == RESOLVE_NO_MATCH) {
+            instruction = new PrimitiveSkipper<T>();
+        } 
+        else if (reader->type() == AVRO_UNION) {
+            const CompoundLayout &compoundLayout = static_cast<const CompoundLayout &>(offset);
+            instruction = new NonUnionToUnionParser(*this, writer, reader, compoundLayout);
+        }
+        else if (match == RESOLVE_MATCH) {
+            const PrimitiveLayout &primitiveLayout = static_cast<const PrimitiveLayout &>(offset);
+            instruction = new PrimitiveParser<T>(primitiveLayout);
+        }
+        else if(match == RESOLVE_PROMOTABLE_TO_LONG) {
+            const PrimitiveLayout &primitiveLayout = static_cast<const PrimitiveLayout &>(offset);
+            instruction = new PrimitivePromoter<T, int64_t>(primitiveLayout);
+        }
+        else if(match == RESOLVE_PROMOTABLE_TO_FLOAT) {
+            const PrimitiveLayout &primitiveLayout = static_cast<const PrimitiveLayout &>(offset);
+            instruction = new PrimitivePromoter<T, float>(primitiveLayout);
+        }
+        else if(match == RESOLVE_PROMOTABLE_TO_DOUBLE) {
+            const PrimitiveLayout &primitiveLayout = static_cast<const PrimitiveLayout &>(offset);
+            instruction = new PrimitivePromoter<T, double>(primitiveLayout);
+        }
+        else {
+            assert(0);
+        }
+        return instruction;
+    }
+
+    template<typename Skipper>
+    Resolver*
+    constructCompoundSkipper(const NodePtr &writer) 
+    {
+        return new Skipper(*this, writer);
+    }
+
+
+    template<typename Parser, typename Skipper>
+    Resolver*
+    constructCompound(const NodePtr &writer, const NodePtr &reader, const Layout &offset)
+    {
+        Resolver *instruction;
+
+        SchemaResolution match = RESOLVE_NO_MATCH;
+
+        match = writer->resolve(*reader);
+
+        if (match == RESOLVE_NO_MATCH) {
+            instruction = new Skipper(*this, writer);
+        }
+        else if(writer->type() != AVRO_UNION && reader->type() == AVRO_UNION) {
+            const CompoundLayout &compoundLayout = dynamic_cast<const CompoundLayout &>(offset);
+            instruction = new NonUnionToUnionParser(*this, writer, reader, compoundLayout);
+        }
+        else if(writer->type() == AVRO_UNION && reader->type() != AVRO_UNION) {
+            instruction = new UnionToNonUnionParser(*this, writer, reader, offset);
+        }
+        else {
+            const CompoundLayout &compoundLayout = dynamic_cast<const CompoundLayout &>(offset);
+            instruction = new Parser(*this, writer, reader, compoundLayout);
+        } 
+
+        return instruction;
+    }
+
+  public:
+
+    Resolver *
+    construct(const NodePtr &writer, const NodePtr &reader, const Layout &offset)
+    {
+
+        typedef Resolver* (ResolverFactory::*BuilderFunc)(const NodePtr &writer, const NodePtr &reader, const Layout &offset);
+
+        NodePtr currentWriter = (writer->type() == AVRO_SYMBOLIC) ?
+            resolveSymbol(writer) : writer;
+
+        NodePtr currentReader = (reader->type() == AVRO_SYMBOLIC) ?
+            resolveSymbol(reader) : reader;
+
+        static const BuilderFunc funcs[] = {
+            &ResolverFactory::constructPrimitive<std::string>, 
+            &ResolverFactory::constructPrimitive<std::vector<uint8_t> >,
+            &ResolverFactory::constructPrimitive<int32_t>,
+            &ResolverFactory::constructPrimitive<int64_t>,
+            &ResolverFactory::constructPrimitive<float>,
+            &ResolverFactory::constructPrimitive<double>,
+            &ResolverFactory::constructPrimitive<bool>,
+            &ResolverFactory::constructPrimitive<Null>,
+            &ResolverFactory::constructCompound<RecordParser, RecordSkipper>,
+            &ResolverFactory::constructCompound<EnumParser, EnumSkipper>,
+            &ResolverFactory::constructCompound<ArrayParser, ArraySkipper>,
+            &ResolverFactory::constructCompound<MapParser, MapSkipper>,
+            &ResolverFactory::constructCompound<UnionParser, UnionSkipper>,
+            &ResolverFactory::constructCompound<FixedParser, FixedSkipper>
+        };
+
+        BOOST_STATIC_ASSERT( (sizeof(funcs)/sizeof(BuilderFunc)) == (AVRO_NUM_TYPES) );
+
+        BuilderFunc func = funcs[currentWriter->type()];
+        assert(func);
+
+        return  ((this)->*(func))(currentWriter, currentReader, offset);
+    }
+
+    Resolver *
+    skipper(const NodePtr &writer) 
+    {
+
+        typedef Resolver* (ResolverFactory::*BuilderFunc)(const NodePtr &writer);
+
+        NodePtr currentWriter = (writer->type() == AVRO_SYMBOLIC) ?
+            writer->leafAt(0) : writer;
+
+        static const BuilderFunc funcs[] = {
+            &ResolverFactory::constructPrimitiveSkipper<std::string>, 
+            &ResolverFactory::constructPrimitiveSkipper<std::vector<uint8_t> >,
+            &ResolverFactory::constructPrimitiveSkipper<int32_t>,
+            &ResolverFactory::constructPrimitiveSkipper<int64_t>,
+            &ResolverFactory::constructPrimitiveSkipper<float>,
+            &ResolverFactory::constructPrimitiveSkipper<double>,
+            &ResolverFactory::constructPrimitiveSkipper<bool>,
+            &ResolverFactory::constructPrimitiveSkipper<Null>,
+            &ResolverFactory::constructCompoundSkipper<RecordSkipper>,
+            &ResolverFactory::constructCompoundSkipper<EnumSkipper>,
+            &ResolverFactory::constructCompoundSkipper<ArraySkipper>,
+            &ResolverFactory::constructCompoundSkipper<MapSkipper>,
+            &ResolverFactory::constructCompoundSkipper<UnionSkipper>,
+            &ResolverFactory::constructCompoundSkipper<FixedSkipper>
+        };
+
+        BOOST_STATIC_ASSERT( (sizeof(funcs)/sizeof(BuilderFunc)) == (AVRO_NUM_TYPES) );
+
+        BuilderFunc func = funcs[currentWriter->type()];
+        assert(func);
+
+        return  ((this)->*(func))(currentWriter);
+    }
+};
+
+
+RecordSkipper::RecordSkipper(ResolverFactory &factory, const NodePtr &writer) :
+    Resolver() 
+{
+    size_t leaves = writer->leaves();
+    resolvers_.reserve(leaves);
+    for(size_t i = 0; i < leaves; ++i) {
+        const NodePtr &w = writer->leafAt(i);
+        resolvers_.push_back(factory.skipper(w));
+    }
+}
+
+RecordParser::RecordParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+    Resolver()
+{
+    size_t leaves = writer->leaves();
+    resolvers_.reserve(leaves);
+    for(size_t i = 0; i < leaves; ++i) {
+    
+        const NodePtr &w = writer->leafAt(i);
+
+        const std::string &name = writer->nameAt(i);
+
+        size_t readerIndex = 0;
+        bool found = reader->nameIndex(name, readerIndex);
+
+        if(found) {
+            const NodePtr &r = reader->leafAt(readerIndex);
+            resolvers_.push_back(factory.construct(w, r, offsets.at(readerIndex)));
+        }
+        else {
+            resolvers_.push_back(factory.skipper(w));
+        }
+    }
+}
+
+MapSkipper::MapSkipper(ResolverFactory &factory, const NodePtr &writer) :
+    Resolver(),
+    resolver_(factory.skipper(writer->leafAt(1)))
+{ }
+
+MapParser::MapParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+    Resolver(),
+    resolver_(factory.construct(writer->leafAt(1), reader->leafAt(1), offsets.at(1))),
+    offset_(offsets.offset()),
+    setFuncOffset_( offsets.at(0).offset())
+{ }
+
+ArraySkipper::ArraySkipper(ResolverFactory &factory, const NodePtr &writer) :
+    Resolver(),
+    resolver_(factory.skipper(writer->leafAt(0)))
+{ }
+
+ArrayParser::ArrayParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+    Resolver(),
+    resolver_(factory.construct(writer->leafAt(0), reader->leafAt(0), offsets.at(1))),
+    offset_(offsets.offset()),
+    setFuncOffset_(offsets.at(0).offset())
+{ }
+
+UnionSkipper::UnionSkipper(ResolverFactory &factory, const NodePtr &writer) :
+    Resolver() 
+{
+    size_t leaves = writer->leaves();
+    resolvers_.reserve(leaves);
+    for(size_t i = 0; i < leaves; ++i) {
+    const NodePtr &w = writer->leafAt(i);
+        resolvers_.push_back(factory.skipper(w));
+    }
+}
+
+namespace {
+
+// asumes the writer is NOT a union, and the reader IS a union
+
+SchemaResolution    
+checkUnionMatch(const NodePtr &writer, const NodePtr &reader, size_t &index)
+{
+    SchemaResolution bestMatch = RESOLVE_NO_MATCH;
+ 
+    index = 0;
+    size_t leaves = reader->leaves();
+
+    for(size_t i=0; i < leaves; ++i) {
+
+        const NodePtr &leaf = reader->leafAt(i);
+        SchemaResolution newMatch = writer->resolve(*leaf);
+
+        if(newMatch == RESOLVE_MATCH) {
+            bestMatch = newMatch;
+            index = i;
+            break;
+        }
+        if(bestMatch == RESOLVE_NO_MATCH) {
+            bestMatch = newMatch;
+            index = i;
+        }
+    }
+
+    return bestMatch;
+}
+
+};
+
+UnionParser::UnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+    Resolver(),
+    offset_(offsets.offset()),
+    choiceOffset_(offsets.at(0).offset()),
+    setFuncOffset_(offsets.at(1).offset())
+{
+
+    size_t leaves = writer->leaves();
+    resolvers_.reserve(leaves);
+    choiceMapping_.reserve(leaves);
+    for(size_t i = 0; i < leaves; ++i) {
+
+        // for each writer, we need a schema match for the reader
+        const NodePtr &w = writer->leafAt(i);
+        size_t index = 0;
+
+        SchemaResolution match = checkUnionMatch(w, reader, index);
+
+        if(match == RESOLVE_NO_MATCH) {
+            resolvers_.push_back(factory.skipper(w));
+            // push back a non-sensical number
+            choiceMapping_.push_back(reader->leaves());
+        }
+        else {
+            const NodePtr &r = reader->leafAt(index);
+            resolvers_.push_back(factory.construct(w, r, offsets.at(index+2)));
+            choiceMapping_.push_back(index);
+        }
+    }
+}
+
+NonUnionToUnionParser::NonUnionToUnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const CompoundLayout &offsets) :
+    Resolver(),
+    offset_(offsets.offset()),
+    choiceOffset_(offsets.at(0).offset()),
+    setFuncOffset_(offsets.at(1).offset())
+{
+#ifndef NDEBUG
+    SchemaResolution bestMatch =
+#endif
+    checkUnionMatch(writer, reader, choice_);
+    assert(bestMatch != RESOLVE_NO_MATCH);
+    resolver_.reset(factory.construct(writer, reader->leafAt(choice_), offsets.at(choice_+2)));
+}
+
+UnionToNonUnionParser::UnionToNonUnionParser(ResolverFactory &factory, const NodePtr &writer, const NodePtr &reader, const Layout &offsets) :
+    Resolver()
+{
+    size_t leaves = writer->leaves();
+    resolvers_.reserve(leaves);
+    for(size_t i = 0; i < leaves; ++i) {
+        const NodePtr &w = writer->leafAt(i);
+        resolvers_.push_back(factory.construct(w, reader, offsets));
+    }
+}
+
+Resolver *constructResolver(const ValidSchema &writerSchema,
+                                    const ValidSchema &readerSchema,
+                                    const Layout &readerLayout)
+{
+    ResolverFactory factory;
+    return factory.construct(writerSchema.root(), readerSchema.root(), readerLayout);
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/ResolverSchema.cc b/lang/c++/impl/ResolverSchema.cc
new file mode 100644
index 0000000..e18ecc7
--- /dev/null
+++ b/lang/c++/impl/ResolverSchema.cc
@@ -0,0 +1,39 @@
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ResolverSchema.hh"
+#include "Resolver.hh"
+#include "ValidSchema.hh"
+
+namespace avro {
+
+ResolverSchema::ResolverSchema(
+        const ValidSchema &writerSchema, 
+        const ValidSchema &readerSchema, 
+        const Layout &readerLayout) :
+    resolver_(constructResolver(writerSchema, readerSchema, readerLayout))
+{ }
+
+void
+ResolverSchema::parse(Reader &reader, uint8_t *address) 
+{
+    resolver_->parse(reader, address);
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/Schema.cc b/lang/c++/impl/Schema.cc
new file mode 100644
index 0000000..b5457ae
--- /dev/null
+++ b/lang/c++/impl/Schema.cc
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "Schema.hh"
+
+namespace avro {
+
+Schema::Schema() 
+{ }
+
+Schema::~Schema() 
+{ }
+
+Schema::Schema(const NodePtr &node) :
+    node_(node)
+{ }
+
+Schema::Schema(Node *node) :
+    node_(node)
+{ }
+
+RecordSchema::RecordSchema(const std::string &name) :
+    Schema(new NodeRecord)
+{
+    node_->setName(name);
+}
+
+void
+RecordSchema::addField(const std::string &name, const Schema &fieldSchema) 
+{
+    // add the name first. it will throw if the name is a duplicate, preventing
+    // the leaf from being added
+    node_->addName(name);
+
+    node_->addLeaf(fieldSchema.root());
+}
+
+EnumSchema::EnumSchema(const std::string &name) :
+    Schema(new NodeEnum)
+{
+    node_->setName(name);
+}
+
+void
+EnumSchema::addSymbol(const std::string &symbol)
+{
+    node_->addName(symbol);
+}
+
+ArraySchema::ArraySchema(const Schema &itemsSchema) :
+    Schema(new NodeArray)
+{
+    node_->addLeaf(itemsSchema.root());
+}
+
+MapSchema::MapSchema(const Schema &valuesSchema) :
+    Schema(new NodeMap)
+{
+    node_->addLeaf(valuesSchema.root());
+}
+
+UnionSchema::UnionSchema() :
+    Schema(new NodeUnion)
+{ }
+
+void
+UnionSchema::addType(const Schema &typeSchema) 
+{
+    if(typeSchema.type() == AVRO_UNION) {
+        throw Exception("Cannot add unions to unions");
+    }
+
+    if(typeSchema.type() == AVRO_RECORD) {
+        // check for duplicate records
+        size_t types = node_->leaves();
+        for(size_t i = 0; i < types; ++i) {
+            const NodePtr &leaf = node_->leafAt(i);
+            // TODO, more checks?
+            if(leaf->type() == AVRO_RECORD && leaf->name() == typeSchema.root()->name()) {
+                throw Exception("Records in unions cannot have duplicate names");
+            }
+        }
+    }
+
+    node_->addLeaf(typeSchema.root());
+}
+
+FixedSchema::FixedSchema(int size, const std::string &name) :
+    Schema(new NodeFixed)
+{
+    node_->setFixedSize(size);
+    node_->setName(name);
+}
+
+SymbolicSchema::SymbolicSchema(const Name &name, const NodePtr& link) :
+    Schema(new NodeSymbolic(HasName(name), link))
+{
+}
+
+
+
+} // namespace avro
diff --git a/lang/c++/impl/Stream.cc b/lang/c++/impl/Stream.cc
new file mode 100644
index 0000000..5da5edb
--- /dev/null
+++ b/lang/c++/impl/Stream.cc
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Stream.hh"
+#include <vector>
+
+namespace avro {
+
+using std::vector;
+
+class MemoryInputStream : public InputStream {
+    const std::vector<uint8_t*>& data_;
+    const size_t chunkSize_;
+    const size_t size_;
+    const size_t available_;
+    size_t cur_;
+    size_t curLen_;
+
+    size_t maxLen() {
+        size_t n = (cur_ == (size_ - 1)) ? available_ : chunkSize_;
+        if (n == curLen_) {
+            if (cur_ == (size_ - 1)) {
+                return 0;
+            }
+            ++cur_;
+            n = (cur_ == (size_ - 1)) ? available_ : chunkSize_;
+            curLen_ = 0;
+        }
+        return n;
+    }
+
+public:
+    MemoryInputStream(const std::vector<uint8_t*>& b,
+        size_t chunkSize, size_t available) :
+            data_(b), chunkSize_(chunkSize), size_(b.size()),
+            available_(available), cur_(0), curLen_(0) { }
+
+    bool next(const uint8_t** data, size_t* len) {
+        if (size_t n = maxLen()) {
+            *data = data_[cur_] + curLen_;
+            *len = n - curLen_;
+            curLen_ = n;
+            return true;
+        }
+        return false;
+    }
+
+    void backup(size_t len) {
+        curLen_ -= len;
+    }
+
+    void skip(size_t len) {
+        while (len > 0) {
+            if (size_t n = maxLen()) {
+                if ((curLen_ + len) < n) {
+                    n = curLen_ + len;
+                }
+                len -= n - curLen_;
+                curLen_ = n;
+            } else {
+                break;
+            }
+        }
+    }
+
+    size_t byteCount() const {
+        return cur_ * chunkSize_ + curLen_;
+    }
+};
+
+class MemoryInputStream2 : public InputStream {
+    const uint8_t* const data_;
+    const size_t size_;
+    size_t curLen_;
+public:
+    MemoryInputStream2(const uint8_t *data, size_t len)
+        : data_(data), size_(len), curLen_(0) { }
+
+    bool next(const uint8_t** data, size_t* len) {
+        if (curLen_ == size_) {
+            return false;
+        }
+        *data = &data_[curLen_];
+        *len = size_ - curLen_;
+        curLen_ = size_;
+        return true;
+    }
+
+    void backup(size_t len) {
+        curLen_ -= len;
+    }
+
+    void skip(size_t len) {
+        if (len > (size_ - curLen_)) {
+            len = size_ - curLen_;
+        }
+        curLen_ += len;
+    }
+
+    size_t byteCount() const {
+        return curLen_;
+    }
+};
+
+class MemoryOutputStream : public OutputStream {
+public:
+    const size_t chunkSize_;
+    std::vector<uint8_t*> data_;
+    size_t available_;
+    size_t byteCount_;
+
+    MemoryOutputStream(size_t chunkSize) : chunkSize_(chunkSize),
+        available_(0), byteCount_(0) { }
+    ~MemoryOutputStream() {
+        for (std::vector<uint8_t*>::const_iterator it = data_.begin();
+            it != data_.end(); ++it) {
+            delete[] *it;
+        }
+    }
+
+    bool next(uint8_t** data, size_t* len) {
+        if (available_ == 0) {
+            data_.push_back(new uint8_t[chunkSize_]);
+            available_ = chunkSize_;
+        }
+        *data = &data_.back()[chunkSize_ - available_];
+        *len = available_;
+        byteCount_ += available_;
+        available_ = 0;
+        return true;
+    }
+
+    void backup(size_t len) {
+        available_ += len;
+        byteCount_ -= len;
+    }
+
+    uint64_t byteCount() const {
+        return byteCount_;
+    }
+
+    void flush() { }
+};
+
+std::auto_ptr<OutputStream> memoryOutputStream(size_t chunkSize)
+{
+    return std::auto_ptr<OutputStream>(new MemoryOutputStream(chunkSize));
+}
+
+std::auto_ptr<InputStream> memoryInputStream(const uint8_t* data, size_t len)
+{
+    return std::auto_ptr<InputStream>(new MemoryInputStream2(data, len));
+}
+
+std::auto_ptr<InputStream> memoryInputStream(const OutputStream& source)
+{
+    const MemoryOutputStream& mos =
+        dynamic_cast<const MemoryOutputStream&>(source);
+    return (mos.data_.empty()) ?
+        std::auto_ptr<InputStream>(new MemoryInputStream2(0, 0)) :
+        std::auto_ptr<InputStream>(new MemoryInputStream(mos.data_,
+            mos.chunkSize_,
+            (mos.chunkSize_ - mos.available_)));
+}
+
+boost::shared_ptr<std::vector<uint8_t> > snapshot(const OutputStream& source)
+{
+    const MemoryOutputStream& mos =
+        dynamic_cast<const MemoryOutputStream&>(source);
+    boost::shared_ptr<std::vector<uint8_t> > result(new std::vector<uint8_t>());
+    size_t c = mos.byteCount_;
+    result->reserve(mos.byteCount_);
+    for (vector<uint8_t*>::const_iterator it = mos.data_.begin();
+        it != mos.data_.end(); ++it) {
+        size_t n = std::min(c, mos.chunkSize_);
+        std::copy(*it, *it + n, std::back_inserter(*result));
+        c -= n;
+    }
+    return result;
+}
+
+}   // namespace avro
+
diff --git a/lang/c++/impl/Types.cc b/lang/c++/impl/Types.cc
new file mode 100644
index 0000000..1578117
--- /dev/null
+++ b/lang/c++/impl/Types.cc
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <string>
+#include <boost/static_assert.hpp>
+#include "Types.hh"
+
+namespace avro {
+
+namespace strings {
+const std::string typeToString[] = {
+    "string",
+    "bytes",
+    "int",
+    "long",
+    "float",
+    "double",
+    "boolean",
+    "null",
+    "record",
+    "enum",
+    "array",
+    "map",
+    "union",
+    "fixed",
+    "symbolic"
+};
+
+BOOST_STATIC_ASSERT( (sizeof(typeToString)/sizeof(std::string)) == (AVRO_NUM_TYPES+1) );
+
+} // namespace strings
+
+
+// this static assert exists because a 32 bit integer is used as a bit-flag for each type,
+// and it would be a problem for this flag if we ever supported more than 32 types
+BOOST_STATIC_ASSERT( AVRO_NUM_TYPES < 32 );
+
+const std::string& toString(Type type)
+{
+    static std::string undefinedType = "Undefined type";
+    if (isAvroTypeOrPseudoType(type)) {
+        return strings::typeToString[type];
+    } else {
+        return undefinedType;
+    }
+}
+
+std::ostream &operator<< (std::ostream &os, Type type)
+{
+    if(isAvroTypeOrPseudoType(type)) {
+        os << strings::typeToString[type];
+    }
+    else {
+        os << static_cast<int>(type);
+    }
+    return os;
+}
+
+std::ostream &operator<< (std::ostream &os, const Null &)
+{
+    os << "(null value)";
+    return os;
+}
+
+} // namespace avro
+
diff --git a/lang/c++/impl/ValidSchema.cc b/lang/c++/impl/ValidSchema.cc
new file mode 100644
index 0000000..bd28079
--- /dev/null
+++ b/lang/c++/impl/ValidSchema.cc
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <boost/format.hpp>
+#include <sstream>
+
+#include "ValidSchema.hh"
+#include "Schema.hh"
+#include "Node.hh"
+
+using std::string;
+using std::make_pair;
+using boost::format;
+using boost::shared_ptr;
+using boost::static_pointer_cast;
+
+namespace avro {
+
+typedef std::map<Name, NodePtr> SymbolMap;
+
+static bool validate(const NodePtr &node, SymbolMap &symbolMap) 
+{
+    if (! node->isValid()) {
+        throw Exception(format("Schema is invalid, due to bad node of type %1%")
+            % node->type());
+    }
+
+    if (node->hasName()) {
+        const Name& nm = node->name();
+        SymbolMap::iterator it = symbolMap.find(nm);
+        bool found = it != symbolMap.end() && nm == it->first;
+
+        if (node->type() == AVRO_SYMBOLIC) {
+            if (! found) {
+                throw Exception(format("Symbolic name \"%1%\" is unknown") %
+                    node->name());
+            }
+
+            shared_ptr<NodeSymbolic> symNode =
+                static_pointer_cast<NodeSymbolic>(node);
+
+            // if the symbolic link is already resolved, we return true,
+            // otherwise returning false will force it to be resolved
+            return symNode->isSet();
+        }
+
+        if (found) {
+            return false;
+        }
+        symbolMap.insert(it, make_pair(nm, node));
+    }
+
+    node->lock();
+    size_t leaves = node->leaves();
+    for (size_t i = 0; i < leaves; ++i) {
+        const NodePtr &leaf(node->leafAt(i));
+
+        if (! validate(leaf, symbolMap)) {
+
+            // if validate returns false it means a node with this name already
+            // existed in the map, instead of keeping this node twice in the
+            // map (which could potentially create circular shared pointer
+            // links that could not be easily freed), replace this node with a
+            // symbolic link to the original one.
+            
+            node->setLeafToSymbolic(i, symbolMap.find(leaf->name())->second);
+        }
+    }
+
+    return true;
+}
+
+static void validate(const NodePtr& p)
+{
+    SymbolMap m;
+    validate(p, m);
+}
+
+ValidSchema::ValidSchema(const NodePtr &root) : root_(root)
+{
+    validate(root_);
+}
+
+ValidSchema::ValidSchema(const Schema &schema) : root_(schema.root())
+{
+    validate(root_);
+}
+
+ValidSchema::ValidSchema() : root_(NullSchema().root()) 
+{
+    validate(root_);
+}
+
+void
+ValidSchema::setSchema(const Schema &schema)
+{
+    root_ = schema.root();
+    validate(root_);
+}
+
+void 
+ValidSchema::toJson(std::ostream &os) const
+{ 
+    root_->printJson(os, 0);
+    os << '\n';
+}
+
+void 
+ValidSchema::toFlatList(std::ostream &os) const
+{ 
+    root_->printBasicInfo(os);
+}
+
+} // namespace avro
+
diff --git a/lang/c++/impl/Validator.cc b/lang/c++/impl/Validator.cc
new file mode 100644
index 0000000..0a2c68e
--- /dev/null
+++ b/lang/c++/impl/Validator.cc
@@ -0,0 +1,301 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <boost/static_assert.hpp>
+
+#include "Validator.hh"
+#include "ValidSchema.hh"
+#include "NodeImpl.hh"
+
+namespace avro {
+
+Validator::Validator(const ValidSchema &schema) :
+    schema_(schema),
+    nextType_(AVRO_NULL),
+    expectedTypesFlag_(0),
+    compoundStarted_(false),
+    waitingForCount_(false),
+    count_(0)
+{
+    setupOperation(schema_.root());
+}
+
+void 
+Validator::setWaitingForCount()
+{
+    waitingForCount_ = true;
+    count_ = 0;
+    expectedTypesFlag_ = typeToFlag(AVRO_INT) | typeToFlag(AVRO_LONG);
+    nextType_ = AVRO_LONG;
+}
+
+void
+Validator::enumAdvance()
+{
+    if(compoundStarted_) {
+        setWaitingForCount();
+        compoundStarted_ = false;
+    }
+    else {
+        waitingForCount_ = false;
+        compoundStack_.pop_back();
+    }
+}
+
+bool 
+Validator::countingSetup()
+{
+    bool proceed = true;
+    if(compoundStarted_) {
+        setWaitingForCount();
+        compoundStarted_ = false;
+        proceed = false;
+    }
+    else if(waitingForCount_) {
+        waitingForCount_ = false;
+        if(count_ == 0) {
+            compoundStack_.pop_back();
+            proceed = false;
+        }
+        else {
+            counters_.push_back(static_cast<size_t>(count_));
+        }
+    }
+
+    return proceed;
+}
+
+void
+Validator::countingAdvance()
+{
+    if(countingSetup()) {
+    
+        size_t index = (compoundStack_.back().pos)++;
+        const NodePtr &node = compoundStack_.back().node;
+
+        if(index < node->leaves() ) {
+            setupOperation(node->leafAt(index));
+        }
+        else {
+            compoundStack_.back().pos = 0;
+            int count = --counters_.back();
+            if(count == 0) {
+                counters_.pop_back();
+                compoundStarted_ = true;
+                nextType_ = node->type();
+                expectedTypesFlag_ = typeToFlag(nextType_);
+            }
+            else {
+                size_t index = (compoundStack_.back().pos)++;
+                setupOperation(node->leafAt(index));
+            }
+        }
+    }
+}
+
+void
+Validator::unionAdvance()
+{
+    if(compoundStarted_) {
+        setWaitingForCount();
+        compoundStarted_ = false;
+    }
+    else {
+        waitingForCount_ = false;
+        NodePtr node = compoundStack_.back().node;
+
+        if(count_ < static_cast<int64_t>(node->leaves())) {
+            compoundStack_.pop_back();
+            setupOperation(node->leafAt(static_cast<int>(count_)));
+        }
+        else {
+            throw Exception(
+                boost::format("Union selection out of range, got %1%," \
+                    " expecting 0-%2%")
+                    % count_ % (node->leaves() -1) 
+            );
+        }
+    }
+}
+
+void
+Validator::fixedAdvance()
+{
+    compoundStarted_ = false;
+    compoundStack_.pop_back();
+}
+
+int 
+Validator::nextSizeExpected() const
+{
+    return compoundStack_.back().node->fixedSize();
+}
+
+void
+Validator::doAdvance()
+{
+    typedef void (Validator::*AdvanceFunc)();
+
+    // only the compound types need advance functions here
+    static const AdvanceFunc funcs[] = {
+        0, // string
+        0, // bytes
+        0, // int
+        0, // long
+        0, // float
+        0, // double
+        0, // bool
+        0, // null
+        &Validator::countingAdvance, // Record is treated like counting with count == 1
+        &Validator::enumAdvance,
+        &Validator::countingAdvance,
+        &Validator::countingAdvance,
+        &Validator::unionAdvance,
+        &Validator::fixedAdvance
+    };
+    BOOST_STATIC_ASSERT( (sizeof(funcs)/sizeof(AdvanceFunc)) == (AVRO_NUM_TYPES) );
+
+    expectedTypesFlag_ = 0;
+    // loop until we encounter a next expected type, or we've exited all compound types 
+    while(!expectedTypesFlag_ && !compoundStack_.empty() ) {
+    
+        Type type = compoundStack_.back().node->type();
+
+        AdvanceFunc func = funcs[type];
+
+        // only compound functions are put on the status stack so it is ok to
+        // assume that func is not null
+        assert(func);
+
+        ((this)->*(func))();
+    }
+
+    if(compoundStack_.empty()) {
+        nextType_ = AVRO_NULL;
+    }
+}
+
+void Validator::advance()
+{
+    if(!waitingForCount_) {
+        doAdvance();
+    }
+}
+
+void
+Validator::setCount(int64_t count) 
+{
+    if(!waitingForCount_) {
+        throw Exception("Not expecting count");
+    }
+    else if(count_ < 0) {
+        throw Exception("Count cannot be negative");
+    }
+    count_ = count;
+
+    doAdvance();
+}
+
+void
+Validator::setupFlag(Type type)
+{
+    // use flags instead of strictly types, so that we can be more lax about the type
+    // (for example, a long should be able to accept an int type, but not vice versa)
+    static const flag_t flags[] = {
+        typeToFlag(AVRO_STRING) | typeToFlag(AVRO_BYTES),
+        typeToFlag(AVRO_STRING) | typeToFlag(AVRO_BYTES),
+        typeToFlag(AVRO_INT),
+        typeToFlag(AVRO_INT) | typeToFlag(AVRO_LONG),
+        typeToFlag(AVRO_FLOAT),
+        typeToFlag(AVRO_DOUBLE),
+        typeToFlag(AVRO_BOOL),
+        typeToFlag(AVRO_NULL),
+        typeToFlag(AVRO_RECORD),
+        typeToFlag(AVRO_ENUM),
+        typeToFlag(AVRO_ARRAY),
+        typeToFlag(AVRO_MAP),
+        typeToFlag(AVRO_UNION),
+        typeToFlag(AVRO_FIXED)
+    };
+    BOOST_STATIC_ASSERT( (sizeof(flags)/sizeof(flag_t)) == (AVRO_NUM_TYPES) );
+
+    expectedTypesFlag_ = flags[type];
+}
+
+void
+Validator::setupOperation(const NodePtr &node)
+{
+    nextType_ = node->type();
+
+    if(nextType_ == AVRO_SYMBOLIC) {
+        NodePtr actualNode = resolveSymbol(node);
+        assert(actualNode);
+        setupOperation(actualNode);
+        return;
+    }
+
+    assert(nextType_ < AVRO_SYMBOLIC);
+
+    setupFlag(nextType_);
+
+    if(!isPrimitive(nextType_)) {
+        compoundStack_.push_back(CompoundType(node));
+        compoundStarted_ = true;
+    }
+}
+
+bool 
+Validator::getCurrentRecordName(std::string &name) const
+{
+    bool found = false;
+    name.clear();
+
+    int idx = -1;
+    // if the top of the stack is a record I want this record name
+    if(!compoundStack_.empty() && (isPrimitive(nextType_) || nextType_ == AVRO_RECORD)) {
+        idx = compoundStack_.size() -1;
+    }
+    else {
+        idx = compoundStack_.size() -2;
+    }
+    
+    if(idx >= 0 && compoundStack_[idx].node->type() == AVRO_RECORD) {
+        name = compoundStack_[idx].node->name().simpleName();
+        found = true;
+    }
+    return found;
+}
+
+bool 
+Validator::getNextFieldName(std::string &name) const
+{
+    bool found = false;
+    name.clear();
+    int idx = isCompound(nextType_) ? compoundStack_.size()-2 : compoundStack_.size()-1;
+    if(idx >= 0 && compoundStack_[idx].node->type() == AVRO_RECORD) {
+        size_t pos = compoundStack_[idx].pos-1;
+        const NodePtr &node = compoundStack_[idx].node;
+        if(pos < node->leaves()) {
+            name = node->nameAt(pos);
+            found = true;
+        }
+    }
+    return found;
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/Zigzag.cc b/lang/c++/impl/Zigzag.cc
new file mode 100644
index 0000000..b31f6b9
--- /dev/null
+++ b/lang/c++/impl/Zigzag.cc
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "Zigzag.hh"
+
+namespace avro {
+
+uint64_t 
+encodeZigzag64(int64_t input)
+{
+    return ((input << 1) ^ (input >> 63));
+}
+
+int64_t 
+decodeZigzag64(uint64_t input)
+{
+    return static_cast<int64_t>(((input >> 1) ^ -(static_cast<int64_t>(input) & 1)));
+}
+
+uint32_t 
+encodeZigzag32(int32_t input)
+{
+    return ((input << 1) ^ (input >> 31));
+}
+
+int32_t 
+decodeZigzag32(uint32_t input)
+{
+    return static_cast<int32_t>(((input >> 1) ^ -(static_cast<int64_t>(input) & 1)));
+}
+
+size_t 
+encodeInt64(int64_t input, boost::array<uint8_t, 10> &output)
+{
+    // get the zigzag encoding 
+    uint64_t val = encodeZigzag64(input);
+
+    // put values in an array of bytes with variable length encoding
+    const int mask  = 0x7F;
+    output[0] = val & mask;
+    size_t bytesOut = 1;
+    while( val >>=7 ) {
+        output[bytesOut-1] |= 0x80;
+        output[bytesOut++] = (val & mask);
+    }
+
+    return bytesOut;
+}
+
+size_t 
+encodeInt32(int32_t input, boost::array<uint8_t, 5> &output)
+{
+    // get the zigzag encoding 
+    uint32_t val = encodeZigzag32(input);
+
+    // put values in an array of bytes with variable length encoding
+    const int mask  = 0x7F;
+    output[0] = val & mask;
+    size_t bytesOut = 1;
+    while( val >>=7 ) {
+        output[bytesOut-1] |= 0x80;
+        output[bytesOut++] = (val & mask);
+    }
+
+    return bytesOut;
+}
+
+} // namespace avro
diff --git a/lang/c++/impl/avrogencpp.cc b/lang/c++/impl/avrogencpp.cc
new file mode 100644
index 0000000..a6a858d
--- /dev/null
+++ b/lang/c++/impl/avrogencpp.cc
@@ -0,0 +1,828 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <ctype.h>
+#ifndef _WIN32
+#include <sys/time.h>
+#endif
+#include <iostream>
+#include <fstream>
+#include <map>
+#include <set>
+
+#include <boost/algorithm/string.hpp>
+#include <boost/lexical_cast.hpp>
+#include <boost/program_options.hpp>
+
+#include <boost/random/mersenne_twister.hpp>
+#include <boost/random/uniform_int.hpp>
+#include <boost/random/variate_generator.hpp>
+
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+#include "NodeImpl.hh"
+
+using std::ostream;
+using std::ifstream;
+using std::ofstream;
+using std::map;
+using std::set;
+using std::string;
+using std::vector;
+using avro::NodePtr;
+using avro::resolveSymbol;
+
+using boost::lexical_cast;
+
+using avro::ValidSchema;
+using avro::compileJsonSchema;
+
+struct PendingSetterGetter {
+    string structName;
+    string type;
+    string name;
+    size_t idx;
+
+    PendingSetterGetter(const string& sn, const string& t,
+        const string& n, size_t i) :
+        structName(sn), type(t), name(n), idx(i) { }
+};
+
+struct PendingConstructor {
+    string structName;
+    string memberName;
+    bool initMember;
+    PendingConstructor(const string& sn, const string& n, bool im) :
+        structName(sn), memberName(n), initMember(im) { }
+};
+
+class CodeGen {
+    size_t unionNumber_;
+    std::ostream& os_;
+    bool inNamespace_;
+    const std::string ns_;
+    const std::string schemaFile_;
+    const std::string headerFile_;
+    const std::string includePrefix_;
+    const bool noUnion_;
+    const std::string guardString_;
+    boost::mt19937 random_;
+
+    vector<PendingSetterGetter> pendingGettersAndSetters;
+    vector<PendingConstructor> pendingConstructors;
+
+    map<NodePtr, string> done;
+    set<NodePtr> doing;
+
+    std::string guard();
+    std::string fullname(const string& name) const;
+    std::string generateEnumType(const NodePtr& n);
+    std::string cppTypeOf(const NodePtr& n);
+    std::string generateRecordType(const NodePtr& n);
+    std::string unionName();
+    std::string generateUnionType(const NodePtr& n);
+    std::string generateType(const NodePtr& n);
+    std::string generateDeclaration(const NodePtr& n);
+    std::string doGenerateType(const NodePtr& n);
+    void generateEnumTraits(const NodePtr& n);
+    void generateTraits(const NodePtr& n);
+    void generateRecordTraits(const NodePtr& n);
+    void generateUnionTraits(const NodePtr& n);
+    void emitCopyright();
+public:
+    CodeGen(std::ostream& os, const std::string& ns,
+        const std::string& schemaFile, const std::string& headerFile,
+        const std::string& guardString,
+        const std::string& includePrefix, bool noUnion) :
+        unionNumber_(0), os_(os), inNamespace_(false), ns_(ns),
+        schemaFile_(schemaFile), headerFile_(headerFile),
+        includePrefix_(includePrefix), noUnion_(noUnion),
+        guardString_(guardString),
+        random_(static_cast<uint32_t>(::time(0))) { }
+    void generate(const ValidSchema& schema);
+};
+
+static string decorate(const avro::Name& name)
+{
+    return name.simpleName();
+}
+
+string CodeGen::fullname(const string& name) const
+{
+    return ns_.empty() ? name : (ns_ + "::" + name);
+}
+
+string CodeGen::generateEnumType(const NodePtr& n)
+{
+    string s = decorate(n->name());
+    os_ << "enum " << s << " {\n";
+    size_t c = n->names();
+    for (size_t i = 0; i < c; ++i) {
+        os_ << "    " << n->nameAt(i) << ",\n";
+    }
+    os_ << "};\n\n";
+    return s;
+}
+
+string CodeGen::cppTypeOf(const NodePtr& n)
+{
+    switch (n->type()) {
+    case avro::AVRO_STRING:
+        return "std::string";
+    case avro::AVRO_BYTES:
+        return "std::vector<uint8_t>";
+    case avro::AVRO_INT:
+        return "int32_t";
+    case avro::AVRO_LONG:
+        return "int64_t";
+    case avro::AVRO_FLOAT:
+        return "float";
+    case avro::AVRO_DOUBLE:
+        return "double";
+    case avro::AVRO_BOOL:
+        return "bool";
+    case avro::AVRO_RECORD:
+    case avro::AVRO_ENUM:
+        {
+            string nm = decorate(n->name());
+            return inNamespace_ ? nm : fullname(nm);
+        }
+    case avro::AVRO_ARRAY:
+        return "std::vector<" + cppTypeOf(n->leafAt(0)) + " >";
+    case avro::AVRO_MAP:
+        return "std::map<std::string, " + cppTypeOf(n->leafAt(1)) + " >";
+    case avro::AVRO_FIXED:
+        return "boost::array<uint8_t, " +
+            lexical_cast<string>(n->fixedSize()) + ">";
+    case avro::AVRO_SYMBOLIC:
+        return cppTypeOf(resolveSymbol(n));
+    case avro::AVRO_UNION:
+        return fullname(done[n]);
+    default:
+        return "$Undefined$";
+    }
+}
+
+static string cppNameOf(const NodePtr& n)
+{
+    switch (n->type()) {
+    case avro::AVRO_NULL:
+        return "null";
+    case avro::AVRO_STRING:
+        return "string";
+    case avro::AVRO_BYTES:
+        return "bytes";
+    case avro::AVRO_INT:
+        return "int";
+    case avro::AVRO_LONG:
+        return "long";
+    case avro::AVRO_FLOAT:
+        return "float";
+    case avro::AVRO_DOUBLE:
+        return "double";
+    case avro::AVRO_BOOL:
+        return "bool";
+    case avro::AVRO_RECORD:
+    case avro::AVRO_ENUM:
+    case avro::AVRO_FIXED:
+        return decorate(n->name());
+    case avro::AVRO_ARRAY:
+        return "array";
+    case avro::AVRO_MAP:
+        return "map";
+    case avro::AVRO_SYMBOLIC:
+        return cppNameOf(resolveSymbol(n));
+    default:
+        return "$Undefined$";
+    }
+}
+
+string CodeGen::generateRecordType(const NodePtr& n)
+{
+    size_t c = n->leaves();
+    vector<string> types;
+    for (size_t i = 0; i < c; ++i) {
+        types.push_back(generateType(n->leafAt(i)));
+    }
+
+    map<NodePtr, string>::const_iterator it = done.find(n);
+    if (it != done.end()) {
+        return it->second;
+    }
+
+    string decoratedName = decorate(n->name());
+    os_ << "struct " << decoratedName << " {\n";
+    if (! noUnion_) {
+        for (size_t i = 0; i < c; ++i) {
+            if (n->leafAt(i)->type() == avro::AVRO_UNION) {
+                os_ << "    typedef " << types[i]
+                    << ' ' << n->nameAt(i) << "_t;\n";
+            }
+        }
+    }
+    for (size_t i = 0; i < c; ++i) {
+        if (! noUnion_ && n->leafAt(i)->type() == avro::AVRO_UNION) {
+            os_ << "    " << n->nameAt(i) << "_t";
+        } else {
+            os_ << "    " << types[i];
+        }
+        os_ << ' ' << n->nameAt(i) << ";\n";
+    }
+
+    os_ << "    " << decoratedName << "()";
+    if (c > 0) {
+        os_ << " :";
+    }
+    os_ << "\n";
+    for (size_t i = 0; i < c; ++i) {
+        os_ << "        " << n->nameAt(i) << "(";
+        if (! noUnion_ && n->leafAt(i)->type() == avro::AVRO_UNION) {
+            os_ << n->nameAt(i) << "_t";
+        } else {
+            os_ << types[i];
+        }
+        os_ << "())";
+        if (i != (c - 1)) {
+            os_ << ',';
+        }
+        os_ << "\n";
+    }
+    os_ << "        { }\n";
+    os_ << "};\n\n";
+    return decorate(n->name());
+}
+
+void makeCanonical(string& s, bool foldCase)
+{
+    for (string::iterator it = s.begin(); it != s.end(); ++it) {
+        if (isalpha(*it)) {
+            if (foldCase) {
+                *it = toupper(*it);
+            }
+        } else if (! isdigit(*it)) {
+            *it = '_';
+        }
+    }
+}
+
+string CodeGen::unionName()
+{
+    string s = schemaFile_;
+    string::size_type n = s.find_last_of("/\\");
+    if (n != string::npos) {
+        s = s.substr(n);
+    }
+    makeCanonical(s, false);
+
+    return s + "_Union__" + boost::lexical_cast<string>(unionNumber_++) + "__";
+}
+
+static void generateGetterAndSetter(ostream& os,
+    const string& structName, const string& type, const string& name,
+    size_t idx)
+{
+    string sn = " " + structName + "::";
+
+    os << "inline\n";
+
+    os << type << sn << "get_" << name << "() const {\n"
+        << "    if (idx_ != " << idx << ") {\n"
+        << "        throw avro::Exception(\"Invalid type for "
+            << "union\");\n"
+        << "    }\n"
+        << "    return boost::any_cast<" << type << " >(value_);\n"
+        << "}\n\n";
+
+    os << "inline\n"
+        << "void" << sn << "set_" << name
+        << "(const " << type << "& v) {\n"
+        << "    idx_ = " << idx << ";\n"
+        << "    value_ = v;\n"
+        << "}\n\n";
+}
+
+static void generateConstructor(ostream& os,
+    const string& structName, bool initMember,
+    const string& type) {
+    os << "inline " << structName  << "::" << structName << "() : idx_(0)";
+    if (initMember) {
+        os << ", value_(" << type << "())";
+    }
+    os << " { }\n";
+}
+
+/**
+ * Generates a type for union and emits the code.
+ * Since unions can encounter names that are not fully defined yet,
+ * such names must be declared and the inline functions deferred until all
+ * types are fully defined.
+ */
+string CodeGen::generateUnionType(const NodePtr& n)
+{
+    size_t c = n->leaves();
+    vector<string> types;
+    vector<string> names;
+
+    set<NodePtr>::const_iterator it = doing.find(n);
+    if (it != doing.end()) {
+        for (size_t i = 0; i < c; ++i) {
+            const NodePtr& nn = n->leafAt(i);
+            types.push_back(generateDeclaration(nn));
+            names.push_back(cppNameOf(nn));
+        }
+    } else {
+        doing.insert(n);
+        for (size_t i = 0; i < c; ++i) {
+            const NodePtr& nn = n->leafAt(i);
+            types.push_back(generateType(nn));
+            names.push_back(cppNameOf(nn));
+        }
+        doing.erase(n);
+    }
+    if (done.find(n) != done.end()) {
+        return done[n];
+    }
+
+    const string result = unionName();
+
+    os_ << "struct " << result << " {\n"
+        << "private:\n"
+        << "    size_t idx_;\n"
+        << "    boost::any value_;\n"
+        << "public:\n"
+        << "    size_t idx() const { return idx_; }\n";
+
+    for (size_t i = 0; i < c; ++i) {
+        const NodePtr& nn = n->leafAt(i);
+        if (nn->type() == avro::AVRO_NULL) {
+            os_ << "    bool is_null() const {\n"
+                << "        return (idx_ == " << i << ");\n"
+                << "    }\n"
+                << "    void set_null() {\n"
+                << "        idx_ = " << i << ";\n"
+                << "        value_ = boost::any();\n"
+                << "    }\n";
+        } else {
+            const string& type = types[i];
+            const string& name = names[i];
+            os_ << "    " << type << " get_" << name << "() const;\n"
+                   "    void set_" << name << "(const " << type << "& v);\n";
+            pendingGettersAndSetters.push_back(
+                PendingSetterGetter(result, type, name, i));
+        }
+    }
+
+    os_ << "    " << result << "();\n";
+    pendingConstructors.push_back(PendingConstructor(result, types[0],
+        n->leafAt(0)->type() != avro::AVRO_NULL));
+    os_ << "};\n\n";
+    
+    return result;
+}
+
+/**
+ * Returns the type for the given schema node and emits code to os.
+ */
+string CodeGen::generateType(const NodePtr& n)
+{
+    NodePtr nn = (n->type() == avro::AVRO_SYMBOLIC) ?  resolveSymbol(n) : n;
+
+    map<NodePtr, string>::const_iterator it = done.find(nn);
+    if (it != done.end()) {
+        return it->second;
+    }
+    string result = doGenerateType(nn);
+    done[nn] = result;
+    return result;
+}
+
+string CodeGen::doGenerateType(const NodePtr& n)
+{
+    switch (n->type()) {
+    case avro::AVRO_STRING:
+    case avro::AVRO_BYTES:
+    case avro::AVRO_INT:
+    case avro::AVRO_LONG:
+    case avro::AVRO_FLOAT:
+    case avro::AVRO_DOUBLE:
+    case avro::AVRO_BOOL:
+    case avro::AVRO_NULL:
+    case avro::AVRO_FIXED:
+        return cppTypeOf(n);
+    case avro::AVRO_ARRAY:
+        return "std::vector<" + generateType(n->leafAt(0)) + " >";
+    case avro::AVRO_MAP:
+        return "std::map<std::string, " + generateType(n->leafAt(1)) + " >";
+    case avro::AVRO_RECORD:
+        return generateRecordType(n);
+    case avro::AVRO_ENUM:
+        return generateEnumType(n);
+    case avro::AVRO_UNION:
+        return generateUnionType(n);
+    default:
+        break;
+    }
+    return "$Undefuned$";
+}
+
+string CodeGen::generateDeclaration(const NodePtr& n)
+{
+    NodePtr nn = (n->type() == avro::AVRO_SYMBOLIC) ?  resolveSymbol(n) : n;
+    switch (nn->type()) {
+    case avro::AVRO_STRING:
+    case avro::AVRO_BYTES:
+    case avro::AVRO_INT:
+    case avro::AVRO_LONG:
+    case avro::AVRO_FLOAT:
+    case avro::AVRO_DOUBLE:
+    case avro::AVRO_BOOL:
+    case avro::AVRO_NULL:
+    case avro::AVRO_FIXED:
+        return cppTypeOf(nn);
+    case avro::AVRO_ARRAY:
+        return "std::vector<" + generateDeclaration(nn->leafAt(0)) + " >";
+    case avro::AVRO_MAP:
+        return "std::map<std::string, " +
+            generateDeclaration(nn->leafAt(1)) + " >";
+    case avro::AVRO_RECORD:
+        os_ << "struct " << cppTypeOf(nn) << ";\n";
+        return cppTypeOf(nn);
+    case avro::AVRO_ENUM:
+        return generateEnumType(nn);
+    case avro::AVRO_UNION:
+        // FIXME: When can this happen?
+        return generateUnionType(nn);
+    default:
+        break;
+    }
+    return "$Undefuned$";
+}
+
+void CodeGen::generateEnumTraits(const NodePtr& n)
+{
+	string dname = decorate(n->name());
+	string fn = fullname(dname);
+	size_t c = n->names();
+	string first; 
+	string last;
+	if (!ns_.empty())
+	{
+		first = ns_;
+		first += "::";
+		first += n->nameAt(0);
+
+		last = ns_;
+		last += "::";
+		last += n->nameAt(c-1);
+	} else {
+		first = n->nameAt(0);
+		last = n->nameAt(c-1);
+	}
+	os_ << "template<> struct codec_traits<" << fn << "> {\n"
+		<< "    static void encode(Encoder& e, " << fn << " v) {\n"
+		<< "		if (v < "  << first << " || v > " << last << ")\n" 
+		<< "		{\n"
+		<< "			std::ostringstream error;\n"
+		<< "			error << \"enum value \" << v << \" is out of bound for " << fn << " and cannot be encoded\";\n"
+		<< "			throw avro::Exception(error.str());\n"
+		<< "		}\n"
+		<< "        e.encodeEnum(v);\n"
+		<< "    }\n"
+		<< "    static void decode(Decoder& d, " << fn << "& v) {\n"
+		<< "		size_t index = d.decodeEnum();\n"
+		<< "		if (index < " << first << " || index > " << last << ")\n" 
+		<< "		{\n"
+		<< "			std::ostringstream error;\n"
+		<< "			error << \"enum value \" << index << \" is out of bound for " << fn << " and cannot be decoded\";\n"
+		<< "			throw avro::Exception(error.str());\n"
+		<< "		}\n"
+		<< "        v = static_cast<" << fn << ">(index);\n"
+		<< "    }\n"
+		<< "};\n\n";
+}
+
+void CodeGen::generateRecordTraits(const NodePtr& n)
+{
+    size_t c = n->leaves();
+    for (size_t i = 0; i < c; ++i) {
+        generateTraits(n->leafAt(i));
+    }
+
+    string fn = fullname(decorate(n->name()));
+    os_ << "template<> struct codec_traits<" << fn << "> {\n"
+        << "    static void encode(Encoder& e, const " << fn << "& v) {\n";
+
+    for (size_t i = 0; i < c; ++i) {
+        os_ << "        avro::encode(e, v." << n->nameAt(i) << ");\n";
+    }
+
+    os_ << "    }\n"
+        << "    static void decode(Decoder& d, " << fn << "& v) {\n";
+    os_ << "        if (avro::ResolvingDecoder *rd =\n";
+    os_ << "            dynamic_cast<avro::ResolvingDecoder *>(&d)) {\n";
+    os_ << "            const std::vector<size_t> fo = rd->fieldOrder();\n";
+    os_ << "            for (std::vector<size_t>::const_iterator it = fo.begin();\n";
+    os_ << "                it != fo.end(); ++it) {\n";
+    os_ << "                switch (*it) {\n";
+    for (size_t i = 0; i < c; ++i) {
+        os_ << "                case " << i << ":\n";
+        os_ << "                    avro::decode(d, v." << n->nameAt(i) << ");\n";
+        os_ << "                    break;\n";
+    }
+    os_ << "                default:\n";
+    os_ << "                    break;\n";
+    os_ << "                }\n";
+    os_ << "            }\n";
+    os_ << "        } else {\n";
+
+    for (size_t i = 0; i < c; ++i) {
+        os_ << "            avro::decode(d, v." << n->nameAt(i) << ");\n";
+    }
+    os_ << "        }\n";
+
+    os_ << "    }\n"
+        << "};\n\n";
+}
+
+void CodeGen::generateUnionTraits(const NodePtr& n)
+{
+    size_t c = n->leaves();
+
+    for (size_t i = 0; i < c; ++i) {
+        const NodePtr& nn = n->leafAt(i);
+        generateTraits(nn);
+    }
+
+    string name = done[n];
+    string fn = fullname(name);
+
+    os_ << "template<> struct codec_traits<" << fn << "> {\n"
+        << "    static void encode(Encoder& e, " << fn << " v) {\n"
+        << "        e.encodeUnionIndex(v.idx());\n"
+        << "        switch (v.idx()) {\n";
+
+    for (size_t i = 0; i < c; ++i) {
+        const NodePtr& nn = n->leafAt(i);
+        os_ << "        case " << i << ":\n";
+        if (nn->type() == avro::AVRO_NULL) {
+            os_ << "            e.encodeNull();\n";
+        } else {
+            os_ << "            avro::encode(e, v.get_" << cppNameOf(nn)
+                << "());\n";
+        }
+        os_ << "            break;\n";
+    }
+
+    os_ << "        }\n"
+        << "    }\n"
+        << "    static void decode(Decoder& d, " << fn << "& v) {\n"
+        << "        size_t n = d.decodeUnionIndex();\n"
+        << "        if (n >= " << c << ") { throw avro::Exception(\""
+            "Union index too big\"); }\n"
+        << "        switch (n) {\n";
+
+    for (size_t i = 0; i < c; ++i) {
+        const NodePtr& nn = n->leafAt(i);
+        os_ << "        case " << i << ":\n";
+        if (nn->type() == avro::AVRO_NULL) {
+            os_ << "            d.decodeNull();\n"
+                << "            v.set_null();\n";
+        } else {
+            os_ << "            {\n"
+                << "                " << cppTypeOf(nn) << " vv;\n"
+                << "                avro::decode(d, vv);\n"
+                << "                v.set_" << cppNameOf(nn) << "(vv);\n"
+                << "            }\n";
+        }
+        os_ << "            break;\n";
+    }
+    os_ << "        }\n"
+        << "    }\n"
+        << "};\n\n";
+}
+
+void CodeGen::generateTraits(const NodePtr& n)
+{
+    switch (n->type()) {
+    case avro::AVRO_STRING:
+    case avro::AVRO_BYTES:
+    case avro::AVRO_INT:
+    case avro::AVRO_LONG:
+    case avro::AVRO_FLOAT:
+    case avro::AVRO_DOUBLE:
+    case avro::AVRO_BOOL:
+    case avro::AVRO_NULL:
+        break;
+    case avro::AVRO_RECORD:
+        generateRecordTraits(n);
+        break;
+    case avro::AVRO_ENUM:
+        generateEnumTraits(n);
+        break;
+    case avro::AVRO_ARRAY:
+    case avro::AVRO_MAP:
+        generateTraits(n->leafAt(n->type() == avro::AVRO_ARRAY ? 0 : 1));
+        break;
+    case avro::AVRO_UNION:
+        generateUnionTraits(n);
+        break;
+    case avro::AVRO_FIXED:
+        break;
+    default:
+        break;
+    }
+}
+
+void CodeGen::emitCopyright()
+{
+    os_ << 
+        "/**\n"
+        " * Licensed to the Apache Software Foundation (ASF) under one\n"
+        " * or more contributor license agreements.  See the NOTICE file\n"
+        " * distributed with this work for additional information\n"
+        " * regarding copyright ownership.  The ASF licenses this file\n"
+        " * to you under the Apache License, Version 2.0 (the\n"
+        " * \"License\"); you may not use this file except in compliance\n"
+        " * with the License.  You may obtain a copy of the License at\n"
+        " *\n"
+        " *     http://www.apache.org/licenses/LICENSE-2.0\n"
+        " *\n"
+        " * Unless required by applicable law or agreed to in writing, "
+            "software\n"
+        " * distributed under the License is distributed on an "
+            "\"AS IS\" BASIS,\n"
+        " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express "
+            "or implied.\n"
+        " * See the License for the specific language governing "
+            "permissions and\n"
+        " * limitations under the License.\n"
+        " */\n\n\n";
+}
+
+string CodeGen::guard()
+{
+    string h = headerFile_;
+    makeCanonical(h, true);
+    return h + "_" + lexical_cast<string>(random_()) + "__H_";
+}
+
+void CodeGen::generate(const ValidSchema& schema)
+{
+    emitCopyright();
+
+    string h = guardString_.empty() ? guard() : guardString_;
+
+    os_ << "#ifndef " << h << "\n";
+    os_ << "#define " << h << "\n\n\n";
+
+    os_ << "#include <sstream>\n"
+        << "#include \"boost/any.hpp\"\n"
+        << "#include \"" << includePrefix_ << "Specific.hh\"\n"
+        << "#include \"" << includePrefix_ << "Encoder.hh\"\n"
+        << "#include \"" << includePrefix_ << "Decoder.hh\"\n"
+        << "\n";
+
+    if (! ns_.empty()) {
+        os_ << "namespace " << ns_ << " {\n";
+        inNamespace_ = true;
+    }
+
+    const NodePtr& root = schema.root();
+    generateType(root);
+
+    for (vector<PendingSetterGetter>::const_iterator it =
+        pendingGettersAndSetters.begin();
+        it != pendingGettersAndSetters.end(); ++it) {
+        generateGetterAndSetter(os_, it->structName, it->type, it->name,
+            it->idx);
+    }
+
+    for (vector<PendingConstructor>::const_iterator it =
+        pendingConstructors.begin();
+        it != pendingConstructors.end(); ++it) {
+        generateConstructor(os_, it->structName,
+            it->initMember, it->memberName);
+    }
+
+    if (! ns_.empty()) {
+        inNamespace_ = false;
+        os_ << "}\n";
+    }
+
+    os_ << "namespace avro {\n";
+
+    unionNumber_ = 0;
+
+    generateTraits(root);
+
+    os_ << "}\n";
+
+    os_ << "#endif\n";
+    os_.flush();
+
+}
+
+namespace po = boost::program_options;
+
+static const string NS("namespace");
+static const string OUT("output");
+static const string IN("input");
+static const string INCLUDE_PREFIX("include-prefix");
+static const string NO_UNION_TYPEDEF("no-union-typedef");
+
+static string readGuard(const string& filename)
+{
+    std::ifstream ifs(filename.c_str());
+    string buf;
+    string candidate;
+    while (std::getline(ifs, buf)) {
+        boost::algorithm::trim(buf);
+        if (candidate.empty()) {
+            if (boost::algorithm::starts_with(buf, "#ifndef ")) {
+                candidate = buf.substr(8);
+            }
+        } else if (boost::algorithm::starts_with(buf, "#define ")) {
+            if (candidate == buf.substr(8)) {
+                break;
+            }
+        } else {
+            candidate.erase();
+        }
+    }
+    return candidate;
+}
+
+int main(int argc, char** argv)
+{
+    po::options_description desc("Allowed options");
+    desc.add_options()
+        ("help,h", "produce help message")
+        ("include-prefix,p", po::value<string>()->default_value("avro"),
+            "prefix for include headers, - for none, default: avro")
+        ("no-union-typedef,U", "do not generate typedefs for unions in records")
+        ("namespace,n", po::value<string>(), "set namespace for generated code")
+        ("input,i", po::value<string>(), "input file")
+        ("output,o", po::value<string>(), "output file to generate");
+
+    po::variables_map vm;
+    po::store(po::parse_command_line(argc, argv, desc), vm);
+    po::notify(vm);
+
+
+    if (vm.count("help") || vm.count(IN) == 0 || vm.count(OUT) == 0) {
+        std::cout << desc << std::endl;
+        return 1;
+    }
+
+    string ns = vm.count(NS) > 0 ? vm[NS].as<string>() : string();
+    string outf = vm.count(OUT) > 0 ? vm[OUT].as<string>() : string();
+    string inf = vm.count(IN) > 0 ? vm[IN].as<string>() : string();
+    string incPrefix = vm[INCLUDE_PREFIX].as<string>();
+    bool noUnion = vm.count(NO_UNION_TYPEDEF) != 0;
+    if (incPrefix == "-") {
+        incPrefix.clear();
+    } else if (*incPrefix.rbegin() != '/') {
+        incPrefix += "/";
+    }
+
+    try {
+        ValidSchema schema;
+
+        if (! inf.empty()) {
+            ifstream in(inf.c_str());
+            compileJsonSchema(in, schema);
+        } else {
+            compileJsonSchema(std::cin, schema);
+        }
+
+        if (! outf.empty()) {
+            string g = readGuard(outf);
+            ofstream out(outf.c_str());
+            CodeGen(out, ns, inf, outf, g, incPrefix, noUnion).generate(schema);
+        } else {
+            CodeGen(std::cout, ns, inf, outf, "", incPrefix, noUnion).
+                generate(schema);
+        }
+        return 0;
+    } catch (std::exception &e) {
+        std::cerr << "Failed to parse or compile schema: "
+            << e.what() << std::endl;
+        return 1;
+    }
+
+}
diff --git a/lang/c++/impl/json/JsonDom.cc b/lang/c++/impl/json/JsonDom.cc
new file mode 100644
index 0000000..4c70b32
--- /dev/null
+++ b/lang/c++/impl/json/JsonDom.cc
@@ -0,0 +1,193 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "JsonDom.hh"
+
+#include <stdexcept>
+
+#include <string.h>
+#include <boost/make_shared.hpp>
+
+#include "Stream.hh"
+#include "JsonIO.hh"
+
+using std::string;
+using boost::format;
+
+namespace avro {
+namespace json {
+static const char* typeToString(EntityType t)
+{
+    switch (t) {
+    case etNull: return "null";
+    case etBool: return "bool";
+    case etLong: return "long";
+    case etDouble: return "double";
+    case etString: return "string";
+    case etArray: return "array";
+    case etObject: return "object";
+    default: return "unknown";
+    }
+}
+
+Entity readEntity(JsonParser& p)
+{
+    switch (p.peek()) {
+    case JsonParser::tkNull:
+        p.advance();
+        return Entity();
+    case JsonParser::tkBool:
+        p.advance();
+        return Entity(p.boolValue());
+    case JsonParser::tkLong:
+        p.advance();
+        return Entity(p.longValue());
+    case JsonParser::tkDouble:
+        p.advance();
+        return Entity(p.doubleValue());
+    case JsonParser::tkString:
+        p.advance();
+        return Entity(boost::make_shared<String>(p.stringValue()));
+    case JsonParser::tkArrayStart:
+        {
+            p.advance();
+            boost::shared_ptr<Array> v = boost::make_shared<Array>();
+            while (p.peek() != JsonParser::tkArrayEnd) {
+                v->push_back(readEntity(p));
+            }
+            p.advance();
+            return Entity(v);
+        }
+    case JsonParser::tkObjectStart:
+        {
+            p.advance();
+            boost::shared_ptr<Object> v = boost::make_shared<Object>();
+            while (p.peek() != JsonParser::tkObjectEnd) {
+                p.advance();
+                std::string k = p.stringValue();
+                Entity n = readEntity(p);
+                v->insert(std::make_pair(k, n));
+            }
+            p.advance();
+            return Entity(v);
+        }
+    default:
+        throw std::domain_error(JsonParser::toString(p.peek()));
+    }
+    
+}
+
+Entity loadEntity(const char* text)
+{
+    return loadEntity(reinterpret_cast<const uint8_t*>(text), ::strlen(text));
+}
+
+Entity loadEntity(InputStream& in)
+{
+    JsonParser p;
+    p.init(in);
+    return readEntity(p);
+}
+
+Entity loadEntity(const uint8_t* text, size_t len)
+{
+    std::auto_ptr<InputStream> in = memoryInputStream(text, len);
+    return loadEntity(*in);
+}
+
+void writeEntity(JsonGenerator& g, const Entity& n)
+{
+    switch (n.type()) {
+    case etNull:
+        g.encodeNull();
+        break;
+    case etBool:
+        g.encodeBool(n.boolValue());
+        break;
+    case etLong:
+        g.encodeNumber(n.longValue());
+        break;
+    case etDouble:
+        g.encodeNumber(n.doubleValue());
+        break;
+    case etString:
+        g.encodeString(n.stringValue());
+        break;
+    case etArray:
+        {
+            g.arrayStart();
+            const Array& v = n.arrayValue();
+            for (Array::const_iterator it = v.begin();
+                it != v.end(); ++it) {
+                writeEntity(g, *it);
+            }
+            g.arrayEnd();
+        }
+        break;
+    case etObject:
+        {
+            g.objectStart();
+            const Object& v = n.objectValue();
+            for (Object::const_iterator it = v.begin(); it != v.end(); ++it) {
+                g.encodeString(it->first);
+                writeEntity(g, it->second);
+            }
+            g.objectEnd();
+        }
+        break;
+    }
+}
+
+void Entity::ensureType(EntityType type) const
+{
+    if (type_ != type) {
+        format msg = format("Invalid type. Expected \"%1%\" actual %2%") %
+            typeToString(type) % typeToString(type_);
+        throw Exception(msg);
+    }
+}
+    
+
+std::string Entity::toString() const
+{
+    std::auto_ptr<OutputStream> out = memoryOutputStream();
+    JsonGenerator g;
+    g.init(*out);
+    writeEntity(g, *this);
+    g.flush();
+    std::auto_ptr<InputStream> in = memoryInputStream(*out);
+    const uint8_t *p = 0;
+    size_t n = 0;
+    size_t c = 0;
+    while (in->next(&p, &n)) {
+        c += n;
+    }
+    std::string result;
+    result.resize(c);
+    c = 0;
+    std::auto_ptr<InputStream> in2 = memoryInputStream(*out);
+    while (in2->next(&p, &n)) {
+        ::memcpy(&result[c], p, n);
+        c += n;
+    }
+    return result;
+}
+
+}
+}
+
diff --git a/lang/c++/impl/json/JsonDom.hh b/lang/c++/impl/json/JsonDom.hh
new file mode 100644
index 0000000..ae99d4f
--- /dev/null
+++ b/lang/c++/impl/json/JsonDom.hh
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_json_JsonDom_hh__
+#define avro_json_JsonDom_hh__
+
+#include <iostream>
+#include <stdint.h>
+#include <map>
+#include <string>
+#include <vector>
+#include <boost/shared_ptr.hpp>
+
+#include "boost/any.hpp"
+#include "Config.hh"
+
+namespace avro {
+
+class AVRO_DECL InputStream;
+
+namespace json {
+class Entity;
+    
+typedef bool Bool;
+typedef int64_t Long;
+typedef double Double;
+typedef std::string String;
+typedef std::vector<Entity> Array;
+typedef std::map<std::string, Entity> Object;
+    
+class AVRO_DECL JsonParser;
+class AVRO_DECL JsonGenerator;
+
+enum EntityType {
+    etNull,
+    etBool,
+    etLong,
+    etDouble,
+    etString,
+    etArray,
+    etObject
+};
+
+class AVRO_DECL Entity {
+    EntityType type_;
+    boost::any value_;
+    void ensureType(EntityType) const;
+public:
+    Entity() : type_(etNull) { }
+    Entity(Bool v) : type_(etBool), value_(v) { }
+    Entity(Long v) : type_(etLong), value_(v) { }
+    Entity(Double v) : type_(etDouble), value_(v) { }
+    Entity(const boost::shared_ptr<String>& v) : type_(etString), value_(v) { }
+    Entity(const boost::shared_ptr<Array>& v) : type_(etArray), value_(v) { }
+    Entity(const boost::shared_ptr<Object>& v) : type_(etObject), value_(v) { }
+    
+    EntityType type() const { return type_; }
+
+    Bool boolValue() const {
+        ensureType(etBool);
+        return boost::any_cast<Bool>(value_);
+    }
+
+    Long longValue() const {
+        ensureType(etLong);
+        return boost::any_cast<Long>(value_);
+    }
+    
+    Double doubleValue() const {
+        ensureType(etDouble);
+        return boost::any_cast<Double>(value_);
+    }
+
+    const String& stringValue() const {
+        ensureType(etString);
+        return **boost::any_cast<boost::shared_ptr<String> >(&value_);
+    }
+    
+    const Array& arrayValue() const {
+        ensureType(etArray);
+        return **boost::any_cast<boost::shared_ptr<Array> >(&value_);
+    }
+
+    const Object& objectValue() const {
+        ensureType(etObject);
+        return **boost::any_cast<boost::shared_ptr<Object> >(&value_);
+    }
+
+    std::string toString() const;
+};
+
+template <typename T>
+struct type_traits {
+};
+
+template <> struct type_traits<bool> {
+    static EntityType type() { return etBool; }
+    static const char* name() { return "bool"; }
+};
+
+template <> struct type_traits<int64_t> {
+    static EntityType type() { return etLong; }
+    static const char* name() { return "long"; }
+};
+
+template <> struct type_traits<double> {
+    static EntityType type() { return etDouble; }
+    static const char* name() { return "double"; }
+};
+    
+template <> struct type_traits<std::string> {
+    static EntityType type() { return etString; }
+    static const char* name() { return "string"; }
+};
+
+template <> struct type_traits<std::vector<Entity> > {
+    static EntityType type() { return etArray; }
+    static const char* name() { return "array"; }
+};
+
+template <> struct type_traits<std::map<std::string, Entity> > {
+    static EntityType type() { return etObject; }
+    static const char* name() { return "object"; }
+};
+
+AVRO_DECL Entity readEntity(JsonParser& p);
+
+AVRO_DECL Entity loadEntity(InputStream& in);
+AVRO_DECL Entity loadEntity(const char* text);
+AVRO_DECL Entity loadEntity(const uint8_t* text, size_t len);
+
+void writeEntity(JsonGenerator& g, const Entity& n);
+
+}
+}
+
+#endif
+
+
diff --git a/lang/c++/impl/json/JsonIO.cc b/lang/c++/impl/json/JsonIO.cc
new file mode 100644
index 0000000..f21e822
--- /dev/null
+++ b/lang/c++/impl/json/JsonIO.cc
@@ -0,0 +1,374 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "boost/math/special_functions/fpclassify.hpp"
+#include "JsonIO.hh"
+
+namespace avro {
+namespace json {
+
+using std::ostringstream;
+
+const char* const
+JsonParser::tokenNames[] = {
+    "Null",
+    "Bool",
+    "Integer",
+    "Double",
+    "String",
+    "Array start",
+    "Array end",
+    "Object start",
+    "Object end",
+};
+
+char JsonParser::next()
+{
+    char ch = hasNext ? nextChar : ' ';
+    while (isspace(ch)) {
+        ch = in_.read();
+    }
+    hasNext = false;
+    return ch;
+}
+
+void JsonParser::expectToken(Token tk)
+{
+    if (advance() != tk) {
+        if (tk == tkDouble) {
+            if(cur() == tkString
+                && (sv == "Infinity" || sv == "-Infinity" || sv == "NaN")) {
+                curToken = tkDouble;
+                dv = sv == "Infinity" ?
+                    std::numeric_limits<double>::infinity() :
+                    sv == "-Infinity" ?
+                        -std::numeric_limits<double>::infinity() :
+                    std::numeric_limits<double>::quiet_NaN();
+                return;
+            } else if (cur() == tkLong) {
+                dv = double(lv);
+                return;
+            }
+        }
+        ostringstream oss;
+        oss << "Incorrect token in the stream. Expected: "
+            << JsonParser::toString(tk) << ", found "
+            << JsonParser::toString(cur());
+        throw Exception(oss.str());
+    }
+}
+
+JsonParser::Token JsonParser::doAdvance()
+{
+    char ch = next();
+    if (ch == ']') {
+        if (curState == stArray0 || stArrayN) {
+            curState = stateStack.top();
+            stateStack.pop();
+            return tkArrayEnd;
+        } else {
+            throw unexpected(ch);
+        }
+    } else if (ch == '}') {
+        if (curState == stObject0 || stObjectN) {
+            curState = stateStack.top();
+            stateStack.pop();
+            return tkObjectEnd;
+        } else {
+            throw unexpected(ch);
+        }
+    } else if (ch == ',') {
+        if (curState != stObjectN && curState != stArrayN) {
+            throw unexpected(ch);
+        }
+        if (curState == stObjectN) {
+            curState = stObject0;
+        }
+        ch = next();
+    } else if (ch == ':') {
+        if (curState != stKey) {
+            throw unexpected(ch);
+        }
+        curState = stObjectN;
+        ch = next();
+    }
+
+    if (curState == stObject0) {
+        if (ch != '"') {
+            throw unexpected(ch);
+        }
+        curState = stKey;
+    } else if (curState == stArray0) {
+        curState = stArrayN;
+    }
+
+    switch (ch) {
+    case '[':
+        stateStack.push(curState);
+        curState = stArray0;
+        return tkArrayStart;
+    case '{':
+        stateStack.push(curState);
+        curState = stObject0;
+        return tkObjectStart;
+    case '"':
+        return tryString();
+    case 't':
+        bv = true;
+        return tryLiteral("rue", 3, tkBool);
+    case 'f':
+        bv = false;
+        return tryLiteral("alse", 4, tkBool);
+    case 'n':
+        return tryLiteral("ull", 3, tkNull);
+    default:
+        if (isdigit(ch) || ch == '-') {
+            return tryNumber(ch);
+        } else {
+            throw unexpected(ch);
+        }
+    }
+}
+
+JsonParser::Token JsonParser::tryNumber(char ch)
+{
+    sv.clear();
+    sv.push_back(ch);
+
+    hasNext = false;
+    int state = (ch == '-') ? 0 : (ch == '0') ? 1 : 2;
+    for (; ;) {
+        switch (state) {
+        case 0:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (isdigit(ch)) {
+                    state = (ch == '0') ? 1 : 2;
+                    sv.push_back(ch);
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 1:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (ch == '.') {
+                    state = 3;
+                    sv.push_back(ch);
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 2:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (isdigit(ch)) {
+                    sv.push_back(ch);
+                    continue;
+                } else if (ch == '.') {
+                    state = 3;
+                    sv.push_back(ch);
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 3:
+        case 6:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (isdigit(ch)) {
+                    sv.push_back(ch);
+                    state++;
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 4:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (isdigit(ch)) {
+                    sv.push_back(ch);
+                    continue;
+                } else if (ch == 'e' || ch == 'E') {
+                    sv.push_back(ch);
+                    state = 5;
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 5:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (ch == '+' || ch == '-') {
+                    sv.push_back(ch);
+                    state = 6;
+                    continue;
+                } else if (isdigit(ch)) {
+                    sv.push_back(ch);
+                    state = 7;
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        case 7:
+            if (in_.hasMore()) {
+                ch = in_.read();
+                if (isdigit(ch)) {
+                    sv.push_back(ch);
+                    continue;
+                }
+                hasNext = true;
+            }
+            break;
+        }
+        if (state == 1 || state == 2 || state == 4 || state == 7) {
+            if (hasNext) {
+                nextChar = ch;
+            }
+            std::istringstream iss(sv);
+            if (state == 1 || state == 2) {
+                iss >> lv;
+                return tkLong;
+            } else {
+                iss >> dv;
+                return tkDouble;
+            }
+        } else {
+            if (hasNext) {
+                throw unexpected(ch);
+            } else {
+                throw Exception("Unexpected EOF");
+            }
+        }
+    }
+}
+
+JsonParser::Token JsonParser::tryString()
+{
+    sv.clear();
+    for ( ; ;) {
+        char ch = in_.read();
+        if (ch == '"') {
+            return tkString;
+        } else if (ch == '\\') {
+            ch = in_.read();
+            switch (ch) {
+            case '"':
+            case '\\':
+            case '/':
+                sv.push_back(ch);
+                continue;
+            case 'b':
+                sv.push_back('\b');
+                continue;
+            case 'f':
+                sv.push_back('\f');
+                continue;
+            case 'n':
+                sv.push_back('\n');
+                continue;
+            case 'r':
+                sv.push_back('\r');
+                continue;
+            case 't':
+                sv.push_back('\t');
+                continue;
+            case 'u':
+            case 'U':
+                {
+                    unsigned int n = 0;
+                    char e[4];
+                    in_.readBytes(reinterpret_cast<uint8_t*>(e), 4);
+                    for (int i = 0; i < 4; i++) {
+                        n *= 16;
+                        char c = e[i];
+                        if (isdigit(c)) {
+                            n += c - '0';
+                        } else if (c >= 'a' && c <= 'f') {
+                            n += c - 'a' + 10;
+                        } else if (c >= 'A' && c <= 'F') {
+                            n += c - 'A' + 10;
+                        } else {
+                            throw unexpected(c);
+                        }
+                    }
+                    sv.push_back(n);
+                }
+                break;
+            default:
+                throw unexpected(ch);
+            }
+        } else {
+            sv.push_back(ch);
+        }
+    }
+}
+
+Exception JsonParser::unexpected(unsigned char c)
+{
+    std::ostringstream oss;
+    oss << "Unexpected character in json " << toHex(c / 16) << toHex(c % 16);
+    return Exception(oss.str());
+}
+
+JsonParser::Token JsonParser::tryLiteral(const char exp[], size_t n, Token tk)
+{
+    char c[100];
+    in_.readBytes(reinterpret_cast<uint8_t*>(c), n);
+    for (size_t i = 0; i < n; ++i) {
+        if (c[i] != exp[i]) {
+            throw unexpected(c[i]);
+        }
+    }
+    if (in_.hasMore()) {
+        nextChar = in_.read();
+        if (isdigit(nextChar) || isalpha(nextChar)) {
+            throw unexpected(nextChar);
+        }
+        hasNext = true;
+    }
+    return tk;
+}
+
+void JsonGenerator::encodeNumber(double t) {
+    sep();
+    std::ostringstream oss;
+    if (boost::math::isfinite(t)) {
+        oss << t;
+    } else if (boost::math::isnan(t)) {
+        oss << "NaN";
+    } else if (t == std::numeric_limits<double>::infinity()) {
+        oss << "Infinity";
+    } else {
+        oss << "-Infinity";
+    }
+    const std::string& s = oss.str();
+    out_.writeBytes(reinterpret_cast<const uint8_t*>(&s[0]), s.size());
+    sep2();
+}
+
+
+}
+}
+
diff --git a/lang/c++/impl/json/JsonIO.hh b/lang/c++/impl/json/JsonIO.hh
new file mode 100644
index 0000000..33da0df
--- /dev/null
+++ b/lang/c++/impl/json/JsonIO.hh
@@ -0,0 +1,323 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_json_JsonIO_hh__
+#define avro_json_JsonIO_hh__
+
+#include <stack>
+#include <string>
+#include <sstream>
+#include <boost/utility.hpp>
+
+#include "Config.hh"
+#include "Stream.hh"
+
+namespace avro {
+namespace json {
+
+inline char toHex(unsigned int n) {
+    return (n < 10) ? (n + '0') : (n + 'a' - 10);
+}
+
+
+class AVRO_DECL JsonParser : boost::noncopyable {
+public:
+    enum Token {
+        tkNull,
+        tkBool,
+        tkLong,
+        tkDouble,
+        tkString,
+        tkArrayStart,
+        tkArrayEnd,
+        tkObjectStart,
+        tkObjectEnd
+    };
+
+private:
+    enum State {
+        stValue,    // Expect a data type
+        stArray0,   // Expect a data type or ']'
+        stArrayN,   // Expect a ',' or ']'
+        stObject0,  // Expect a string or a '}'
+        stObjectN,  // Expect a ',' or '}'
+        stKey       // Expect a ':'
+    };
+    std::stack<State> stateStack;
+    State curState;
+    bool hasNext;
+    char nextChar;
+    bool peeked;
+
+    StreamReader in_;
+    Token curToken;
+    bool bv;
+    int64_t lv;
+    double dv;
+    std::string sv;
+
+    Token doAdvance();
+    Token tryLiteral(const char exp[], size_t n, Token tk);
+    Token tryNumber(char ch);
+    Token tryString();
+    Exception unexpected(unsigned char ch);
+    char next();
+
+public:
+    JsonParser() : curState(stValue), hasNext(false), peeked(false) { }
+
+    void init(InputStream& is) {
+        in_.reset(is);
+    }
+
+    Token advance() {
+        if (! peeked) {
+            curToken = doAdvance();
+        } else {
+            peeked = false;
+        }
+        return curToken;
+    }
+
+    Token peek() {
+        if (! peeked) {
+            curToken = doAdvance();
+            peeked = true;
+        }
+        return curToken;
+    }
+
+    void expectToken(Token tk);
+
+    bool boolValue() {
+        return bv;
+    }
+
+    Token cur() {
+        return curToken;
+    }
+
+    double doubleValue() {
+        return dv;
+    }
+
+    int64_t longValue() {
+        return lv;
+    }
+
+    std::string stringValue() {
+        return sv;
+    }
+
+    static const char* const tokenNames[];
+
+    static const char* toString(Token tk) {
+        return tokenNames[tk];
+    }
+};
+
+class AVRO_DECL JsonGenerator {
+    StreamWriter out_;
+    enum State {
+        stStart,
+        stArray0,
+        stArrayN,
+        stMap0,
+        stMapN,
+        stKey,
+    };
+
+    std::stack<State> stateStack;
+    State top;
+
+    void write(const char *b, const char* p) {
+        if (b != p) {
+            out_.writeBytes(reinterpret_cast<const uint8_t*>(b), p - b);
+        }
+    }
+
+    void escape(char c, const char* b, const char *p) {
+        write(b, p);
+        out_.write('\\');
+        out_.write(c);
+    }
+
+    void escapeCtl(char c) {
+        out_.write('\\');
+        out_.write('U');
+        out_.write('0');
+        out_.write('0');
+        out_.write(toHex((static_cast<unsigned char>(c)) / 16));
+        out_.write(toHex((static_cast<unsigned char>(c)) % 16));
+    }
+
+    void doEncodeString(const std::string& s) {
+        const char* b = &s[0];
+        const char* e = b + s.size();
+        out_.write('"');
+        for (const char* p = b; p != e; p++) {
+            switch (*p) {
+            case '\\':
+            case '"':
+            case '/':
+                escape(*p, b, p);
+                break;
+            case '\b':
+                escape('b', b, p);
+                break;
+            case '\f':
+                escape('f', b, p);
+                break;
+            case '\n':
+                escape('n', b, p);
+                break;
+            case '\r':
+                escape('r', b, p);
+                break;
+            case '\t':
+                escape('t', b, p);
+                break;
+            default:
+                if (! iscntrl(*p)) {
+                    continue;
+                }
+                write(b, p);
+                escapeCtl(*p);
+                break;
+            }
+            b = p + 1;
+        }
+        write(b, e);
+        out_.write('"');
+    }
+
+    void sep() {
+        if (top == stArrayN) {
+            out_.write(',');
+        } else if (top == stArray0) {
+            top = stArrayN;
+        }
+    }
+
+    void sep2() {
+        if (top == stKey) {
+            top = stMapN;
+        }
+    }
+
+public:
+    JsonGenerator() : top(stStart) { }
+
+    void init(OutputStream& os) {
+        out_.reset(os);
+    }
+
+    void flush() {
+        out_.flush();
+    }
+
+    void encodeNull() {
+        sep();
+        out_.writeBytes(reinterpret_cast<const uint8_t*>("null"), 4);
+        sep2();
+    }
+
+    void encodeBool(bool b) {
+        sep();
+        if (b) {
+            out_.writeBytes(reinterpret_cast<const uint8_t*>("true"), 4);
+        } else {
+            out_.writeBytes(reinterpret_cast<const uint8_t*>("false"), 5);
+        }
+        sep2();
+    }
+
+    template <typename T>
+    void encodeNumber(T t) {
+        sep();
+        std::ostringstream oss;
+        oss << t;
+        const std::string& s = oss.str();
+        out_.writeBytes(reinterpret_cast<const uint8_t*>(&s[0]), s.size());
+        sep2();
+    }
+
+    void encodeNumber(double t);
+
+    void encodeString(const std::string& s) {
+        if (top == stMap0) {
+            top = stKey;
+        } else if (top == stMapN) {
+            out_.write(',');
+            top = stKey;
+        } else if (top == stKey) {
+            top = stMapN;
+        } else {
+            sep();
+        }
+        doEncodeString(s);
+        if (top == stKey) {
+            out_.write(':');
+        }
+    }
+
+    void encodeBinary(const uint8_t* bytes, size_t len) {
+        sep();
+        out_.write('"');
+        const uint8_t* e = bytes + len;
+        while (bytes != e) {
+            escapeCtl(*bytes++);
+        }
+        out_.write('"');
+        sep2();
+    }
+
+    void arrayStart() {
+        sep();
+        stateStack.push(top);
+        top = stArray0;
+        out_.write('[');
+    }
+
+    void arrayEnd() {
+        top = stateStack.top();
+        stateStack.pop();
+        out_.write(']');
+        sep2();
+    }
+
+    void objectStart() {
+        sep();
+        stateStack.push(top);
+        top = stMap0;
+        out_.write('{');
+    }
+
+    void objectEnd() {
+        top = stateStack.top();
+        stateStack.pop();
+        out_.write('}');
+        sep2();
+    }
+
+};
+
+}
+}
+
+#endif
diff --git a/lang/c++/impl/parsing/JsonCodec.cc b/lang/c++/impl/parsing/JsonCodec.cc
new file mode 100644
index 0000000..f18a1c9
--- /dev/null
+++ b/lang/c++/impl/parsing/JsonCodec.cc
@@ -0,0 +1,696 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define __STDC_LIMIT_MACROS
+
+#include <string>
+#include <map>
+#include <algorithm>
+#include <ctype.h>
+#include <boost/shared_ptr.hpp>
+#include <boost/make_shared.hpp>
+#include <boost/weak_ptr.hpp>
+#include <boost/any.hpp>
+#include <boost/math/special_functions/fpclassify.hpp>
+
+#include "ValidatingCodec.hh"
+#include "Symbol.hh"
+#include "ValidSchema.hh"
+#include "Decoder.hh"
+#include "Encoder.hh"
+#include "NodeImpl.hh"
+
+#include "../json/JsonIO.hh"
+
+namespace avro {
+
+namespace parsing {
+
+using boost::shared_ptr;
+using boost::make_shared;
+using boost::static_pointer_cast;
+
+using std::map;
+using std::vector;
+using std::string;
+using std::reverse;
+using std::ostringstream;
+using std::istringstream;
+
+using avro::json::JsonParser;
+using avro::json::JsonGenerator;
+
+class JsonGrammarGenerator : public ValidatingGrammarGenerator {
+    ProductionPtr doGenerate(const NodePtr& n,
+        std::map<NodePtr, ProductionPtr> &m);
+};
+
+static std::string nameOf(const NodePtr& n)
+{
+    if (n->hasName()) {
+        return n->name();
+    }
+    std::ostringstream oss;
+    oss << n->type();
+    return oss.str();
+}
+
+ProductionPtr JsonGrammarGenerator::doGenerate(const NodePtr& n,
+    std::map<NodePtr, ProductionPtr> &m) {
+    switch (n->type()) {
+    case AVRO_NULL:
+    case AVRO_BOOL:
+    case AVRO_INT:
+    case AVRO_LONG:
+    case AVRO_FLOAT:
+    case AVRO_DOUBLE:
+    case AVRO_STRING:
+    case AVRO_BYTES:
+    case AVRO_FIXED:
+    case AVRO_ARRAY:
+    case AVRO_MAP:
+    case AVRO_SYMBOLIC:
+        return ValidatingGrammarGenerator::doGenerate(n, m);
+    case AVRO_RECORD:
+        {
+            ProductionPtr result = make_shared<Production>();
+
+            m.erase(n);
+
+            size_t c = n->leaves();
+            result->reserve(2 + 2 * c);
+            result->push_back(Symbol::recordStartSymbol());
+            for (size_t i = 0; i < c; ++i) {
+                const NodePtr& leaf = n->leafAt(i);
+                ProductionPtr v = doGenerate(leaf, m);
+                result->push_back(Symbol::fieldSymbol(n->nameAt(i)));
+                copy(v->rbegin(), v->rend(), back_inserter(*result));
+            }
+            result->push_back(Symbol::recordEndSymbol());
+            reverse(result->begin(), result->end());
+
+            m[n] = result;
+            return result;
+        }
+    case AVRO_ENUM:
+        {
+            vector<string> nn;
+            size_t c = n->names();
+            nn.reserve(c);
+            for (size_t i = 0; i < c; ++i) {
+                nn.push_back(n->nameAt(i));
+            }
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::nameListSymbol(nn));
+            result->push_back(Symbol::enumSymbol());
+            m[n] = result;
+            return result;
+        }
+    case AVRO_UNION:
+        {
+            size_t c = n->leaves();
+
+            vector<ProductionPtr> vv;
+            vv.reserve(c);
+
+            vector<string> names;
+            names.reserve(c);
+
+            for (size_t i = 0; i < c; ++i) {
+                const NodePtr& nn = n->leafAt(i);
+                ProductionPtr v = doGenerate(nn, m);
+                if (nn->type() != AVRO_NULL) {
+                    ProductionPtr v2 = make_shared<Production>();
+                    v2->push_back(Symbol::recordEndSymbol());
+                    copy(v->begin(), v->end(), back_inserter(*v2));
+                    v.swap(v2);
+                }
+                vv.push_back(v);
+                names.push_back(nameOf(nn));
+            }
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::alternative(vv));
+            result->push_back(Symbol::nameListSymbol(names));
+            result->push_back(Symbol::unionSymbol());
+            return result;
+        }
+    default:
+        throw Exception("Unknown node type");
+    }
+}
+
+static void expectToken(JsonParser& in, JsonParser::Token tk)
+{
+    in.expectToken(tk);
+}
+
+class JsonDecoderHandler {
+    JsonParser& in_;
+public:
+    JsonDecoderHandler(JsonParser& p) : in_(p) { }
+    size_t handle(const Symbol& s) {
+        switch (s.kind()) {
+        case Symbol::sRecordStart:
+            expectToken(in_, JsonParser::tkObjectStart);
+            break;
+        case Symbol::sRecordEnd:
+            expectToken(in_, JsonParser::tkObjectEnd);
+            break;
+        case Symbol::sField:
+            expectToken(in_, JsonParser::tkString);
+            if (s.extra<string>() != in_.stringValue()) {
+                throw Exception("Incorrect field");
+            }
+            break;
+        default:
+            break;
+        }
+        return 0;
+    }
+};
+
+template <typename P>
+class JsonDecoder : public Decoder {
+    JsonParser in_;
+    JsonDecoderHandler handler_;
+    P parser_;
+
+    void init(InputStream& is);
+    void decodeNull();
+    bool decodeBool();
+    int32_t decodeInt();
+    int64_t decodeLong();
+    float decodeFloat();
+    double decodeDouble();
+    void decodeString(string& value);
+    void skipString();
+    void decodeBytes(vector<uint8_t>& value);
+    void skipBytes();
+    void decodeFixed(size_t n, vector<uint8_t>& value);
+    void skipFixed(size_t n);
+    size_t decodeEnum();
+    size_t arrayStart();
+    size_t arrayNext();
+    size_t skipArray();
+    size_t mapStart();
+    size_t mapNext();
+    size_t skipMap();
+    size_t decodeUnionIndex();
+
+    void expect(JsonParser::Token tk);
+    void skipComposite();
+public:
+
+    JsonDecoder(const ValidSchema& s) :
+        handler_(in_),
+        parser_(JsonGrammarGenerator().generate(s), NULL, handler_) { }
+
+};
+
+template <typename P>
+void JsonDecoder<P>::init(InputStream& is)
+{
+    in_.init(is);
+}
+
+template <typename P>
+void JsonDecoder<P>::expect(JsonParser::Token tk)
+{
+    expectToken(in_, tk);
+}
+
+template <typename P>
+void JsonDecoder<P>::decodeNull()
+{
+    parser_.advance(Symbol::sNull);
+    expect(JsonParser::tkNull);
+}
+
+template <typename P>
+bool JsonDecoder<P>::decodeBool()
+{
+    parser_.advance(Symbol::sBool);
+    expect(JsonParser::tkBool);
+    bool result = in_.boolValue();
+    return result;
+}
+
+template <typename P>
+int32_t JsonDecoder<P>::decodeInt()
+{
+    parser_.advance(Symbol::sInt);
+    expect(JsonParser::tkLong);
+    int64_t result = in_.longValue();
+    if (result < INT32_MIN || result > INT32_MAX) {
+        throw Exception(boost::format("Value out of range for Avro int: %1%")
+            % result);
+    }
+    return static_cast<int32_t>(result);
+}
+
+template <typename P>
+int64_t JsonDecoder<P>::decodeLong()
+{
+    parser_.advance(Symbol::sLong);
+    expect(JsonParser::tkLong);
+    int64_t result = in_.longValue();
+    return result;
+}
+
+template <typename P>
+float JsonDecoder<P>::decodeFloat()
+{
+    parser_.advance(Symbol::sFloat);
+    expect(JsonParser::tkDouble);
+    double result = in_.doubleValue();
+    return static_cast<float>(result);
+}
+
+template <typename P>
+double JsonDecoder<P>::decodeDouble()
+{
+    parser_.advance(Symbol::sDouble);
+    expect(JsonParser::tkDouble);
+    double result = in_.doubleValue();
+    return result;
+}
+
+template <typename P>
+void JsonDecoder<P>::decodeString(string& value)
+{
+    parser_.advance(Symbol::sString);
+    expect(JsonParser::tkString);
+    value = in_.stringValue();
+}
+
+template <typename P>
+void JsonDecoder<P>::skipString()
+{
+    parser_.advance(Symbol::sString);
+    expect(JsonParser::tkString);
+}
+
+static vector<uint8_t> toBytes(const string& s)
+{
+    return vector<uint8_t>(s.begin(), s.end());
+}
+
+template <typename P>
+void JsonDecoder<P>::decodeBytes(vector<uint8_t>& value )
+{
+    parser_.advance(Symbol::sBytes);
+    expect(JsonParser::tkString);
+    value = toBytes(in_.stringValue());
+}
+
+template <typename P>
+void JsonDecoder<P>::skipBytes()
+{
+    parser_.advance(Symbol::sBytes);
+    expect(JsonParser::tkString);
+}
+
+template <typename P>
+void JsonDecoder<P>::decodeFixed(size_t n, vector<uint8_t>& value)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(n);
+    expect(JsonParser::tkString);
+    value = toBytes(in_.stringValue());
+    if (value.size() != n) {
+        throw Exception("Incorrect value for fixed");
+    }
+}
+
+template <typename P>
+void JsonDecoder<P>::skipFixed(size_t n)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(n);
+    expect(JsonParser::tkString);
+    vector<uint8_t> result = toBytes(in_.stringValue());
+    if (result.size() != n) {
+        throw Exception("Incorrect value for fixed");
+    }
+}
+
+template <typename P>
+size_t JsonDecoder<P>::decodeEnum()
+{
+    parser_.advance(Symbol::sEnum);
+    expect(JsonParser::tkString);
+    size_t result = parser_.indexForName(in_.stringValue());
+    return result;
+}
+
+template <typename P>
+size_t JsonDecoder<P>::arrayStart()
+{
+    parser_.advance(Symbol::sArrayStart);
+    expect(JsonParser::tkArrayStart);
+    return arrayNext();
+}
+
+template <typename P>
+size_t JsonDecoder<P>::arrayNext()
+{
+    parser_.processImplicitActions();
+    if (in_.peek() == JsonParser::tkArrayEnd) {
+        in_.advance();
+        parser_.popRepeater();
+        parser_.advance(Symbol::sArrayEnd);
+        return 0;
+    }
+    parser_.setRepeatCount(1);
+    return 1;
+}
+
+template<typename P>
+void JsonDecoder<P>::skipComposite()
+{
+    size_t level = 0;
+    for (; ;) {
+        switch (in_.advance()) {
+        case JsonParser::tkArrayStart:
+        case JsonParser::tkObjectStart:
+            ++level;
+            continue;
+        case JsonParser::tkArrayEnd:
+        case JsonParser::tkObjectEnd:
+            if (level == 0) {
+                return;
+            }
+            --level;
+            continue;
+        default:
+            continue;
+        }
+    }
+}
+
+template <typename P>
+size_t JsonDecoder<P>::skipArray()
+{
+    parser_.advance(Symbol::sArrayStart);
+    parser_.pop();
+    parser_.advance(Symbol::sArrayEnd);
+    expect(JsonParser::tkArrayStart);
+    skipComposite();
+    return 0;
+}
+
+template <typename P>
+size_t JsonDecoder<P>::mapStart()
+{
+    parser_.advance(Symbol::sMapStart);
+    expect(JsonParser::tkObjectStart);
+    return mapNext();
+}
+
+template <typename P>
+size_t JsonDecoder<P>::mapNext()
+{
+    parser_.processImplicitActions();
+    if (in_.peek() == JsonParser::tkObjectEnd) {
+        in_.advance();
+        parser_.popRepeater();
+        parser_.advance(Symbol::sMapEnd);
+        return 0;
+    }
+    parser_.setRepeatCount(1);
+    return 1;
+}
+
+template <typename P>
+size_t JsonDecoder<P>::skipMap()
+{
+    parser_.advance(Symbol::sMapStart);
+    parser_.pop();
+    parser_.advance(Symbol::sMapEnd);
+    expect(JsonParser::tkObjectStart);
+    skipComposite();
+    return 0;
+}
+
+template <typename P>
+size_t JsonDecoder<P>::decodeUnionIndex()
+{
+    parser_.advance(Symbol::sUnion);
+
+    size_t result;
+    if (in_.peek() == JsonParser::tkNull) {
+        result = parser_.indexForName("null");
+    } else {
+        expect(JsonParser::tkObjectStart);
+        expect(JsonParser::tkString);
+        result = parser_.indexForName(in_.stringValue());
+    }
+    parser_.selectBranch(result);
+    return result;
+}
+
+
+class JsonHandler {
+    JsonGenerator& generator_;
+public:
+    JsonHandler(JsonGenerator& g) : generator_(g) { }
+    size_t handle(const Symbol& s) {
+        switch (s.kind()) {
+        case Symbol::sRecordStart:
+            generator_.objectStart();
+            break;
+        case Symbol::sRecordEnd:
+            generator_.objectEnd();
+            break;
+        case Symbol::sField:
+            generator_.encodeString(s.extra<string>());
+            break;
+        default:
+            break;
+        }
+        return 0;
+    }
+};
+
+template <typename P>
+class JsonEncoder : public Encoder {
+    JsonGenerator out_;
+    JsonHandler handler_;
+    P parser_;
+
+    void init(OutputStream& os);
+    void flush();
+    void encodeNull();
+    void encodeBool(bool b);
+    void encodeInt(int32_t i);
+    void encodeLong(int64_t l);
+    void encodeFloat(float f);
+    void encodeDouble(double d);
+    void encodeString(const std::string& s);
+    void encodeBytes(const uint8_t *bytes, size_t len);
+    void encodeFixed(const uint8_t *bytes, size_t len);
+    void encodeEnum(size_t e);
+    void arrayStart();
+    void arrayEnd();
+    void mapStart();
+    void mapEnd();
+    void setItemCount(size_t count);
+    void startItem();
+    void encodeUnionIndex(size_t e);
+public:
+    JsonEncoder(const ValidSchema& schema) :
+        handler_(out_),
+        parser_(JsonGrammarGenerator().generate(schema), NULL, handler_) { }
+};
+
+template<typename P>
+void JsonEncoder<P>::init(OutputStream& os)
+{
+    out_.init(os);
+}
+
+template<typename P>
+void JsonEncoder<P>::flush()
+{
+    parser_.processImplicitActions();
+    out_.flush();
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeNull()
+{
+    parser_.advance(Symbol::sNull);
+    out_.encodeNull();
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeBool(bool b)
+{
+    parser_.advance(Symbol::sBool);
+    out_.encodeBool(b);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeInt(int32_t i)
+{
+    parser_.advance(Symbol::sInt);
+    out_.encodeNumber(i);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeLong(int64_t l)
+{
+    parser_.advance(Symbol::sLong);
+    out_.encodeNumber(l);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeFloat(float f)
+{
+    parser_.advance(Symbol::sFloat);
+    if (f == std::numeric_limits<float>::infinity()) {
+        out_.encodeString("Infinity");
+    } else if (f == -std::numeric_limits<float>::infinity()) {
+        out_.encodeString("-Infinity");
+    } else if (boost::math::isnan(f)) {
+        out_.encodeString("NaN");
+    } else {
+        out_.encodeNumber(f);
+    }
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeDouble(double d)
+{
+    parser_.advance(Symbol::sDouble);
+    if (d == std::numeric_limits<double>::infinity()) {
+        out_.encodeString("Infinity");
+    } else if (d == -std::numeric_limits<double>::infinity()) {
+        out_.encodeString("-Infinity");
+    } else if (boost::math::isnan(d)) {
+        out_.encodeString("NaN");
+    } else {
+        out_.encodeNumber(d);
+    }
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeString(const std::string& s)
+{
+    parser_.advance(Symbol::sString);
+    out_.encodeString(s);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeBytes(const uint8_t *bytes, size_t len)
+{
+    parser_.advance(Symbol::sBytes);
+    out_.encodeBinary(bytes, len);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeFixed(const uint8_t *bytes, size_t len)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(len);
+    out_.encodeBinary(bytes, len);
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeEnum(size_t e)
+{
+    parser_.advance(Symbol::sEnum);
+    const string& s = parser_.nameForIndex(e);
+    out_.encodeString(s);
+}
+
+template<typename P>
+void JsonEncoder<P>::arrayStart()
+{
+    parser_.advance(Symbol::sArrayStart);
+    out_.arrayStart();
+}
+
+template<typename P>
+void JsonEncoder<P>::arrayEnd()
+{
+    parser_.popRepeater();
+    parser_.advance(Symbol::sArrayEnd);
+    out_.arrayEnd();
+}
+
+template<typename P>
+void JsonEncoder<P>::mapStart()
+{
+    parser_.advance(Symbol::sMapStart);
+    out_.objectStart();
+}
+
+template<typename P>
+void JsonEncoder<P>::mapEnd()
+{
+    parser_.popRepeater();
+    parser_.advance(Symbol::sMapEnd);
+    out_.objectEnd();
+}
+
+template<typename P>
+void JsonEncoder<P>::setItemCount(size_t count)
+{
+    parser_.setRepeatCount(count);
+}
+
+template<typename P>
+void JsonEncoder<P>::startItem()
+{
+    parser_.processImplicitActions();
+    if (parser_.top() != Symbol::sRepeater) {
+        throw Exception("startItem at not an item boundary");
+    }
+}
+
+template<typename P>
+void JsonEncoder<P>::encodeUnionIndex(size_t e)
+{
+    parser_.advance(Symbol::sUnion);
+
+    const std::string name = parser_.nameForIndex(e);
+
+    if (name != "null") {
+        out_.objectStart();
+        out_.encodeString(name);
+    }
+    parser_.selectBranch(e);
+}
+
+}   // namespace parsing
+
+DecoderPtr jsonDecoder(const ValidSchema& s)
+{
+    return boost::make_shared<parsing::JsonDecoder<
+        parsing::SimpleParser<parsing::JsonDecoderHandler> > >(s);
+}
+
+EncoderPtr jsonEncoder(const ValidSchema& schema)
+{
+    return boost::make_shared<parsing::JsonEncoder<
+        parsing::SimpleParser<parsing::JsonHandler> > >(schema);
+}
+
+}   // namespace avro
+
diff --git a/lang/c++/impl/parsing/ResolvingDecoder.cc b/lang/c++/impl/parsing/ResolvingDecoder.cc
new file mode 100644
index 0000000..e0d25ed
--- /dev/null
+++ b/lang/c++/impl/parsing/ResolvingDecoder.cc
@@ -0,0 +1,744 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define __STDC_LIMIT_MACROS
+
+#include <string>
+#include <stack>
+#include <map>
+#include <algorithm>
+#include <ctype.h>
+#include <boost/shared_ptr.hpp>
+#include <boost/make_shared.hpp>
+#include <boost/weak_ptr.hpp>
+#include <boost/any.hpp>
+#include <boost/utility.hpp>
+
+#include "ValidatingCodec.hh"
+#include "Symbol.hh"
+#include "Types.hh"
+#include "ValidSchema.hh"
+#include "Decoder.hh"
+#include "Encoder.hh"
+#include "NodeImpl.hh"
+#include "Generic.hh"
+#include "Stream.hh"
+
+namespace avro {
+
+using boost::make_shared;
+
+namespace parsing {
+
+using boost::shared_ptr;
+using boost::static_pointer_cast;
+using boost::make_shared;
+
+using std::auto_ptr;
+using std::map;
+using std::pair;
+using std::vector;
+using std::string;
+using std::reverse;
+using std::ostringstream;
+using std::istringstream;
+using std::stack;
+using std::find_if;
+using std::make_pair;
+
+typedef pair<NodePtr, NodePtr> NodePair;
+
+class ResolvingGrammarGenerator : public ValidatingGrammarGenerator {
+    ProductionPtr doGenerate2(const NodePtr& writer,
+        const NodePtr& reader, map<NodePair, ProductionPtr> &m,
+        map<NodePtr, ProductionPtr> &m2);
+    ProductionPtr resolveRecords(const NodePtr& writer,
+        const NodePtr& reader, map<NodePair, ProductionPtr> &m,
+        map<NodePtr, ProductionPtr> &m2);
+    ProductionPtr resolveUnion(const NodePtr& writer,
+        const NodePtr& reader, map<NodePair, ProductionPtr> &m,
+        map<NodePtr, ProductionPtr> &m2);
+
+    static vector<pair<string, size_t> > fields(const NodePtr& n) {
+        vector<pair<string, size_t> > result;
+        size_t c = n->names();
+        for (size_t i = 0; i < c; ++i) {
+            result.push_back(make_pair(n->nameAt(i), i));
+        }
+        return result;
+    }
+
+    static int bestBranch(const NodePtr& writer, const NodePtr& reader);
+
+    ProductionPtr getWriterProduction(const NodePtr& n,
+        map<NodePtr, ProductionPtr>& m2);
+
+public:
+    Symbol generate(
+        const ValidSchema& writer, const ValidSchema& reader);
+};
+
+Symbol ResolvingGrammarGenerator::generate(
+    const ValidSchema& writer, const ValidSchema& reader) {
+    map<NodePtr, ProductionPtr> m2;
+
+    const NodePtr& rr = reader.root();
+    const NodePtr& rw = writer.root();
+    ProductionPtr backup = ValidatingGrammarGenerator::doGenerate(rw, m2);
+    fixup(backup, m2);
+
+    map<NodePair, ProductionPtr> m;
+    ProductionPtr main = doGenerate2(rw, rr, m, m2);
+    fixup(main, m);
+    return Symbol::rootSymbol(main, backup);
+}
+
+int ResolvingGrammarGenerator::bestBranch(const NodePtr& writer,
+    const NodePtr& reader)
+{
+    Type t = writer->type();
+
+    const size_t c = reader->leaves();
+    for (size_t j = 0; j < c; ++j) {
+        NodePtr r = reader->leafAt(j);
+        if (r->type() == AVRO_SYMBOLIC) {
+            r = resolveSymbol(r);
+        }
+        if (t == r->type()) {
+            if (r->hasName()) {
+                if (r->name() == writer->name()) {
+                    return j;
+                }
+            } else {
+                return j;
+            }
+        }
+    }
+
+    for (size_t j = 0; j < c; ++j) {
+        const NodePtr& r = reader->leafAt(j);
+        Type rt = r->type();
+        switch (t) {
+        case AVRO_INT:
+            if (rt == AVRO_LONG || rt == AVRO_DOUBLE || rt == AVRO_FLOAT) {
+                return j;
+            }
+            break;
+        case AVRO_LONG:
+        case AVRO_FLOAT:
+            if (rt == AVRO_DOUBLE) {
+                return j;
+            }
+            break;
+        default:
+            break;
+        }
+    }
+    return -1;
+}
+
+static shared_ptr<vector<uint8_t> > getAvroBinary(
+    const GenericDatum& defaultValue)
+{
+    EncoderPtr e = binaryEncoder();
+    auto_ptr<OutputStream> os = memoryOutputStream();
+    e->init(*os);
+    GenericWriter::write(*e, defaultValue);
+    e->flush();
+    return snapshot(*os);
+}
+
+template<typename T1, typename T2>
+struct equalsFirst
+{
+    const T1& v_;
+    equalsFirst(const T1& v) : v_(v) { }
+    bool operator()(const pair<T1, T2>& p) {
+        return p.first == v_;
+    }
+};
+
+ProductionPtr ResolvingGrammarGenerator::getWriterProduction(
+    const NodePtr& n, map<NodePtr, ProductionPtr>& m2)
+{
+    const NodePtr& nn = (n->type() == AVRO_SYMBOLIC) ?
+        static_cast<const NodeSymbolic& >(*n).getNode() : n;
+    map<NodePtr, ProductionPtr>::const_iterator it2 = m2.find(nn);
+    if (it2 != m2.end()) {
+        return it2->second;
+    } else {
+        ProductionPtr result = ValidatingGrammarGenerator::doGenerate(nn, m2);
+        fixup(result, m2);
+        return result;
+    }
+}
+
+ProductionPtr ResolvingGrammarGenerator::resolveRecords(
+    const NodePtr& writer, const NodePtr& reader,
+    map<NodePair, ProductionPtr>& m,
+    map<NodePtr, ProductionPtr>& m2)
+{
+    ProductionPtr result = make_shared<Production>();
+
+    vector<pair<string, size_t> > wf = fields(writer);
+    vector<pair<string, size_t> > rf = fields(reader);
+    vector<size_t> fieldOrder;
+    fieldOrder.reserve(reader->names());
+
+    /*
+     * We look for all writer fields in the reader. If found, recursively
+     * resolve the corresponding fields. Then erase the reader field.
+     * If no matching field is found for reader, arrange to skip the writer
+     * field.
+     */
+    for (vector<pair<string, size_t> >::const_iterator it = wf.begin();
+        it != wf.end(); ++it) {
+        vector<pair<string, size_t> >::iterator it2 =
+            find_if(rf.begin(), rf.end(),
+                equalsFirst<string, size_t>(it->first));
+        if (it2 != rf.end()) {
+            ProductionPtr p = doGenerate2(writer->leafAt(it->second),
+                reader->leafAt(it2->second), m, m2);
+            copy(p->rbegin(), p->rend(), back_inserter(*result));
+            fieldOrder.push_back(it2->second);
+            rf.erase(it2);
+        } else {
+            ProductionPtr p = getWriterProduction(
+                writer->leafAt(it->second), m2);
+            result->push_back(Symbol::skipStart());
+            if (p->size() == 1) {
+                result->push_back((*p)[0]);
+            } else {
+                result->push_back(Symbol::indirect(p));
+            }
+        }
+    }
+
+    /*
+     * Examine the reader fields left out, (i.e. those didn't have corresponding
+     * writer field).
+     */
+    for (vector<pair<string, size_t> >::const_iterator it = rf.begin();
+        it != rf.end(); ++it) {
+
+        NodePtr s = reader->leafAt(it->second);
+        fieldOrder.push_back(it->second);
+
+        if (s->type() == AVRO_SYMBOLIC) {
+            s = resolveSymbol(s);
+        }
+        shared_ptr<vector<uint8_t> > defaultBinary =
+            getAvroBinary(reader->defaultValueAt(it->second));
+        result->push_back(Symbol::defaultStartAction(defaultBinary));
+        map<NodePair, shared_ptr<Production> >::const_iterator it2 =
+            m.find(NodePair(s, s));
+        ProductionPtr p = (it2 == m.end()) ?
+            doGenerate2(s, s, m, m2) : it2->second;
+        copy(p->rbegin(), p->rend(), back_inserter(*result));
+        result->push_back(Symbol::defaultEndAction());
+    }
+    reverse(result->begin(), result->end());
+    result->push_back(Symbol::sizeListAction(fieldOrder));
+    result->push_back(Symbol::recordAction());
+
+    return result;
+
+}
+
+ProductionPtr ResolvingGrammarGenerator::resolveUnion(
+    const NodePtr& writer, const NodePtr& reader,
+    map<NodePair, ProductionPtr>& m,
+    map<NodePtr, ProductionPtr>& m2)
+{
+    vector<ProductionPtr> v;
+    size_t c = writer->leaves();
+    v.reserve(c);
+    for (size_t i = 0; i < c; ++i) {
+        ProductionPtr p = doGenerate2(writer->leafAt(i), reader, m, m2);
+        v.push_back(p);
+    }
+    ProductionPtr result = make_shared<Production>();
+    result->push_back(Symbol::alternative(v));
+    result->push_back(Symbol::writerUnionAction());
+    return result;
+}
+
+ProductionPtr ResolvingGrammarGenerator::doGenerate2(
+    const NodePtr& w, const NodePtr& r,
+    map<NodePair, ProductionPtr> &m,
+    map<NodePtr, ProductionPtr> &m2)
+{
+    const NodePtr writer = w->type() == AVRO_SYMBOLIC ? resolveSymbol(w) : w;
+    const NodePtr reader = r->type() == AVRO_SYMBOLIC ? resolveSymbol(r) : r;
+    Type writerType = writer->type();
+    Type readerType = reader->type();
+
+    if (writerType == readerType) {
+        switch (writerType) {
+        case AVRO_NULL:
+            return make_shared<Production>(1, Symbol::nullSymbol());
+        case AVRO_BOOL:
+            return make_shared<Production>(1, Symbol::boolSymbol());
+        case AVRO_INT:
+            return make_shared<Production>(1, Symbol::intSymbol());
+        case AVRO_LONG:
+            return make_shared<Production>(1, Symbol::longSymbol());
+        case AVRO_FLOAT:
+            return make_shared<Production>(1, Symbol::floatSymbol());
+        case AVRO_DOUBLE:
+            return make_shared<Production>(1, Symbol::doubleSymbol());
+        case AVRO_STRING:
+            return make_shared<Production>(1, Symbol::stringSymbol());
+        case AVRO_BYTES:
+            return make_shared<Production>(1, Symbol::bytesSymbol());
+        case AVRO_FIXED:
+            if (writer->name() == reader->name() &&
+                writer->fixedSize() == reader->fixedSize()) {
+                ProductionPtr result = make_shared<Production>();
+                result->push_back(Symbol::sizeCheckSymbol(reader->fixedSize()));
+                result->push_back(Symbol::fixedSymbol());
+                m[make_pair(writer, reader)] = result;
+                return result;
+            }
+            break;
+        case AVRO_RECORD:
+            if (writer->name() == reader->name()) {
+                const pair<NodePtr, NodePtr> key(writer, reader);
+                map<NodePair, ProductionPtr>::const_iterator kp = m.find(key);
+                if (kp != m.end()) {
+                    return (kp->second) ? kp->second :
+                        make_shared<Production>(1, Symbol::placeholder(key));
+                }
+                m[key] = ProductionPtr();
+                ProductionPtr result = resolveRecords(writer, reader, m, m2);
+                m[key] = result;
+                return result;
+            }
+            break;
+
+        case AVRO_ENUM:
+            if (writer->name() == reader->name()) {
+                ProductionPtr result = make_shared<Production>();
+                result->push_back(Symbol::enumAdjustSymbol(writer, reader));
+                result->push_back(Symbol::enumSymbol());
+                m[make_pair(writer, reader)] = result;
+                return result;
+            }
+            break;
+
+        case AVRO_ARRAY:
+            {
+                ProductionPtr p = getWriterProduction(writer->leafAt(0), m2);
+                ProductionPtr p2 = doGenerate2(writer->leafAt(0), reader->leafAt(0), m, m2);
+                ProductionPtr result = make_shared<Production>();
+                result->push_back(Symbol::arrayEndSymbol());
+                result->push_back(Symbol::repeater(p2, p, true));
+                result->push_back(Symbol::arrayStartSymbol());
+                return result;
+            }
+        case AVRO_MAP:
+            {
+                ProductionPtr pp =
+                    doGenerate2(writer->leafAt(1),reader->leafAt(1), m, m2);
+                ProductionPtr v(new Production(*pp));
+                v->push_back(Symbol::stringSymbol());
+
+                ProductionPtr pp2 = getWriterProduction(writer->leafAt(1), m2);
+                ProductionPtr v2(new Production(*pp2));
+
+                v2->push_back(Symbol::stringSymbol());
+
+                ProductionPtr result = make_shared<Production>();
+                result->push_back(Symbol::mapEndSymbol());
+                result->push_back(Symbol::repeater(v, v2, false));
+                result->push_back(Symbol::mapStartSymbol());
+                return result;
+            }
+        case AVRO_UNION:
+            return resolveUnion(writer, reader, m, m2);
+        case AVRO_SYMBOLIC:
+            {
+                shared_ptr<NodeSymbolic> w =
+                    static_pointer_cast<NodeSymbolic>(writer);
+                shared_ptr<NodeSymbolic> r =
+                    static_pointer_cast<NodeSymbolic>(reader);
+                NodePair p(w->getNode(), r->getNode());
+                map<NodePair, ProductionPtr>::iterator it = m.find(p);
+                if (it != m.end() && it->second) {
+                    return it->second;
+                } else {
+                    m[p] = ProductionPtr();
+                    return make_shared<Production>(1, Symbol::placeholder(p));
+                }
+            }
+        default:
+            throw Exception("Unknown node type");
+        }
+    } else if (writerType == AVRO_UNION) {
+        return resolveUnion(writer, reader, m, m2);
+    } else {
+        switch (readerType) {
+        case AVRO_LONG:
+            if (writerType == AVRO_INT) {
+                return make_shared<Production>(1,
+                    Symbol::resolveSymbol(Symbol::sInt, Symbol::sLong));
+            }
+            break;
+        case AVRO_FLOAT:
+            if (writerType == AVRO_INT || writerType == AVRO_LONG) {
+                return make_shared<Production>(1,
+                    Symbol::resolveSymbol(writerType == AVRO_INT ?
+                    Symbol::sInt : Symbol::sLong, Symbol::sFloat));
+            }
+            break;
+        case AVRO_DOUBLE:
+            if (writerType == AVRO_INT || writerType == AVRO_LONG
+                || writerType == AVRO_FLOAT) {
+                return make_shared<Production>(1,
+                    Symbol::resolveSymbol(writerType == AVRO_INT ?
+                    Symbol::sInt : writerType == AVRO_LONG ?
+                    Symbol::sLong : Symbol::sFloat, Symbol::sDouble));
+            }
+            break;
+
+        case AVRO_UNION:
+            {
+                int j = bestBranch(writer, reader);
+                if (j >= 0) {
+                    ProductionPtr p = doGenerate2(writer, reader->leafAt(j), m, m2);
+                    ProductionPtr result = make_shared<Production>();
+                    result->push_back(Symbol::unionAdjustSymbol(j, p));
+                    result->push_back(Symbol::unionSymbol());
+                    return result;
+                }
+            }
+            break;
+        case AVRO_NULL:
+        case AVRO_BOOL:
+        case AVRO_INT:
+        case AVRO_STRING:
+        case AVRO_BYTES:
+        case AVRO_ENUM:
+        case AVRO_ARRAY:
+        case AVRO_MAP:
+        case AVRO_RECORD:
+            break;
+        default:
+            throw Exception("Unknown node type");
+        }
+    }
+    return make_shared<Production>(1, Symbol::error(writer, reader));
+}
+
+class ResolvingDecoderHandler {
+    shared_ptr<vector<uint8_t> > defaultData_;
+    auto_ptr<InputStream> inp_;
+    DecoderPtr backup_;
+    DecoderPtr& base_;
+    const DecoderPtr binDecoder;
+  public:
+    ResolvingDecoderHandler(DecoderPtr& base) : base_(base),
+         binDecoder(binaryDecoder()) { }
+    size_t handle(const Symbol& s) {
+        switch (s.kind()) {
+        case Symbol::sWriterUnion:
+            return base_->decodeUnionIndex();
+        case Symbol::sDefaultStart:
+            defaultData_ = s.extra<shared_ptr<vector<uint8_t> > >();
+            backup_ = base_;
+            inp_ = memoryInputStream(&(*defaultData_)[0], defaultData_->size());
+            base_ = binDecoder;
+            base_->init(*inp_);
+            return 0;
+        case Symbol::sDefaultEnd:
+            base_= backup_;
+            backup_.reset();
+            return 0;
+        default:
+            return 0;
+        }
+    }
+
+    void reset()
+    {
+        if (backup_ != NULL)
+        {
+            base_= backup_;
+            backup_.reset();
+        }
+    }
+};
+
+template <typename Parser>
+class ResolvingDecoderImpl : public ResolvingDecoder
+{
+    DecoderPtr base_;
+    ResolvingDecoderHandler handler_;
+    Parser parser_;
+
+    void init(InputStream& is);
+    void decodeNull();
+    bool decodeBool();
+    int32_t decodeInt();
+    int64_t decodeLong();
+    float decodeFloat();
+    double decodeDouble();
+    void decodeString(string& value);
+    void skipString();
+    void decodeBytes(vector<uint8_t>& value);
+    void skipBytes();
+    void decodeFixed(size_t n, vector<uint8_t>& value);
+    void skipFixed(size_t n);
+    size_t decodeEnum();
+    size_t arrayStart();
+    size_t arrayNext();
+    size_t skipArray();
+    size_t mapStart();
+    size_t mapNext();
+    size_t skipMap();
+    size_t decodeUnionIndex();
+    const vector<size_t>& fieldOrder();
+public:
+    ResolvingDecoderImpl(const ValidSchema& writer, const ValidSchema& reader,
+        const DecoderPtr& base) :
+        base_(base),
+        handler_(base_),
+        parser_(ResolvingGrammarGenerator().generate(writer, reader),
+            &(*base_), handler_)
+    {
+    }
+};
+
+template <typename P>
+void ResolvingDecoderImpl<P>::init(InputStream& is)
+{
+    handler_.reset();
+    base_->init(is);
+    parser_.reset();
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::decodeNull()
+{
+    parser_.advance(Symbol::sNull);
+    base_->decodeNull();
+}
+
+template <typename P>
+bool ResolvingDecoderImpl<P>::decodeBool()
+{
+    parser_.advance(Symbol::sBool);
+    return base_->decodeBool();
+}
+
+template <typename P>
+int32_t ResolvingDecoderImpl<P>::decodeInt()
+{
+    parser_.advance(Symbol::sInt);
+    return base_->decodeInt();
+}
+
+template <typename P>
+int64_t ResolvingDecoderImpl<P>::decodeLong()
+{
+    Symbol::Kind k = parser_.advance(Symbol::sLong);
+    return k == Symbol::sInt ? base_->decodeInt() : base_->decodeLong();
+}
+
+template <typename P>
+float ResolvingDecoderImpl<P>::decodeFloat()
+{
+    Symbol::Kind k = parser_.advance(Symbol::sFloat);
+    return k == Symbol::sInt ? base_->decodeInt() :
+        k == Symbol::sLong ? base_->decodeLong() :
+        base_->decodeFloat();
+}
+
+template <typename P>
+double ResolvingDecoderImpl<P>::decodeDouble()
+{
+    Symbol::Kind k = parser_.advance(Symbol::sDouble);
+    return k == Symbol::sInt ? base_->decodeInt() :
+        k == Symbol::sLong ? base_->decodeLong() :
+        k == Symbol::sFloat ? base_->decodeFloat() :
+        base_->decodeDouble();
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::decodeString(string& value)
+{
+    parser_.advance(Symbol::sString);
+    base_->decodeString(value);
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::skipString()
+{
+    parser_.advance(Symbol::sString);
+    base_->skipString();
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::decodeBytes(vector<uint8_t>& value)
+{
+    parser_.advance(Symbol::sBytes);
+    base_->decodeBytes(value);
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::skipBytes()
+{
+    parser_.advance(Symbol::sBytes);
+    base_->skipBytes();
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::decodeFixed(size_t n, vector<uint8_t>& value)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(n);
+    return base_->decodeFixed(n, value);
+}
+
+template <typename P>
+void ResolvingDecoderImpl<P>::skipFixed(size_t n)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(n);
+    base_->skipFixed(n);
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::decodeEnum()
+{
+    parser_.advance(Symbol::sEnum);
+    size_t n = base_->decodeEnum();
+    return parser_.enumAdjust(n);
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::arrayStart()
+{
+    parser_.advance(Symbol::sArrayStart);
+    size_t result = base_->arrayStart();
+    if (result == 0) {
+        parser_.popRepeater();
+        parser_.advance(Symbol::sArrayEnd);
+    } else {
+        parser_.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::arrayNext()
+{
+    parser_.processImplicitActions();
+    size_t result = base_->arrayNext();
+    if (result == 0) {
+        parser_.popRepeater();
+        parser_.advance(Symbol::sArrayEnd);
+    } else {
+        parser_.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::skipArray()
+{
+    parser_.advance(Symbol::sArrayStart);
+    size_t n = base_->skipArray();
+    if (n == 0) {
+        parser_.pop();
+    } else {
+        parser_.setRepeatCount(n);
+        parser_.skip(*base_);
+    }
+    parser_.advance(Symbol::sArrayEnd);
+    return 0;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::mapStart()
+{
+    parser_.advance(Symbol::sMapStart);
+    size_t result = base_->mapStart();
+    if (result == 0) {
+        parser_.popRepeater();
+        parser_.advance(Symbol::sMapEnd);
+    } else {
+        parser_.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::mapNext()
+{
+    parser_.processImplicitActions();
+    size_t result = base_->mapNext();
+    if (result == 0) {
+        parser_.popRepeater();
+        parser_.advance(Symbol::sMapEnd);
+    } else {
+        parser_.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::skipMap()
+{
+    parser_.advance(Symbol::sMapStart);
+    size_t n = base_->skipMap();
+    if (n == 0) {
+        parser_.pop();
+    } else {
+        parser_.setRepeatCount(n);
+        parser_.skip(*base_);
+    }
+    parser_.advance(Symbol::sMapEnd);
+    return 0;
+}
+
+template <typename P>
+size_t ResolvingDecoderImpl<P>::decodeUnionIndex()
+{
+    parser_.advance(Symbol::sUnion);
+    return parser_.unionAdjust();
+}
+
+template <typename P>
+const vector<size_t>& ResolvingDecoderImpl<P>::fieldOrder()
+{
+    parser_.advance(Symbol::sRecord);
+    return parser_.sizeList();
+}
+
+}   // namespace parsing
+
+ResolvingDecoderPtr resolvingDecoder(const ValidSchema& writer,
+    const ValidSchema& reader, const DecoderPtr& base) {
+    return make_shared<parsing::ResolvingDecoderImpl
+        <parsing::SimpleParser<parsing::ResolvingDecoderHandler> > >(
+        writer, reader, base);
+}
+
+}   // namespace avro
+
diff --git a/lang/c++/impl/parsing/Symbol.cc b/lang/c++/impl/parsing/Symbol.cc
new file mode 100644
index 0000000..a651f40
--- /dev/null
+++ b/lang/c++/impl/parsing/Symbol.cc
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "Symbol.hh"
+
+namespace avro {
+namespace parsing {
+
+using std::vector;
+using std::string;
+using std::ostringstream;
+
+const char* Symbol::stringValues[] = {
+    "TerminalLow",
+    "Null",
+    "Bool",
+    "Int",
+    "Long",
+    "Float",
+    "Double",
+    "String",
+    "Bytes",
+    "ArrayStart",
+    "ArrayEnd",
+    "MapStart",
+    "MapEnd",
+    "Fixed",
+    "Enum",
+    "Union",
+    "TerminalHigh",
+    "SizeCheck",
+    "NameList",
+    "Root",
+    "Repeater",
+    "Alternative",
+    "Placeholder",
+    "Indirect",
+    "Symbolic",
+    "EnumAdjust",
+    "UnionAdjust",
+    "SkipStart",
+    "Resolve",
+    "ImplicitActionLow",
+    "RecordStart",
+    "RecordEnd",
+    "Field",
+    "Record",
+    "SizeList",
+    "WriterUnion",
+    "DefaultStart",
+    "DefaultEnd",
+    "ImplicitActionHigh",
+    "Error"
+};
+
+Symbol Symbol::enumAdjustSymbol(const NodePtr& writer, const NodePtr& reader)
+{
+    vector<string> rs;
+    size_t rc = reader->names();
+    for (size_t i = 0; i < rc; ++i) {
+        rs.push_back(reader->nameAt(i));
+    }
+
+    size_t wc = writer->names();
+    vector<int> adj;
+    adj.reserve(wc);
+
+    vector<string> err;
+
+    for (size_t i = 0; i < wc; ++i) {
+        const string& s = writer->nameAt(i);
+        vector<string>::const_iterator it = find(rs.begin(), rs.end(), s);
+        if (it == rs.end()) {
+            int pos = err.size() + 1;
+            adj.push_back(-pos);
+            err.push_back(s);
+        } else {
+            adj.push_back(it - rs.begin());
+        }
+    }
+    return Symbol(sEnumAdjust, make_pair(adj, err));
+}
+
+Symbol Symbol::error(const NodePtr& writer, const NodePtr& reader)
+{
+    ostringstream oss;
+    oss << "Cannot resolve: " << std::endl;
+    writer->printJson(oss, 0);
+    oss << std::endl << "with" << std::endl;
+    reader->printJson(oss, 0);
+    return Symbol(sError, oss.str());
+}
+
+}   // namespace parsing
+}   // namespace avro
diff --git a/lang/c++/impl/parsing/Symbol.hh b/lang/c++/impl/parsing/Symbol.hh
new file mode 100644
index 0000000..a7c0997
--- /dev/null
+++ b/lang/c++/impl/parsing/Symbol.hh
@@ -0,0 +1,798 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_parsing_Symbol_hh__
+#define avro_parsing_Symbol_hh__
+
+#include <vector>
+#include <map>
+#include <set>
+#include <stack>
+#include <sstream>
+
+#include <boost/any.hpp>
+#include <boost/shared_ptr.hpp>
+#include <boost/weak_ptr.hpp>
+#include <boost/make_shared.hpp>
+#include <boost/tuple/tuple.hpp>
+
+#include "Node.hh"
+#include "Decoder.hh"
+#include "Exception.hh"
+
+namespace avro {
+namespace parsing {
+
+class Symbol;
+
+typedef std::vector<Symbol> Production;
+typedef boost::shared_ptr<Production> ProductionPtr;
+typedef boost::tuple<size_t, bool, ProductionPtr, ProductionPtr> RepeaterInfo;
+typedef boost::tuple<ProductionPtr, ProductionPtr> RootInfo;
+
+class Symbol {
+public:
+    enum Kind {
+        sTerminalLow,   // extra has nothing
+        sNull,
+        sBool,
+        sInt,
+        sLong,
+        sFloat,
+        sDouble,
+        sString,
+        sBytes,
+        sArrayStart,
+        sArrayEnd,
+        sMapStart,
+        sMapEnd,
+        sFixed,
+        sEnum,
+        sUnion,
+        sTerminalHigh,
+        sSizeCheck,     // Extra has size
+        sNameList,      // Extra has a vector<string>
+        sRoot,          // Root for a schema, extra is Symbol
+        sRepeater,      // Array or Map, extra is symbol
+        sAlternative,   // One of many (union), extra is Union
+        sPlaceholder,   // To be fixed up later.
+        sIndirect,      // extra is shared_ptr<Production>
+        sSymbolic,      // extra is weal_ptr<Production>
+        sEnumAdjust,
+        sUnionAdjust,
+        sSkipStart,
+        sResolve,
+
+        sImplicitActionLow,
+        sRecordStart,
+        sRecordEnd,
+        sField,         // extra is string
+        sRecord,
+        sSizeList,
+        sWriterUnion,
+        sDefaultStart,  // extra has default value in Avro binary encoding
+        sDefaultEnd,
+        sImplicitActionHigh,
+        sError
+    };
+
+private:
+    Kind kind_;
+    boost::any extra_;
+
+
+    explicit Symbol(Kind k) : kind_(k) { }
+    template <typename T> Symbol(Kind k, T t) : kind_(k), extra_(t) { }
+public:
+
+    Kind kind() const {
+        return kind_;
+    }
+
+    template <typename T> T extra() const {
+        return boost::any_cast<T>(extra_);
+    }
+
+    template <typename T> T* extrap() {
+        return boost::any_cast<T>(&extra_);
+    }
+
+    template <typename T> const T* extrap() const {
+        return boost::any_cast<T>(&extra_);
+    }
+
+    template <typename T> void extra(const T& t) {
+        extra_ = t;
+    }
+
+    bool isTerminal() const {
+        return kind_ > sTerminalLow && kind_ < sTerminalHigh;
+    }
+
+    bool isImplicitAction() const {
+        return kind_ > sImplicitActionLow && kind_ < sImplicitActionHigh;
+    }
+
+    static const char* stringValues[];
+    static const char* toString(Kind k) {
+        return stringValues[k];
+    }
+
+    static Symbol rootSymbol(ProductionPtr& s)
+    {
+        return Symbol(Symbol::sRoot, RootInfo(s, boost::make_shared<Production>()));
+    }
+
+    static Symbol rootSymbol(const ProductionPtr& main,
+                             const ProductionPtr& backup)
+    {
+        return Symbol(Symbol::sRoot, RootInfo(main, backup));
+    }
+
+    static Symbol nullSymbol() {
+        return Symbol(sNull);
+    }
+
+    static Symbol boolSymbol() {
+        return Symbol(sBool);
+    }
+
+    static Symbol intSymbol() {
+        return Symbol(sInt);
+    }
+
+    static Symbol longSymbol() {
+        return Symbol(sLong);
+    }
+
+    static Symbol floatSymbol() {
+        return Symbol(sFloat);
+    }
+
+    static Symbol doubleSymbol() {
+        return Symbol(sDouble);
+    }
+
+    static Symbol stringSymbol() {
+        return Symbol(sString);
+    }
+
+    static Symbol bytesSymbol() {
+        return Symbol(sBytes);
+    }
+
+    static Symbol sizeCheckSymbol(size_t s) {
+        return Symbol(sSizeCheck, s);
+    }
+
+    static Symbol fixedSymbol() {
+        return Symbol(sFixed);
+    }
+
+    static Symbol enumSymbol() {
+        return Symbol(sEnum);
+    }
+
+    static Symbol arrayStartSymbol() {
+        return Symbol(sArrayStart);
+    }
+
+    static Symbol arrayEndSymbol() {
+        return Symbol(sArrayEnd);
+    }
+
+    static Symbol mapStartSymbol() {
+        return Symbol(sMapStart);
+    }
+
+    static Symbol mapEndSymbol() {
+        return Symbol(sMapEnd);
+    }
+
+    static Symbol repeater(const ProductionPtr& p,
+                           bool isArray) {
+        size_t s = 0;
+        return Symbol(sRepeater, boost::make_tuple(s, isArray, p, p));
+    }
+
+    static Symbol repeater(const ProductionPtr& read,
+                           const ProductionPtr& skip,
+                           bool isArray) {
+        size_t s = 0;
+        return Symbol(sRepeater, boost::make_tuple(s, isArray, read, skip));
+    }
+
+    static Symbol defaultStartAction(boost::shared_ptr<std::vector<uint8_t> > bb)
+    {
+        return Symbol(sDefaultStart, bb);
+    }
+ 
+    static Symbol defaultEndAction()
+    {
+        return Symbol(sDefaultEnd);
+    }
+
+    static Symbol alternative(
+        const std::vector<ProductionPtr>& branches)
+    {
+        return Symbol(Symbol::sAlternative, branches);
+    }
+
+    static Symbol unionSymbol() {
+        return Symbol(sUnion);
+    }
+
+    static Symbol recordStartSymbol() {
+        return Symbol(sRecordStart);
+    }
+
+    static Symbol recordEndSymbol() {
+        return Symbol(sRecordEnd);
+    }
+
+    static Symbol fieldSymbol(const std::string& name) {
+        return Symbol(sField, name);
+    }
+
+    static Symbol writerUnionAction() {
+        return Symbol(sWriterUnion);
+    }
+
+    static Symbol nameListSymbol(
+        const std::vector<std::string>& v) {
+        return Symbol(sNameList, v);
+    }
+
+    template <typename T>
+    static Symbol placeholder(const T& n) {
+        return Symbol(sPlaceholder, n);
+    }
+
+    static Symbol indirect(const ProductionPtr& p) {
+        return Symbol(sIndirect, p);
+    }
+
+    static Symbol symbolic(const boost::weak_ptr<Production>& p) {
+        return Symbol(sSymbolic, p);
+    }
+
+    static Symbol enumAdjustSymbol(const NodePtr& writer,
+        const NodePtr& reader);
+
+    static Symbol unionAdjustSymbol(size_t branch,
+                                    const ProductionPtr& p) {
+        return Symbol(sUnionAdjust, std::make_pair(branch, p));
+    }
+
+    static Symbol sizeListAction(std::vector<size_t> order) {
+        return Symbol(sSizeList, order);
+    }
+
+    static Symbol recordAction() {
+        return Symbol(sRecord);
+    }
+
+    static Symbol error(const NodePtr& writer, const NodePtr& reader);
+
+    static Symbol resolveSymbol(Kind w, Kind r) {
+        return Symbol(sResolve, std::make_pair(w, r));
+    }
+
+    static Symbol skipStart() {
+        return Symbol(sSkipStart);
+    }
+
+};
+
+/**
+ * Recursively replaces all placeholders in the production with the
+ * corresponding values.
+ */
+template<typename T>
+void fixup(const ProductionPtr& p,
+           const std::map<T, ProductionPtr> &m)
+{
+    std::set<ProductionPtr> seen;
+    for (Production::iterator it = p->begin(); it != p->end(); ++it) {
+        fixup(*it, m, seen);
+    }
+}
+    
+
+/**
+ * Recursively replaces all placeholders in the symbol with the values with the
+ * corresponding values.
+ */
+template<typename T>
+void fixup_internal(const ProductionPtr& p,
+                    const std::map<T, ProductionPtr> &m,
+                    std::set<ProductionPtr>& seen)
+{
+    if (seen.find(p) == seen.end()) {
+        seen.insert(p);
+        for (Production::iterator it = p->begin(); it != p->end(); ++it) {
+            fixup(*it, m, seen);
+        }
+    }
+}
+
+template<typename T>
+void fixup(Symbol& s, const std::map<T, ProductionPtr> &m,
+           std::set<ProductionPtr>& seen)
+{
+    switch (s.kind()) {
+    case Symbol::sIndirect:
+        fixup_internal(s.extra<ProductionPtr>(), m, seen);
+        break;
+    case Symbol::sAlternative:
+        {
+            const std::vector<ProductionPtr> *vv =
+            s.extrap<std::vector<ProductionPtr> >();
+            for (std::vector<ProductionPtr>::const_iterator it = vv->begin();
+                it != vv->end(); ++it) {
+                fixup_internal(*it, m, seen);
+            }
+        }
+        break;
+    case Symbol::sRepeater:
+        {
+            const RepeaterInfo& ri = *s.extrap<RepeaterInfo>();
+            fixup_internal(boost::tuples::get<2>(ri), m, seen);
+            fixup_internal(boost::tuples::get<3>(ri), m, seen);
+        }
+        break;
+    case Symbol::sPlaceholder:
+        {
+            typename std::map<T, boost::shared_ptr<Production> >::const_iterator it =
+                m.find(s.extra<T>());
+            if (it == m.end()) {
+                throw Exception("Placeholder symbol cannot be resolved");
+            }
+            s = Symbol::symbolic(boost::weak_ptr<Production>(it->second));
+        }
+        break;
+    case Symbol::sUnionAdjust:
+        fixup_internal(s.extrap<std::pair<size_t, ProductionPtr> >()->second,
+                       m, seen);
+        break;
+    default:
+        break;
+    }
+}
+
+template<typename Handler>
+class SimpleParser {
+    Decoder* decoder_;
+    Handler& handler_;
+    std::stack<Symbol> parsingStack;
+
+    static void throwMismatch(Symbol::Kind expected, Symbol::Kind actual)
+    {
+        std::ostringstream oss;
+        oss << "Invalid operation. Expected: " <<
+            Symbol::toString(expected) << " got " <<
+            Symbol::toString(actual);
+        throw Exception(oss.str());
+    }
+
+    static void assertMatch(Symbol::Kind expected, Symbol::Kind actual)
+    {
+        if (expected != actual) {
+            throwMismatch(expected, actual);
+        }
+
+    }
+
+    void append(const ProductionPtr& ss) {
+        for (Production::const_iterator it = ss->begin();
+            it != ss->end(); ++it) {
+            parsingStack.push(*it);
+        }
+    }
+
+    size_t popSize() {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sSizeCheck, s.kind());
+        size_t result = s.extra<size_t>();
+        parsingStack.pop();
+        return result;
+    }
+
+    static void assertLessThan(size_t n, size_t s) {
+        if (n >= s) {
+            std::ostringstream oss;
+            oss << "Size max value. Upper bound: " << s << " found " << n;
+            throw Exception(oss.str());
+        }
+    }
+
+public:
+    Symbol::Kind advance(Symbol::Kind k) {
+        for (; ;) {
+            Symbol& s = parsingStack.top();
+            if (s.kind() == k) {
+                parsingStack.pop();
+                return k;
+            } else if (s.isTerminal()) {
+                throwMismatch(k, s.kind());
+            } else {
+                switch (s.kind()) {
+                case Symbol::sRoot:
+                    append(boost::tuples::get<0>(*s.extrap<RootInfo>()));
+                    continue;
+                case Symbol::sIndirect:
+                    {
+                        ProductionPtr pp =
+                            s.extra<ProductionPtr>();
+                        parsingStack.pop();
+                        append(pp);
+                    }
+                    continue;
+                case Symbol::sSymbolic:
+                    {
+                        ProductionPtr pp(
+                            s.extra<boost::weak_ptr<Production> >());
+                        parsingStack.pop();
+                        append(pp);
+                    }
+                    continue;
+                case Symbol::sRepeater:
+                    {
+                        RepeaterInfo *p = s.extrap<RepeaterInfo>();
+                        --boost::tuples::get<0>(*p);
+                        append(boost::tuples::get<2>(*p));
+                    }
+                    continue;
+                case Symbol::sError:
+                    throw Exception(s.extra<std::string>());
+                case Symbol::sResolve:
+                    {
+                        const std::pair<Symbol::Kind, Symbol::Kind>* p =
+                            s.extrap<std::pair<Symbol::Kind, Symbol::Kind> >();
+                        assertMatch(p->second, k);
+                        Symbol::Kind result = p->first;
+                        parsingStack.pop();
+                        return result;
+                    }
+                case Symbol::sSkipStart:
+                    parsingStack.pop();
+                    skip(*decoder_);
+                    break;
+                default:
+                    if (s.isImplicitAction()) {
+                        size_t n = handler_.handle(s);
+                        if (s.kind() == Symbol::sWriterUnion) {
+                            parsingStack.pop();
+                            selectBranch(n); 
+                        } else {
+                            parsingStack.pop();
+                        }
+                    } else {
+                        std::ostringstream oss;
+                        oss << "Encountered " << Symbol::toString(s.kind())
+                            << " while looking for " << Symbol::toString(k);
+                        throw Exception(oss.str());
+                    }
+                }
+            }
+        }
+    }
+
+    void skip(Decoder& d) {
+        const size_t sz = parsingStack.size();
+        if (sz == 0) {
+            throw Exception("Nothing to skip!");
+        }
+        while (parsingStack.size() >= sz) {
+            Symbol& t = parsingStack.top();
+            switch (t.kind()) {
+            case Symbol::sNull:
+                d.decodeNull();
+                break;
+            case Symbol::sBool:
+                d.decodeBool();
+                break;
+            case Symbol::sInt:
+                d.decodeInt();
+                break;
+            case Symbol::sLong:
+                d.decodeLong();
+                break;
+            case Symbol::sFloat:
+                d.decodeFloat();
+                break;
+            case Symbol::sDouble:
+                d.decodeDouble();
+                break;
+            case Symbol::sString:
+                d.skipString();
+                break;
+            case Symbol::sBytes:
+                d.skipBytes();
+                break;
+            case Symbol::sArrayStart:
+                {
+                    parsingStack.pop();
+                    size_t n = d.skipArray();
+                    assertMatch(Symbol::sRepeater, parsingStack.top().kind());
+                    if (n == 0) {
+                        break;
+                    }
+                    Symbol& t = parsingStack.top();
+                    RepeaterInfo *p = t.extrap<RepeaterInfo>();
+                    boost::tuples::get<0>(*p) = n;
+                    continue;
+                }
+            case Symbol::sArrayEnd:
+                break;
+            case Symbol::sMapStart:
+                {
+                    parsingStack.pop();
+                    size_t n = d.skipMap();
+                    if (n == 0) {
+                        break;
+                    }
+                    assertMatch(Symbol::sRepeater, parsingStack.top().kind());
+                    Symbol& t = parsingStack.top();
+                    RepeaterInfo *p = t.extrap<RepeaterInfo>();
+                    boost::tuples::get<0>(*p) = n;
+                    continue;
+                }
+            case Symbol::sMapEnd:
+                break;
+            case Symbol::sFixed:
+                {
+                    parsingStack.pop();
+                    Symbol& t = parsingStack.top();
+                    d.decodeFixed(t.extra<size_t>());
+                }
+                break;
+            case Symbol::sEnum:
+                parsingStack.pop();
+                d.decodeEnum();
+                break;
+            case Symbol::sUnion:
+                {
+                    parsingStack.pop();
+                    size_t n = d.decodeUnionIndex();
+                    selectBranch(n);
+                    continue;
+                }
+            case Symbol::sRepeater:
+                {
+                    RepeaterInfo *p = t.extrap<RepeaterInfo>();
+                    if (boost::tuples::get<0>(*p) == 0) {
+                        boost::tuples::get<0>(*p) =
+                            boost::tuples::get<1>(*p) ? d.arrayNext() :
+                                d.mapNext();
+                    }
+                    if (boost::tuples::get<0>(*p) != 0) {
+                        --boost::tuples::get<0>(*p);
+                        append(boost::tuples::get<3>(*p));
+                        continue;
+                    }
+                }
+                break;
+            case Symbol::sIndirect:
+                {
+                    ProductionPtr pp =
+                        t.extra<ProductionPtr>();
+                    parsingStack.pop();
+                    append(pp);
+                }
+                continue;
+            case Symbol::sSymbolic:
+                {
+                    ProductionPtr pp(
+                        t.extra<boost::weak_ptr<Production> >());
+                    parsingStack.pop();
+                    append(pp);
+                }
+                continue;
+            default:
+                {
+                    std::ostringstream oss;
+                    oss << "Don't know how to skip "
+                        << Symbol::toString(t.kind());
+                    throw Exception(oss.str());
+                }
+            }
+            parsingStack.pop();
+        }
+    }
+
+    void assertSize(size_t n) {
+        size_t s = popSize();
+        if (s != n) {
+            std::ostringstream oss;
+            oss << "Incorrect size. Expected: " << s << " found " << n;
+            throw Exception(oss.str());
+        }
+    }
+
+    void assertLessThanSize(size_t n) {
+        assertLessThan(n, popSize());
+    }
+
+    size_t enumAdjust(size_t n) {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sEnumAdjust, s.kind());
+        const std::pair<std::vector<int>, std::vector<std::string> >* v =
+            s.extrap<std::pair<std::vector<int>, std::vector<std::string> > >();
+        assertLessThan(n, v->first.size());
+
+        int result = v->first[n];
+        if (result < 0) {
+            std::ostringstream oss;
+            oss << "Cannot resolve symbol: " << v->second[-result - 1]
+                << std::endl;
+            throw Exception(oss.str());
+        }
+        parsingStack.pop();
+        return result;
+    }
+
+    size_t unionAdjust() {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sUnionAdjust, s.kind());
+        std::pair<size_t, ProductionPtr> p =
+        s.extra<std::pair<size_t, ProductionPtr> >();
+        parsingStack.pop();
+        append(p.second);
+        return p.first;
+    }
+
+    std::string nameForIndex(size_t e) {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sNameList, s.kind());
+        const std::vector<std::string> names =
+            s.extra<std::vector<std::string> >();
+        if (e >= names.size()) {
+            throw Exception("Not that many names");
+        }
+        std::string result = names[e];
+        parsingStack.pop();
+        return result;
+    }
+
+    size_t indexForName(const std::string &name) {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sNameList, s.kind());
+        const std::vector<std::string> names =
+            s.extra<std::vector<std::string> >();
+        std::vector<std::string>::const_iterator it =
+            std::find(names.begin(), names.end(), name);
+        if (it == names.end()) {
+            throw Exception("No such enum symbol");
+        }
+        size_t result = it - names.begin();
+        parsingStack.pop();
+        return result;
+    }
+
+    void setRepeatCount(size_t n) {
+        Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sRepeater, s.kind());
+        size_t& nn = boost::tuples::get<0>(*s.extrap<RepeaterInfo>());
+        if (nn != 0) {
+            throw Exception("Wrong number of items");
+        }
+        nn = n;
+    }
+
+    void popRepeater() {
+        processImplicitActions();
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sRepeater, s.kind());
+        if (boost::tuples::get<0>(*s.extrap<RepeaterInfo>()) != 0) {
+            throw Exception("Incorrect number of items");
+        }
+        parsingStack.pop();
+    }
+
+    void selectBranch(size_t n) {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sAlternative, s.kind());
+        std::vector<ProductionPtr> v =
+        s.extra<std::vector<ProductionPtr> >();
+        if (n >= v.size()) {
+            throw Exception("Not that many branches");
+        }
+        parsingStack.pop();
+        append(v[n]);
+    }
+
+    const std::vector<size_t>& sizeList() {
+        const Symbol& s = parsingStack.top();
+        assertMatch(Symbol::sSizeList, s.kind());
+        return *s.extrap<std::vector<size_t> >();
+    }
+
+    Symbol::Kind top() const {
+        return parsingStack.top().kind();
+    }
+
+    void pop() {
+        parsingStack.pop();
+    }
+
+    void processImplicitActions() {
+        for (; ;) {
+            Symbol& s = parsingStack.top();
+            if (s.isImplicitAction()) {
+                handler_.handle(s);
+                parsingStack.pop();
+            } else {
+                break;
+            }
+        }
+    }
+
+    SimpleParser(const Symbol& s, Decoder* d, Handler& h) :
+        decoder_(d), handler_(h) {
+        parsingStack.push(s);
+    }
+
+    void reset() {
+        while (parsingStack.size() > 1) {
+            parsingStack.pop();
+        }
+    }
+
+};
+
+inline std::ostream& operator<<(std::ostream& os, const Symbol s);
+
+inline std::ostream& operator<<(std::ostream& os, const Production p)
+{
+    os << '(';
+    for (Production::const_iterator it = p.begin(); it != p.end(); ++it) {
+        os << *it << ", ";
+    }
+    os << ')';
+    return os;
+}
+
+inline std::ostream& operator<<(std::ostream& os, const Symbol s)
+{
+        switch (s.kind()) {
+        case Symbol::sRepeater:
+            {
+                const RepeaterInfo& ri = *s.extrap<RepeaterInfo>();
+                os << '(' << Symbol::toString(s.kind())
+                    << boost::tuples::get<2>(ri)
+                    << boost::tuples::get<3>(ri)
+                    << ')';
+            }
+            break;
+        case Symbol::sIndirect:
+            {
+                os << '(' << Symbol::toString(s.kind()) << ' '
+                << *s.extra<boost::shared_ptr<Production> >() << ')';
+            }
+            break;
+        default:
+            os << Symbol::toString(s.kind());
+            break;
+        }
+        return os;
+    }
+}   // namespace parsing
+}   // namespace avro
+
+#endif
diff --git a/lang/c++/impl/parsing/ValidatingCodec.cc b/lang/c++/impl/parsing/ValidatingCodec.cc
new file mode 100644
index 0000000..46670e1
--- /dev/null
+++ b/lang/c++/impl/parsing/ValidatingCodec.cc
@@ -0,0 +1,586 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ValidatingCodec.hh"
+
+#include <string>
+#include <map>
+#include <algorithm>
+#include <boost/shared_ptr.hpp>
+#include <boost/make_shared.hpp>
+#include <boost/weak_ptr.hpp>
+#include <boost/any.hpp>
+
+#include "ValidSchema.hh"
+#include "Decoder.hh"
+#include "Encoder.hh"
+#include "NodeImpl.hh"
+
+namespace avro {
+
+namespace parsing {
+
+using boost::shared_ptr;
+using boost::weak_ptr;
+using boost::static_pointer_cast;
+using boost::make_shared;
+
+using std::map;
+using std::vector;
+using std::pair;
+using std::string;
+using std::reverse;
+using std::ostringstream;
+
+/** Follows the design of Avro Parser in Java. */
+ProductionPtr ValidatingGrammarGenerator::generate(const NodePtr& n)
+{
+    map<NodePtr, ProductionPtr> m;
+    ProductionPtr result = doGenerate(n, m);
+    fixup(result, m);
+    return result;
+}
+
+Symbol ValidatingGrammarGenerator::generate(const ValidSchema& schema)
+{
+    ProductionPtr r = generate(schema.root());
+    return Symbol::rootSymbol(r);
+}
+
+ProductionPtr ValidatingGrammarGenerator::doGenerate(const NodePtr& n,
+    map<NodePtr, ProductionPtr> &m) {
+    switch (n->type()) {
+    case AVRO_NULL:
+        return make_shared<Production>(1, Symbol::nullSymbol());
+    case AVRO_BOOL:
+        return make_shared<Production>(1, Symbol::boolSymbol());
+    case AVRO_INT:
+        return make_shared<Production>(1, Symbol::intSymbol());
+    case AVRO_LONG:
+        return make_shared<Production>(1, Symbol::longSymbol());
+    case AVRO_FLOAT:
+        return make_shared<Production>(1, Symbol::floatSymbol());
+    case AVRO_DOUBLE:
+        return make_shared<Production>(1, Symbol::doubleSymbol());
+    case AVRO_STRING:
+        return make_shared<Production>(1, Symbol::stringSymbol());
+    case AVRO_BYTES:
+        return make_shared<Production>(1, Symbol::bytesSymbol());
+    case AVRO_FIXED:
+        {
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::sizeCheckSymbol(n->fixedSize()));
+            result->push_back(Symbol::fixedSymbol());
+            m[n] = result;
+            return result;
+        }
+    case AVRO_RECORD:
+        {
+            ProductionPtr result = make_shared<Production>();
+
+            m.erase(n);
+            size_t c = n->leaves();
+            for (size_t i = 0; i < c; ++i) {
+                const NodePtr& leaf = n->leafAt(i);
+                ProductionPtr v = doGenerate(leaf, m);
+                copy(v->rbegin(), v->rend(), back_inserter(*result));
+            }
+            reverse(result->begin(), result->end());
+
+            m[n] = result;
+            return result;
+        }
+    case AVRO_ENUM:
+        {
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::sizeCheckSymbol(n->names()));
+            result->push_back(Symbol::enumSymbol());
+            m[n] = result;
+            return result;
+        }
+    case AVRO_ARRAY:
+        {
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::arrayEndSymbol());
+            result->push_back(Symbol::repeater(doGenerate(n->leafAt(0), m), true));
+            result->push_back(Symbol::arrayStartSymbol());
+            return result;
+        }
+    case AVRO_MAP:
+        {
+            ProductionPtr pp = doGenerate(n->leafAt(1), m);
+			ProductionPtr v(new Production(*pp));
+            v->push_back(Symbol::stringSymbol());
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::mapEndSymbol());
+            result->push_back(Symbol::repeater(v, false));
+            result->push_back(Symbol::mapStartSymbol());
+            return result;
+        }
+    case AVRO_UNION:
+        {
+            vector<ProductionPtr> vv;
+            size_t c = n->leaves();
+            vv.reserve(c);
+            for (size_t i = 0; i < c; ++i) {
+                vv.push_back(doGenerate(n->leafAt(i), m));
+            }
+            ProductionPtr result = make_shared<Production>();
+            result->push_back(Symbol::alternative(vv));
+            result->push_back(Symbol::unionSymbol());
+            return result;
+        }
+    case AVRO_SYMBOLIC:
+        {
+            shared_ptr<NodeSymbolic> ns = static_pointer_cast<NodeSymbolic>(n);
+            NodePtr nn = ns->getNode();
+            map<NodePtr, ProductionPtr>::iterator it =
+                m.find(nn);
+            if (it != m.end() && it->second) {
+                return it->second;
+            } else {
+                m[nn] = ProductionPtr();
+                return make_shared<Production>(1, Symbol::placeholder(nn));
+            }
+        }
+    default:
+        throw Exception("Unknown node type");
+    }
+}
+
+struct DummyHandler {
+    size_t handle(const Symbol& s) {
+        return 0;
+    }
+};
+
+template <typename P>
+class ValidatingDecoder : public Decoder {
+    const shared_ptr<Decoder> base;
+    DummyHandler handler_;
+    P parser;
+
+    void init(InputStream& is);
+    void decodeNull();
+    bool decodeBool();
+    int32_t decodeInt();
+    int64_t decodeLong();
+    float decodeFloat();
+    double decodeDouble();
+    void decodeString(string& value);
+    void skipString();
+    void decodeBytes(vector<uint8_t>& value);
+    void skipBytes();
+    void decodeFixed(size_t n, vector<uint8_t>& value);
+    void skipFixed(size_t n);
+    size_t decodeEnum();
+    size_t arrayStart();
+    size_t arrayNext();
+    size_t skipArray();
+    size_t mapStart();
+    size_t mapNext();
+    size_t skipMap();
+    size_t decodeUnionIndex();
+
+public:
+
+    ValidatingDecoder(const ValidSchema& s, const shared_ptr<Decoder> b) :
+        base(b),
+        parser(ValidatingGrammarGenerator().generate(s), NULL, handler_) { }
+
+};
+
+template <typename P>
+void ValidatingDecoder<P>::init(InputStream& is)
+{
+    base->init(is);
+}
+
+template <typename P>
+void ValidatingDecoder<P>::decodeNull()
+{
+    parser.advance(Symbol::sNull);
+}
+
+template <typename P>
+bool ValidatingDecoder<P>::decodeBool()
+{
+    parser.advance(Symbol::sBool);
+    return base->decodeBool();
+}
+
+template <typename P>
+int32_t ValidatingDecoder<P>::decodeInt()
+{
+    parser.advance(Symbol::sInt);
+    return base->decodeInt();
+}
+
+template <typename P>
+int64_t ValidatingDecoder<P>::decodeLong()
+{
+    parser.advance(Symbol::sLong);
+    return base->decodeLong();
+}
+
+template <typename P>
+float ValidatingDecoder<P>::decodeFloat()
+{
+    parser.advance(Symbol::sFloat);
+    return base->decodeFloat();
+}
+
+template <typename P>
+double ValidatingDecoder<P>::decodeDouble()
+{
+    parser.advance(Symbol::sDouble);
+    return base->decodeDouble();
+}
+
+template <typename P>
+void ValidatingDecoder<P>::decodeString(string& value)
+{
+    parser.advance(Symbol::sString);
+    base->decodeString(value);
+}
+
+template <typename P>
+void ValidatingDecoder<P>::skipString()
+{
+    parser.advance(Symbol::sString);
+    base->skipString();
+}
+
+template <typename P>
+void ValidatingDecoder<P>::decodeBytes(vector<uint8_t>& value)
+{
+    parser.advance(Symbol::sBytes);
+    base->decodeBytes(value);
+}
+
+template <typename P>
+void ValidatingDecoder<P>::skipBytes()
+{
+    parser.advance(Symbol::sBytes);
+    base->skipBytes();
+}
+
+template <typename P>
+void ValidatingDecoder<P>::decodeFixed(size_t n, vector<uint8_t>& value)
+{
+    parser.advance(Symbol::sFixed);
+    parser.assertSize(n);
+    base->decodeFixed(n, value);
+}
+
+template <typename P>
+void ValidatingDecoder<P>::skipFixed(size_t n)
+{
+    parser.advance(Symbol::sFixed);
+    parser.assertSize(n);
+    base->skipFixed(n);
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::decodeEnum()
+{
+    parser.advance(Symbol::sEnum);
+    size_t result = base->decodeEnum();
+    parser.assertLessThanSize(result);
+    return result;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::arrayStart()
+{
+    parser.advance(Symbol::sArrayStart);
+    size_t result = base->arrayStart();
+    if (result == 0) {
+        parser.popRepeater();
+        parser.advance(Symbol::sArrayEnd);
+    } else {
+        parser.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::arrayNext()
+{
+    size_t result = base->arrayNext();
+    if (result == 0) {
+        parser.popRepeater();
+        parser.advance(Symbol::sArrayEnd);
+    } else {
+        parser.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::skipArray()
+{
+    parser.advance(Symbol::sArrayStart);
+    size_t n = base->skipArray();
+    if (n == 0) {
+        parser.pop();
+    } else {
+        parser.setRepeatCount(n);
+        parser.skip(*base);
+    }
+    parser.advance(Symbol::sArrayEnd);
+    return 0;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::mapStart()
+{
+    parser.advance(Symbol::sMapStart);
+    size_t result = base->mapStart();
+    if (result == 0) {
+        parser.popRepeater();
+        parser.advance(Symbol::sMapEnd);
+    } else {
+        parser.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::mapNext()
+{
+    size_t result = base->mapNext();
+    if (result == 0) {
+        parser.popRepeater();
+        parser.advance(Symbol::sMapEnd);
+    } else {
+        parser.setRepeatCount(result);
+    }
+    return result;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::skipMap()
+{
+    parser.advance(Symbol::sMapStart);
+    size_t n = base->skipMap();
+    if (n == 0) {
+        parser.pop();
+    } else {
+        parser.setRepeatCount(n);
+        parser.skip(*base);
+    }
+    parser.advance(Symbol::sMapEnd);
+    return 0;
+}
+
+template <typename P>
+size_t ValidatingDecoder<P>::decodeUnionIndex()
+{
+    parser.advance(Symbol::sUnion);
+    size_t result = base->decodeUnionIndex();
+    parser.selectBranch(result);
+    return result;
+}
+
+template <typename P>
+class ValidatingEncoder : public Encoder {
+    DummyHandler handler_;
+    P parser_;
+    EncoderPtr base_;
+
+    void init(OutputStream& os);
+    void flush();
+    void encodeNull();
+    void encodeBool(bool b);
+    void encodeInt(int32_t i);
+    void encodeLong(int64_t l);
+    void encodeFloat(float f);
+    void encodeDouble(double d);
+    void encodeString(const std::string& s);
+    void encodeBytes(const uint8_t *bytes, size_t len);
+    void encodeFixed(const uint8_t *bytes, size_t len);
+    void encodeEnum(size_t e);
+    void arrayStart();
+    void arrayEnd();
+    void mapStart();
+    void mapEnd();
+    void setItemCount(size_t count);
+    void startItem();
+    void encodeUnionIndex(size_t e);
+public:
+    ValidatingEncoder(const ValidSchema& schema, const EncoderPtr& base) :
+        parser_(ValidatingGrammarGenerator().generate(schema), NULL, handler_),
+        base_(base) { }
+};
+
+template<typename P>
+void ValidatingEncoder<P>::init(OutputStream& os)
+{
+    base_->init(os);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::flush()
+{
+    base_->flush();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeNull()
+{
+    parser_.advance(Symbol::sNull);
+    base_->encodeNull();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeBool(bool b)
+{
+    parser_.advance(Symbol::sBool);
+    base_->encodeBool(b);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeInt(int32_t i)
+{
+    parser_.advance(Symbol::sInt);
+    base_->encodeInt(i);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeLong(int64_t l)
+{
+    parser_.advance(Symbol::sLong);
+    base_->encodeLong(l);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeFloat(float f)
+{
+    parser_.advance(Symbol::sFloat);
+    base_->encodeFloat(f);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeDouble(double d)
+{
+    parser_.advance(Symbol::sDouble);
+    base_->encodeDouble(d);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeString(const std::string& s)
+{
+    parser_.advance(Symbol::sString);
+    base_->encodeString(s);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeBytes(const uint8_t *bytes, size_t len)
+{
+    parser_.advance(Symbol::sBytes);
+    base_->encodeBytes(bytes, len);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeFixed(const uint8_t *bytes, size_t len)
+{
+    parser_.advance(Symbol::sFixed);
+    parser_.assertSize(len);
+    base_->encodeFixed(bytes, len);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeEnum(size_t e)
+{
+    parser_.advance(Symbol::sEnum);
+    parser_.assertLessThanSize(e);
+    base_->encodeEnum(e);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::arrayStart()
+{
+    parser_.advance(Symbol::sArrayStart);
+    base_->arrayStart();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::arrayEnd()
+{
+    parser_.popRepeater();
+    parser_.advance(Symbol::sArrayEnd);
+    base_->arrayEnd();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::mapStart()
+{
+    parser_.advance(Symbol::sMapStart);
+    base_->mapStart();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::mapEnd()
+{
+    parser_.popRepeater();
+    parser_.advance(Symbol::sMapEnd);
+    base_->mapEnd();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::setItemCount(size_t count)
+{
+    parser_.setRepeatCount(count);
+    base_->setItemCount(count);
+}
+
+template<typename P>
+void ValidatingEncoder<P>::startItem()
+{
+    if (parser_.top() != Symbol::sRepeater) {
+        throw Exception("startItem at not an item boundary");
+    }
+    base_->startItem();
+}
+
+template<typename P>
+void ValidatingEncoder<P>::encodeUnionIndex(size_t e)
+{
+    parser_.advance(Symbol::sUnion);
+    parser_.selectBranch(e);
+    base_->encodeUnionIndex(e);
+}
+
+}   // namespace parsing
+
+DecoderPtr validatingDecoder(const ValidSchema& s,
+    const DecoderPtr& base)
+{
+    return boost::make_shared<parsing::ValidatingDecoder<
+        parsing::SimpleParser<parsing::DummyHandler> > >(s, base);
+}
+
+EncoderPtr validatingEncoder(const ValidSchema& schema, const EncoderPtr& base)
+{
+    return boost::make_shared<parsing::ValidatingEncoder<
+        parsing::SimpleParser<parsing::DummyHandler> > >(schema, base);
+}
+
+}   // namespace avro
+
diff --git a/lang/c++/impl/parsing/ValidatingCodec.hh b/lang/c++/impl/parsing/ValidatingCodec.hh
new file mode 100644
index 0000000..5a52e20
--- /dev/null
+++ b/lang/c++/impl/parsing/ValidatingCodec.hh
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef avro_parsing_ValidatingCodec_hh__
+#define avro_parsing_ValidatingCodec_hh__
+
+#include <map>
+#include <vector>
+#include "boost/make_shared.hpp"
+
+#include "Symbol.hh"
+#include "ValidSchema.hh"
+#include "NodeImpl.hh"
+
+namespace avro {
+namespace parsing {
+
+class ValidatingGrammarGenerator {
+protected:
+    template<typename T>
+    static void doFixup(Production& p, const std::map<T, ProductionPtr> &m);
+
+    template<typename T>
+    static void doFixup(Symbol &s, const std::map<T, ProductionPtr> &m);
+    virtual ProductionPtr doGenerate(const NodePtr& n,
+        std::map<NodePtr, ProductionPtr> &m);
+
+    ProductionPtr generate(const NodePtr& schema);
+public:
+    Symbol generate(const ValidSchema& schema);
+
+};
+
+}   // namespace parsing
+}   // namespace avro
+
+#endif
diff --git a/lang/c++/jsonschemas/array b/lang/c++/jsonschemas/array
new file mode 100644
index 0000000..e5c3761
--- /dev/null
+++ b/lang/c++/jsonschemas/array
@@ -0,0 +1 @@
+{ "type" : "array", "items" : "int" , "name":"test" }
diff --git a/lang/c++/jsonschemas/bigrecord b/lang/c++/jsonschemas/bigrecord
new file mode 100644
index 0000000..02dbccb
--- /dev/null
+++ b/lang/c++/jsonschemas/bigrecord
@@ -0,0 +1,107 @@
+{
+    "type": "record",
+    "name": "RootRecord",
+    "fields": [
+        {
+            "name": "mylong",
+            "type": "long"
+        },
+        {
+            "name": "nestedrecord",
+            "type": {
+                "type": "record",
+                "name": "Nested",
+                "fields": [
+                    {
+                        "name": "inval1",
+                        "type": "double"
+                    },
+                    {
+                        "name": "inval2",
+                        "type": "string"
+                    },
+                    {
+                        "name": "inval3",
+                        "type": "int"
+                    }
+                ]
+            }
+        },
+        {
+            "name": "mymap",
+            "type": {
+                "type": "map",
+                "values": "int"
+            }
+        },
+        {
+            "name": "recordmap",
+            "type": {
+                "type": "map",
+                "values": "Nested"
+            }
+        },
+        {
+            "name": "myarray",
+            "type": {
+                "type": "array",
+                "items": "double"
+            }
+        },
+        {
+            "name": "myenum",
+            "type": {
+                "type": "enum",
+                "name": "ExampleEnum",
+                "symbols": [
+                    "zero",
+                    "one",
+                    "two",
+                    "three"
+                ]
+            }
+        },
+        {
+            "name": "myunion",
+            "type": [
+                "null",
+                {
+                    "type": "map",
+                    "values": "int"
+                },
+                "float"
+            ]
+        },
+        {
+            "name": "anotherunion",
+            "type": [
+                "bytes",
+                "null"
+            ]
+        },
+        {
+            "name": "mybool",
+            "type": "boolean"
+        },
+        {
+            "name": "anothernested",
+            "type": "Nested"
+        },
+        {
+            "name": "myfixed",
+            "type": {
+                "type": "fixed",
+                "size": 16,
+                "name": "md5"
+            }
+        },
+        {
+            "name": "anotherint",
+            "type": "int"
+        },
+        {
+            "name": "bytes",
+            "type": "bytes"
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/bigrecord2 b/lang/c++/jsonschemas/bigrecord2
new file mode 100644
index 0000000..73b9a99
--- /dev/null
+++ b/lang/c++/jsonschemas/bigrecord2
@@ -0,0 +1,100 @@
+{
+    "type": "record",
+    "name": "RootRecord",
+    "fields": [
+        {
+            "name": "mylong",
+            "type": "double"
+        },
+        {
+            "name": "anotherint",
+            "type": "int"
+        },
+        {
+            "name": "bytes",
+            "type": "bytes"
+        },
+        {
+            "name": "nestedrecord",
+            "type": {
+                "type": "record",
+                "name": "Nested",
+                "fields": [
+                    {
+                        "name": "inval3",
+                        "type": "int"
+                    },
+                    {
+                        "name": "inval2",
+                        "type": "string"
+                    },
+                    {
+                        "name": "inval1",
+                        "type": "double"
+                    }
+                ]
+            }
+        },
+        {
+            "name": "mymap",
+            "type": {
+                "type": "map",
+                "values": "long"
+            }
+        },
+        {
+            "name": "myarray",
+            "type": {
+                "type": "array",
+                "items": "double"
+            }
+        },
+        {
+            "name": "myenum",
+            "type": {
+                "type": "enum",
+                "name": "ExampleEnum",
+                "symbols": [
+                    "three",
+                    "two",
+                    "one",
+                    "zero"
+                ]
+            }
+        },
+        {
+            "name": "myunion",
+            "type": [
+                "null",
+                "float",
+                {
+                    "type": "map",
+                    "values": "float"
+                }
+            ]
+        },
+        {
+            "name": "anotherunion",
+            "type": "bytes"
+        },
+        {
+            "name": "anothernested",
+            "type": "Nested"
+        },
+        {
+            "name": "newbool",
+            "type": "boolean"
+        },
+        {
+            "name": "myfixed",
+            "type": [
+                "float",
+                {
+                 "type": "fixed",
+                 "size": 16,
+                 "name": "md5"
+                }
+            ]
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/bigrecord_r b/lang/c++/jsonschemas/bigrecord_r
new file mode 100644
index 0000000..f079162
--- /dev/null
+++ b/lang/c++/jsonschemas/bigrecord_r
@@ -0,0 +1,166 @@
+{
+    "type": "record",
+    "name": "RootRecord",
+    "fields": [
+        {
+            "name": "mylong",
+            "type": "long"
+        },
+        {
+            "name": "mymap",
+            "type": {
+                "type": "map",
+                "values": "int"
+            }
+        },
+        {
+            "name": "nestedrecord",
+            "type": {
+                "type": "record",
+                "name": "Nested",
+                "fields": [
+                    {
+                        "name": "inval2",
+                        "type": "string"
+                    },
+                    {
+                        "name": "inval1",
+                        "type": "double"
+                    },
+                    {
+                        "name": "inval3",
+                        "type": "int"
+                    }
+                ]
+            }
+        },
+        {
+            "name": "recordmap",
+            "type": {
+                "type": "map",
+                "values": "Nested"
+            }
+        },
+        {
+            "name": "withDefaultValue",
+            "type": {
+                "type": "record",
+                "name": "WithDefaultValue",
+                "fields": [
+                    {
+                        "name": "s1",
+                        "type": "string"
+                    },
+                    {
+                        "name": "d1",
+                        "type": "double"
+                    },
+                    {
+                        "name": "i1",
+                        "type": "int"
+                    }
+                ]
+            },
+            "default": {
+                "s1": "sval",
+                "d1": 5.67,
+                "i1": 99
+            }
+        },
+        {
+            "name": "union1WithDefaultValue",
+            "type": [ "string", "int" ],
+            "default": {
+                "string": "sval"
+            }
+        },
+        {
+            "name": "union2WithDefaultValue",
+            "type": [ "string", "null" ],
+            "default": null
+        },
+        {
+            "name": "myarray",
+            "type": {
+                "type": "array",
+                "items": "double"
+            }
+        },
+        {
+            "name": "myenum",
+            "type": {
+                "type": "enum",
+                "name": "ExampleEnum",
+                "symbols": [
+                    "zero",
+                    "one",
+                    "two",
+                    "three"
+                ]
+            }
+        },
+        {
+            "name": "myunion",
+            "type": [
+                "null",
+                {
+                    "type": "map",
+                    "values": "int"
+                },
+                "float"
+            ],
+            "default": null
+        },
+        {
+            "name": "anotherunion",
+            "type": [
+                "bytes",
+                "null"
+            ]
+        },
+        {
+            "name": "mybool",
+            "type": "boolean"
+        },
+        {
+            "name": "anothernested",
+            "type": "Nested"
+        },
+        {
+            "name": "rwd",
+            "type": {
+                "type": "record",
+                "name": "RecordWithDefault",
+                "fields": [
+                    {
+                        "name": "rwd_f1",
+                        "type": "Nested"
+                    }
+                ]
+            },
+            "default": {
+                "rwd_f1": {
+                    "inval2": "hello",
+                    "inval1": 4.23,
+                    "inval3": 100
+                }
+            }
+        },
+        {
+            "name": "myfixed",
+            "type": {
+                "type": "fixed",
+                "size": 16,
+                "name": "md5"
+            }
+        },
+        {
+            "name": "anotherint",
+            "type": "int"
+        },
+        {
+            "name": "bytes",
+            "type": "bytes"
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/circulardep b/lang/c++/jsonschemas/circulardep
new file mode 100644
index 0000000..a5d1a48
--- /dev/null
+++ b/lang/c++/jsonschemas/circulardep
@@ -0,0 +1,33 @@
+{
+     "type" : "record",
+    "name" : "Item",
+    "fields" : [ {
+            "name" : "id",
+            "type" : "string",
+            "ctor" : "true"
+          }, 
+		  {
+            "name" : "entities",
+            "type" : 
+             [ "null", {
+                "type" : "record",
+                "name" : "Information",
+                "fields" : [ {
+                  "name" : "id",
+                  "type" : "string"
+                }, {
+                  "name" : "externalItem",
+                  "type" : "Item"
+                }, {
+                  "name" : "innerUnion",
+                  "type" : ["int", "double"]
+                }
+			]
+               
+            }
+		]
+		
+	}
+]
+}
+	
diff --git a/lang/c++/jsonschemas/empty_record b/lang/c++/jsonschemas/empty_record
new file mode 100644
index 0000000..60b92a5
--- /dev/null
+++ b/lang/c++/jsonschemas/empty_record
@@ -0,0 +1,5 @@
+{
+    "type": "record",
+    "name": "Empty",
+    "fields": []
+}
diff --git a/lang/c++/jsonschemas/enum b/lang/c++/jsonschemas/enum
new file mode 100644
index 0000000..d979c86
--- /dev/null
+++ b/lang/c++/jsonschemas/enum
@@ -0,0 +1,10 @@
+        {
+            "type": "enum",
+            "symbols": [
+                "zero",
+                "int",
+                "two",
+                "three"
+            ],
+            "name": "myenum"
+        }
diff --git a/lang/c++/jsonschemas/fixed b/lang/c++/jsonschemas/fixed
new file mode 100644
index 0000000..2fe4cbc
--- /dev/null
+++ b/lang/c++/jsonschemas/fixed
@@ -0,0 +1 @@
+{"type": "fixed", "size" : 16, "name":"md5" }
diff --git a/lang/c++/jsonschemas/int b/lang/c++/jsonschemas/int
new file mode 100644
index 0000000..a21274f
--- /dev/null
+++ b/lang/c++/jsonschemas/int
@@ -0,0 +1 @@
+"int"
diff --git a/lang/c++/jsonschemas/large_schema.avsc b/lang/c++/jsonschemas/large_schema.avsc
new file mode 100644
index 0000000..c5819b6
--- /dev/null
+++ b/lang/c++/jsonschemas/large_schema.avsc
@@ -0,0 +1 @@
+["null",{"type":"record","name":"El","namespace":"foo.e.f.g.h.ac","fields":[{"name":"ref1","type":{"type":"record","name":"J","namespace":"foo.a.b.c.d","fields":[{"name":"attr11","type":"long","declared":"true"},{"name":"attr12","type":"long","declared":"true"}]}},{"name":"attr13","type":["null","string"],"default":null},{"name":"attr1","type":{"type":"long","date":"true"}},{"name":"attr2","type":["null","string"],"default":null},{"name":"attr3","type":["null","string"],"default":null},{ [...]
\ No newline at end of file
diff --git a/lang/c++/jsonschemas/map b/lang/c++/jsonschemas/map
new file mode 100644
index 0000000..6c84e95
--- /dev/null
+++ b/lang/c++/jsonschemas/map
@@ -0,0 +1,5 @@
+{
+    "type": "map",
+    "values": {"type":"int"},
+    "name": "noname"
+}
diff --git a/lang/c++/jsonschemas/nested b/lang/c++/jsonschemas/nested
new file mode 100644
index 0000000..5aa6a75
--- /dev/null
+++ b/lang/c++/jsonschemas/nested
@@ -0,0 +1,17 @@
+{
+    "type": "record",
+    "name": "LongList",
+    "fields": [
+        {
+            "name": "value",
+            "type": "long"
+        },
+        {
+            "name": "next",
+            "type": [
+                "LongList",
+                "null"
+            ]
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/nested.error b/lang/c++/jsonschemas/nested.error
new file mode 100644
index 0000000..5c82308
--- /dev/null
+++ b/lang/c++/jsonschemas/nested.error
@@ -0,0 +1,17 @@
+{
+    "type": "record",
+    "name": "LongList",
+    "fields": [
+        {
+            "name": "value",
+            "type": "long"
+        },
+        {
+            "name": "next",
+            "type": [
+                "LongLister",
+                "null"
+            ]
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/recinrec b/lang/c++/jsonschemas/recinrec
new file mode 100644
index 0000000..84b673e
--- /dev/null
+++ b/lang/c++/jsonschemas/recinrec
@@ -0,0 +1,18 @@
+{
+  "type": "record", 
+  "name": "Rec1",
+  "fields" : [
+    {"name": "val1", "type": "long"},           
+    {"name": "val2", "type": {
+          "type": "record", 
+          "name": "Rec2",
+          "fields" : [
+            {"name": "inval1", "type": "double"},           
+            {"name": "inval2", "type": "int" }
+          ]
+      }
+    },
+    {"name": "val3", "type": "float"}
+  ]
+}
+
diff --git a/lang/c++/jsonschemas/record b/lang/c++/jsonschemas/record
new file mode 100644
index 0000000..f2be606
--- /dev/null
+++ b/lang/c++/jsonschemas/record
@@ -0,0 +1,9 @@
+{
+  "type": "record", 
+  "name": "LongList",
+  "fields" : [
+    {"name": "value", "type": "long"},           
+    {"type": "int", "name": "next", "metadata" : "two"}
+  ]
+}
+
diff --git a/lang/c++/jsonschemas/record2 b/lang/c++/jsonschemas/record2
new file mode 100644
index 0000000..788478f
--- /dev/null
+++ b/lang/c++/jsonschemas/record2
@@ -0,0 +1,10 @@
+{
+  "type": "record", 
+  "name": "LongList",
+  "fields" : [
+    {"name": "value", "type": "long"},           
+    {"name": "next", "type": ["int", "float"] },
+    {"name": "hello", "type": {"type" : "array" , "items" :"float"}}
+  ]
+}
+
diff --git a/lang/c++/jsonschemas/recursive b/lang/c++/jsonschemas/recursive
new file mode 100644
index 0000000..6542701
--- /dev/null
+++ b/lang/c++/jsonschemas/recursive
@@ -0,0 +1,9 @@
+{
+    "type": "record",
+    "name": "LongList",
+    "aliases": ["LinkedLongs"],
+    "fields" : [
+        { "name": "value", "type": "long" },
+        { "name": "next", "type": ["LongList", "null"] }
+    ]
+}
diff --git a/lang/c++/jsonschemas/reuse b/lang/c++/jsonschemas/reuse
new file mode 100644
index 0000000..d6487f4
--- /dev/null
+++ b/lang/c++/jsonschemas/reuse
@@ -0,0 +1,14 @@
+{
+    "type": "record",
+    "name": "outer",
+    "fields" : [
+        { "name": "f1", "type":
+            {"type": "record", "name": "F",
+                "fields": [
+                    { "name": "g1", "type": "boolean" },
+                    { "name": "g2", "type": "int" }
+                ]
+            } },
+        { "name": "f2", "type": "F" }
+    ]
+}
diff --git a/lang/c++/jsonschemas/tweet b/lang/c++/jsonschemas/tweet
new file mode 100644
index 0000000..1fd3957
--- /dev/null
+++ b/lang/c++/jsonschemas/tweet
@@ -0,0 +1,152 @@
+{
+    "type": "record",
+    "name": "AvroTweet",
+    "namespace": "com.bifflabs.grok.model.twitter.avro",
+    "fields": [{
+        "name": "ID",
+        "type": "long"
+    },
+    {
+        "name": "text",
+        "type": "string"
+    },
+    {
+        "name": "authorScreenName",
+        "type": "string"
+    },
+    {
+        "name": "authorProfileImageURL",
+        "type": "string"
+    },
+    {
+        "name": "authorUserID",
+        "type": ["null", "long"]
+    },
+    {
+        "name": "location",
+        "type": ["null", {
+            "type": "record",
+            "name": "AvroPoint",
+            "namespace": "com.bifflabs.grok.model.common.avro",
+            "fields": [{
+                "name": "latitude",
+                "type": "double"
+            },
+            {
+                "name": "longitude",
+                "type": "double"
+            }]
+        }]
+    },
+    {
+        "name": "placeID",
+        "type": ["null", "string"]
+    },
+    {
+        "name": "createdAt",
+        "type": {
+            "type": "record",
+            "name": "AvroDateTime",
+            "namespace": "com.bifflabs.grok.model.common.avro",
+            "fields": [{
+                "name": "dateTimeString",
+                "type": "string"
+            }]
+        }
+    },
+    {
+        "name": "metadata",
+        "type": {
+            "type": "record",
+            "name": "AvroTweetMetadata",
+            "fields": [{
+                "name": "inReplyToScreenName",
+                "type": {
+                    "type": "record",
+                    "name": "AvroKnowableOptionString",
+                    "namespace": "com.bifflabs.grok.model.common.avro",
+                    "fields": [{
+                        "name": "known",
+                        "type": "boolean"
+                    },
+                    {
+                        "name": "data",
+                        "type": ["null", "string"]
+                    }]
+                }
+            },
+            {
+                "name": "mentionedScreenNames",
+                "type": {
+                    "type": "record",
+                    "name": "AvroKnowableListString",
+                    "namespace": "com.bifflabs.grok.model.common.avro",
+                    "fields": [{
+                        "name": "known",
+                        "type": "boolean"
+                    },
+                    {
+                        "name": "data",
+                        "type": {
+                            "type": "array",
+                            "items": "string"
+                        }
+                    }]
+                }
+            },
+            {
+                "name": "links",
+                "type": "com.bifflabs.grok.model.common.avro.AvroKnowableListString"
+            },            
+            {
+                "name": "hashtags",
+                "type": "com.bifflabs.grok.model.common.avro.AvroKnowableListString"
+            },
+            {
+                "name": "isBareCheckin",
+                "type": {
+                    "type": "record",
+                    "name": "AvroKnowableBoolean",
+                    "namespace": "com.bifflabs.grok.model.common.avro",
+                    "fields": [{
+                        "name": "known",
+                        "type": "boolean"
+                    },
+                    {
+                        "name": "data",
+                        "type": "boolean"
+                    }]
+                }
+            },
+            {
+                "name": "isBareRetweet",
+                "type": "com.bifflabs.grok.model.common.avro.AvroKnowableBoolean"
+            },
+            {
+                "name": "isRetweet",
+                "type": "com.bifflabs.grok.model.common.avro.AvroKnowableBoolean"
+            },
+            {
+                "name": "venueID",
+                "type": "com.bifflabs.grok.model.common.avro.AvroKnowableOptionString"
+            },
+            {
+                "name": "venuePoint",
+                "type": {
+                    "type": "record",
+                    "name": "AvroKnowableOptionPoint",
+                    "namespace": "com.bifflabs.grok.model.common.avro",
+                    "fields": [{
+                        "name": "known",
+                        "type": "boolean"
+                    },
+                    {
+                        "name": "data",
+                        "type": ["null", "AvroPoint"]
+                    }]
+                }
+            }
+            ]
+        }
+    }]
+}
\ No newline at end of file
diff --git a/lang/c++/jsonschemas/union b/lang/c++/jsonschemas/union
new file mode 100644
index 0000000..ebf26d0
--- /dev/null
+++ b/lang/c++/jsonschemas/union
@@ -0,0 +1 @@
+[ "int" , "long" , "float" ]
diff --git a/lang/c++/jsonschemas/union_array_union b/lang/c++/jsonschemas/union_array_union
new file mode 100644
index 0000000..459c5e2
--- /dev/null
+++ b/lang/c++/jsonschemas/union_array_union
@@ -0,0 +1,14 @@
+{
+    "type": "record",
+    "name": "r1",
+    "fields" : [
+        {
+            "name": "f1",
+            "type":
+            [
+                "null",
+                { "type":"array", "items":[ "null", "int" ] }
+            ]
+        }
+    ]
+}
diff --git a/lang/c++/jsonschemas/union_conflict b/lang/c++/jsonschemas/union_conflict
new file mode 100644
index 0000000..0fffa41
--- /dev/null
+++ b/lang/c++/jsonschemas/union_conflict
@@ -0,0 +1,9 @@
+{
+  "type": "record",
+  "name": "uc",
+  "fields" : [
+    {"name": "rev_t", "type": "string"},
+    {"name": "data", "type": "bytes"},
+    {"name": "rev", "type": ["string", "null"]}
+  ]
+}
diff --git a/lang/c++/jsonschemas/union_map_union b/lang/c++/jsonschemas/union_map_union
new file mode 100644
index 0000000..5258c09
--- /dev/null
+++ b/lang/c++/jsonschemas/union_map_union
@@ -0,0 +1,8 @@
+{
+  "type": "record",
+  "name": "r1",
+  "fields" : [
+    {"name": "id", "type": "string"},
+    {"name": "val", "type": [{"type": "map", "values": {"name": "r3", "type": "record", "fields": [{"name": "name", "type": "string"}, {"name": "data", "type": "bytes"}, {"name": "rev", "type": ["string", "null"]}]}}, "null"]}
+  ]
+}
diff --git a/lang/c++/jsonschemas/unionwithmap b/lang/c++/jsonschemas/unionwithmap
new file mode 100644
index 0000000..33e0c6d
--- /dev/null
+++ b/lang/c++/jsonschemas/unionwithmap
@@ -0,0 +1 @@
+[ "int" , "long" , {"type":"map", "values":[ "int", "long" ] } ]
diff --git a/lang/c++/jsonschemas/verboseint b/lang/c++/jsonschemas/verboseint
new file mode 100644
index 0000000..5dccae9
--- /dev/null
+++ b/lang/c++/jsonschemas/verboseint
@@ -0,0 +1 @@
+{ "type": "int", "metadata1" : "ju\"nk", metadata2: 123, metadata3 : {"ju{nk"}, "name":"hello", metadata4: ["he]ll}o"]}
diff --git a/lang/c++/m4/README b/lang/c++/m4/README
new file mode 100644
index 0000000..7129fcb
--- /dev/null
+++ b/lang/c++/m4/README
@@ -0,0 +1,3 @@
+The macros in this directory came from http://www.nongnu.org/autoconf-archive/index.html
+
+Please refer to the files for their licensing info.
diff --git a/lang/c++/m4/m4_ax_boost_asio.m4 b/lang/c++/m4/m4_ax_boost_asio.m4
new file mode 100644
index 0000000..434096d
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_asio.m4
@@ -0,0 +1,108 @@
+# ===========================================================================
+#       http://www.gnu.org/software/autoconf-archive/ax_boost_asio.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_ASIO
+#
+# DESCRIPTION
+#
+#   Test for Asio library from the Boost C++ libraries. The macro requires a
+#   preceding call to AX_BOOST_BASE. Further documentation is available at
+#   <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_ASIO_LIB)
+#
+#   And sets:
+#
+#     HAVE_BOOST_ASIO
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Thomas Porschberg <thomas at randspringer.de>
+#   Copyright (c) 2008 Pete Greenwell <pete at mu.org>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 7
+
+AC_DEFUN([AX_BOOST_ASIO],
+[
+    AC_ARG_WITH([boost-asio],
+    AS_HELP_STRING([--with-boost-asio@<:@=special-lib@:>@],
+                   [use the ASIO library from boost - it is possible to specify a certain library for the linker
+                        e.g. --with-boost-asio=boost_system-gcc41-mt-1_34 ]),
+        [
+        if test "$withval" = "no"; then
+            want_boost="no"
+        elif test "$withval" = "yes"; then
+            want_boost="yes"
+            ax_boost_user_asio_lib=""
+        else
+            want_boost="yes"
+            ax_boost_user_asio_lib="$withval"
+        fi
+        ],
+        [want_boost="yes"]
+    )
+
+    if test "x$want_boost" = "xyes"; then
+        AC_REQUIRE([AC_PROG_CC])
+        CPPFLAGS_SAVED="$CPPFLAGS"
+        CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+        export CPPFLAGS
+
+        LDFLAGS_SAVED="$LDFLAGS"
+        LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+        export LDFLAGS
+
+        AC_CACHE_CHECK(whether the Boost::ASIO library is available,
+                       ax_cv_boost_asio,
+        [AC_LANG_PUSH([C++])
+         AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[ @%:@include <boost/asio.hpp>
+                                            ]],
+                                  [[
+
+                                    boost::asio::io_service io;
+                                    boost::system::error_code timer_result;
+                                    boost::asio::deadline_timer t(io);
+                                    t.cancel();
+                                    io.run_one();
+                                    return 0;
+                                   ]]),
+                             ax_cv_boost_asio=yes, ax_cv_boost_asio=no)
+         AC_LANG_POP([C++])
+        ])
+        if test "x$ax_cv_boost_asio" = "xyes"; then
+            AC_DEFINE(HAVE_BOOST_ASIO,,[define if the Boost::ASIO library is available])
+            BN=boost_system
+            if test "x$ax_boost_user_asio_lib" = "x"; then
+                for ax_lib in $BN $BN-$CC $BN-$CC-mt $BN-$CC-mt-s $BN-$CC-s \
+                              lib$BN lib$BN-$CC lib$BN-$CC-mt lib$BN-$CC-mt-s lib$BN-$CC-s \
+                              $BN-mgw $BN-mgw $BN-mgw-mt $BN-mgw-mt-s $BN-mgw-s ; do
+                    AC_CHECK_LIB($ax_lib, main, [BOOST_ASIO_LIB="-l$ax_lib" AC_SUBST(BOOST_ASIO_LIB) link_thread="yes" break],
+                                 [link_thread="no"])
+                done
+            else
+               for ax_lib in $ax_boost_user_asio_lib $BN-$ax_boost_user_asio_lib; do
+                      AC_CHECK_LIB($ax_lib, main,
+                                   [BOOST_ASIO_LIB="-l$ax_lib" AC_SUBST(BOOST_ASIO_LIB) link_asio="yes" break],
+                                   [link_asio="no"])
+                  done
+
+            fi
+            if test "x$link_asio" = "xno"; then
+                AC_MSG_ERROR(Could not link against $ax_lib !)
+            fi
+        fi
+
+        CPPFLAGS="$CPPFLAGS_SAVED"
+        LDFLAGS="$LDFLAGS_SAVED"
+    fi
+])
diff --git a/lang/c++/m4/m4_ax_boost_base.m4 b/lang/c++/m4/m4_ax_boost_base.m4
new file mode 100644
index 0000000..ff81ac6
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_base.m4
@@ -0,0 +1,219 @@
+# ===========================================================================
+#          http://www.nongnu.org/autoconf-archive/ax_boost_base.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_BASE([MINIMUM-VERSION])
+#
+# DESCRIPTION
+#
+#   Test for the Boost C++ libraries of a particular version (or newer)
+#
+#   If no path to the installed boost library is given the macro searchs
+#   under /usr, /usr/local, /opt and /opt/local and evaluates the
+#   $BOOST_ROOT environment variable. Further documentation is available at
+#   <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_CPPFLAGS) / AC_SUBST(BOOST_LDFLAGS)
+#
+#   And sets:
+#
+#     HAVE_BOOST
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Thomas Porschberg <thomas at randspringer.de>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved.
+
+AC_DEFUN([AX_BOOST_BASE],
+[
+AC_ARG_WITH([boost],
+	AS_HELP_STRING([--with-boost@<:@=DIR@:>@], [use boost (default is yes) - it is possible to specify the root directory for boost (optional)]),
+	[
+    if test "$withval" = "no"; then
+		want_boost="no"
+    elif test "$withval" = "yes"; then
+        want_boost="yes"
+        ac_boost_path=""
+    else
+	    want_boost="yes"
+        ac_boost_path="$withval"
+	fi
+    ],
+    [want_boost="yes"])
+
+
+AC_ARG_WITH([boost-libdir],
+        AS_HELP_STRING([--with-boost-libdir=LIB_DIR],
+        [Force given directory for boost libraries. Note that this will overwrite library path detection, so use this parameter only if default library detection fails and you know exactly where your boost libraries are located.]),
+        [
+        if test -d $withval
+        then
+                ac_boost_lib_path="$withval"
+        else
+                AC_MSG_ERROR(--with-boost-libdir expected directory name)
+        fi
+        ],
+        [ac_boost_lib_path=""]
+)
+
+if test "x$want_boost" = "xyes"; then
+	boost_lib_version_req=ifelse([$1], ,1.20.0,$1)
+	boost_lib_version_req_shorten=`expr $boost_lib_version_req : '\([[0-9]]*\.[[0-9]]*\)'`
+	boost_lib_version_req_major=`expr $boost_lib_version_req : '\([[0-9]]*\)'`
+	boost_lib_version_req_minor=`expr $boost_lib_version_req : '[[0-9]]*\.\([[0-9]]*\)'`
+	boost_lib_version_req_sub_minor=`expr $boost_lib_version_req : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'`
+	if test "x$boost_lib_version_req_sub_minor" = "x" ; then
+		boost_lib_version_req_sub_minor="0"
+    	fi
+	WANT_BOOST_VERSION=`expr $boost_lib_version_req_major \* 100000 \+  $boost_lib_version_req_minor \* 100 \+ $boost_lib_version_req_sub_minor`
+	AC_MSG_CHECKING(for boostlib >= $boost_lib_version_req)
+	succeeded=no
+
+	dnl first we check the system location for boost libraries
+	dnl this location ist chosen if boost libraries are installed with the --layout=system option
+	dnl or if you install boost with RPM
+	if test "$ac_boost_path" != ""; then
+		BOOST_LDFLAGS="-L$ac_boost_path/lib"
+		BOOST_CPPFLAGS="-I$ac_boost_path/include"
+	else
+		for ac_boost_path_tmp in /usr /usr/local /opt /opt/local ; do
+			if test -d "$ac_boost_path_tmp/include/boost" && test -r "$ac_boost_path_tmp/include/boost"; then
+				BOOST_LDFLAGS="-L$ac_boost_path_tmp/lib"
+				BOOST_CPPFLAGS="-I$ac_boost_path_tmp/include"
+				break;
+			fi
+		done
+	fi
+
+    dnl overwrite ld flags if we have required special directory with
+    dnl --with-boost-libdir parameter
+    if test "$ac_boost_lib_path" != ""; then
+       BOOST_LDFLAGS="-L$ac_boost_lib_path"
+    fi
+
+	CPPFLAGS_SAVED="$CPPFLAGS"
+	CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+	export CPPFLAGS
+
+	LDFLAGS_SAVED="$LDFLAGS"
+	LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+	export LDFLAGS
+
+	AC_LANG_PUSH(C++)
+     	AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+	@%:@include <boost/version.hpp>
+	]], [[
+	#if BOOST_VERSION >= $WANT_BOOST_VERSION
+	// Everything is okay
+	#else
+	#  error Boost version is too old
+	#endif
+	]])],[
+        AC_MSG_RESULT(yes)
+	succeeded=yes
+	found_system=yes
+       	],[
+       	])
+	AC_LANG_POP([C++])
+
+
+
+	dnl if we found no boost with system layout we search for boost libraries
+	dnl built and installed without the --layout=system option or for a staged(not installed) version
+	if test "x$succeeded" != "xyes"; then
+		_version=0
+		if test "$ac_boost_path" != ""; then
+			if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then
+				for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do
+					_version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'`
+					V_CHECK=`expr $_version_tmp \> $_version`
+					if test "$V_CHECK" = "1" ; then
+						_version=$_version_tmp
+					fi
+					VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'`
+					BOOST_CPPFLAGS="-I$ac_boost_path/include/boost-$VERSION_UNDERSCORE"
+				done
+			fi
+		else
+			for ac_boost_path in /usr /usr/local /opt /opt/local ; do
+				if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then
+					for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do
+						_version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'`
+						V_CHECK=`expr $_version_tmp \> $_version`
+						if test "$V_CHECK" = "1" ; then
+							_version=$_version_tmp
+	               					best_path=$ac_boost_path
+						fi
+					done
+				fi
+			done
+
+			VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'`
+			BOOST_CPPFLAGS="-I$best_path/include/boost-$VERSION_UNDERSCORE"
+            if test "$ac_boost_lib_path" = ""
+            then
+               BOOST_LDFLAGS="-L$best_path/lib"
+            fi
+
+	    		if test "x$BOOST_ROOT" != "x"; then
+				if test -d "$BOOST_ROOT" && test -r "$BOOST_ROOT" && test -d "$BOOST_ROOT/stage/lib" && test -r "$BOOST_ROOT/stage/lib"; then
+					version_dir=`expr //$BOOST_ROOT : '.*/\(.*\)'`
+					stage_version=`echo $version_dir | sed 's/boost_//' | sed 's/_/./g'`
+			        	stage_version_shorten=`expr $stage_version : '\([[0-9]]*\.[[0-9]]*\)'`
+					V_CHECK=`expr $stage_version_shorten \>\= $_version`
+                    if test "$V_CHECK" = "1" -a "$ac_boost_lib_path" = "" ; then
+						AC_MSG_NOTICE(We will use a staged boost library from $BOOST_ROOT)
+						BOOST_CPPFLAGS="-I$BOOST_ROOT"
+						BOOST_LDFLAGS="-L$BOOST_ROOT/stage/lib"
+					fi
+				fi
+	    		fi
+		fi
+
+		CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+		export CPPFLAGS
+		LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+		export LDFLAGS
+
+		AC_LANG_PUSH(C++)
+	     	AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
+		@%:@include <boost/version.hpp>
+		]], [[
+		#if BOOST_VERSION >= $WANT_BOOST_VERSION
+		// Everything is okay
+		#else
+		#  error Boost version is too old
+		#endif
+		]])],[
+        	AC_MSG_RESULT(yes)
+		succeeded=yes
+		found_system=yes
+       		],[
+	       	])
+		AC_LANG_POP([C++])
+	fi
+
+	if test "$succeeded" != "yes" ; then
+		if test "$_version" = "0" ; then
+			AC_MSG_ERROR([[We could not detect the boost libraries (version $boost_lib_version_req_shorten or higher). If you have a staged boost library (still not installed) please specify \$BOOST_ROOT in your environment and do not give a PATH to --with-boost option.  If you are sure you have boost installed, then check your version number looking in <boost/version.hpp>. See http://randspringer.de/boost for more documentation.]])
+		else
+			AC_MSG_ERROR([Your boost libraries seems too old (version $_version).])
+		fi
+	else
+		AC_SUBST(BOOST_CPPFLAGS)
+		AC_SUBST(BOOST_LDFLAGS)
+		AC_DEFINE(HAVE_BOOST,,[define if the Boost library is available])
+	fi
+
+        CPPFLAGS="$CPPFLAGS_SAVED"
+       	LDFLAGS="$LDFLAGS_SAVED"
+fi
+
+])
diff --git a/lang/c++/m4/m4_ax_boost_filesystem.m4 b/lang/c++/m4/m4_ax_boost_filesystem.m4
new file mode 100644
index 0000000..8f8a5d9
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_filesystem.m4
@@ -0,0 +1,115 @@
+# ===========================================================================
+#    http://www.gnu.org/software/autoconf-archive/ax_boost_filesystem.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_FILESYSTEM
+#
+# DESCRIPTION
+#
+#   Test for Filesystem library from the Boost C++ libraries. The macro
+#   requires a preceding call to AX_BOOST_BASE. Further documentation is
+#   available at <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_FILESYSTEM_LIB)
+#
+#   And sets:
+#
+#     HAVE_BOOST_FILESYSTEM
+#
+# LICENSE
+#
+#   Copyright (c) 2009 Thomas Porschberg <thomas at randspringer.de>
+#   Copyright (c) 2009 Michael Tindal
+#   Copyright (c) 2009 Roman Rybalko <libtorrent at romanr.info>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 13
+
+AC_DEFUN([AX_BOOST_FILESYSTEM],
+[
+	AC_ARG_WITH([boost-filesystem],
+	AS_HELP_STRING([--with-boost-filesystem@<:@=special-lib@:>@],
+                   [use the Filesystem library from boost - it is possible to specify a certain library for the linker
+                        e.g. --with-boost-filesystem=boost_filesystem-gcc-mt ]),
+        [
+        if test "$withval" = "no"; then
+			want_boost="no"
+        elif test "$withval" = "yes"; then
+            want_boost="yes"
+            ax_boost_user_filesystem_lib=""
+        else
+		    want_boost="yes"
+        	ax_boost_user_filesystem_lib="$withval"
+		fi
+        ],
+        [want_boost="yes"]
+	)
+
+	if test "x$want_boost" = "xyes"; then
+        AC_REQUIRE([AC_PROG_CC])
+		CPPFLAGS_SAVED="$CPPFLAGS"
+		CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+		export CPPFLAGS
+
+		LDFLAGS_SAVED="$LDFLAGS"
+		LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+		export LDFLAGS
+
+		LIBS_SAVED=$LIBS
+		LIBS="$LIBS $BOOST_SYSTEM_LIB"
+		export LIBS
+
+        AC_CACHE_CHECK(whether the Boost::Filesystem library is available,
+					   ax_cv_boost_filesystem,
+        [AC_LANG_PUSH([C++])
+         AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[@%:@include <boost/filesystem/path.hpp>]],
+                                   [[using namespace boost::filesystem;
+                                   path my_path( "foo/bar/data.txt" );
+                                   return 0;]]),
+            				       ax_cv_boost_filesystem=yes, ax_cv_boost_filesystem=no)
+         AC_LANG_POP([C++])
+		])
+		if test "x$ax_cv_boost_filesystem" = "xyes"; then
+			AC_DEFINE(HAVE_BOOST_FILESYSTEM,,[define if the Boost::Filesystem library is available])
+            BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/@<:@^\/@:>@*//'`
+            if test "x$ax_boost_user_filesystem_lib" = "x"; then
+                for libextension in `ls $BOOSTLIBDIR/libboost_filesystem*.{so,dylib,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_filesystem.*\)\.so.*$;\1;' -e 's;^lib\(boost_filesystem.*\)\.a*$;\1;' -e 's;^lib\(boost_filesystem.*\)\.dylib$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_FILESYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_FILESYSTEM_LIB) link_filesystem="yes"; break],
+                                 [link_filesystem="no"])
+  				done
+                if test "x$link_program_options" != "xyes"; then
+                for libextension in `ls $BOOSTLIBDIR/boost_filesystem*.{dll,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_filesystem.*\)\.dll.*$;\1;' -e 's;^\(boost_filesystem.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_FILESYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_FILESYSTEM_LIB) link_filesystem="yes"; break],
+                                 [link_filesystem="no"])
+  				done
+	            fi
+            else
+               for ax_lib in $ax_boost_user_filesystem_lib boost_filesystem-$ax_boost_user_filesystem_lib; do
+				      AC_CHECK_LIB($ax_lib, exit,
+                                   [BOOST_FILESYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_FILESYSTEM_LIB) link_filesystem="yes"; break],
+                                   [link_filesystem="no"])
+                  done
+
+            fi
+			if test "x$link_filesystem" != "xyes"; then
+				AC_MSG_ERROR(Could not link against $ax_lib !)
+			fi
+		fi
+
+		CPPFLAGS="$CPPFLAGS_SAVED"
+    		LDFLAGS="$LDFLAGS_SAVED"
+		LIBS="$LIBS_SAVED"
+	fi
+])
diff --git a/lang/c++/m4/m4_ax_boost_regex.m4 b/lang/c++/m4/m4_ax_boost_regex.m4
new file mode 100644
index 0000000..faebd13
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_regex.m4
@@ -0,0 +1,105 @@
+# ===========================================================================
+#         http://www.nongnu.org/autoconf-archive/ax_boost_regex.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_REGEX
+#
+# DESCRIPTION
+#
+#   Test for Regex library from the Boost C++ libraries. The macro requires
+#   a preceding call to AX_BOOST_BASE. Further documentation is available at
+#   <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_REGEX_LIB)
+#
+#   And sets:
+#
+#     HAVE_BOOST_REGEX
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Thomas Porschberg <thomas at randspringer.de>
+#   Copyright (c) 2008 Michael Tindal
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved.
+
+AC_DEFUN([AX_BOOST_REGEX],
+[
+	AC_ARG_WITH([boost-regex],
+	AS_HELP_STRING([--with-boost-regex@<:@=special-lib@:>@],
+                   [use the Regex library from boost - it is possible to specify a certain library for the linker
+                        e.g. --with-boost-regex=boost_regex-gcc-mt-d-1_33_1 ]),
+        [
+        if test "$withval" = "no"; then
+			want_boost="no"
+        elif test "$withval" = "yes"; then
+            want_boost="yes"
+            ax_boost_user_regex_lib=""
+        else
+		    want_boost="yes"
+        	ax_boost_user_regex_lib="$withval"
+		fi
+        ],
+        [want_boost="yes"]
+	)
+
+	if test "x$want_boost" = "xyes"; then
+        AC_REQUIRE([AC_PROG_CC])
+		CPPFLAGS_SAVED="$CPPFLAGS"
+		CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+		export CPPFLAGS
+
+		LDFLAGS_SAVED="$LDFLAGS"
+		LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+		export LDFLAGS
+
+        AC_CACHE_CHECK(whether the Boost::Regex library is available,
+					   ax_cv_boost_regex,
+        [AC_LANG_PUSH([C++])
+			 AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[@%:@include <boost/regex.hpp>
+												]],
+                                   [[boost::regex r(); return 0;]]),
+                   ax_cv_boost_regex=yes, ax_cv_boost_regex=no)
+         AC_LANG_POP([C++])
+		])
+		if test "x$ax_cv_boost_regex" = "xyes"; then
+			AC_DEFINE(HAVE_BOOST_REGEX,,[define if the Boost::Regex library is available])
+            BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/@<:@^\/@:>@*//'`
+            if test "x$ax_boost_user_regex_lib" = "x"; then
+                for libextension in `ls $BOOSTLIBDIR/libboost_regex*.{so,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_regex.*\)\.so.*$;\1;' -e 's;^lib\(boost_regex.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_REGEX_LIB="-l$ax_lib"; AC_SUBST(BOOST_REGEX_LIB) link_regex="yes"; break],
+                                 [link_regex="no"])
+  				done
+                if test "x$link_regex" != "xyes"; then
+                for libextension in `ls $BOOSTLIBDIR/boost_regex*.{dll,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_regex.*\)\.dll.*$;\1;' -e 's;^\(boost_regex.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_REGEX_LIB="-l$ax_lib"; AC_SUBST(BOOST_REGEX_LIB) link_regex="yes"; break],
+                                 [link_regex="no"])
+  				done
+                fi
+
+            else
+               for ax_lib in $ax_boost_user_regex_lib boost_regex-$ax_boost_user_regex_lib; do
+				      AC_CHECK_LIB($ax_lib, main,
+                                   [BOOST_REGEX_LIB="-l$ax_lib"; AC_SUBST(BOOST_REGEX_LIB) link_regex="yes"; break],
+                                   [link_regex="no"])
+               done
+            fi
+			if test "x$link_regex" != "xyes"; then
+				AC_MSG_ERROR(Could not link against $ax_lib !)
+			fi
+		fi
+
+		CPPFLAGS="$CPPFLAGS_SAVED"
+    	LDFLAGS="$LDFLAGS_SAVED"
+	fi
+])
diff --git a/lang/c++/m4/m4_ax_boost_system.m4 b/lang/c++/m4/m4_ax_boost_system.m4
new file mode 100644
index 0000000..9fe784b
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_system.m4
@@ -0,0 +1,117 @@
+# ===========================================================================
+#      http://www.gnu.org/software/autoconf-archive/ax_boost_system.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_SYSTEM
+#
+# DESCRIPTION
+#
+#   Test for System library from the Boost C++ libraries. The macro requires
+#   a preceding call to AX_BOOST_BASE. Further documentation is available at
+#   <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_SYSTEM_LIB)
+#
+#   And sets:
+#
+#     HAVE_BOOST_SYSTEM
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Thomas Porschberg <thomas at randspringer.de>
+#   Copyright (c) 2008 Michael Tindal
+#   Copyright (c) 2008 Daniel Casimiro <dan.casimiro at gmail.com>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 7
+
+AC_DEFUN([AX_BOOST_SYSTEM],
+[
+	AC_ARG_WITH([boost-system],
+	AS_HELP_STRING([--with-boost-system@<:@=special-lib@:>@],
+                   [use the System library from boost - it is possible to specify a certain library for the linker
+                        e.g. --with-boost-system=boost_system-gcc-mt ]),
+        [
+        if test "$withval" = "no"; then
+			want_boost="no"
+        elif test "$withval" = "yes"; then
+            want_boost="yes"
+            ax_boost_user_system_lib=""
+        else
+		    want_boost="yes"
+        	ax_boost_user_system_lib="$withval"
+		fi
+        ],
+        [want_boost="yes"]
+	)
+
+	if test "x$want_boost" = "xyes"; then
+        AC_REQUIRE([AC_PROG_CC])
+        AC_REQUIRE([AC_CANONICAL_BUILD])
+		CPPFLAGS_SAVED="$CPPFLAGS"
+		CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+		export CPPFLAGS
+
+		LDFLAGS_SAVED="$LDFLAGS"
+		LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+		export LDFLAGS
+
+        AC_CACHE_CHECK(whether the Boost::System library is available,
+					   ax_cv_boost_system,
+        [AC_LANG_PUSH([C++])
+			 CXXFLAGS_SAVE=$CXXFLAGS
+
+			 AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[@%:@include <boost/system/error_code.hpp>]],
+                                   [[boost::system::system_category]]),
+                   ax_cv_boost_system=yes, ax_cv_boost_system=no)
+			 CXXFLAGS=$CXXFLAGS_SAVE
+             AC_LANG_POP([C++])
+		])
+		if test "x$ax_cv_boost_system" = "xyes"; then
+			AC_SUBST(BOOST_CPPFLAGS)
+
+			AC_DEFINE(HAVE_BOOST_SYSTEM,,[define if the Boost::System library is available])
+            BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/@<:@^\/@:>@*//'`
+
+			LDFLAGS_SAVE=$LDFLAGS
+            if test "x$ax_boost_user_system_lib" = "x"; then
+                for libextension in `ls $BOOSTLIBDIR/libboost_system*.{so,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_system.*\)\.so.*$;\1;' -e 's;^lib\(boost_system.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_SYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_SYSTEM_LIB) link_system="yes"; break],
+                                 [link_system="no"])
+  				done
+                if test "x$link_system" != "xyes"; then
+                for libextension in `ls $BOOSTLIBDIR/boost_system*.{dll,a}* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_system.*\)\.dll.*$;\1;' -e 's;^\(boost_system.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_SYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_SYSTEM_LIB) link_system="yes"; break],
+                                 [link_system="no"])
+  				done
+                fi
+
+            else
+               for ax_lib in $ax_boost_user_system_lib boost_system-$ax_boost_user_system_lib; do
+				      AC_CHECK_LIB($ax_lib, exit,
+                                   [BOOST_SYSTEM_LIB="-l$ax_lib"; AC_SUBST(BOOST_SYSTEM_LIB) link_system="yes"; break],
+                                   [link_system="no"])
+                  done
+
+            fi
+			if test "x$link_system" = "xno"; then
+				AC_MSG_ERROR(Could not link against $ax_lib !)
+			fi
+		fi
+
+		CPPFLAGS="$CPPFLAGS_SAVED"
+    	LDFLAGS="$LDFLAGS_SAVED"
+	fi
+])
diff --git a/lang/c++/m4/m4_ax_boost_thread.m4 b/lang/c++/m4/m4_ax_boost_thread.m4
new file mode 100644
index 0000000..ef3a68e
--- /dev/null
+++ b/lang/c++/m4/m4_ax_boost_thread.m4
@@ -0,0 +1,146 @@
+# ===========================================================================
+#      http://www.gnu.org/software/autoconf-archive/ax_boost_thread.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_BOOST_THREAD
+#
+# DESCRIPTION
+#
+#   Test for Thread library from the Boost C++ libraries. The macro requires
+#   a preceding call to AX_BOOST_BASE. Further documentation is available at
+#   <http://randspringer.de/boost/index.html>.
+#
+#   This macro calls:
+#
+#     AC_SUBST(BOOST_THREAD_LIB)
+#
+#   And sets:
+#
+#     HAVE_BOOST_THREAD
+#
+# LICENSE
+#
+#   Copyright (c) 2009 Thomas Porschberg <thomas at randspringer.de>
+#   Copyright (c) 2009 Michael Tindal
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 15
+
+AC_DEFUN([AX_BOOST_THREAD],
+[
+	AC_ARG_WITH([boost-thread],
+	AS_HELP_STRING([--with-boost-thread@<:@=special-lib@:>@],
+                   [use the Thread library from boost - it is possible to specify a certain library for the linker
+                        e.g. --with-boost-thread=boost_thread-gcc-mt ]),
+        [
+        if test "$withval" = "no"; then
+			want_boost="no"
+        elif test "$withval" = "yes"; then
+            want_boost="yes"
+            ax_boost_user_thread_lib=""
+        else
+		    want_boost="yes"
+        	ax_boost_user_thread_lib="$withval"
+		fi
+        ],
+        [want_boost="yes"]
+	)
+
+	if test "x$want_boost" = "xyes"; then
+        AC_REQUIRE([AC_PROG_CC])
+        AC_REQUIRE([AC_CANONICAL_BUILD])
+		CPPFLAGS_SAVED="$CPPFLAGS"
+		CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS"
+		export CPPFLAGS
+
+		LDFLAGS_SAVED="$LDFLAGS"
+		LDFLAGS="$LDFLAGS $BOOST_LDFLAGS"
+		export LDFLAGS
+
+        AC_CACHE_CHECK(whether the Boost::Thread library is available,
+					   ax_cv_boost_thread,
+        [AC_LANG_PUSH([C++])
+			 CXXFLAGS_SAVE=$CXXFLAGS
+
+			 if test "x$build_os" = "xsolaris" ; then
+  				 CXXFLAGS="-pthreads $CXXFLAGS"
+			 elif test "x$build_os" = "xming32" ; then
+				 CXXFLAGS="-mthreads $CXXFLAGS"
+			 else
+				CXXFLAGS="-pthread $CXXFLAGS"
+			 fi
+			 AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[@%:@include <boost/thread/thread.hpp>]],
+                                   [[boost::thread_group thrds;
+                                   return 0;]]),
+                   ax_cv_boost_thread=yes, ax_cv_boost_thread=no)
+			 CXXFLAGS=$CXXFLAGS_SAVE
+             AC_LANG_POP([C++])
+		])
+		if test "x$ax_cv_boost_thread" = "xyes"; then
+           if test "x$build_os" = "xsolaris" ; then
+			  BOOST_CPPFLAGS="-pthreads $BOOST_CPPFLAGS"
+		   elif test "x$build_os" = "xming32" ; then
+			  BOOST_CPPFLAGS="-mthreads $BOOST_CPPFLAGS"
+		   else
+			  BOOST_CPPFLAGS="-pthread $BOOST_CPPFLAGS"
+		   fi
+
+			AC_SUBST(BOOST_CPPFLAGS)
+
+			AC_DEFINE(HAVE_BOOST_THREAD,,[define if the Boost::Thread library is available])
+            BOOSTLIBDIR=`echo $BOOST_LDFLAGS | sed -e 's/@<:@^\/@:>@*//'`
+
+			LDFLAGS_SAVE=$LDFLAGS
+                        case "x$build_os" in
+                          *bsd* )
+                               LDFLAGS="-pthread $LDFLAGS"
+                          break;
+                          ;;
+                        esac
+            if test "x$ax_boost_user_thread_lib" = "x"; then
+                for libextension in `ls $BOOSTLIBDIR/libboost_thread*.so* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_thread.*\)\.so.*$;\1;'` `ls $BOOSTLIBDIR/libboost_thread*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^lib\(boost_thread.*\)\.a*$;\1;'`; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_THREAD_LIB="-l$ax_lib"; AC_SUBST(BOOST_THREAD_LIB) link_thread="yes"; break],
+                                 [link_thread="no"])
+  				done
+                if test "x$link_thread" != "xyes"; then
+                for libextension in `ls $BOOSTLIBDIR/boost_thread*.dll* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_thread.*\)\.dll.*$;\1;'` `ls $BOOSTLIBDIR/boost_thread*.a* 2>/dev/null | sed 's,.*/,,' | sed -e 's;^\(boost_thread.*\)\.a*$;\1;'` ; do
+                     ax_lib=${libextension}
+				    AC_CHECK_LIB($ax_lib, exit,
+                                 [BOOST_THREAD_LIB="-l$ax_lib"; AC_SUBST(BOOST_THREAD_LIB) link_thread="yes"; break],
+                                 [link_thread="no"])
+  				done
+                fi
+
+            else
+               for ax_lib in $ax_boost_user_thread_lib boost_thread-$ax_boost_user_thread_lib; do
+				      AC_CHECK_LIB($ax_lib, exit,
+                                   [BOOST_THREAD_LIB="-l$ax_lib"; AC_SUBST(BOOST_THREAD_LIB) link_thread="yes"; break],
+                                   [link_thread="no"])
+                  done
+
+            fi
+			if test "x$link_thread" = "xno"; then
+				AC_MSG_ERROR(Could not link against $ax_lib !)
+                        else
+                           case "x$build_os" in
+                              *bsd* )
+			        BOOST_LDFLAGS="-pthread $BOOST_LDFLAGS"
+                              break;
+                              ;;
+                           esac
+
+			fi
+		fi
+
+		CPPFLAGS="$CPPFLAGS_SAVED"
+    	LDFLAGS="$LDFLAGS_SAVED"
+	fi
+])
diff --git a/lang/c++/parser/AvroLex.ll b/lang/c++/parser/AvroLex.ll
new file mode 100644
index 0000000..a666d86
--- /dev/null
+++ b/lang/c++/parser/AvroLex.ll
@@ -0,0 +1,203 @@
+%{
+/*
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+ http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+
+// on some systems, won't find an EOF definition 
+#ifndef EOF
+#define EOF (-1)
+#endif
+
+#include "AvroYacc.hh"
+
+// this undef is a hack for my mac implementation
+#undef yyFlexLexer
+#include "Compiler.hh"
+
+#define YY_STACK_USED 1
+
+using std::cin;
+using std::cout;
+using std::cerr;
+
+%}
+
+%option c++
+%option noyywrap
+
+%{
+
+int yylex(int *val, void *ctx)
+{
+    avro::CompilerContext *c = static_cast<avro::CompilerContext *>(ctx);
+    int ret = c->lexer().yylex();
+    if( ret > AVRO_LEX_OUTPUT_TEXT_BEGIN && ret < AVRO_LEX_OUTPUT_TEXT_END ) { 
+        c->setText( c->lexer().YYText()) ;
+    }
+    return ret;
+}
+
+%}
+
+%x READTYPE
+%x STARTTYPE
+%x STARTSCHEMA
+%x READNAME
+%x READFIELD
+%x READFIELDS
+%x READFIELDNAME
+%x READSYMBOLS
+%x READSYMBOL
+%x READSIZE
+%x INUNION
+%x INOBJECT
+%x READMETADATA
+%x SKIPJSONSTRING
+%x SKIPJSONARRAY
+%x SKIPJSONOBJECT
+
+ws [ \t\r\n]
+nonws [^ \t\r\n]
+delim {ws}*:{ws}*
+avrotext [a-zA-Z_][a-zA-Z0-9_.]*
+startunion \[
+startobject \{
+integer [0-9]+
+anytext .*
+
+%%
+<READTYPE>int                   return AVRO_LEX_INT;
+<READTYPE>long                  return AVRO_LEX_LONG;
+<READTYPE>null                  return AVRO_LEX_NULL;
+<READTYPE>boolean               return AVRO_LEX_BOOL;
+<READTYPE>float                 return AVRO_LEX_FLOAT;
+<READTYPE>double                return AVRO_LEX_DOUBLE;
+<READTYPE>string                return AVRO_LEX_STRING;
+<READTYPE>bytes                 return AVRO_LEX_BYTES;
+<READTYPE>record                return AVRO_LEX_RECORD;
+<READTYPE>enum                  return AVRO_LEX_ENUM;
+<READTYPE>map                   return AVRO_LEX_MAP;
+<READTYPE>array                 return AVRO_LEX_ARRAY;
+<READTYPE>fixed                 return AVRO_LEX_FIXED;
+<READTYPE>{avrotext}            return AVRO_LEX_NAMED_TYPE;
+<READTYPE>\"                    yy_pop_state(); 
+
+<READNAME>{avrotext}            return AVRO_LEX_NAME;
+<READNAME>\"                    yy_pop_state();
+
+<READSYMBOL>{avrotext}          return AVRO_LEX_SYMBOL;
+<READSYMBOL>\"                  yy_pop_state();
+
+<READFIELDNAME>{avrotext}       return AVRO_LEX_FIELD_NAME;
+<READFIELDNAME>\"               yy_pop_state();
+
+<READFIELD>\"type\"{delim}      yy_push_state(STARTSCHEMA); 
+<READFIELD>\"name\"{delim}\"    yy_push_state(READFIELDNAME); 
+<READFIELD>\}                   yy_pop_state(); return AVRO_LEX_FIELD_END;
+<READFIELD>,                    return yytext[0];
+<READFIELD>\"{avrotext}\"+{delim}      yy_push_state(READMETADATA); return AVRO_LEX_METADATA;
+<READFIELD>{ws}                 ;
+
+<READFIELDS>\{                  yy_push_state(READFIELD); return AVRO_LEX_FIELD;
+<READFIELDS>\]                  yy_pop_state(); return AVRO_LEX_FIELDS_END;
+<READFIELDS>,                   return yytext[0];
+<READFIELDS>{ws}                ;
+
+<READSYMBOLS>\"                 yy_push_state(READSYMBOL); 
+<READSYMBOLS>,                  return yytext[0];
+<READSYMBOLS>\]                 yy_pop_state(); return AVRO_LEX_SYMBOLS_END;
+<READSYMBOLS>{ws}               ;
+
+<READSIZE>{integer}             yy_pop_state(); return AVRO_LEX_SIZE;
+
+<INUNION>\"                     yy_push_state(READTYPE); return AVRO_LEX_SIMPLE_TYPE;
+<INUNION>{startobject}          yy_push_state(INOBJECT); return yytext[0];
+<INUNION>\]                     yy_pop_state(); return yytext[0];
+<INUNION>,                      return yytext[0];
+<INUNION>{ws}                   ;
+
+<SKIPJSONSTRING>\"              yy_pop_state();
+<SKIPJSONSTRING>\\.             ;
+<SKIPJSONSTRING>[^\"\\]+        ;
+
+<SKIPJSONOBJECT>\}              yy_pop_state();
+<SKIPJSONOBJECT>\{              yy_push_state(SKIPJSONOBJECT);
+<SKIPJSONOBJECT>\"              yy_push_state(SKIPJSONSTRING);
+<SKIPJSONOBJECT>[^\{\}\"]+      ;
+
+<SKIPJSONARRAY>\]               yy_pop_state();
+<SKIPJSONARRAY>\[               yy_push_state(SKIPJSONARRAY);
+<SKIPJSONARRAY>\"               yy_push_state(SKIPJSONSTRING);
+<SKIPJSONARRAY>[^\[\]\"]+       ;  
+
+<READMETADATA>\"                yy_pop_state(); yy_push_state(SKIPJSONSTRING);
+<READMETADATA>\{                yy_pop_state(); yy_push_state(SKIPJSONOBJECT);
+<READMETADATA>\[                yy_pop_state(); yy_push_state(SKIPJSONARRAY);
+<READMETADATA>[^\"\{\[,\}]+     yy_pop_state();
+
+<INOBJECT>\"type\"{delim}       yy_push_state(STARTTYPE); return AVRO_LEX_TYPE;
+<INOBJECT>\"name\"{delim}\"     yy_push_state(READNAME); 
+<INOBJECT>\"size\"{delim}       yy_push_state(READSIZE);
+<INOBJECT>\"items\"{delim}      yy_push_state(STARTSCHEMA); return AVRO_LEX_ITEMS;
+<INOBJECT>\"values\"{delim}     yy_push_state(STARTSCHEMA); return AVRO_LEX_VALUES;
+<INOBJECT>\"fields\"{delim}\[   yy_push_state(READFIELDS); return AVRO_LEX_FIELDS; 
+<INOBJECT>\"symbols\"{delim}\[  yy_push_state(READSYMBOLS); return AVRO_LEX_SYMBOLS;
+<INOBJECT>,                     return yytext[0];
+<INOBJECT>\}                    yy_pop_state(); return yytext[0];
+<INOBJECT>\"{avrotext}+\"{delim}       yy_push_state(READMETADATA); return AVRO_LEX_METADATA;
+<INOBJECT>{ws}                  ;
+
+<STARTTYPE>\"                   yy_pop_state(); yy_push_state(READTYPE); 
+<STARTTYPE>{startunion}         yy_pop_state(); yy_push_state(INUNION); return yytext[0];
+<STARTTYPE>{startobject}        yy_pop_state(); yy_push_state(INOBJECT); return yytext[0];
+
+<STARTSCHEMA>\"                 yy_pop_state(); yy_push_state(READTYPE); return AVRO_LEX_SIMPLE_TYPE;
+<STARTSCHEMA>{startunion}       yy_pop_state(); yy_push_state(INUNION); return yytext[0];
+<STARTSCHEMA>{startobject}      yy_pop_state(); yy_push_state(INOBJECT); return yytext[0];
+
+{startobject}                   yy_push_state(INOBJECT); return yytext[0];
+{startunion}                    yy_push_state(INUNION); return yytext[0];
+\"                              yy_push_state(READTYPE); return AVRO_LEX_SIMPLE_TYPE;
+{ws}                            ;
+<<EOF>>                         {
+#if !YY_FLEX_SUBMINOR_VERSION || YY_FLEX_SUBMINOR_VERSION < 27
+// The versions of flex before 3.5.27 do not free their stack when done, so explcitly free it.
+// Note that versions before did not actually define a subminor macro.
+                                    if (yy_start_stack) {
+                                        yy_flex_free(yy_start_stack);
+                                        yy_start_stack = 0;
+                                    }
+#endif
+#if YY_FLEX_SUBMINOR_VERSION > 35
+// At this time, 3.5.35 is the latest version.
+#warning "Warning:  untested version of flex"
+#endif
+#if YY_FLEX_SUBMINOR_VERSION >= 31 && YY_FLEX_SUBMINOR_VERSION < 34
+// The versions of flex starting 3.5.31 do not free yy_buffer_stack, so do so 
+// explicitly (first yy_delete_buffer must be called to free pointers stored on the stack, then it is
+// safe to remove the stack).  This was fixed in 3.4.34.
+                                    if(yy_buffer_stack) {
+                                        yy_delete_buffer(YY_CURRENT_BUFFER);
+                                        yyfree(yy_buffer_stack);
+                                        yy_buffer_stack = 0;
+                                    }
+#endif
+                                    yyterminate();
+                                }
+
+%%
+
diff --git a/lang/c++/parser/AvroYacc.yy b/lang/c++/parser/AvroYacc.yy
new file mode 100644
index 0000000..20bd629
--- /dev/null
+++ b/lang/c++/parser/AvroYacc.yy
@@ -0,0 +1,200 @@
+%{
+/*
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements.  See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership.  The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License.  You may obtain a copy of the License at
+ 
+ http://www.apache.org/licenses/LICENSE-2.0
+ 
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+*/
+
+#include <boost/format.hpp>
+#include "Compiler.hh"
+#include "Exception.hh"
+
+#define YYLEX_PARAM ctx
+#define YYPARSE_PARAM ctx
+
+void yyerror(const char *str)
+{
+    throw avro::Exception(boost::format("Parser error: %1%") % str);
+}
+ 
+extern void *lexer; 
+extern int yylex(int *, void *);
+  
+avro::CompilerContext &context(void *ctx) { 
+    return *static_cast<avro::CompilerContext *>(ctx);
+};
+  
+%}
+
+%pure-parser
+%error-verbose
+
+%token AVRO_LEX_INT AVRO_LEX_LONG 
+%token AVRO_LEX_FLOAT AVRO_LEX_DOUBLE
+%token AVRO_LEX_BOOL AVRO_LEX_NULL 
+%token AVRO_LEX_BYTES AVRO_LEX_STRING 
+%token AVRO_LEX_RECORD AVRO_LEX_ENUM AVRO_LEX_ARRAY AVRO_LEX_MAP AVRO_LEX_UNION AVRO_LEX_FIXED
+
+%token AVRO_LEX_METADATA
+
+%token AVRO_LEX_SYMBOLS AVRO_LEX_SYMBOLS_END
+%token AVRO_LEX_FIELDS AVRO_LEX_FIELDS_END AVRO_LEX_FIELD AVRO_LEX_FIELD_END
+
+%token AVRO_LEX_TYPE AVRO_LEX_ITEMS AVRO_LEX_VALUES 
+
+// Tokens that output text:
+%token AVRO_LEX_OUTPUT_TEXT_BEGIN
+%token AVRO_LEX_NAME
+%token AVRO_LEX_NAMED_TYPE
+%token AVRO_LEX_FIELD_NAME
+%token AVRO_LEX_SYMBOL
+%token AVRO_LEX_SIZE
+%token AVRO_LEX_OUTPUT_TEXT_END
+
+%token AVRO_LEX_SIMPLE_TYPE
+
+%%
+
+avroschema: 
+        simpleprimitive | object | union_t
+        ;
+ 
+primitive:
+        AVRO_LEX_INT    { context(ctx).addType(avro::AVRO_INT); }
+        |
+        AVRO_LEX_LONG   { context(ctx).addType(avro::AVRO_LONG); }
+        |
+        AVRO_LEX_FLOAT  { context(ctx).addType(avro::AVRO_FLOAT); }
+        |
+        AVRO_LEX_DOUBLE { context(ctx).addType(avro::AVRO_DOUBLE); }
+        |
+        AVRO_LEX_BOOL   { context(ctx).addType(avro::AVRO_BOOL); }
+        |
+        AVRO_LEX_NULL   { context(ctx).addType(avro::AVRO_NULL); }
+        |
+        AVRO_LEX_BYTES  { context(ctx).addType(avro::AVRO_BYTES); }
+        |
+        AVRO_LEX_STRING { context(ctx).addType(avro::AVRO_STRING); }
+        |
+        AVRO_LEX_NAMED_TYPE { context(ctx).addNamedType(); }
+        ;
+
+simpleprimitive:
+        AVRO_LEX_SIMPLE_TYPE { context(ctx).startType(); } primitive { context(ctx).stopType(); }
+        ;
+
+primitive_t:
+        AVRO_LEX_TYPE primitive
+        ;
+
+array_t:
+        AVRO_LEX_TYPE AVRO_LEX_ARRAY { context(ctx).addType(avro::AVRO_ARRAY); }
+        ;
+
+enum_t: 
+        AVRO_LEX_TYPE AVRO_LEX_ENUM { context(ctx).addType(avro::AVRO_ENUM); }
+        ;
+
+fixed_t:
+        AVRO_LEX_TYPE AVRO_LEX_FIXED { context(ctx).addType(avro::AVRO_FIXED); }
+        ;
+
+map_t: 
+        AVRO_LEX_TYPE AVRO_LEX_MAP { context(ctx).addType(avro::AVRO_MAP); }
+        ;
+
+record_t: 
+        AVRO_LEX_TYPE AVRO_LEX_RECORD { context(ctx).addType(avro::AVRO_RECORD); }
+        ;
+
+type_attribute:
+        array_t | enum_t | fixed_t | map_t | record_t | primitive_t
+        ;
+
+union_t:
+        '[' { context(ctx).startType(); context(ctx).addType(avro::AVRO_UNION); context(ctx).setTypesAttribute(); } 
+        unionlist
+        ']' { context(ctx).stopType(); }
+        ;
+
+object: 
+        '{' { context(ctx).startType(); } 
+         attributelist
+        '}' { context(ctx).stopType(); }
+        ;
+        
+name_attribute:
+        AVRO_LEX_NAME { context(ctx).setNameAttribute(); }
+        ;
+
+size_attribute:
+        AVRO_LEX_SIZE { context(ctx).setSizeAttribute(); }
+        ;
+
+values_attribute:
+        AVRO_LEX_VALUES { context(ctx).setValuesAttribute(); } avroschema 
+        ;
+
+fields_attribute:
+        AVRO_LEX_FIELDS { context(ctx).setFieldsAttribute(); } fieldslist AVRO_LEX_FIELDS_END
+        ;
+
+items_attribute:
+        AVRO_LEX_ITEMS { context(ctx).setItemsAttribute(); } avroschema
+        ;
+
+symbols_attribute:
+        AVRO_LEX_SYMBOLS symbollist AVRO_LEX_SYMBOLS_END
+        ;
+
+attribute:
+        type_attribute | name_attribute | fields_attribute | items_attribute | size_attribute | values_attribute | symbols_attribute | AVRO_LEX_METADATA
+        ;
+
+attributelist: 
+        attribute | attributelist ',' attribute
+        ;
+
+symbol:
+        AVRO_LEX_SYMBOL { context(ctx).setSymbolsAttribute(); }
+        ;
+
+symbollist:
+        symbol | symbollist ',' symbol
+        ;
+
+fieldsetting:
+        fieldname | avroschema | AVRO_LEX_METADATA
+        ;
+
+fieldsettinglist:
+        fieldsetting | fieldsettinglist ',' fieldsetting 
+        ;
+
+fields:
+        AVRO_LEX_FIELD fieldsettinglist AVRO_LEX_FIELD_END
+        ;   
+
+fieldname:
+        AVRO_LEX_FIELD_NAME { context(ctx).textContainsFieldName(); }
+        ;
+
+fieldslist:
+        fields | fieldslist ',' fields
+        ;
+
+unionlist: 
+        avroschema | unionlist ',' avroschema
+        ;
diff --git a/lang/c++/scripts/gen-cppcode.py b/lang/c++/scripts/gen-cppcode.py
new file mode 100644
index 0000000..7fbd321
--- /dev/null
+++ b/lang/c++/scripts/gen-cppcode.py
@@ -0,0 +1,685 @@
+#!/usr/bin/python
+
+license = '''/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+'''
+
+headers = '''
+#include <stdint.h>
+#include <string>
+#include <vector>
+#include <map>
+#include "Boost.hh"
+#include "Exception.hh"
+#include "AvroSerialize.hh"
+#include "AvroParse.hh"
+#include "Layout.hh"
+'''
+
+done = False
+
+typeToC= { 'int' : 'int32_t', 'long' :'int64_t', 'float' : 'float', 'double' : 'double', 
+'boolean' : 'bool', 'null': 'avro::Null', 'string' : 'std::string', 'bytes' : 'std::vector<uint8_t>'} 
+
+structList = []
+structNames = {} 
+forwardDeclareList = []
+
+def addStruct(name, declaration) :
+    if not structNames.has_key(name) :
+        structNames[name] = True
+        structList.append(declaration)
+
+def addForwardDeclare(declaration) :
+    code = 'struct ' + declaration + ';'
+    forwardDeclareList.append(code)
+
+def doPrimitive(type):
+    return (typeToC[type], type)
+
+def doSymbolic(args):
+    addForwardDeclare(args[1])
+    return (args[1], args[1])
+
+def addLayout(name, type, var) :
+    result = '        add(new $offsetType$(offset + offsetof($name$, $var$)));\n'
+    result = result.replace('$name$', name)
+    if typeToC.has_key(type) : 
+        offsetType = 'avro::PrimitiveLayout'
+    else :
+        offsetType = type+ '_Layout'
+    result = result.replace('$offsetType$', offsetType)
+    result = result.replace('$var$', var)
+    return result;
+
+def addSimpleLayout(type) :
+    result = '        add(new $offsetType$);\n'
+    if typeToC.has_key(type) : 
+        offsetType = 'avro::PrimitiveLayout'
+    else :
+        offsetType = type+ '_Layout'
+    return result.replace('$offsetType$', offsetType)
+
+recordfieldTemplate = '$type$ $name$\n'
+recordTemplate = '''struct $name$ {
+
+    $name$ () :
+$initializers$
+    { }
+
+$recordfields$};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    s.writeRecord();
+$serializefields$    s.writeRecordEnd();
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    p.readRecord();
+$parsefields$    p.readRecordEnd();
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+$offsetlist$    }
+}; 
+'''
+
+def doRecord(args):
+    structDef = recordTemplate;
+    typename = args[1];
+    structDef = structDef.replace('$name$', typename);
+    fields = ''
+    serializefields = ''
+    parsefields = ''
+    initlist = ''
+    offsetlist = ''
+    end = False
+    while not end:
+        line = getNextLine()
+        if line[0] == 'end': 
+            end = True
+            initlist = initlist.rstrip(',\n')
+        elif line[0] == 'name':
+            fieldname = line[1]
+            fieldline = getNextLine()
+            fieldtypename, fieldtype = processType(fieldline)
+            fields += '    ' +  fieldtypename + ' ' + fieldname + ';\n'
+            serializefields += '    serialize(s, val.' + fieldname + ');\n'
+            initlist += '        ' + fieldname + '(),\n'
+            parsefields += '    parse(p, val.' + fieldname + ');\n'
+            offsetlist += addLayout(typename, fieldtype, fieldname)
+    structDef = structDef.replace('$initializers$', initlist)
+    structDef = structDef.replace('$recordfields$', fields)
+    structDef = structDef.replace('$serializefields$', serializefields)
+    structDef = structDef.replace('$parsefields$', parsefields)
+    structDef = structDef.replace('$offsetlist$', offsetlist)
+    addStruct(typename, structDef)
+    return (typename,typename)
+
+uniontypestemplate = 'typedef $type$ Choice$N$Type'
+unionTemplate = '''struct $name$ {
+
+$typedeflist$
+    typedef void* (*GenericSetter)($name$ *, int64_t);
+
+    $name$() : 
+        choice(0), 
+        value(T0()),
+        genericSetter(&$name$::genericSet)
+    { }
+
+$setfuncs$
+#ifdef AVRO_BOOST_NO_ANYREF
+    template<typename T>
+    const T &getValue() const {
+        const T *ptr = boost::any_cast<T>(&value);
+        return *ptr;
+    }
+#else
+    template<typename T>
+    const T &getValue() const {
+        return boost::any_cast<const T&>(value);
+    }
+#endif
+
+    static void *genericSet($name$ *u, int64_t choice) {
+        boost::any *val = &(u->value);
+        void *data = NULL;
+        switch (choice) {$switch$
+        }
+        return data;
+    }
+
+    int64_t choice; 
+    boost::any value;
+    GenericSetter genericSetter;
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    s.writeUnion(val.choice);
+    switch(val.choice) {
+$switchserialize$      default :
+        throw avro::Exception("Unrecognized union choice");
+    }
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    val.choice = p.readUnion();
+    switch(val.choice) {
+$switchparse$      default :
+        throw avro::Exception("Unrecognized union choice");
+    }
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, choice)));
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
+$offsetlist$    }
+}; 
+'''
+
+unionser = '      case $choice$:\n        serialize(s, val.getValue< $type$ >());\n        break;\n'
+unionpar = '      case $choice$:\n        { $type$ chosenVal; parse(p, chosenVal); val.value = chosenVal; }\n        break;\n'
+
+setfunc =  '''    void set_$name$(const $type$ &val) {
+        choice = $N$;
+        value =  val;
+    };\n'''
+
+switcher = '''\n          case $N$:
+            *val = T$N$();
+            data = boost::any_cast<T$N$>(val);
+            break;'''
+
+
+def doUnion(args):
+    structDef = unionTemplate
+    uniontypes = ''
+    switchserialize= ''
+    switchparse= ''
+    typename = 'Union_of'
+    setters = ''
+    switches = ''
+    offsetlist = ''
+    i = 0
+    end = False
+    while not end:
+        line = getNextLine()
+        if line[0] == 'end': end = True
+        else :
+            uniontype, name = processType(line)
+            typename += '_' + name
+            uniontypes += '    ' + 'typedef ' + uniontype + ' T' + str(i) + ';\n'
+            switch = unionser
+            switch = switch.replace('$choice$', str(i))
+            switch = switch.replace('$type$', uniontype)
+            switchserialize += switch 
+            switch = unionpar
+            switch = switch.replace('$choice$', str(i))
+            switch = switch.replace('$type$', uniontype)
+            switchparse += switch 
+            setter = setfunc
+            setter = setter.replace('$name$', name)
+            setter = setter.replace('$type$', uniontype)
+            setter = setter.replace('$N$', str(i))
+            setters += setter
+            switch = switcher
+            switches += switch.replace('$N$', str(i))
+            offsetlist += addSimpleLayout(name)
+        i+= 1
+    structDef = structDef.replace('$name$', typename)
+    structDef = structDef.replace('$typedeflist$', uniontypes)
+    structDef = structDef.replace('$switchserialize$', switchserialize)
+    structDef = structDef.replace('$switchparse$', switchparse)
+    structDef = structDef.replace('$setfuncs$', setters)
+    structDef = structDef.replace('$switch$', switches)
+    structDef = structDef.replace('$offsetlist$', offsetlist)
+    addStruct(typename, structDef)
+    return (typename,typename)
+
+enumTemplate = '''struct $name$ {
+
+    enum EnumSymbols {
+        $enumsymbols$
+    };
+
+    $name$() : 
+        value($firstsymbol$) 
+    { }
+
+    EnumSymbols value;
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    s.writeEnum(val.value);
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    val.value = static_cast<$name$::EnumSymbols>(p.readEnum());
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
+    }
+}; 
+'''
+
+def doEnum(args):
+    structDef = enumTemplate;
+    typename = args[1]
+    structDef = structDef.replace('$name$', typename)
+    end = False
+    symbols = '';
+    firstsymbol = '';
+    while not end:
+        line = getNextLine()
+        if line[0] == 'end': end = True
+        elif line[0] == 'name':
+            if symbols== '' :
+                firstsymbol = line[1]
+            else :
+                symbols += ', '
+            symbols += line[1]
+        else: print "error"
+    structDef = structDef.replace('$enumsymbols$', symbols);
+    structDef = structDef.replace('$firstsymbol$', firstsymbol);
+    addStruct(typename, structDef)
+    return (typename,typename)
+
+arrayTemplate = '''struct $name$ {
+    typedef $valuetype$ ValueType;
+    typedef std::vector<ValueType> ArrayType;
+    typedef ValueType* (*GenericSetter)($name$ *);
+    
+    $name$() :
+        value(),
+        genericSetter(&$name$::genericSet)
+    { }
+
+    static ValueType *genericSet($name$ *array) {
+        array->value.push_back(ValueType());
+        return &array->value.back();
+    }
+
+    void addValue(const ValueType &val) {
+        value.push_back(val);
+    }
+
+    ArrayType value;
+    GenericSetter genericSetter;
+
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    const size_t size = val.value.size();
+    if(size) {
+        s.writeArrayBlock(size);
+        for(size_t i = 0; i < size; ++i) {
+            serialize(s, val.value[i]);
+        }
+    }
+    s.writeArrayEnd();
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    val.value.clear();
+    while(1) {
+        int size = p.readArrayBlockSize();
+        if(size > 0) {
+            val.value.reserve(val.value.size() + size);
+            while (size-- > 0) { 
+                val.value.push_back($name$::ValueType());
+                parse(p, val.value.back());
+            }
+        }
+        else {
+            break;
+        }
+    } 
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
+$offsetlist$    }
+}; 
+'''
+
+def doArray(args):
+    structDef = arrayTemplate
+    line = getNextLine()
+    arraytype, typename = processType(line)
+    offsetlist = addSimpleLayout(typename)
+    typename = 'Array_of_' + typename
+
+    structDef = structDef.replace('$name$', typename)
+    structDef = structDef.replace('$valuetype$', arraytype)
+    structDef = structDef.replace('$offsetlist$', offsetlist)
+
+    line = getNextLine()
+    if line[0] != 'end': print 'error'
+
+    addStruct(typename, structDef)
+    return (typename,typename)
+
+mapTemplate = '''struct $name$ {
+    typedef $valuetype$ ValueType;
+    typedef std::map<std::string, ValueType> MapType;
+    typedef ValueType* (*GenericSetter)($name$ *, const std::string &);
+    
+    $name$() :
+        value(),
+        genericSetter(&$name$::genericSet)
+    { }
+
+    void addValue(const std::string &key, const ValueType &val) {
+        value.insert(MapType::value_type(key, val));
+    }
+
+    static ValueType *genericSet($name$ *map, const std::string &key) { 
+        map->value[key] = ValueType();
+        return &(map->value[key]);
+    }
+
+    MapType value;
+    GenericSetter genericSetter;
+
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    if(val.value.size()) {
+        s.writeMapBlock(val.value.size());
+        $name$::MapType::const_iterator iter = val.value.begin();
+        $name$::MapType::const_iterator end  = val.value.end();
+        while(iter!=end) {
+            serialize(s, iter->first);
+            serialize(s, iter->second);
+            ++iter;
+        }
+    }
+    s.writeMapEnd();
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    val.value.clear();
+    while(1) {
+        int size = p.readMapBlockSize();
+        if(size > 0) {
+            while (size-- > 0) { 
+                std::string key;
+                parse(p, key);
+                $name$::ValueType m;
+                parse(p, m);
+                val.value.insert($name$::MapType::value_type(key, m));
+            }
+        }
+        else {
+            break;
+        }
+    } 
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, genericSetter)));
+$offsetlist$    }
+}; 
+'''
+
+def doMap(args):
+    structDef = mapTemplate
+    line = getNextLine() # must be string
+    line = getNextLine()
+    maptype, typename = processType(line);
+
+    offsetlist = addSimpleLayout(typename)
+    typename = 'Map_of_' + typename
+
+    structDef = structDef.replace('$name$', typename)
+    structDef = structDef.replace('$valuetype$', maptype)
+    structDef = structDef.replace('$offsetlist$', offsetlist)
+
+    line = getNextLine()
+    if line[0] != 'end': print 'error'
+    addStruct(typename, structDef)
+    return (typename,typename)
+    
+fixedTemplate = '''struct $name$ {
+    enum {
+        fixedSize = $N$
+    };
+
+    $name$() {
+        memset(value, 0, sizeof(value));
+    }
+    
+    uint8_t value[fixedSize];
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    s.writeFixed(val.value);
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    p.readFixed(val.value);
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
+    }
+}; 
+'''
+
+def doFixed(args):
+    structDef = fixedTemplate
+    typename = args[1]
+    size = args[2]
+
+    line = getNextLine()
+    if line[0] != 'end': print 'error'
+
+    structDef = structDef.replace('$name$', typename)
+    structDef = structDef.replace('$N$', size)
+    addStruct(typename, structDef)
+    return (typename,typename)
+
+primitiveTemplate = '''struct $name$ {
+    $type$ value;
+};
+
+template <typename Serializer>
+inline void serialize(Serializer &s, const $name$ &val, const boost::true_type &) {
+    s.writeValue(val.value);
+}
+
+template <typename Parser>
+inline void parse(Parser &p, $name$ &val, const boost::true_type &) {
+    p.readValue(val.value);
+}
+
+class $name$_Layout : public avro::CompoundLayout {
+  public:
+    $name$_Layout(size_t offset = 0) :
+        CompoundLayout(offset)
+    {
+        add(new avro::PrimitiveLayout(offset + offsetof($name$, value)));
+    }
+}; 
+'''
+
+def doPrimitiveStruct(type):
+    structDef = primitiveTemplate
+    name =  type.capitalize()
+    structDef = structDef.replace('$name$', name);
+    structDef = structDef.replace('$type$', typeToC[type]);
+    addStruct(name, structDef)
+
+compoundBuilder= { 'record' : doRecord, 'union' : doUnion, 'enum' : doEnum, 
+'map' : doMap, 'array' : doArray, 'fixed' : doFixed, 'symbolic' : doSymbolic } 
+
+def processType(inputs) :
+    type = inputs[0]
+    if typeToC.has_key(type) : 
+        result = doPrimitive(type)
+    else :
+        func = compoundBuilder[type]
+        result = func(inputs)
+    return result
+
+def generateCode() :
+    inputs = getNextLine()
+    type = inputs[0]
+    if typeToC.has_key(type) : 
+        doPrimitiveStruct(type)
+    else :
+        func = compoundBuilder[type]
+        func(inputs)
+
+def getNextLine():
+    try:
+        line = raw_input()
+    except:
+        line = '';
+        globals()["done"] = True
+
+    if line == '':
+        globals()["done"] = True
+    return line.split(' ')
+    
+def writeHeader(filebase, namespace):
+    headerstring = "%s_%s_hh__" % (namespace, filebase)
+
+    print license
+    print "#ifndef %s" % headerstring
+    print "#define %s" % headerstring 
+    print headers
+    print "namespace %s {\n" % namespace
+
+    for x in forwardDeclareList:
+        print "%s\n" % x
+
+    for x in structList:
+        print "/*----------------------------------------------------------------------------------*/\n"
+        print "%s\n" % x
+
+    print "\n} // namespace %s\n" % namespace
+
+    print "namespace avro {\n"
+    for x in structNames:
+        print 'template <> struct is_serializable<%s::%s> : public boost::true_type{};' % (namespace, x)
+
+    print "\n} // namespace avro\n"
+
+    print "#endif // %s" % headerstring
+
+
+def usage():
+    print "-h, --help            print this helpful message"
+    print "-i, --input=FILE      input file to read (default is stdin)"
+    print "-o, --output=PATH     output file to generate (default is stdout)"
+    print "-n, --namespace=LABEL namespace for schema (default is avrouser)"
+
+if __name__ == "__main__":
+    from sys import argv
+    import getopt,sys
+
+    try:
+        opts, args = getopt.getopt(argv[1:], "hi:o:n:", ["help", "input=", "output=", "namespace="])
+
+    except getopt.GetoptError, err:
+        print str(err) 
+        usage()
+        sys.exit(2)
+
+    namespace = 'avrouser'
+
+    savein = sys.stdin              
+    saveout = sys.stdout              
+    inputFile = False
+    outputFile = False
+    outputFileBase = 'AvroGenerated'
+
+    for o, a in opts:
+        if o in ("-i", "--input"):
+            try:
+                inputFile = open(a, 'r')
+                sys.stdin = inputFile
+            except:
+                print "Could not open file " + a
+                sys.exit() 
+        elif o in ("-o", "--output"):
+            try:
+                outputFile = open(a, 'w')
+                sys.stdout = outputFile
+            except:
+                print "Could not open file " + a
+            outputFileBase = a.rstrip('.hp')  # strip for .h, .hh, .hpp
+        elif o in ("-n", "--namespace"):
+            namespace = a
+        elif o in ("-h", "--help"):
+            usage()
+            sys.exit()
+        else:
+            print "Unhandled option: " + o
+            usage()
+            sys.exit()
+
+    generateCode()
+    writeHeader(outputFileBase, namespace)
+
+    sys.stdin = savein
+    sys.stdout = saveout
+    if inputFile:
+        inputFile.close()
+    if outputFile:
+        outputFile.close()
+
diff --git a/lang/c++/test/AvrogencppTests.cc b/lang/c++/test/AvrogencppTests.cc
new file mode 100644
index 0000000..2312676
--- /dev/null
+++ b/lang/c++/test/AvrogencppTests.cc
@@ -0,0 +1,271 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "empty_record.hh"
+#include "bigrecord.hh"
+#include "bigrecord_r.hh"
+#include "bigrecord2.hh"
+#include "tweet.hh"
+#include "union_array_union.hh"
+#include "union_map_union.hh"
+#include "union_conflict.hh"
+#include "recursive.hh"
+#include "circulardep.hh"
+#include "reuse.hh"
+#include "Compiler.hh"
+
+#include <fstream>
+#include <boost/test/included/unit_test_framework.hpp>
+
+#ifdef min
+#undef min
+#endif
+
+#ifdef max
+#undef max
+#endif
+
+
+using std::auto_ptr;
+using std::map;
+using std::string;
+using std::vector;
+using std::ifstream;
+
+using avro::ValidSchema;
+using avro::OutputStream;
+using avro::InputStream;
+using avro::Encoder;
+using avro::Decoder;
+using avro::EncoderPtr;
+using avro::DecoderPtr;
+using avro::memoryInputStream;
+using avro::memoryOutputStream;
+using avro::binaryEncoder;
+using avro::validatingEncoder;
+using avro::binaryDecoder;
+using avro::validatingDecoder;
+
+void setRecord(testgen::RootRecord &myRecord)
+{
+    uint8_t fixed[] =  {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
+
+    myRecord.mylong = 212;
+    myRecord.nestedrecord.inval1 = std::numeric_limits<double>::min();
+    myRecord.nestedrecord.inval2 = "hello world";
+    myRecord.nestedrecord.inval3 = std::numeric_limits<int32_t>::max();
+
+    myRecord.mymap["one"] = 100;
+    myRecord.mymap["two"] = 200;
+
+    myRecord.myarray.push_back(3434.9);
+    myRecord.myarray.push_back(7343.9);
+    myRecord.myarray.push_back(-63445.9);
+    myRecord.myenum = testgen::one;
+
+    map<string, int32_t> m;
+    m["one"] = 1;
+    m["two"] = 2;
+    myRecord.myunion.set_map(m);
+
+    vector<uint8_t> v;
+    v.push_back(1);
+    v.push_back(2);
+    myRecord.anotherunion.set_bytes(v);
+
+    myRecord.mybool = true;
+    myRecord.anothernested.inval1 = std::numeric_limits<double>::max();
+    myRecord.anothernested.inval2 = "goodbye world";
+    myRecord.anothernested.inval3 = std::numeric_limits<int32_t>::min();
+    memcpy(&myRecord.myfixed[0], fixed, myRecord.myfixed.size());
+    myRecord.anotherint = 4534;
+    myRecord.bytes.push_back(10);
+    myRecord.bytes.push_back(20);
+}
+
+template <typename T1, typename T2>
+void checkRecord(const T1& r1, const T2& r2)
+{
+    BOOST_CHECK_EQUAL(r1.mylong, r2.mylong);
+    BOOST_CHECK_EQUAL(r1.nestedrecord.inval1, r2.nestedrecord.inval1);
+    BOOST_CHECK_EQUAL(r1.nestedrecord.inval2, r2.nestedrecord.inval2);
+    BOOST_CHECK_EQUAL(r1.nestedrecord.inval3, r2.nestedrecord.inval3);
+    BOOST_CHECK(r1.mymap == r2.mymap);
+    BOOST_CHECK(r1.myarray == r2.myarray);
+    BOOST_CHECK_EQUAL(r1.myunion.idx(), r2.myunion.idx());
+    BOOST_CHECK(r1.myunion.get_map() == r2.myunion.get_map());
+    BOOST_CHECK_EQUAL(r1.anotherunion.idx(), r2.anotherunion.idx());
+    BOOST_CHECK(r1.anotherunion.get_bytes() == r2.anotherunion.get_bytes());
+    BOOST_CHECK_EQUAL(r1.mybool, r2.mybool);
+    BOOST_CHECK_EQUAL(r1.anothernested.inval1, r2.anothernested.inval1);
+    BOOST_CHECK_EQUAL(r1.anothernested.inval2, r2.anothernested.inval2);
+    BOOST_CHECK_EQUAL(r1.anothernested.inval3, r2.anothernested.inval3);
+    BOOST_CHECK_EQUAL_COLLECTIONS(r1.myfixed.begin(), r1.myfixed.end(),
+        r2.myfixed.begin(), r2.myfixed.end());
+    BOOST_CHECK_EQUAL(r1.anotherint, r2.anotherint);
+    BOOST_CHECK_EQUAL(r1.bytes.size(), r2.bytes.size());
+    BOOST_CHECK_EQUAL_COLLECTIONS(r1.bytes.begin(), r1.bytes.end(),
+        r2.bytes.begin(), r2.bytes.end());
+    BOOST_CHECK_EQUAL(r1.myenum, r2.myenum);
+}
+
+void checkDefaultValues(const testgen_r::RootRecord& r)
+{
+    BOOST_CHECK_EQUAL(r.withDefaultValue.s1, "sval");
+    BOOST_CHECK_EQUAL(r.withDefaultValue.i1, 99);
+    BOOST_CHECK_CLOSE(r.withDefaultValue.d1, 5.67, 1e-10);
+}
+
+
+void testEncoding()
+{
+    ValidSchema s;
+    ifstream ifs("jsonschemas/bigrecord");
+    compileJsonSchema(ifs, s);
+    auto_ptr<OutputStream> os = memoryOutputStream();
+    EncoderPtr e = validatingEncoder(s, binaryEncoder());
+    e->init(*os);
+    testgen::RootRecord t1;
+    setRecord(t1);
+    avro::encode(*e, t1);
+    e->flush();
+
+    DecoderPtr d = validatingDecoder(s, binaryDecoder());
+    auto_ptr<InputStream> is = memoryInputStream(*os);
+    d->init(*is);
+    testgen::RootRecord t2;
+    avro::decode(*d, t2);
+
+    checkRecord(t2, t1);
+}
+
+void testResolution()
+{
+    ValidSchema s_w;
+    ifstream ifs_w("jsonschemas/bigrecord");
+    compileJsonSchema(ifs_w, s_w);
+    auto_ptr<OutputStream> os = memoryOutputStream();
+    EncoderPtr e = validatingEncoder(s_w, binaryEncoder());
+    e->init(*os);
+    testgen::RootRecord t1;
+    setRecord(t1);
+    avro::encode(*e, t1);
+    e->flush();
+
+    ValidSchema s_r;
+    ifstream ifs_r("jsonschemas/bigrecord_r");
+    compileJsonSchema(ifs_r, s_r);
+    DecoderPtr dd = binaryDecoder();
+    auto_ptr<InputStream> is = memoryInputStream(*os);
+    dd->init(*is);
+    DecoderPtr rd = resolvingDecoder(s_w, s_r, dd);
+    testgen_r::RootRecord t2;
+    avro::decode(*rd, t2);
+
+    checkRecord(t2, t1);
+    checkDefaultValues(t2);
+
+    //Re-use the resolving decoder to decode again.
+    auto_ptr<InputStream> is1 = memoryInputStream(*os);
+    rd->init(*is1);
+    testgen_r::RootRecord t3;
+    avro::decode(*rd, t3);
+    checkRecord(t3, t1);
+    checkDefaultValues(t3);
+
+}
+
+void testNamespace()
+{
+    ValidSchema s;
+    ifstream ifs("jsonschemas/tweet");
+    // basic compilation should work
+    compileJsonSchema(ifs, s);
+    // an AvroPoint was defined and then referred to from within a namespace
+    testgen3::AvroPoint point;
+    point.latitude = 42.3570;
+    point.longitude = -71.1109;
+    // set it in something that referred to it in the schema
+    testgen3::_tweet_Union__1__ twPoint;
+    twPoint.set_AvroPoint(point);
+}
+
+void setRecord(uau::r1& r)
+{
+}
+
+void check(const uau::r1& r1, const uau::r1& r2)
+{
+
+}
+
+void setRecord(umu::r1& r)
+{
+}
+
+void check(const umu::r1& r1, const umu::r1& r2)
+{
+
+}
+
+template <typename T> struct schemaFilename { };
+template <> struct schemaFilename<uau::r1> {
+    static const char value[];
+};
+const char schemaFilename<uau::r1>::value[] = "jsonschemas/union_array_union";
+template <> struct schemaFilename<umu::r1> {
+    static const char value[];
+};
+const char schemaFilename<umu::r1>::value[] = "jsonschemas/union_map_union";
+
+template<typename T>
+void testEncoding2()
+{
+    ValidSchema s;
+    ifstream ifs(schemaFilename<T>::value);
+    compileJsonSchema(ifs, s);
+
+    auto_ptr<OutputStream> os = memoryOutputStream();
+    EncoderPtr e = validatingEncoder(s, binaryEncoder());
+    e->init(*os);
+    T t1;
+    setRecord(t1);
+    avro::encode(*e, t1);
+    e->flush();
+
+    DecoderPtr d = validatingDecoder(s, binaryDecoder());
+    auto_ptr<InputStream> is = memoryInputStream(*os);
+    d->init(*is);
+    T t2;
+    avro::decode(*d, t2);
+
+    check(t2, t1);
+}
+
+boost::unit_test::test_suite*
+init_unit_test_suite(int argc, char* argv[])
+{
+    boost::unit_test::test_suite* ts = BOOST_TEST_SUITE("Code generator tests");
+    ts->add(BOOST_TEST_CASE(testEncoding));
+    ts->add(BOOST_TEST_CASE(testResolution));
+    ts->add(BOOST_TEST_CASE(testEncoding2<uau::r1>));
+    ts->add(BOOST_TEST_CASE(testEncoding2<umu::r1>));
+    ts->add(BOOST_TEST_CASE(testNamespace));
+    return ts;
+}
+
diff --git a/lang/c++/test/CodecTests.cc b/lang/c++/test/CodecTests.cc
new file mode 100644
index 0000000..61d29a2
--- /dev/null
+++ b/lang/c++/test/CodecTests.cc
@@ -0,0 +1,1544 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+
+#include "Encoder.hh"
+#include "Decoder.hh"
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+#include "Generic.hh"
+#include "Specific.hh"
+
+#include <stdint.h>
+#include <vector>
+#include <stack>
+#include <string>
+#include <functional>
+#include <boost/bind.hpp>
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/test/parameterized_test.hpp>
+#include <boost/random/mersenne_twister.hpp>
+#include <boost/math/special_functions/fpclassify.hpp>
+
+namespace avro {
+
+/*
+void dump(const OutputStream& os)
+{
+    std::auto_ptr<InputStream> in = memoryInputStream(os);
+    const char *b;
+    size_t n;
+    std::cout << os.byteCount() << std::endl;
+    while (in->next(reinterpret_cast<const uint8_t**>(&b), &n)) {
+        std::cout << std::string(b, n);
+    }
+    std::cout << std::endl;
+}
+*/
+
+namespace parsing {
+
+static const unsigned int count = 10;
+
+/**
+ * A bunch of tests that share quite a lot of infrastructure between them.
+ * The basic idea is to generate avro data for according to a schema and
+ * then read back and compare the data with the original. But quite a few
+ * variations are possible:
+ * 1. While reading back, one can skip different data elements
+ * 2. While reading resolve against a reader's schema. The resolver may
+ * promote data type, convert from union to plain data type and vice versa,
+ * insert or remove fields in records or reorder fields in a record.
+ * 
+ * To test Json encoder and decoder, we use the same technqiue with only
+ * one difference - we use JsonEncoder and JsonDecoder.
+ *
+ * We also use the same infrastructure to test GenericReader and GenericWriter.
+ * In this case, avro binary is generated in the standard way. It is read
+ * into a GenericDatum, which in turn is written out. This newly serialized
+ * data is decoded in the standard way to check that it is what is written. The
+ * last step won't work if there is schema for reading is different from
+ * that for writing. This is because any reordering of fields would have
+ * got fixed by the GenericDatum's decoding and encoding step.
+ *
+ * For most tests, the data is generated at random.
+ */
+
+using std::string;
+using std::vector;
+using std::stack;
+using std::pair;
+using std::make_pair;
+using std::istringstream;
+using std::ostringstream;
+using std::back_inserter;
+using std::copy;
+using std::auto_ptr;
+
+template <typename T>
+T from_string(const std::string& s)
+{
+    istringstream iss(s);
+    T result;
+    iss >> result;
+    return result;
+}
+
+template <>
+vector<uint8_t> from_string(const std::string& s)
+{
+    vector<uint8_t> result;
+    result.reserve(s.size());
+    copy(s.begin(), s.end(), back_inserter(result));
+    return result;
+}
+
+template <typename T>
+std::string to_string(const T& t)
+{
+    ostringstream oss;
+    oss << t;
+    return oss.str();
+}
+
+template <>
+std::string to_string(const vector<uint8_t>& t)
+{
+    string result;
+    copy(t.begin(), t.end(), back_inserter(result));
+    return result;
+}
+
+class Scanner {
+    const char *p;
+    const char * const end;
+public:
+    Scanner(const char* calls) : p(calls), end(calls + strlen(calls)) { }
+    Scanner(const char* calls, size_t len) : p(calls), end(calls + len) { }
+    char advance() {
+        return *p++;
+    }
+
+    int extractInt() {
+        int result = 0;
+        while (p < end) {
+            if (isdigit(*p)) {
+                result *= 10;
+                result += *p++ - '0';
+            } else {
+                break;
+            }
+        }
+        return result;
+    }
+
+    bool isDone() const { return p == end; }
+};
+
+boost::mt19937 rnd;
+
+static string randomString(size_t len)
+{
+    std::string result;
+    result.reserve(len + 1);
+    for (size_t i = 0; i < len; ++i) {
+        char c = static_cast<char>(rnd()) & 0x7f;
+        if (c == '\0') {
+            c = '\x7f';
+        }
+        result.push_back(c);
+    }
+    return result;
+}
+
+static vector<uint8_t> randomBytes(size_t len)
+{
+    vector<uint8_t> result;
+    result.reserve(len);
+    for (size_t i = 0; i < len; ++i) {
+        result.push_back(rnd());
+    }
+    return result;
+}
+
+static vector<string> randomValues(const char* calls)
+{
+    Scanner sc(calls);
+    vector<string> result;
+    while (! sc.isDone()) {
+        char c = sc.advance();
+        switch (c) {
+        case 'B':
+            result.push_back(to_string(rnd() % 2 == 0));
+            break;
+        case 'I':
+            result.push_back(to_string(static_cast<int32_t>(rnd())));
+            break;
+        case 'L':
+            result.push_back(to_string(rnd() | static_cast<int64_t>(rnd()) << 32));
+            break;
+        case 'F':
+            result.push_back(
+                to_string(static_cast<float>(rnd()) / static_cast<float>(rnd())));
+            break;
+        case 'D':
+            result.push_back(
+                to_string(static_cast<double>(rnd()) / static_cast<double>(rnd())));
+            break;
+        case 'S':
+        case 'K':
+            result.push_back(to_string(randomString(sc.extractInt())));
+            break;
+        case 'b':
+        case 'f':
+            result.push_back(to_string(randomBytes(sc.extractInt())));
+            break;
+        case 'e':
+        case 'c':
+        case 'U':
+            sc.extractInt();
+            break;
+        case 'N':
+        case '[':
+        case ']':
+        case '{':
+        case '}':
+        case 's':
+            break;
+        default:
+            BOOST_FAIL("Unknown mnemonic: " << c);
+        }
+    }
+    return result;
+}
+
+static auto_ptr<OutputStream> generate(Encoder& e, const char* calls,
+    const vector<string>& values)
+{
+    Scanner sc(calls);
+    vector<string>::const_iterator it = values.begin();
+    auto_ptr<OutputStream> ob = memoryOutputStream();
+    e.init(*ob);
+
+    while (! sc.isDone()) {
+        char c = sc.advance();
+
+        switch (c) {
+        case 'N':
+            e.encodeNull();
+            break;
+        case 'B':
+            e.encodeBool(from_string<bool>(*it++));
+            break;
+        case 'I':
+            e.encodeInt(from_string<int32_t>(*it++));
+            break;
+        case 'L':
+            e.encodeLong(from_string<int64_t>(*it++));
+            break;
+        case 'F':
+            e.encodeFloat(from_string<float>(*it++));
+            break;
+        case 'D':
+            e.encodeDouble(from_string<double>(*it++));
+            break;
+        case 'S':
+        case 'K':
+            sc.extractInt();
+            e.encodeString(from_string<string>(*it++));
+            break;
+        case 'b':
+            sc.extractInt();
+            e.encodeBytes(from_string<vector<uint8_t> >(*it++));
+            break;
+        case 'f':
+            sc.extractInt();
+            e.encodeFixed(from_string<vector<uint8_t> >(*it++));
+            break;
+        case 'e':
+            e.encodeEnum(sc.extractInt());
+            break;
+        case '[':
+            e.arrayStart();
+            break;
+        case ']':
+            e.arrayEnd();
+            break;
+        case '{':
+            e.mapStart();
+            break;
+        case '}':
+            e.mapEnd();
+            break;
+        case 'c':
+            e.setItemCount(sc.extractInt());
+            break;
+        case 's':
+            e.startItem();
+            break;
+        case 'U':
+            e.encodeUnionIndex(sc.extractInt());
+            break;
+        default:
+            BOOST_FAIL("Unknown mnemonic: " << c);
+        }
+    }
+    e.flush();
+    return ob;
+}
+
+namespace {
+struct StackElement {
+    size_t size;
+    size_t count;
+    bool isArray;
+    StackElement(size_t s, bool a) : size(s), count(0), isArray(a) { }
+};
+}
+
+static vector<string>::const_iterator skipCalls(Scanner& sc, Decoder& d,
+    vector<string>::const_iterator it, bool isArray)
+{
+    char end = isArray ? ']' : '}';
+    int level = 0;
+    while (! sc.isDone()) {
+        char c = sc.advance();
+        switch (c) {
+        case '[':
+        case '{':
+            ++level;
+            break;
+        case ']':
+        case '}':
+            if (c == end && level == 0) {
+                return it;
+            }
+            --level;
+            break;
+        case 'B':
+        case 'I':
+        case 'L':
+        case 'F':
+        case 'D':
+            ++it;
+            break;
+        case 'S':
+        case 'K':
+        case 'b':
+        case 'f':
+        case 'e':
+            ++it;       // Fall through.
+        case 'c':
+        case 'U':
+            sc.extractInt();
+            break;
+        case 's':
+        case 'N':
+        case 'R':
+            break;
+        default:
+            BOOST_FAIL("Don't know how to skip: " << c);
+        }
+    }
+    BOOST_FAIL("End reached while trying to skip");
+    throw 0;    // Just to keep the compiler happy.
+}
+
+static void check(Decoder& d, unsigned int skipLevel,
+    const char* calls, const vector<string>& values)
+{
+    const size_t zero = 0;
+    Scanner sc(calls);
+    stack<StackElement> containerStack;
+    vector<string>::const_iterator it = values.begin();
+    while (! sc.isDone()) {
+        char c = sc.advance();
+        switch (c) {
+        case 'N':
+            d.decodeNull();
+            break;
+        case 'B':
+            {
+                bool b1 = d.decodeBool();
+                bool b2 = from_string<bool>(*it++);
+                BOOST_CHECK_EQUAL(b1, b2);
+            }
+            break;
+        case 'I':
+            {
+                int32_t b1 = d.decodeInt();
+                int32_t b2 = from_string<int32_t>(*it++);
+                BOOST_CHECK_EQUAL(b1, b2);
+            }
+            break;
+        case 'L':
+            {
+                int64_t b1 = d.decodeLong();
+                int64_t b2 = from_string<int64_t>(*it++);
+                BOOST_CHECK_EQUAL(b1, b2);
+            }
+            break;
+        case 'F':
+            {
+                float b1 = d.decodeFloat();
+                float b2 = from_string<float>(*it++);
+                BOOST_CHECK_CLOSE(b1, b2, 0.001f);
+            }
+            break;
+        case 'D':
+            {
+                double b1 = d.decodeDouble();
+                double b2 = from_string<double>(*it++);
+                BOOST_CHECK_CLOSE(b1, b2, 0.001f);
+            }
+            break;
+        case 'S':
+        case 'K':
+            sc.extractInt();
+            if (containerStack.size() >= skipLevel) {
+                d.skipString();
+            } else {
+                string b1 = d.decodeString();
+                string b2 = from_string<string>(*it);
+                BOOST_CHECK_EQUAL(b1, b2);
+            }
+            ++it;
+            break;
+        case 'b':
+            sc.extractInt();
+            if (containerStack.size() >= skipLevel) {
+                d.skipBytes();
+            } else {
+                vector<uint8_t> b1 = d.decodeBytes();
+                vector<uint8_t> b2 = from_string<vector<uint8_t> >(*it);
+                BOOST_CHECK_EQUAL_COLLECTIONS(b1.begin(), b1.end(),
+                    b2.begin(), b2.end());
+            }
+            ++it;
+            break;
+        case 'f':
+            {
+                size_t len = sc.extractInt();
+                if (containerStack.size() >= skipLevel) {
+                    d.skipFixed(len);
+                } else {
+                    vector<uint8_t> b1 = d.decodeFixed(len);
+                    vector<uint8_t> b2 = from_string<vector<uint8_t> >(*it);
+                    BOOST_CHECK_EQUAL_COLLECTIONS(b1.begin(), b1.end(),
+                        b2.begin(), b2.end());
+                }
+            }
+            ++it;
+            break;
+        case 'e':
+            {
+                size_t b1 = d.decodeEnum();
+                size_t b2 = sc.extractInt();
+                BOOST_CHECK_EQUAL(b1, b2);
+            }
+            break;
+        case '[':
+            if (containerStack.size() >= skipLevel) {
+                size_t n = d.skipArray();
+                if (n == 0) {
+                    it = skipCalls(sc, d, it, true);
+                } else {
+                    containerStack.push(StackElement(n, true));
+                }
+            } else {
+                containerStack.push(StackElement(d.arrayStart(), true));
+            }
+            break;
+        case '{':
+            if (containerStack.size() >= skipLevel) {
+                size_t n = d.skipMap();
+                if (n == 0) {
+                    it = skipCalls(sc, d, it, false);
+                } else {
+                    containerStack.push(StackElement(n, false));
+                }
+            } else {
+                containerStack.push(StackElement(d.mapStart(), false));
+            }
+            break;
+        case ']':
+            {
+                const StackElement& se = containerStack.top();
+                BOOST_CHECK_EQUAL(se.size, se.count);
+                if (se.size != 0) {
+                    BOOST_CHECK_EQUAL(zero, d.arrayNext());
+                }
+                containerStack.pop();
+            }
+            break;
+        case '}':
+            {
+                const StackElement& se = containerStack.top();
+                BOOST_CHECK_EQUAL(se.size, se.count);
+                if (se.size != 0) {
+                    BOOST_CHECK_EQUAL(zero, d.mapNext());
+                }
+                containerStack.pop();
+            }
+            break;
+        case 's':
+            {
+                StackElement& se = containerStack.top();
+                if (se.size == se.count) {
+                    se.size += (se.isArray ?
+                        d.arrayNext() : d.mapNext());
+                }
+                ++se.count;
+            }
+            break;
+        case 'c':
+            sc.extractInt();
+            break;
+        case 'U':
+            {
+                size_t idx = sc.extractInt();
+                BOOST_CHECK_EQUAL(idx, d.decodeUnionIndex());
+            }
+            break;
+        case 'R':
+            static_cast<ResolvingDecoder&>(d).fieldOrder();
+            continue;
+        default:
+            BOOST_FAIL("Unknown mnemonic: " << c);
+        }
+    }
+    BOOST_CHECK(it == values.end());
+}
+
+ValidSchema makeValidSchema(const char* schema)
+{
+    istringstream iss(schema);
+    ValidSchema vs;
+    compileJsonSchema(iss, vs);
+    return ValidSchema(vs);
+}
+
+void testEncoder(const EncoderPtr& e, const char* writerCalls,
+    vector<string>& v, auto_ptr<OutputStream>& p)
+{
+    v = randomValues(writerCalls);
+    p = generate(*e, writerCalls, v);
+}
+
+static void testDecoder(const DecoderPtr& d, 
+    const vector<string>& values, InputStream& data,
+    const char* readerCalls, unsigned int skipLevel)
+{
+    d->init(data);
+    check(*d, skipLevel, readerCalls, values);
+}
+
+/**
+ * The first member is a schema.
+ * The second one is a sequence of (single character) mnemonics:
+ * N  null
+ * B  boolean
+ * I  int
+ * L  long
+ * F  float
+ * D  double
+ * K followed by integer - key-name (and its length) in a map
+ * S followed by integer - string and its length
+ * b followed by integer - bytes and length
+ * f followed by integer - fixed and length
+ * c  Number of items to follow in an array/map.
+ * U followed by integer - Union and its branch
+ * e followed by integer - Enum and its value
+ * [  Start array
+ * ]  End array
+ * {  Start map
+ * }  End map
+ * s  start item
+ * R  Start of record in resolving situations. Client may call fieldOrder()
+ */
+
+struct TestData {
+    const char* schema;
+    const char* calls;
+    unsigned int depth;
+};
+
+struct TestData2 {
+    const char* schema;
+    const char* correctCalls;
+    const char* incorrectCalls;
+    unsigned int depth;
+};
+
+struct TestData3 {
+    const char* writerSchema;
+    const char* writerCalls;
+    const char* readerSchema;
+    const char* readerCalls;
+    unsigned int depth;
+};
+
+struct TestData4 {
+    const char* writerSchema;
+    const char* writerCalls;
+    const char* writerValues[100];
+    const char* readerSchema;
+    const char* readerCalls;
+    const char* readerValues[100];
+    unsigned int depth;
+};
+
+template<typename CodecFactory>
+void testCodec(const TestData& td) {
+    static int testNo = 0;
+    testNo++;
+
+    ValidSchema vs = makeValidSchema(td.schema);
+
+    for (unsigned int i = 0; i < count; ++i) {
+        vector<string> v;
+        auto_ptr<OutputStream> p;
+        testEncoder(CodecFactory::newEncoder(vs), td.calls, v, p);
+        // dump(*p);
+
+        for (unsigned int i = 0; i <= td.depth; ++i) {
+            unsigned int skipLevel = td.depth - i;
+            /*
+            std::cout << "Test: " << testNo << ' '
+                << " schema: " << td.schema
+                << " calls: " << td.calls
+                << " skip-level: " << skipLevel << std::endl;
+                */
+            BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+                << " schema: " << td.schema
+                << " calls: " << td.calls
+                << " skip-level: " << skipLevel);
+            auto_ptr<InputStream> in = memoryInputStream(*p);
+            testDecoder(CodecFactory::newDecoder(vs), v, *in,
+                td.calls, skipLevel);
+        }
+    }
+}
+
+template<typename CodecFactory>
+void testCodecResolving(const TestData3& td) {
+    static int testNo = 0;
+    testNo++;
+
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " writer schema: " << td.writerSchema
+        << " writer calls: " << td.writerCalls
+        << " reader schema: " << td.readerSchema
+        << " reader calls: " << td.readerCalls);
+
+    ValidSchema vs = makeValidSchema(td.writerSchema);
+
+    for (unsigned int i = 0; i < count; ++i) {
+        vector<string> v;
+        auto_ptr<OutputStream> p;
+        testEncoder(CodecFactory::newEncoder(vs), td.writerCalls, v, p);
+        // dump(*p);
+
+        ValidSchema rvs = makeValidSchema(td.readerSchema);
+        for (unsigned int i = 0; i <= td.depth; ++i) {
+            unsigned int skipLevel = td.depth - i;
+            BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+                << " writer schema: " << td.writerSchema
+                << " writer calls: " << td.writerCalls
+                << " reader schema: " << td.readerSchema
+                << " reader calls: " << td.readerCalls
+                << " skip-level: " << skipLevel);
+            auto_ptr<InputStream> in = memoryInputStream(*p);
+            testDecoder(CodecFactory::newDecoder(vs, rvs), v, *in,
+                td.readerCalls, skipLevel);
+        }
+    }
+}
+
+static vector<string> mkValues(const char* const values[])
+{
+    vector<string> result;
+    for (const char* const* p = values; *p; ++p) {
+        result.push_back(*p);
+    }
+    return result;
+}
+
+template<typename CodecFactory>
+void testCodecResolving2(const TestData4& td) {
+    static int testNo = 0;
+    testNo++;
+
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " writer schema: " << td.writerSchema
+        << " writer calls: " << td.writerCalls
+        << " reader schema: " << td.readerSchema
+        << " reader calls: " << td.readerCalls);
+
+    ValidSchema vs = makeValidSchema(td.writerSchema);
+
+    vector<string> wd = mkValues(td.writerValues);
+    auto_ptr<OutputStream> p =
+        generate(*CodecFactory::newEncoder(vs), td.writerCalls, wd);
+    // dump(*p);
+
+    ValidSchema rvs = makeValidSchema(td.readerSchema);
+    vector<string> rd = mkValues(td.readerValues);
+    for (unsigned int i = 0; i <= td.depth; ++i) {
+        unsigned int skipLevel = td.depth - i;
+        BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+            << " writer schema: " << td.writerSchema
+            << " writer calls: " << td.writerCalls
+            << " reader schema: " << td.readerSchema
+            << " reader calls: " << td.readerCalls
+            << " skip-level: " << skipLevel);
+        auto_ptr<InputStream> in = memoryInputStream(*p);
+        testDecoder(CodecFactory::newDecoder(vs, rvs), rd, *in,
+            td.readerCalls, skipLevel);
+    }
+}
+
+template<typename CodecFactory>
+void testReaderFail(const TestData2& td) {
+    static int testNo = 0;
+    testNo++;
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " schema: " << td.schema
+        << " correctCalls: " << td.correctCalls
+        << " incorrectCalls: " << td.incorrectCalls
+        << " skip-level: " << td.depth);
+    ValidSchema vs = makeValidSchema(td.schema);
+
+    vector<string> v;
+    auto_ptr<OutputStream> p;
+    testEncoder(CodecFactory::newEncoder(vs), td.correctCalls, v, p);
+    auto_ptr<InputStream> in = memoryInputStream(*p);
+    BOOST_CHECK_THROW(
+        testDecoder(CodecFactory::newDecoder(vs), v, *in,
+            td.incorrectCalls, td.depth), Exception);
+}
+
+template<typename CodecFactory>
+void testWriterFail(const TestData2& td) {
+    static int testNo = 0;
+    testNo++;
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " schema: " << td.schema
+        << " incorrectCalls: " << td.incorrectCalls);
+    ValidSchema vs = makeValidSchema(td.schema);
+
+    vector<string> v;
+    auto_ptr<OutputStream> p;
+    BOOST_CHECK_THROW(testEncoder(CodecFactory::newEncoder(vs),
+        td.incorrectCalls, v, p), Exception);
+}
+
+template<typename CodecFactory>
+void testGeneric(const TestData& td) {
+    static int testNo = 0;
+    testNo++;
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " schema: " << td.schema
+        << " calls: " << td.calls);
+
+    ValidSchema vs = makeValidSchema(td.schema);
+
+    for (unsigned int i = 0; i < count; ++i) {
+        vector<string> v;
+        auto_ptr<OutputStream> p;
+        testEncoder(CodecFactory::newEncoder(vs), td.calls, v, p);
+        // dump(*p);
+        DecoderPtr d1 = CodecFactory::newDecoder(vs);
+        auto_ptr<InputStream> in1 = memoryInputStream(*p);
+        d1->init(*in1);
+        GenericDatum datum(vs);
+        avro::decode(*d1, datum);
+
+        EncoderPtr e2 = CodecFactory::newEncoder(vs);
+        auto_ptr<OutputStream> ob = memoryOutputStream();
+        e2->init(*ob);
+
+        avro::encode(*e2, datum);
+        e2->flush();
+
+        BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+            << " schema: " << td.schema
+            << " calls: " << td.calls);
+        auto_ptr<InputStream> in2 = memoryInputStream(*ob);
+        testDecoder(CodecFactory::newDecoder(vs), v, *in2,
+            td.calls, td.depth);
+    }
+}
+
+template<typename CodecFactory>
+void testGenericResolving(const TestData3& td) {
+    static int testNo = 0;
+    testNo++;
+
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " writer schema: " << td.writerSchema
+        << " writer calls: " << td.writerCalls
+        << " reader schema: " << td.readerSchema
+        << " reader calls: " << td.readerCalls);
+
+    ValidSchema wvs = makeValidSchema(td.writerSchema);
+    ValidSchema rvs = makeValidSchema(td.readerSchema);
+
+    for (unsigned int i = 0; i < count; ++i) {
+        vector<string> v;
+        auto_ptr<OutputStream> p;
+        testEncoder(CodecFactory::newEncoder(wvs), td.writerCalls, v, p);
+        // dump(*p);
+        DecoderPtr d1 = CodecFactory::newDecoder(wvs);
+        auto_ptr<InputStream> in1 = memoryInputStream(*p);
+        d1->init(*in1);
+
+        GenericReader gr(wvs, rvs, d1);
+        GenericDatum datum;
+        gr.read(datum);
+
+        EncoderPtr e2 = CodecFactory::newEncoder(rvs);
+        auto_ptr<OutputStream> ob = memoryOutputStream();
+        e2->init(*ob);
+        avro::encode(*e2, datum);
+        e2->flush();
+
+        BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+            << " writer-schemai " << td.writerSchema
+            << " writer-calls: " << td.writerCalls 
+            << " reader-schema: " << td.readerSchema
+            << " calls: " << td.readerCalls);
+        auto_ptr<InputStream> in2 = memoryInputStream(*ob);
+        testDecoder(CodecFactory::newDecoder(rvs), v, *in2,
+            td.readerCalls, td.depth);
+    }
+}
+
+template<typename CodecFactory>
+void testGenericResolving2(const TestData4& td) {
+    static int testNo = 0;
+    testNo++;
+
+    BOOST_TEST_CHECKPOINT("Test: " << testNo << ' '
+        << " writer schema: " << td.writerSchema
+        << " writer calls: " << td.writerCalls
+        << " reader schema: " << td.readerSchema
+        << " reader calls: " << td.readerCalls);
+
+    ValidSchema wvs = makeValidSchema(td.writerSchema);
+    ValidSchema rvs = makeValidSchema(td.readerSchema);
+
+    const vector<string> wd = mkValues(td.writerValues);
+
+    auto_ptr<OutputStream> p = generate(*CodecFactory::newEncoder(wvs),
+        td.writerCalls, wd);
+    // dump(*p);
+    DecoderPtr d1 = CodecFactory::newDecoder(wvs);
+    auto_ptr<InputStream> in1 = memoryInputStream(*p);
+    d1->init(*in1);
+
+    GenericReader gr(wvs, rvs, d1);
+    GenericDatum datum;
+    gr.read(datum);
+
+    EncoderPtr e2 = CodecFactory::newEncoder(rvs);
+    auto_ptr<OutputStream> ob = memoryOutputStream();
+    e2->init(*ob);
+    avro::encode(*e2, datum);
+    e2->flush();
+    // We cannot verify with the reader calls because they are for
+    // the resolving decoder and hence could be in a different order than
+    // the "normal" data.
+}
+
+
+static const TestData data[] = {
+    { "\"null\"", "N", 1 },
+    { "\"boolean\"", "B", 1 },
+    { "\"int\"", "I", 1 },
+    { "\"long\"", "L", 1 },
+    { "\"float\"", "F", 1 },
+    { "\"double\"", "D", 1 },
+    { "\"string\"", "S0", 1 },
+    { "\"string\"", "S10", 1 },
+    { "\"bytes\"", "b0", 1 },
+    { "\"bytes\"", "b10", 1 },
+
+    { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 1}", "f1", 1 },
+    { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 10}", "f10", 1 },
+    { "{\"type\":\"enum\", \"name\":\"en\", \"symbols\":[\"v1\", \"v2\"]}",
+        "e1", 1 },
+
+    { "{\"type\":\"array\", \"items\": \"boolean\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"long\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"float\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"double\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"string\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"bytes\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", "
+      "\"name\":\"fi\", \"size\": 10}}", "[]", 2 },
+
+    { "{\"type\":\"array\", \"items\": \"boolean\"}", "[c1sB]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]", 2 },
+    { "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]", 2 },
+    { "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]", 2 },
+    { "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]", 2 },
+    { "{\"type\":\"array\", \"items\": \"string\"}", "[c1sS10]", 2 },
+    { "{\"type\":\"array\", \"items\": \"bytes\"}", "[c1sb10]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sIc1sI]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[c2sIsI]", 2 },
+    { "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", "
+      "\"name\":\"fi\", \"size\": 10}}", "[c2sf10sf10]", 2 },
+
+    { "{\"type\":\"map\", \"values\": \"boolean\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"long\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"float\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"double\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"string\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"bytes\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": "
+      "{\"type\":\"array\", \"items\":\"int\"}}", "{}", 2 },
+
+    { "{\"type\":\"map\", \"values\": \"boolean\"}", "{c1sK5B}", 2 },
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}", 2 },
+    { "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}", 2 },
+    { "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}", 2 },
+    { "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}", 2 },
+    { "{\"type\":\"map\", \"values\": \"string\"}", "{c1sK5S10}", 2 },
+    { "{\"type\":\"map\", \"values\": \"bytes\"}", "{c1sK5b10}", 2 },
+    { "{\"type\":\"map\", \"values\": "
+      "{\"type\":\"array\", \"items\":\"int\"}}", "{c1sK5[c3sIsIsI]}", 2 },
+
+    { "{\"type\":\"map\", \"values\": \"boolean\"}",
+        "{c1sK5Bc2sK5BsK5B}", 2 },
+
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"boolean\"}]}", "B", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"int\"}]}", "I", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"long\"}]}", "L", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"float\"}]}", "F", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"double\"}]}", "D", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"string\"}]}", "S10", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f\", \"type\":\"bytes\"}]}", "b10", 1 },
+
+    // multi-field records
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"int\"},"
+      "{\"name\":\"f2\", \"type\":\"double\"},"
+      "{\"name\":\"f3\", \"type\":\"string\"}]}", "IDS10", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f0\", \"type\":\"null\"},"
+      "{\"name\":\"f1\", \"type\":\"boolean\"},"
+      "{\"name\":\"f2\", \"type\":\"int\"},"
+      "{\"name\":\"f3\", \"type\":\"long\"},"
+      "{\"name\":\"f4\", \"type\":\"float\"},"
+      "{\"name\":\"f5\", \"type\":\"double\"},"
+      "{\"name\":\"f6\", \"type\":\"string\"},"
+      "{\"name\":\"f7\", \"type\":\"bytes\"}]}",
+        "NBILFDS10b25", 1 },
+    
+    // record of records
+    { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":{\"type\":\"record\", "
+      "\"name\":\"inner\", \"fields\":["
+      "{\"name\":\"g1\", \"type\":\"int\"}, {\"name\":\"g2\", "
+      "\"type\":\"double\"}]}},"
+      "{\"name\":\"f2\", \"type\":\"string\"},"
+      "{\"name\":\"f3\", \"type\":\"inner\"}]}",
+      "IDS10ID", 1 },
+
+    // record with name references
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":{\"type\":\"fixed\", "
+      "\"name\":\"f\", \"size\":10 }}," 
+      "{\"name\":\"f2\", \"type\":\"f\"},"
+      "{\"name\":\"f3\", \"type\":\"f\"}]}",
+      "f10f10f10", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":{\"type\":\"enum\", "
+      "\"name\": \"e\", \"symbols\":[\"s1\", \"s2\"] }}," 
+      "{\"name\":\"f2\", \"type\":\"e\"},"
+      "{\"name\":\"f3\", \"type\":\"e\"}]}",
+      "e1e0e1", 1 },
+
+    // record with array
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", "
+      "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}",
+      "L[c1sI]", 2 },
+
+    // record with map
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", "
+      "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}",
+      "L{c1sK5I}", 2 },
+
+    // array of records
+    { "{\"type\":\"array\", \"items\":"
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", \"type\":\"null\"}]}}",
+        "[c2sLNsLN]", 2 },
+        
+
+    { "{\"type\":\"array\", \"items\":"
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", "
+      "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}}",
+        "[c2sL[c1sI]sL[c2sIsI]]", 3 },
+    { "{\"type\":\"array\", \"items\":"
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", "
+      "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}}",
+        "[c2sL{c1sK5I}sL{c2sK5IsK5I}]", 3 },
+    { "{\"type\":\"array\", \"items\":"
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+      "{\"name\":\"f1\", \"type\":\"long\"},"
+      "{\"name\":\"f2\", "
+      "\"type\":[\"null\", \"int\"]}]}}",
+        "[c2sLU0NsLU1I]", 2 },
+
+    { "[\"boolean\", \"null\" ]", "U0B", 1 },
+    { "[\"int\", \"null\" ]", "U0I", 1 },
+    { "[\"long\", \"null\" ]", "U0L", 1 },
+    { "[\"float\", \"null\" ]", "U0F", 1 },
+    { "[\"double\", \"null\" ]", "U0D", 1 },
+    { "[\"string\", \"null\" ]", "U0S10", 1 },
+    { "[\"bytes\", \"null\" ]", "U0b10", 1 },
+
+    { "[\"null\", \"int\"]", "U0N", 1 },
+    { "[\"boolean\", \"int\"]", "U0B", 1 },
+    { "[\"boolean\", \"int\"]", "U1I", 1 },
+    { "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]",
+      "U0B", 1 },
+
+    { "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]",
+        "U1[c1sI]", 2 },
+      
+    // Recursion
+    { "{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+      "{\"name\":\"label\", \"type\":\"string\"},"
+      "{\"name\":\"children\", \"type\":"
+      "{\"type\": \"array\", \"items\": \"Node\" }}]}",
+      "S10[c1sS10[]]", 3 },
+      
+    { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+      "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+      "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+      "{\"name\":\"car\", \"type\":\"Lisp\"},"
+      "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+      "U0N", 1 },
+    { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+      "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+      "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+      "{\"name\":\"car\", \"type\":\"Lisp\"},"
+      "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+      "U1S10", 1},
+    { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+      "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+      "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+      "{\"name\":\"car\", \"type\":\"Lisp\"},"
+      "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+      "U2U1S10U0N", 1},
+};
+
+static const TestData2 data2[] = {
+    { "\"int\"", "I", "B", 1 },
+    { "\"boolean\"", "B", "I", 1 },
+    { "\"boolean\"", "B", "L", 1 },
+    { "\"boolean\"", "B", "F", 1 },
+    { "\"boolean\"", "B", "D", 1 },
+    { "\"boolean\"", "B", "S10", 1 },
+    { "\"boolean\"", "B", "b10", 1 },
+    { "\"boolean\"", "B", "[]", 1 },
+    { "\"boolean\"", "B", "{}", 1 },
+    { "\"boolean\"", "B", "U0", 1 },
+    { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 1}", "f1", "f2", 1 },
+};
+
+static const TestData3 data3[] = {
+    { "\"int\"", "I", "\"float\"", "F", 1 },
+    { "\"int\"", "I", "\"double\"", "D", 1 },
+    { "\"int\"", "I", "\"long\"", "L", 1 },
+    { "\"long\"", "L", "\"float\"", "F", 1 },
+    { "\"long\"", "L", "\"double\"", "D", 1 },
+    { "\"float\"", "F", "\"double\"", "D", 1 },
+
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[]",
+      "{\"type\":\"array\", \"items\": \"long\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"long\"}", "[]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[]", 2 },
+    { "{\"type\":\"array\", \"items\": \"float\"}", "[]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[]", 2 },
+
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]",
+      "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]", 2 },
+    { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]", 2 },
+    { "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]", 2 },
+    { "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]",
+      "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]", 2 },
+
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{}",
+      "{\"type\":\"map\", \"values\": \"long\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"long\"}", "{}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{}", 2 },
+    { "{\"type\":\"map\", \"values\": \"float\"}", "{}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{}", 2 },
+
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}",
+      "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}", 2 },
+    { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}", 2 },
+    { "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}", 2 },
+    { "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}",
+      "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}", 2 },
+
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"long\"}]}", "L", 1 },
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"double\"}]}", "D", 1 },
+
+    // multi-field record with promotions
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f0\", \"type\":\"boolean\"},"
+        "{\"name\":\"f1\", \"type\":\"int\"},"
+        "{\"name\":\"f2\", \"type\":\"float\"},"
+        "{\"name\":\"f3\", \"type\":\"string\"}]}", "BIFS",
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f0\", \"type\":\"boolean\"},"
+        "{\"name\":\"f1\", \"type\":\"long\"},"
+        "{\"name\":\"f2\", \"type\":\"double\"},"
+        "{\"name\":\"f3\", \"type\":\"string\"}]}", "BLDS", 1 },
+
+    { "[\"int\", \"long\"]", "U0I", "[\"long\", \"string\"]", "U0L", 1 },
+    { "[\"int\", \"long\"]", "U0I", "[\"double\", \"string\"]", "U0D", 1 },
+    { "[\"long\", \"double\"]", "U0L", "[\"double\", \"string\"]", "U0D", 1 },
+    { "[\"float\", \"double\"]", "U0F", "[\"double\", \"string\"]", "U0D", 1 },
+
+    { "\"int\"", "I", "[\"int\", \"string\"]", "U0I", 1 },
+
+    { "[\"int\", \"double\"]", "U0I", "\"int\"", "I", 1 },
+    { "[\"int\", \"double\"]", "U0I", "\"long\"", "L", 1 },
+
+    { "[\"boolean\", \"int\"]", "U1I", "[\"boolean\", \"long\"]", "U1L", 1 },
+    { "[\"boolean\", \"int\"]", "U1I", "[\"long\", \"boolean\"]", "U0L", 1 },
+};
+
+static const TestData4 data4[] = {
+    // Projection
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"string\"},"
+        "{\"name\":\"f2\", \"type\":\"string\"},"
+        "{\"name\":\"f3\", \"type\":\"int\"}]}", "S10S10IS10S10I",
+        { "s1", "s2", "100", "t1", "t2", "200", NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"string\" },"
+        "{\"name\":\"f2\", \"type\":\"string\"}]}", "RS10S10RS10S10",
+        { "s1", "s2", "t1", "t2", NULL }, 1 },
+
+    // Reordered fields
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"int\"},"
+        "{\"name\":\"f2\", \"type\":\"string\"}]}", "IS10",
+        { "10", "hello", NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f2\", \"type\":\"string\" },"
+        "{\"name\":\"f1\", \"type\":\"long\"}]}", "RLS10",
+        { "10", "hello", NULL }, 1 },
+
+    // Default values
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
+        { NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]}", "RI",
+        { "100", NULL }, 1 },
+
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f2\", \"type\":\"int\"}]}", "I",
+        { "10", NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+        "{\"name\":\"f2\", \"type\":\"int\"}]}", "RII",
+        { "10", "101", NULL }, 1 },
+
+    { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\", " 
+            "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+            "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+            "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
+        { "10", "11", NULL },
+        "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\", "
+            "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+            "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+            "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+            "{\"name\": \"g2\", \"type\": \"long\"}]}}", "RRIIL",
+        { "10", "101", "11", NULL }, 1 },
+
+    // Default value for a record.
+    { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\", "
+        "\"type\":{\"type\":\"record\",\"name\":\"inner1\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"long\" },"
+        "{\"name\":\"f2\", \"type\":\"int\"}] } }, "
+        "{\"name\": \"g2\", \"type\": \"long\"}]}", "LIL",
+        { "10", "12", "13", NULL },
+        "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            "{\"name\": \"g1\", "
+            "\"type\":{\"type\":\"record\",\"name\":\"inner1\",\"fields\":["
+            "{\"name\":\"f1\", \"type\":\"long\" },"
+            "{\"name\":\"f2\", \"type\":\"int\"}] } }, "
+            "{\"name\": \"g2\", \"type\": \"long\"},"
+            "{\"name\": \"g3\", "
+            "\"type\":{\"type\":\"record\",\"name\":\"inner2\",\"fields\":["
+            "{\"name\":\"f1\", \"type\":\"long\" },"
+            "{\"name\":\"f2\", \"type\":\"int\"}] }, "
+            "\"default\": { \"f1\": 15, \"f2\": 101 } }] } ",
+            "RRLILRLI",
+        { "10", "12", "13", "15", "101", NULL}, 1 },
+
+    { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+        "{\"name\": \"g1\", "
+        "\"type\":{\"type\":\"record\",\"name\":\"inner1\",\"fields\":["
+        "{\"name\":\"f1\", \"type\":\"long\" },"
+        "{\"name\":\"f2\", \"type\":\"int\"}] } }, "
+        "{\"name\": \"g2\", \"type\": \"long\"}]}", "LIL",
+        { "10", "12", "13", NULL },
+        "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            "{\"name\": \"g1\", "
+            "\"type\":{\"type\":\"record\",\"name\":\"inner1\",\"fields\":["
+            "{\"name\":\"f1\", \"type\":\"long\" },"
+            "{\"name\":\"f2\", \"type\":\"int\"}] } }, "
+            "{\"name\": \"g2\", \"type\": \"long\"},"
+            "{\"name\": \"g3\", "
+            "\"type\":\"inner1\", "
+            "\"default\": { \"f1\": 15, \"f2\": 101 } }] } ",
+            "RRLILRLI",
+        { "10", "12", "13", "15", "101", NULL}, 1 },
+
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
+        { NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":{ \"type\": \"array\", \"items\": \"int\" },"
+        "\"default\": [100]}]}", "[c1sI]",
+        { "100", NULL }, 1 },
+
+    { "{ \"type\": \"array\", \"items\": {\"type\":\"record\","
+        "\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f0\", \"type\": \"int\"}]} }", "[c1sI]",
+        { "99", NULL },
+        "{ \"type\": \"array\", \"items\": {\"type\":\"record\","
+        "\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]} }",
+        "[Rc1sI]",
+        { "100", NULL }, 1 },
+
+    // Enum resolution
+    { "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"x\",\"y\",\"z\"]}",
+        "e2",
+        { NULL },
+        "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}",
+        "e1",
+        { NULL }, 1 },
+
+    { "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"x\", \"y\" ]}",
+        "e1",
+        { NULL },
+        "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}",
+        "e0",
+        { NULL }, 1 },
+
+
+    // Union
+    { "\"int\"", "I", { "100", NULL },
+        "[ \"long\", \"int\"]", "U1I", { "100", NULL }, 1 },
+
+    { "[ \"long\", \"int\"]", "U1I", { "100", NULL } ,
+        "\"int\"", "I", { "100", NULL }, 1 },
+
+    // Arrray of unions
+    { "{\"type\":\"array\", \"items\":[ \"long\", \"int\"]}",
+        "[c2sU1IsU1I]", { "100", "100", NULL } ,
+        "{\"type\":\"array\", \"items\": \"int\"}",
+            "[c2sIsI]", { "100", "100", NULL }, 2 },
+    { "{\"type\":\"array\", \"items\":[ \"long\", \"int\"]}",
+        "[c1sU1Ic1sU1I]", { "100", "100", NULL } ,
+        "{\"type\":\"array\", \"items\": \"int\"}",
+            "[c1sIc1sI]", { "100", "100", NULL }, 2 },
+
+    // Map of unions
+    { "{\"type\":\"map\", \"values\":[ \"long\", \"int\"]}",
+        "{c2sS10U1IsS10U1I}", { "k1", "100", "k2", "100", NULL } ,
+        "{\"type\":\"map\", \"values\": \"int\"}",
+            "{c2sS10IsS10I}", { "k1", "100", "k2", "100", NULL }, 2 },
+    { "{\"type\":\"map\", \"values\":[ \"long\", \"int\"]}",
+        "{c1sS10U1Ic1sS10U1I}", { "k1", "100", "k2", "100", NULL } ,
+        "{\"type\":\"map\", \"values\": \"int\"}",
+            "{c1sS10Ic1sS10I}", { "k1", "100", "k2", "100", NULL }, 2 },
+
+    // Union + promotion
+    { "\"int\"", "I", { "100", NULL },
+        "[ \"long\", \"string\"]", "U0L", { "100", NULL }, 1 },
+
+    { "[ \"int\", \"string\"]", "U0I", { "100", NULL },
+        "\"long\"", "L", { "100", NULL }, 1 },
+
+    // Record where union field is skipped.
+    { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f0\", \"type\":\"boolean\"},"
+        "{\"name\":\"f1\", \"type\":\"int\"},"
+        "{\"name\":\"f2\", \"type\":[\"int\", \"long\"]},"
+        "{\"name\":\"f3\", \"type\":\"float\"}"
+        "]}", "BIU0IF",
+        { "1", "100", "121", "10.75", NULL },
+        "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+        "{\"name\":\"f0\", \"type\":\"boolean\"},"
+        "{\"name\":\"f1\", \"type\":\"long\"},"
+        "{\"name\":\"f3\", \"type\":\"double\"}]}", "BLD",
+        { "1", "100", "10.75", NULL }, 1 },
+};
+
+#define COUNTOF(x)  sizeof(x) / sizeof(x[0])
+#define ENDOF(x)    (x) + COUNTOF(x)
+
+#define ADD_TESTS(testSuite, Factory, testFunc, data)           \
+testSuite.add(BOOST_PARAM_TEST_CASE(&testFunc<Factory>,         \
+    data, data + COUNTOF(data)))
+
+struct BinaryEncoderFactory {
+    static EncoderPtr newEncoder(const ValidSchema& schema) {
+        return binaryEncoder();
+    }
+};
+
+struct BinaryDecoderFactory {
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return binaryDecoder();
+    }
+};
+
+struct BinaryCodecFactory : public BinaryEncoderFactory,
+    public BinaryDecoderFactory { };
+
+struct ValidatingEncoderFactory {
+    static EncoderPtr newEncoder(const ValidSchema& schema) {
+        return validatingEncoder(schema, binaryEncoder());
+    }
+};
+
+struct ValidatingDecoderFactory {
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return validatingDecoder(schema, binaryDecoder());
+    }
+};
+
+struct ValidatingCodecFactory : public ValidatingEncoderFactory,
+    public ValidatingDecoderFactory { };
+
+struct JsonCodec {
+    static EncoderPtr newEncoder(const ValidSchema& schema) {
+        return jsonEncoder(schema);
+    }
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return jsonDecoder(schema);
+    }
+};
+
+struct BinaryEncoderResolvingDecoderFactory : public BinaryEncoderFactory {
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return resolvingDecoder(schema, schema, binaryDecoder());
+    }
+
+    static DecoderPtr newDecoder(const ValidSchema& writer,
+        const ValidSchema& reader) {
+        return resolvingDecoder(writer, reader, binaryDecoder());
+    }
+};
+
+struct ValidatingEncoderResolvingDecoderFactory :
+    public ValidatingEncoderFactory {
+    static DecoderPtr newDecoder(const ValidSchema& schema) {
+        return resolvingDecoder(schema, schema,
+            validatingDecoder(schema, binaryDecoder()));
+    }
+
+    static DecoderPtr newDecoder(const ValidSchema& writer,
+        const ValidSchema& reader) {
+        return resolvingDecoder(writer, reader,
+            validatingDecoder(writer, binaryDecoder()));
+    }
+};
+
+void add_tests(boost::unit_test::test_suite& ts)
+{
+    ADD_TESTS(ts, BinaryCodecFactory, testCodec, data);
+    ADD_TESTS(ts, ValidatingCodecFactory, testCodec, data);
+    ADD_TESTS(ts, JsonCodec, testCodec, data);
+    ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory, testCodec, data);
+    ADD_TESTS(ts, ValidatingCodecFactory, testReaderFail, data2);
+    ADD_TESTS(ts, ValidatingCodecFactory, testWriterFail, data2);
+    ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory,
+        testCodecResolving, data3);
+    ADD_TESTS(ts, BinaryEncoderResolvingDecoderFactory,
+        testCodecResolving2, data4);
+    ADD_TESTS(ts, ValidatingEncoderResolvingDecoderFactory,
+        testCodecResolving2, data4);
+
+    ADD_TESTS(ts, ValidatingCodecFactory, testGeneric, data);
+    ADD_TESTS(ts, ValidatingCodecFactory, testGenericResolving, data3);
+    ADD_TESTS(ts, ValidatingCodecFactory, testGenericResolving2, data4);
+}
+
+}   // namespace parsing
+
+static void testStreamLifetimes()
+{
+    EncoderPtr e = binaryEncoder();
+    {
+        std::auto_ptr<OutputStream> s1 = memoryOutputStream();
+        e->init(*s1);
+        e->encodeInt(100);
+        e->encodeDouble(4.73);
+        e->flush();
+    }
+
+    {
+        std::auto_ptr<OutputStream> s2 = memoryOutputStream();
+        e->init(*s2);
+        e->encodeDouble(3.14);
+        e->flush();
+    }
+
+}
+
+static void testLimits(const EncoderPtr& e, const DecoderPtr& d)
+{
+    std::auto_ptr<OutputStream> s1 = memoryOutputStream();
+    {
+        e->init(*s1);
+        e->encodeDouble(std::numeric_limits<double>::infinity());
+        e->encodeDouble(-std::numeric_limits<double>::infinity());
+        e->encodeDouble(std::numeric_limits<double>::quiet_NaN());
+        e->encodeFloat(std::numeric_limits<float>::infinity());
+        e->encodeFloat(-std::numeric_limits<float>::infinity());
+        e->encodeFloat(std::numeric_limits<float>::quiet_NaN());
+        e->flush();
+    }
+
+    {
+        std::auto_ptr<InputStream> s2 = memoryInputStream(*s1);
+        d->init(*s2);
+        BOOST_CHECK_EQUAL(d->decodeDouble(),
+            std::numeric_limits<double>::infinity());
+        BOOST_CHECK_EQUAL(d->decodeDouble(),
+            -std::numeric_limits<double>::infinity());
+        BOOST_CHECK(boost::math::isnan(d->decodeDouble()));
+        BOOST_CHECK_EQUAL(d->decodeFloat(),
+            std::numeric_limits<float>::infinity());
+        BOOST_CHECK_EQUAL(d->decodeFloat(),
+            -std::numeric_limits<float>::infinity());
+        BOOST_CHECK(boost::math::isnan(d->decodeFloat()));
+    }
+
+}
+
+static void testLimitsBinaryCodec()
+{
+    testLimits(binaryEncoder(), binaryDecoder());
+}
+
+static void testLimitsJsonCodec()
+{
+    const char* s = "{ \"type\": \"record\", \"name\": \"r\", \"fields\": ["
+        "{ \"name\": \"d1\", \"type\": \"double\" },"
+        "{ \"name\": \"d2\", \"type\": \"double\" },"
+        "{ \"name\": \"d3\", \"type\": \"double\" },"
+        "{ \"name\": \"f1\", \"type\": \"float\" },"
+        "{ \"name\": \"f2\", \"type\": \"float\" },"
+        "{ \"name\": \"f3\", \"type\": \"float\" }"
+    "]}";
+    ValidSchema schema = parsing::makeValidSchema(s);
+    testLimits(jsonEncoder(schema), jsonDecoder(schema));
+}
+
+struct JsonData {
+    const char *schema;
+    const char *json;
+    const char* calls;
+    int depth;
+};
+
+const JsonData jsonData[] = {
+    { "{\"type\": \"double\"}", " 10 ", "D", 1 },
+    { "{\"type\": \"double\"}", " 10.0 ", "D", 1 },
+    { "{\"type\": \"double\"}", " \"Infinity\"", "D", 1 },
+    { "{\"type\": \"double\"}", " \"-Infinity\"", "D", 1 },
+    { "{\"type\": \"double\"}", " \"NaN\"", "D", 1 },
+    { "{\"type\": \"long\"}", " 10 ", "L", 1 },
+};
+
+static void testJson(const JsonData& data)
+{
+    ValidSchema schema = parsing::makeValidSchema(data.schema);
+    EncoderPtr e = jsonEncoder(schema);
+    
+}
+
+}   // namespace avro
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* ts= BOOST_TEST_SUITE("Avro C++ unit tests for codecs");
+    avro::parsing::add_tests(*ts);
+    ts->add(BOOST_TEST_CASE(avro::testStreamLifetimes));
+    ts->add(BOOST_TEST_CASE(avro::testLimitsBinaryCodec));
+    ts->add(BOOST_TEST_CASE(avro::testLimitsJsonCodec));
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::testJson, avro::jsonData,
+        ENDOF(avro::jsonData)));
+
+    return ts;
+}
diff --git a/lang/c++/test/DataFileTests.cc b/lang/c++/test/DataFileTests.cc
new file mode 100644
index 0000000..95e80b1
--- /dev/null
+++ b/lang/c++/test/DataFileTests.cc
@@ -0,0 +1,501 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/filesystem.hpp>
+
+#include <sstream>
+
+#include "DataFile.hh"
+#include "Generic.hh"
+#include "Stream.hh"
+#include "Compiler.hh"
+
+using std::auto_ptr;
+using std::string;
+using std::pair;
+using std::vector;
+using std::map;
+using std::istringstream;
+using std::ostringstream;
+
+using boost::array;
+using boost::shared_ptr;
+using boost::unit_test::test_suite;
+
+
+using avro::ValidSchema;
+using avro::GenericDatum;
+using avro::GenericRecord;
+
+const int count = 1000;
+
+template <typename T>
+struct Complex {
+    T re;
+    T im;
+    Complex() : re(0), im(0) { }
+    Complex(T r, T i) : re(r), im(i) { }
+};
+
+struct Integer {
+    int64_t re;
+    Integer() : re(0) { }
+    Integer(int64_t r) : re(r) { }
+};
+
+typedef Complex<int64_t> ComplexInteger;
+typedef Complex<double> ComplexDouble;
+
+struct Double {
+    double re;
+    Double() : re(0) { }
+    Double(double r) : re(r) { }
+};
+
+namespace avro {
+
+template <typename T> struct codec_traits<Complex<T> > {
+    static void encode(Encoder& e, const Complex<T>& c) {
+        avro::encode(e, c.re);
+        avro::encode(e, c.im);
+    }
+
+    static void decode(Decoder& d, Complex<T>& c) {
+        avro::decode(d, c.re);
+        avro::decode(d, c.im);
+    }
+};
+
+template <> struct codec_traits<Integer> {
+    static void decode(Decoder& d, Integer& c) {
+        avro::decode(d, c.re);
+    }
+};
+
+template <> struct codec_traits<Double> {
+    static void decode(Decoder& d, Double& c) {
+        avro::decode(d, c.re);
+    }
+};
+
+template<> struct codec_traits<uint32_t> {
+    static void encode(Encoder& e, const uint32_t& v) {
+      e.encodeFixed( (uint8_t *) &v,sizeof(uint32_t));
+    }
+
+    static void decode(Decoder& d, uint32_t& v) {
+        std::vector <uint8_t> value;
+        d.decodeFixed(sizeof(uint32_t),value);
+        memcpy(&v,&(value[0]),sizeof(uint32_t));
+    }
+};
+
+}
+
+static ValidSchema makeValidSchema(const char* schema)
+{
+    istringstream iss(schema);
+    ValidSchema vs;
+    compileJsonSchema(iss, vs);
+    return ValidSchema(vs);
+}
+
+static const char sch[] = "{\"type\": \"record\","
+    "\"name\":\"ComplexInteger\", \"fields\": ["
+        "{\"name\":\"re\", \"type\":\"long\"},"
+        "{\"name\":\"im\", \"type\":\"long\"}"
+    "]}";
+static const char isch[] = "{\"type\": \"record\","
+    "\"name\":\"ComplexInteger\", \"fields\": ["
+        "{\"name\":\"re\", \"type\":\"long\"}"
+    "]}";
+static const char dsch[] = "{\"type\": \"record\","
+    "\"name\":\"ComplexDouble\", \"fields\": ["
+        "{\"name\":\"re\", \"type\":\"double\"},"
+        "{\"name\":\"im\", \"type\":\"double\"}"
+    "]}";
+static const char dblsch[] = "{\"type\": \"record\","
+    "\"name\":\"ComplexDouble\", \"fields\": ["
+        "{\"name\":\"re\", \"type\":\"double\"}"
+    "]}";
+static const char fsch[] = "{\"type\": \"fixed\","
+    "\"name\":\"Fixed_32\", \"size\":4}";
+
+
+string toString(const ValidSchema& s)
+{
+    ostringstream oss;
+    s.toJson(oss);
+    return oss.str();
+}
+
+class DataFileTest {
+    const char* filename;
+    const ValidSchema writerSchema;
+    const ValidSchema readerSchema;
+
+public:
+    DataFileTest(const char* f, const char* wsch, const char* rsch) :
+        filename(f), writerSchema(makeValidSchema(wsch)),
+        readerSchema(makeValidSchema(rsch)) { }
+
+    typedef pair<ValidSchema, GenericDatum> Pair;
+
+    void testCleanup() {
+        BOOST_CHECK(boost::filesystem::remove(filename));
+    }
+
+    void testWrite() {
+        avro::DataFileWriter<ComplexInteger> df(filename, writerSchema, 100);
+        int64_t re = 3;
+        int64_t im = 5;
+        for (int i = 0; i < count; ++i, re *= im, im += 3) {
+            ComplexInteger c(re, im);
+            df.write(c);
+        }
+        df.close();
+    }
+
+    void testWriteGeneric() {
+        avro::DataFileWriter<Pair> df(filename, writerSchema, 100);
+        int64_t re = 3;
+        int64_t im = 5;
+        Pair p(writerSchema, GenericDatum());
+
+        GenericDatum& c = p.second;
+        c = GenericDatum(writerSchema.root());
+        GenericRecord& r = c.value<GenericRecord>();
+
+        for (int i = 0; i < count; ++i, re *= im, im += 3) {
+            r.fieldAt(0) = re;
+            r.fieldAt(1) = im;
+            df.write(p);
+        }
+        df.close();
+    }
+
+    void testWriteGenericByName() {
+        avro::DataFileWriter<Pair> df(filename, writerSchema, 100);
+        int64_t re = 3;
+        int64_t im = 5;
+        Pair p(writerSchema, GenericDatum());
+
+        GenericDatum& c = p.second;
+        c = GenericDatum(writerSchema.root());
+        GenericRecord& r = c.value<GenericRecord>();
+
+        for (int i = 0; i < count; ++i, re *= im, im += 3) {
+            r.field("re") = re;
+            r.field("im") = im;
+            df.write(p);
+        }
+        df.close();
+    }
+
+    void testWriteDouble() {
+        avro::DataFileWriter<ComplexDouble> df(filename, writerSchema, 100);
+        double re = 3.0;
+        double im = 5.0;
+        for (int i = 0; i < count; ++i, re += im - 0.7, im += 3.1) {
+            ComplexDouble c(re, im);
+            df.write(c);
+        }
+        df.close();
+    }
+
+    void testTruncate() {
+        testWriteDouble();
+        uintmax_t size = boost::filesystem::file_size(filename);
+        {
+            avro::DataFileWriter<Pair> df(filename, writerSchema, 100);
+            df.close();
+        }
+        uintmax_t new_size = boost::filesystem::file_size(filename);
+        BOOST_CHECK(size > new_size);
+    }
+
+    void testReadFull() {
+        avro::DataFileReader<ComplexInteger> df(filename, writerSchema);
+        int i = 0;
+        ComplexInteger ci;
+        int64_t re = 3;
+        int64_t im = 5;
+        while (df.read(ci)) {
+            BOOST_CHECK_EQUAL(ci.re, re);
+            BOOST_CHECK_EQUAL(ci.im, im);
+            re *= im;
+            im += 3;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    void testReadProjection() {
+        avro::DataFileReader<Integer> df(filename, readerSchema);
+        int i = 0;
+        Integer integer;
+        int64_t re = 3;
+        int64_t im = 5;
+        while (df.read(integer)) {
+            BOOST_CHECK_EQUAL(integer.re, re);
+            re *= im;
+            im += 3;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    void testReaderGeneric() {
+        avro::DataFileReader<Pair> df(filename, writerSchema);
+        int i = 0;
+        Pair p(writerSchema, GenericDatum());
+        int64_t re = 3;
+        int64_t im = 5;
+
+        const GenericDatum& ci = p.second;
+        while (df.read(p)) {
+            BOOST_REQUIRE_EQUAL(ci.type(), avro::AVRO_RECORD);
+            const GenericRecord& r = ci.value<GenericRecord>();
+            const size_t n = 2;
+            BOOST_REQUIRE_EQUAL(r.fieldCount(), n);
+            const GenericDatum& f0 = r.fieldAt(0);
+            BOOST_REQUIRE_EQUAL(f0.type(), avro::AVRO_LONG);
+            BOOST_CHECK_EQUAL(f0.value<int64_t>(), re);
+
+            const GenericDatum& f1 = r.fieldAt(1);
+            BOOST_REQUIRE_EQUAL(f1.type(), avro::AVRO_LONG);
+            BOOST_CHECK_EQUAL(f1.value<int64_t>(), im);
+            re *= im;
+            im += 3;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    void testReaderGenericByName() {
+        avro::DataFileReader<Pair> df(filename, writerSchema);
+        int i = 0;
+        Pair p(writerSchema, GenericDatum());
+        int64_t re = 3;
+        int64_t im = 5;
+
+        const GenericDatum& ci = p.second;
+        while (df.read(p)) {
+            BOOST_REQUIRE_EQUAL(ci.type(), avro::AVRO_RECORD);
+            const GenericRecord& r = ci.value<GenericRecord>();
+            const size_t n = 2;
+            BOOST_REQUIRE_EQUAL(r.fieldCount(), n);
+            const GenericDatum& f0 = r.field("re");
+            BOOST_REQUIRE_EQUAL(f0.type(), avro::AVRO_LONG);
+            BOOST_CHECK_EQUAL(f0.value<int64_t>(), re);
+
+            const GenericDatum& f1 = r.field("im");
+            BOOST_REQUIRE_EQUAL(f1.type(), avro::AVRO_LONG);
+            BOOST_CHECK_EQUAL(f1.value<int64_t>(), im);
+            re *= im;
+            im += 3;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    void testReaderGenericProjection() {
+        avro::DataFileReader<Pair> df(filename, readerSchema);
+        int i = 0;
+        Pair p(readerSchema, GenericDatum());
+        int64_t re = 3;
+        int64_t im = 5;
+
+        const GenericDatum& ci = p.second;
+        while (df.read(p)) {
+            BOOST_REQUIRE_EQUAL(ci.type(), avro::AVRO_RECORD);
+            const GenericRecord& r = ci.value<GenericRecord>();
+            const size_t n = 1;
+            BOOST_REQUIRE_EQUAL(r.fieldCount(), n);
+            const GenericDatum& f0 = r.fieldAt(0);
+            BOOST_REQUIRE_EQUAL(f0.type(), avro::AVRO_LONG);
+            BOOST_CHECK_EQUAL(f0.value<int64_t>(), re);
+
+            re *= im;
+            im += 3;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    void testReadDouble() {
+        avro::DataFileReader<ComplexDouble> df(filename, writerSchema);
+        int i = 0;
+        ComplexDouble ci;
+        double re = 3.0;
+        double im = 5.0;
+        while (df.read(ci)) {
+            BOOST_CHECK_CLOSE(ci.re, re, 0.0001);
+            BOOST_CHECK_CLOSE(ci.im, im, 0.0001);
+            re += (im - 0.7);
+            im += 3.1;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    /**
+     * Constructs the DataFileReader in two steps.
+     */
+    void testReadDoubleTwoStep() {
+        auto_ptr<avro::DataFileReaderBase>
+            base(new avro::DataFileReaderBase(filename));
+        avro::DataFileReader<ComplexDouble> df(base);
+        BOOST_CHECK_EQUAL(toString(writerSchema), toString(df.readerSchema()));
+        BOOST_CHECK_EQUAL(toString(writerSchema), toString(df.dataSchema()));
+        int i = 0;
+        ComplexDouble ci;
+        double re = 3.0;
+        double im = 5.0;
+        while (df.read(ci)) {
+            BOOST_CHECK_CLOSE(ci.re, re, 0.0001);
+            BOOST_CHECK_CLOSE(ci.im, im, 0.0001);
+            re += (im - 0.7);
+            im += 3.1;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+
+    /**
+     * Constructs the DataFileReader in two steps using a different
+     * reader schema.
+     */
+    void testReadDoubleTwoStepProject() {
+        auto_ptr<avro::DataFileReaderBase>
+            base(new avro::DataFileReaderBase(filename));
+        avro::DataFileReader<Double> df(base, readerSchema);
+
+        BOOST_CHECK_EQUAL(toString(readerSchema), toString(df.readerSchema()));
+        BOOST_CHECK_EQUAL(toString(writerSchema), toString(df.dataSchema()));
+        int i = 0;
+        Double ci;
+        double re = 3.0;
+        double im = 5.0;
+        while (df.read(ci)) {
+            BOOST_CHECK_CLOSE(ci.re, re, 0.0001);
+            re += (im - 0.7);
+            im += 3.1;
+            ++i;
+        }
+        BOOST_CHECK_EQUAL(i, count);
+    }
+    /**
+     * Test writing DataFiles into other streams operations.
+     */
+    void testZip() {
+        const size_t number_of_objects = 100;
+        // first create a large file
+        ValidSchema dschema = avro::compileJsonSchemaFromString(sch);
+        {
+            avro::DataFileWriter<ComplexInteger> writer(
+              filename, dschema, 16 * 1024, avro::DEFLATE_CODEC);
+
+            for (size_t i = 0; i < number_of_objects; ++i) {
+                ComplexInteger d;
+                d.re = i;
+                d.im = 2 * i;
+                writer.write(d);
+            }
+        }
+        {
+            avro::DataFileReader<ComplexInteger> reader(filename, dschema);
+            std::vector<int> found;
+            ComplexInteger record;
+            while (reader.read(record)) {
+                found.push_back(record.re);
+            }
+            BOOST_CHECK_EQUAL(found.size(), number_of_objects);
+            for (unsigned int i = 0; i < found.size(); ++i) {
+                BOOST_CHECK_EQUAL(found[i], i);
+            }
+        }
+    }
+
+    void testSchemaReadWrite() {
+    uint32_t a=42;
+    {
+            avro::DataFileWriter<uint32_t> df(filename, writerSchema);
+        df.write(a);    
+        }
+
+        {
+        avro::DataFileReader<uint32_t> df(filename);
+        uint32_t b;
+            df.read(b);
+            BOOST_CHECK_EQUAL(b, a);
+    }
+    }
+};
+
+void addReaderTests(test_suite* ts, const shared_ptr<DataFileTest>& t)
+{
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReadFull, t));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReadProjection, t));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReaderGeneric, t));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReaderGenericByName, t));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReaderGenericProjection,
+        t));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testCleanup, t));
+
+}
+
+test_suite*
+init_unit_test_suite( int argc, char* argv[] )
+{
+    test_suite* ts= BOOST_TEST_SUITE("DataFile tests");
+    shared_ptr<DataFileTest> t1(new DataFileTest("test1.df", sch, isch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testWrite, t1));
+    addReaderTests(ts, t1);
+
+    shared_ptr<DataFileTest> t2(new DataFileTest("test2.df", sch, isch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testWriteGeneric, t2));
+    addReaderTests(ts, t2);
+
+    shared_ptr<DataFileTest> t3(new DataFileTest("test3.df", dsch, dblsch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testWriteDouble, t3));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReadDouble, t3));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReadDoubleTwoStep, t3));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testReadDoubleTwoStepProject,
+        t3));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testCleanup, t3));
+
+    shared_ptr<DataFileTest> t4(new DataFileTest("test4.df", dsch, dblsch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testTruncate, t4));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testCleanup, t4));
+
+    shared_ptr<DataFileTest> t5(new DataFileTest("test5.df", sch, isch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testWriteGenericByName, t5));
+    addReaderTests(ts, t5);
+
+    shared_ptr<DataFileTest> t6(new DataFileTest("test6.df", dsch, dblsch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testZip, t6));
+
+    shared_ptr<DataFileTest> t7(new DataFileTest("test7.df",fsch,fsch));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testSchemaReadWrite,t7));
+    ts->add(BOOST_CLASS_TEST_CASE(&DataFileTest::testCleanup,t7));
+
+    return ts;
+}
diff --git a/lang/c++/test/JsonTests.cc b/lang/c++/test/JsonTests.cc
new file mode 100644
index 0000000..823f15f
--- /dev/null
+++ b/lang/c++/test/JsonTests.cc
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <limits>
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/test/parameterized_test.hpp>
+
+#include "../impl/json/JsonDom.hh"
+
+#define S(x) #x
+
+namespace avro {
+namespace json {
+
+template <typename T>
+struct TestData {
+    const char *input;
+    EntityType type;
+    T value;
+};
+
+TestData<bool> boolData[] = {
+    { "true", etBool, true },
+    { "false", etBool, false },
+};
+
+TestData<int64_t> longData[] = {
+    { "0", etLong, 0 },
+    { "-1", etLong, -1 },
+    { "1", etLong, 1 },
+    { "9223372036854775807", etLong, 9223372036854775807LL },
+    { "-9223372036854775807", etLong, -9223372036854775807LL },
+};
+
+TestData<double> doubleData[] = {
+    { "0.0", etDouble, 0.0 },
+    { "-1.0", etDouble, -1.0 },
+    { "1.0", etDouble, 1.0 },
+    { "4.7e3", etDouble, 4700.0 },
+    { "-7.2e-4", etDouble, -0.00072 },
+};
+
+TestData<const char*> stringData[] = {
+    { "\"\"", etString, "" },
+    { "\"a\"", etString, "a" },
+    { "\"\\U000a\"", etString, "\n" },
+    { "\"\\u000a\"", etString, "\n" },
+    { "\"\\\"\"", etString, "\"" },
+    { "\"\\/\"", etString, "/" },
+};
+
+void testBool(const TestData<bool>& d)
+{
+    Entity n = loadEntity(d.input);
+    BOOST_CHECK_EQUAL(n.type(), d.type);
+    BOOST_CHECK_EQUAL(n.boolValue(), d.value);
+}
+
+    
+void testLong(const TestData<int64_t>& d)
+{
+    Entity n = loadEntity(d.input);
+    BOOST_CHECK_EQUAL(n.type(), d.type);
+    BOOST_CHECK_EQUAL(n.longValue(), d.value);
+}
+
+void testDouble(const TestData<double>& d)
+{
+    Entity n = loadEntity(d.input);
+    BOOST_CHECK_EQUAL(n.type(), d.type);
+    BOOST_CHECK_CLOSE(n.doubleValue(), d.value, 1e-10);
+}
+
+void testString(const TestData<const char*>& d)
+{
+    Entity n = loadEntity(d.input);
+    BOOST_CHECK_EQUAL(n.type(), d.type);
+    BOOST_CHECK_EQUAL(n.stringValue(), d.value);
+}
+
+static void testNull()
+{
+    Entity n = loadEntity("null");
+    BOOST_CHECK_EQUAL(n.type(), etNull);
+
+}
+
+static void testArray0()
+{
+    Entity n = loadEntity("[]");
+    BOOST_CHECK_EQUAL(n.type(), etArray);
+    const Array& a = n.arrayValue();
+    BOOST_CHECK_EQUAL(a.size(), 0);
+}
+
+static void testArray1()
+{
+    Entity n = loadEntity("[200]");
+    BOOST_CHECK_EQUAL(n.type(), etArray);
+    const Array& a = n.arrayValue();
+    BOOST_CHECK_EQUAL(a.size(), 1);
+    BOOST_CHECK_EQUAL(a[0].type(), etLong);
+    BOOST_CHECK_EQUAL(a[0].longValue(), 200ll);
+}
+
+static void testArray2()
+{
+    Entity n = loadEntity("[200, \"v100\"]");
+    BOOST_CHECK_EQUAL(n.type(), etArray);
+    const Array& a = n.arrayValue();
+    BOOST_CHECK_EQUAL(a.size(), 2);
+    BOOST_CHECK_EQUAL(a[0].type(), etLong);
+    BOOST_CHECK_EQUAL(a[0].longValue(), 200ll);
+    BOOST_CHECK_EQUAL(a[1].type(), etString);
+    BOOST_CHECK_EQUAL(a[1].stringValue(), "v100");
+}
+
+static void testObject0()
+{
+    Entity n = loadEntity("{}");
+    BOOST_CHECK_EQUAL(n.type(), etObject);
+    const Object& m = n.objectValue();
+    BOOST_CHECK_EQUAL(m.size(), 0);
+}
+
+static void testObject1()
+{
+    Entity n = loadEntity("{\"k1\": 100}");
+    BOOST_CHECK_EQUAL(n.type(), etObject);
+    const Object& m = n.objectValue();
+    BOOST_CHECK_EQUAL(m.size(), 1);
+    BOOST_CHECK_EQUAL(m.begin()->first, "k1");
+    BOOST_CHECK_EQUAL(m.begin()->second.type(), etLong);
+    BOOST_CHECK_EQUAL(m.begin()->second.longValue(), 100ll);
+}
+
+static void testObject2()
+{
+    Entity n = loadEntity("{\"k1\": 100, \"k2\": [400, \"v0\"]}");
+    BOOST_CHECK_EQUAL(n.type(), etObject);
+    const Object& m = n.objectValue();
+    BOOST_CHECK_EQUAL(m.size(), 2);
+
+    Object::const_iterator it = m.find("k1");
+    BOOST_CHECK(it != m.end());
+    BOOST_CHECK_EQUAL(it->second.type(), etLong);
+    BOOST_CHECK_EQUAL(m.begin()->second.longValue(), 100ll);
+
+    it = m.find("k2");
+    BOOST_CHECK(it != m.end());
+    BOOST_CHECK_EQUAL(it->second.type(), etArray);
+    const Array& a = it->second.arrayValue();
+    BOOST_CHECK_EQUAL(a.size(), 2);
+    BOOST_CHECK_EQUAL(a[0].type(), etLong);
+    BOOST_CHECK_EQUAL(a[0].longValue(), 400ll);
+    BOOST_CHECK_EQUAL(a[1].type(), etString);
+    BOOST_CHECK_EQUAL(a[1].stringValue(), "v0");
+}
+
+}
+}
+
+#define COUNTOF(x)  (sizeof(x) / sizeof(x[0]))
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* ts= BOOST_TEST_SUITE("Avro C++ unit tests for json routines");
+
+    ts->add(BOOST_TEST_CASE(&avro::json::testNull));
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::json::testBool,
+        avro::json::boolData,
+        avro::json::boolData + COUNTOF(avro::json::boolData)));
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::json::testLong,
+        avro::json::longData,
+        avro::json::longData + COUNTOF(avro::json::longData)));
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::json::testDouble,
+        avro::json::doubleData,
+        avro::json::doubleData + COUNTOF(avro::json::doubleData)));
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::json::testString,
+        avro::json::stringData,
+        avro::json::stringData + COUNTOF(avro::json::stringData)));
+
+    ts->add(BOOST_TEST_CASE(&avro::json::testArray0));
+    ts->add(BOOST_TEST_CASE(&avro::json::testArray1));
+    ts->add(BOOST_TEST_CASE(&avro::json::testArray2));
+
+    ts->add(BOOST_TEST_CASE(&avro::json::testObject0));
+    ts->add(BOOST_TEST_CASE(&avro::json::testObject1));
+    ts->add(BOOST_TEST_CASE(&avro::json::testObject2));
+
+    return ts;
+}
diff --git a/lang/c++/test/LargeSchemaTests.cc b/lang/c++/test/LargeSchemaTests.cc
new file mode 100644
index 0000000..3d19cf2
--- /dev/null
+++ b/lang/c++/test/LargeSchemaTests.cc
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <fstream>
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+#include "Decoder.hh"
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/test/parameterized_test.hpp>
+
+void testLargeSchema()
+{
+    std::ifstream in("jsonschemas/large_schema.avsc");
+    avro::ValidSchema vs;
+    avro::compileJsonSchema(in, vs);
+    avro::DecoderPtr d = avro::binaryDecoder();
+    avro::DecoderPtr vd = avro::validatingDecoder(vs, d);
+    avro::DecoderPtr rd = avro::resolvingDecoder(vs, vs, d);
+}
+
+boost::unit_test::test_suite*
+init_unit_test_suite(int argc, char* argv[]) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* ts= BOOST_TEST_SUITE("Avro C++ unit tests for schemas");
+    ts->add(BOOST_TEST_CASE(&testLargeSchema));
+    return ts;
+}
diff --git a/lang/c++/test/SchemaTests.cc b/lang/c++/test/SchemaTests.cc
new file mode 100644
index 0000000..3d483b4
--- /dev/null
+++ b/lang/c++/test/SchemaTests.cc
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+#include <boost/test/parameterized_test.hpp>
+
+
+namespace avro {
+namespace schema {
+
+const char* basicSchemas[] = {
+    "\"null\"",
+    "\"boolean\"",
+    "\"int\"",
+    "\"long\"",
+    "\"float\"",
+    "\"double\"",
+    "\"bytes\"",
+    "\"string\"",
+
+    // Primitive types - longer
+    "{ \"type\": \"null\" }",
+    "{ \"type\": \"boolean\" }",
+    "{ \"type\": \"int\" }",
+    "{ \"type\": \"long\" }",
+    "{ \"type\": \"float\" }",
+    "{ \"type\": \"double\" }",
+    "{ \"type\": \"bytes\" }",
+    "{ \"type\": \"string\" }",
+
+    // Record
+    "{\"type\": \"record\",\"name\": \"Test\",\"fields\": "
+        "[{\"name\": \"f\",\"type\": \"long\"}]}",
+    "{\"type\": \"record\",\"name\": \"Test\",\"fields\": "
+        "[{\"name\": \"f1\",\"type\": \"long\"},"
+        "{\"name\": \"f2\", \"type\": \"int\"}]}",
+    "{\"type\": \"error\",\"name\": \"Test\",\"fields\": "
+        "[{\"name\": \"f1\",\"type\": \"long\"},"
+        "{\"name\": \"f2\", \"type\": \"int\"}]}",
+
+    // Recursive.
+    "{\"type\":\"record\",\"name\":\"LongList\","
+        "\"fields\":[{\"name\":\"value\",\"type\":\"long\"},"
+        "{\"name\":\"next\",\"type\":[\"LongList\",\"null\"]}]}",
+    // Enum
+    "{\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}",
+
+    // Array
+    "{\"type\": \"array\", \"items\": \"long\"}",
+    "{\"type\": \"array\",\"items\": {\"type\": \"enum\", "
+        "\"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}}",
+
+    // Map
+    "{\"type\": \"map\", \"values\": \"long\"}",
+    "{\"type\": \"map\",\"values\": {\"type\": \"enum\", "
+        "\"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}}",
+
+    // Union
+    "[\"string\", \"null\", \"long\"]",
+
+    // Fixed
+    "{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}",
+    "{\"type\": \"fixed\", \"name\": \"MyFixed\", "
+        "\"namespace\": \"org.apache.hadoop.avro\", \"size\": 1}",
+    "{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}",
+    "{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}",
+
+    // Extra attributes (should be ignored)
+    "{\"type\": \"null\", \"extra attribute\": \"should be ignored\"}",
+    "{\"type\": \"boolean\", \"extra1\": 1, \"extra2\": 2, \"extra3\": 3}",
+    "{\"type\": \"record\",\"name\": \"Test\",\"fields\": "
+        "[{\"name\": \"f\",\"type\": \"long\"}], \"extra attribute\": 1}",
+    "{\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"],"
+        "\"extra attribute\": 1}",
+    "{\"type\": \"array\", \"items\": \"long\", \"extra attribute\": 1}",
+    "{\"type\": \"map\", \"values\": \"long\", \"extra attribute\": 1}",
+    "{\"type\": \"fixed\", \"name\": \"Test\", \"size\": 1, \"extra attribute\": 1}",
+};
+
+const char* basicSchemaErrors[] = {
+    // Record
+    // No fields
+    "{\"type\":\"record\",\"name\":\"LongList\"}",
+    // Fields not an array
+    "{\"type\":\"record\",\"name\":\"LongList\", \"fields\": \"hi\"}",
+
+    // Undefined name
+    "{\"type\":\"record\",\"name\":\"LongList\","
+        "\"fields\":[{\"name\":\"value\",\"type\":\"long\"},"
+        "{\"name\":\"next\",\"type\":[\"LongListA\",\"null\"]}]}",
+
+    // Enum
+    // Symbols not an array
+    "{\"type\": \"enum\", \"name\": \"Status\", \"symbols\": "
+        "\"Normal Caution Critical\"}",
+    // Name not a string
+    "{\"type\": \"enum\", \"name\": [ 0, 1, 1, 2, 3, 5, 8 ], "
+        "\"symbols\": [\"Golden\", \"Mean\"]}",
+    // No name
+    "{\"type\": \"enum\", \"symbols\" : [\"I\", \"will\", "
+        "\"fail\", \"no\", \"name\"]}",
+    // Duplicate symbol
+    "{\"type\": \"enum\", \"name\": \"Test\","
+        "\"symbols\" : [\"AA\", \"AA\"]}",
+
+    // Union
+    // Duplicate type
+    "[\"string\", \"long\", \"long\"]",
+    // Duplicate type
+    "[{\"type\": \"array\", \"items\": \"long\"}, "
+        "{\"type\": \"array\", \"items\": \"string\"}]",
+        
+    // Fixed
+    // No size
+    "{\"type\": \"fixed\", \"name\": \"Missing size\"}",
+    // No name
+    "{\"type\": \"fixed\", \"size\": 314}",
+};
+
+static void testBasic(const char* schema)
+{
+    BOOST_CHECKPOINT(schema);
+    compileJsonSchemaFromString(schema);
+}
+
+static void testBasic_fail(const char* schema)
+{
+    BOOST_CHECKPOINT(schema);
+    BOOST_CHECK_THROW(compileJsonSchemaFromString(schema), Exception);
+}
+
+static void testCompile(const char* schema)
+{
+    BOOST_CHECKPOINT(schema);
+    compileJsonSchemaFromString(std::string(schema));
+}
+
+}
+}
+
+#define ENDOF(x)  (x + sizeof(x) / sizeof(x[0]))
+
+#define ADD_PARAM_TEST(ts, func, data) \
+    ts->add(BOOST_PARAM_TEST_CASE(&func, data, ENDOF(data)))
+    
+
+boost::unit_test::test_suite*
+init_unit_test_suite(int argc, char* argv[]) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* ts= BOOST_TEST_SUITE("Avro C++ unit tests for schemas");
+    ADD_PARAM_TEST(ts, avro::schema::testBasic, avro::schema::basicSchemas);
+    ADD_PARAM_TEST(ts, avro::schema::testBasic_fail,
+        avro::schema::basicSchemaErrors);
+    ADD_PARAM_TEST(ts, avro::schema::testCompile, avro::schema::basicSchemas);
+
+    return ts;
+}
diff --git a/lang/c++/test/SpecificTests.cc b/lang/c++/test/SpecificTests.cc
new file mode 100644
index 0000000..aec338c
--- /dev/null
+++ b/lang/c++/test/SpecificTests.cc
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/unit_test.hpp>
+
+#include "Specific.hh"
+#include "Stream.hh"
+
+using std::auto_ptr;
+using std::string;
+using std::vector;
+using std::map;
+using boost::array;
+
+namespace avro {
+
+class C {
+    int32_t i_;
+    int64_t l_;
+public:
+    C() : i_(0), l_(0) { }
+    C(int32_t i, int64_t l) : i_(i), l_(l) { }
+    int32_t i() const { return i_; }
+    int64_t l() const { return l_; }
+    void i(int32_t ii) { i_ = ii; }
+    void l(int64_t ll) { l_ = ll; }
+
+    bool operator==(const C& oth) const {
+        return i_ == oth.i_ && l_ == oth.l_;
+    }
+};
+
+template <> struct codec_traits<C> {
+    static void encode(Encoder& e, const C& c) {
+        e.encodeInt(c.i());
+        e.encodeLong(c.l());
+    }
+    
+    static void decode(Decoder& d, C& c) {
+        c.i(d.decodeInt());
+        c.l(d.decodeLong());
+    }
+};
+
+namespace specific {
+
+class Test {
+    auto_ptr<OutputStream> os;
+    EncoderPtr e;
+    DecoderPtr d;
+public:
+    Test() : os(memoryOutputStream()), e(binaryEncoder()), d(binaryDecoder()) {
+        e->init(*os);
+    }
+
+    template <typename T> void encode(const T& t) {
+        avro::encode(*e, t);
+        e->flush();
+    }
+
+    template <typename T> void decode(T& t) {
+        auto_ptr<InputStream> is = memoryInputStream(*os);
+        d->init(*is);
+        avro::decode(*d, t);
+    }
+};
+
+template <typename T> T encodeAndDecode(const T& t)
+{
+    Test tst;
+
+    tst.encode(t);
+
+    T actual = T();
+    
+    tst.decode(actual);
+    return actual;
+}
+
+void testBool()
+{
+    bool b = encodeAndDecode(true);
+    BOOST_CHECK_EQUAL(b, true);
+}
+
+void testInt()
+{
+    int32_t n = 10;
+    int32_t b = encodeAndDecode(n);
+    BOOST_CHECK_EQUAL(b, n);
+}
+
+void testLong()
+{
+    int64_t n = -109;
+    int64_t b = encodeAndDecode(n);
+    BOOST_CHECK_EQUAL(b, n);
+}
+
+void testFloat()
+{
+    float n = 10.19f;
+    float b = encodeAndDecode(n);
+    BOOST_CHECK_CLOSE(b, n, 0.00001f);
+}
+
+void testDouble()
+{
+    double n = 10.00001;
+    double b = encodeAndDecode(n);
+    BOOST_CHECK_CLOSE(b, n, 0.00000001);
+}
+
+void testString()
+{
+    string n = "abc";
+    string b = encodeAndDecode(n);
+    BOOST_CHECK_EQUAL(b, n);
+}
+
+void testBytes()
+{
+    uint8_t values[] = { 1, 7, 23, 47, 83 };
+    vector<uint8_t> n(values, values + 5);
+    vector<uint8_t> b = encodeAndDecode(n);
+    BOOST_CHECK_EQUAL_COLLECTIONS(b.begin(), b.end(), n.begin(), n.end());
+}
+
+void testFixed()
+{
+    array<uint8_t, 5> n = { { 1, 7, 23, 47, 83 } };
+    array<uint8_t, 5> b = encodeAndDecode(n);
+    BOOST_CHECK_EQUAL_COLLECTIONS(b.begin(), b.end(), n.begin(), n.end());
+}
+
+void testArray()
+{
+    int32_t values[] = { 101, 709, 409, 34 };
+    vector<int32_t> n(values, values + 4);
+    vector<int32_t> b = encodeAndDecode(n);
+    
+    BOOST_CHECK_EQUAL_COLLECTIONS(b.begin(), b.end(), n.begin(), n.end());
+}
+
+void testMap()
+{
+    map<string, int32_t> n;
+    n["a"] = 1;
+    n["b"] = 101;
+
+    map<string, int32_t> b = encodeAndDecode(n);
+    
+    BOOST_CHECK(b == n);
+}
+
+void testCustom()
+{
+    C n(10, 1023);
+    C b = encodeAndDecode(n);
+    BOOST_CHECK(b == n);
+}
+
+}
+}
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* ts= BOOST_TEST_SUITE("Specific tests");
+    ts->add(BOOST_TEST_CASE(avro::specific::testBool));
+    ts->add(BOOST_TEST_CASE(avro::specific::testInt));
+    ts->add(BOOST_TEST_CASE(avro::specific::testLong));
+    ts->add(BOOST_TEST_CASE(avro::specific::testFloat));
+    ts->add(BOOST_TEST_CASE(avro::specific::testDouble));
+    ts->add(BOOST_TEST_CASE(avro::specific::testString));
+    ts->add(BOOST_TEST_CASE(avro::specific::testBytes));
+    ts->add(BOOST_TEST_CASE(avro::specific::testFixed));
+    ts->add(BOOST_TEST_CASE(avro::specific::testArray));
+    ts->add(BOOST_TEST_CASE(avro::specific::testMap));
+    ts->add(BOOST_TEST_CASE(avro::specific::testCustom));
+    return ts;
+}
diff --git a/lang/c++/test/StreamTests.cc b/lang/c++/test/StreamTests.cc
new file mode 100644
index 0000000..2504207
--- /dev/null
+++ b/lang/c++/test/StreamTests.cc
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "boost/filesystem.hpp"
+#include "Stream.hh"
+#include "Exception.hh"
+#include <boost/test/included/unit_test_framework.hpp>
+#include <boost/test/parameterized_test.hpp>
+
+namespace avro {
+namespace stream {
+
+struct CheckEmpty1 {
+    void operator()(InputStream& is) {
+        const uint8_t* d;
+        size_t n;
+        BOOST_CHECK(! is.next(&d, &n));
+    }
+};
+
+struct CheckEmpty2 {
+    void operator()(InputStream& is) {
+        StreamReader r;
+        r.reset(is);
+        BOOST_CHECK_THROW(r.read(), Exception);
+    }
+};
+
+struct TestData {
+    size_t chunkSize;
+    size_t dataSize;
+};
+
+struct Fill1 {
+    void operator()(OutputStream& os, size_t len) {
+        StreamWriter w;
+        w.reset(os);
+        for (size_t i = 0; i < len; ++i) {
+            w.write(i % 10 + '0');
+        }
+        w.flush();
+    }
+};
+
+struct Fill2 {
+    void operator()(OutputStream& os, size_t len) {
+        for (size_t i = 0; i < len;) {
+            uint8_t *b;
+            size_t n;
+            os.next(&b, &n);
+            size_t j = 0;
+            for (; i < len && j < n; ++j, ++i, ++b) {
+                *b = i % 10 + '0';
+            }
+            if (i == len) {
+                os.backup(n - j);
+            }
+        }
+        os.flush();
+    }
+};
+
+struct Verify1 {
+    void operator()(InputStream& is, size_t dataSize) {
+        StreamReader r;
+        r.reset(is);
+        for (size_t i = 0; i < dataSize; ++i) {
+            BOOST_CHECK_EQUAL(i % 10 + '0', r.read());
+        }
+        BOOST_CHECK_THROW(r.read(), Exception);
+    }
+};
+
+struct Verify2 {
+    void operator()(InputStream& is, size_t len) {
+        const uint8_t *b;
+        size_t n;
+
+        for (size_t i = 0; i < len;) {
+            BOOST_REQUIRE(is.next(&b, &n));
+            size_t j = 0;
+            for (; i < len && j < n; ++j, ++i, ++b) {
+                BOOST_CHECK_EQUAL(*b, i % 10 + '0');
+            }
+            BOOST_CHECK_EQUAL(j, n);
+        }
+        BOOST_CHECK(! is.next(&b, &n));
+    }
+};
+
+template <typename V>
+void testEmpty_memoryStream() {
+    std::auto_ptr<OutputStream> os = memoryOutputStream();
+    std::auto_ptr<InputStream> is = memoryInputStream(*os);
+    V()(*is);
+}
+
+template <typename F, typename V>
+void testNonEmpty_memoryStream(const TestData& td)
+{
+    std::auto_ptr<OutputStream> os = memoryOutputStream(td.chunkSize);
+    F()(*os, td.dataSize);
+
+    std::auto_ptr<InputStream> is = memoryInputStream(*os);
+    V()(*is, td.dataSize);
+}
+
+void testNonEmpty2(const TestData& td) {
+    std::vector<uint8_t> v;
+    for (size_t i = 0; i < td.dataSize; ++i) {
+        v.push_back(i % 10 + '0');
+    }
+
+    uint8_t v2 = 0;
+    std::auto_ptr<InputStream> is = memoryInputStream(v.empty() ? &v2 : &v[0], v.size());
+    Verify1()(*is, td.dataSize);
+}
+
+static const char filename[] = "test_str.bin";
+
+struct FileRemover {
+    const boost::filesystem::path file;
+    FileRemover(const char* filename) : file(filename) { }
+    ~FileRemover() { boost::filesystem::remove(file); }
+};
+
+template <typename V>
+void testEmpty_fileStream() {
+    FileRemover fr(filename);
+    {
+        std::auto_ptr<OutputStream> os = fileOutputStream(filename);
+    }
+    std::auto_ptr<InputStream> is = fileInputStream(filename);
+    V()(*is);
+}
+
+template <typename F, typename V>
+void testNonEmpty_fileStream(const TestData& td)
+{
+    FileRemover fr(filename);
+    {
+        std::auto_ptr<OutputStream> os = fileOutputStream(filename,
+            td.chunkSize);
+        F()(*os, td.dataSize);
+    }
+
+    std::auto_ptr<InputStream> is = fileInputStream(filename, td.chunkSize);
+    V()(*is, td.dataSize);
+}
+
+TestData data[] = {
+    { 100, 0 },
+    { 100, 1 },
+    { 100, 10 },
+    { 100, 100 },
+    { 100, 101 },
+    { 100, 1000 },
+    { 100, 1024 }
+};
+
+}   // namespace stream
+
+}   // namespace
+    
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    boost::unit_test::test_suite* ts =
+        BOOST_TEST_SUITE("Avro C++ unit test suite for streams");
+
+    ts->add(BOOST_TEST_CASE(
+        &avro::stream::testEmpty_memoryStream<avro::stream::CheckEmpty1>));
+    ts->add(BOOST_TEST_CASE(
+        &avro::stream::testEmpty_memoryStream<avro::stream::CheckEmpty2>));
+
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_memoryStream<avro::stream::Fill1,
+            avro::stream::Verify1>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_memoryStream<avro::stream::Fill2,
+            avro::stream::Verify1>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_memoryStream<avro::stream::Fill2,
+            avro::stream::Verify2>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+
+    ts->add(BOOST_PARAM_TEST_CASE(&avro::stream::testNonEmpty2,
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+
+    ts->add(BOOST_TEST_CASE(
+        &avro::stream::testEmpty_fileStream<avro::stream::CheckEmpty1>));
+    ts->add(BOOST_TEST_CASE(
+        &avro::stream::testEmpty_fileStream<avro::stream::CheckEmpty2>));
+
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_fileStream<avro::stream::Fill1,
+            avro::stream::Verify1>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_fileStream<avro::stream::Fill2,
+            avro::stream::Verify1>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+    ts->add(BOOST_PARAM_TEST_CASE(
+        (&avro::stream::testNonEmpty_fileStream<avro::stream::Fill2,
+            avro::stream::Verify2>),
+        avro::stream::data,
+        avro::stream::data +
+        sizeof(avro::stream::data) / sizeof(avro::stream::data[0])));
+    return ts;
+}
diff --git a/lang/c++/test/buffertest.cc b/lang/c++/test/buffertest.cc
new file mode 100644
index 0000000..ac445aa
--- /dev/null
+++ b/lang/c++/test/buffertest.cc
@@ -0,0 +1,1153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <boost/test/included/unit_test_framework.hpp>
+
+#include <boost/thread.hpp>
+#include <boost/bind.hpp>
+#include <boost/scoped_array.hpp>
+
+#ifdef HAVE_BOOST_ASIO
+#include <boost/asio.hpp>
+#endif
+#include <fstream>
+#include <iostream>
+
+#define BUFFER_UNITTEST
+#include "buffer/BufferStream.hh"
+#include "buffer/BufferReader.hh"
+#include "buffer/BufferPrint.hh"
+
+using namespace avro;
+using std::cout;
+using std::endl;
+using detail::kDefaultBlockSize;
+using detail::kMinBlockSize;
+using detail::kMaxBlockSize;
+
+std::string makeString(size_t len)
+{
+    std::string newstring;
+    newstring.reserve(len);
+
+    for(size_t i=0; i < len; ++i) {
+        char newchar = '0' + i%16;
+        if(newchar > '9') {
+            newchar += 7;
+        }
+        newstring.push_back(newchar);
+    }
+
+    return newstring;
+}
+
+void printBuffer(const InputBuffer &buf) 
+{
+    avro::istream is(buf);
+    cout << is.rdbuf() << endl;
+}
+
+void TestReserve()
+{
+    BOOST_MESSAGE( "TestReserve");
+    {
+        OutputBuffer ob;
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), 0U);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 0);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+
+    {
+        size_t reserveSize = kMinBlockSize/2;
+
+        OutputBuffer ob (reserveSize);
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kMinBlockSize);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+
+        // reserve should add a single block
+        reserveSize += 8192;
+
+        ob.reserve(reserveSize);
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), reserveSize);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 2);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+
+        // reserve should add two blocks, one of the maximum size and
+        // one of the minimum size
+        reserveSize += (kMaxBlockSize + kMinBlockSize/2);
+
+        ob.reserve(reserveSize);
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), reserveSize + kMinBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 4);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+}
+
+void addDataToBuffer(OutputBuffer &buf, size_t size)
+{
+    std::string data = makeString(size);
+    buf.writeTo(data.c_str(), data.size());
+}
+
+void TestGrow()
+{
+    BOOST_MESSAGE( "TestGrow");
+    { 
+        OutputBuffer ob;
+
+        // add exactly one block
+        addDataToBuffer(ob, kDefaultBlockSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), kDefaultBlockSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), 0U);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 0);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 1);
+
+        // add another block, half full
+        addDataToBuffer(ob, kDefaultBlockSize/2);
+
+        BOOST_CHECK_EQUAL(ob.size(), kDefaultBlockSize + kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 2);
+
+        // reserve more capacity
+        size_t reserveSize = ob.freeSpace() + 8192;
+        ob.reserve(reserveSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), kDefaultBlockSize + kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), reserveSize);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 2);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 2);
+
+        // fill beyond capacity
+        addDataToBuffer(ob, reserveSize + 1);
+        BOOST_CHECK_EQUAL(ob.size(), kDefaultBlockSize + kDefaultBlockSize/2 + reserveSize +1);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize - 1);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 4);
+    }
+}
+
+void TestDiscard()
+{
+    BOOST_MESSAGE( "TestDiscard");
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+
+        ob.discardData();
+
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+
+    {
+        // discard no bytes
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+
+        ob.discardData(0);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+    }
+
+    { 
+        // discard exactly one block
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+
+        ob.discardData(kDefaultBlockSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize - kDefaultBlockSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 2);
+    }
+
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+
+        size_t remainder = dataSize % 100;
+
+        // discard data 100 bytes at a time
+        size_t discarded = 0;
+        while(ob.size() > 100) {
+            ob.discardData(100);
+            dataSize -= 100;
+            discarded += 100;
+
+            BOOST_CHECK_EQUAL(ob.size(), dataSize);
+            BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+            BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+
+            int chunks = 3 - (discarded / kDefaultBlockSize);
+            BOOST_CHECK_EQUAL(ob.numDataChunks(), chunks);
+        }
+
+        BOOST_CHECK_EQUAL(ob.size(), remainder);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 1);
+
+        try {
+            ob.discardData(ob.size()+1);
+        }
+        catch (std::exception &e) {
+            std::cout << "Intentionally triggered exception: " << e.what() << std::endl; 
+        }
+        ob.discardData(ob.size());
+
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+}
+
+void TestConvertToInput()
+{
+    BOOST_MESSAGE( "TestConvertToInput");
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        InputBuffer ib(ob);
+
+        BOOST_CHECK_EQUAL(ib.size(), dataSize);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 3);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+    }
+}
+
+void TestExtractToInput()
+{
+    BOOST_MESSAGE( "TestExtractToInput");
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        InputBuffer ib = ob.extractData();
+
+        BOOST_CHECK_EQUAL(ib.size(), dataSize);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 3);
+
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+
+    {
+        // extract no bytes
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        InputBuffer ib = ob.extractData(0);
+
+        BOOST_CHECK_EQUAL(ib.size(), 0U);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 0);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 3);
+    }
+    
+    {
+        // extract exactly one block
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        InputBuffer ib = ob.extractData(kDefaultBlockSize);
+
+        BOOST_CHECK_EQUAL(ib.size(), kDefaultBlockSize);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 1);
+
+        BOOST_CHECK_EQUAL(ob.size(), dataSize - kDefaultBlockSize);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 2);
+    }
+
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize*2 + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        size_t remainder = dataSize % 100;
+
+        // extract data 100 bytes at a time
+        size_t extracted = 0;
+        while(ob.size() > 100) {
+            ob.extractData(100);
+            dataSize -= 100;
+            extracted += 100;
+
+            BOOST_CHECK_EQUAL(ob.size(), dataSize);
+            BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+            BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+
+            int chunks = 3 - (extracted / kDefaultBlockSize);
+            BOOST_CHECK_EQUAL(ob.numDataChunks(), chunks);
+        }
+
+        BOOST_CHECK_EQUAL(ob.size(), remainder);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 1);
+
+        try {
+            ob.extractData(ob.size()+1);
+        }
+        catch (std::exception &e) {
+            std::cout << "Intentionally triggered exception: " << e.what() << std::endl; 
+        }
+
+        InputBuffer ib = ob.extractData(remainder);
+
+        BOOST_CHECK_EQUAL(ib.size(), remainder);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 1);
+
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kDefaultBlockSize/2);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 0);
+    }
+}
+
+void TestAppend()
+{
+    BOOST_MESSAGE( "TestAppend");
+    {
+        OutputBuffer ob;
+        size_t dataSize = kDefaultBlockSize + kDefaultBlockSize/2;
+        addDataToBuffer(ob, dataSize);
+
+        OutputBuffer a;
+        a.append(ob);
+
+        BOOST_CHECK_EQUAL(a.size(), dataSize);
+        BOOST_CHECK_EQUAL(a.freeSpace(), 0U);
+        BOOST_CHECK_EQUAL(a.numChunks(), 0);
+        BOOST_CHECK_EQUAL(a.numDataChunks(), 2);
+
+        // reserve on a, then append from an input buffer
+        a.reserve(7000);
+
+        InputBuffer ib(ob);
+        a.append(ib);
+
+        BOOST_CHECK_EQUAL(a.size(), dataSize*2);
+        BOOST_CHECK_EQUAL(a.freeSpace(), 7000U);
+        BOOST_CHECK_EQUAL(a.numChunks(), 1);
+        BOOST_CHECK_EQUAL(a.numDataChunks(), 4);
+    }
+}
+
+void TestBufferStream()
+{
+    BOOST_MESSAGE( "TestBufferStream");
+
+    {
+        // write enough bytes to a buffer, to create at least 3 blocks
+        std::string junk = makeString(kDefaultBlockSize);
+        ostream os;
+        int i = 0;
+        for(; i < 3; ++i) {
+            os << junk;
+        }
+
+        const OutputBuffer &buf = os.getBuffer();
+        cout << "Buffer has " << buf.size() << " bytes\n";
+        BOOST_CHECK_EQUAL(buf.size(), junk.size() * i);
+    }
+}
+
+template<typename T>
+void TestEof()
+{
+    // create a message full of eof chars
+    std::vector<char> eofs(sizeof(T) * 3 / 2, -1);
+
+    OutputBuffer buf1;
+    buf1.writeTo(&eofs[0], eofs.size());
+
+    OutputBuffer buf2;
+    buf2.writeTo(&eofs[0], eofs.size());
+
+    // append the buffers, so the first 
+    // character on a buffer boundary is eof
+    buf1.append(buf2);
+
+    avro::istream is(buf1);
+
+    for(int i = 0; i < 3; ++i) {
+        T d;
+        char *addr = reinterpret_cast<char *>(&d);
+        is.read(addr, sizeof(T));
+        BOOST_CHECK_EQUAL(is.gcount(), static_cast<std::streamsize>(sizeof(T)));
+        BOOST_CHECK_EQUAL(is.eof(), false);
+    }
+
+    char c;
+    is.read(&c, sizeof(c));
+    BOOST_CHECK_EQUAL(is.gcount(), 0);
+    BOOST_CHECK_EQUAL(is.eof(), true);
+}
+
+void TestBufferStreamEof()
+{
+    BOOST_MESSAGE( "TestBufferStreamEof");
+
+    TestEof<int32_t>();
+
+    TestEof<int64_t>();
+
+    TestEof<float>();
+
+    TestEof<double>();
+}
+
+void TestSeekAndTell()
+{
+    BOOST_MESSAGE( "TestSeekAndTell");
+
+    {
+        std::string junk = makeString(kDefaultBlockSize/2);
+
+        ostream os;
+
+        // write enough bytes to a buffer, to create at least 3 blocks
+        int i = 0;
+        for(; i < 5; ++i) {
+            os << junk;
+        }
+
+        const OutputBuffer &buf = os.getBuffer();
+        cout << "Buffer has " << buf.size() << " bytes\n";
+
+        istream is(os.getBuffer());
+        BOOST_CHECK_EQUAL(is.getBuffer().size(), junk.size() * i);
+        is.seekg(2000);
+        BOOST_CHECK_EQUAL(is.tellg(), static_cast<std::streampos>(2000));
+        is.seekg(6000);
+        BOOST_CHECK_EQUAL(is.tellg(), static_cast<std::streampos>(6000));
+        is.seekg(is.getBuffer().size());
+        BOOST_CHECK_EQUAL(is.tellg(), static_cast<std::streampos>(is.getBuffer().size()));
+        is.seekg(is.getBuffer().size()+1);
+        BOOST_CHECK_EQUAL(is.tellg(), static_cast<std::streampos>(-1));
+
+    }
+}
+
+void TestReadSome()
+{
+    BOOST_MESSAGE( "TestReadSome");
+    {
+        std::string junk = makeString(kDefaultBlockSize/2);
+
+        ostream os;
+
+        // write enough bytes to a buffer, to create at least 3 blocks
+        int i = 0;
+        for(; i < 5; ++i) {
+            os << junk;
+        }
+
+        cout << "Buffer has " << os.getBuffer().size() << " bytes\n";
+
+        istream is(os.getBuffer());
+
+        char datain[5000];
+
+        while(is.rdbuf()->in_avail()) {
+            size_t bytesAvail = static_cast<size_t>(is.rdbuf()->in_avail());
+            cout << "Bytes avail = " << bytesAvail << endl;
+            size_t in = static_cast<size_t>(is.readsome(datain, sizeof(datain)));
+            cout << "Bytes read = " << in << endl;
+            BOOST_CHECK_EQUAL(bytesAvail, in);
+        }
+    }
+}
+
+void TestSeek()
+{
+    BOOST_MESSAGE( "TestSeek");
+    {
+        const std::string str = "SampleMessage";
+
+        avro::OutputBuffer tmp1, tmp2, tmp3;
+        tmp1.writeTo(str.c_str(), 3); // Sam
+        tmp2.writeTo(str.c_str()+3, 7);  // pleMess
+        tmp3.writeTo(str.c_str()+10, 3); // age
+        
+        tmp2.append(tmp3);
+        tmp1.append(tmp2);
+
+        BOOST_CHECK_EQUAL(tmp3.numDataChunks(), 1);
+        BOOST_CHECK_EQUAL(tmp2.numDataChunks(), 2);
+        BOOST_CHECK_EQUAL(tmp1.numDataChunks(), 3);
+
+        avro::InputBuffer buf(tmp1);
+
+        cout << "Starting string: " << str << '\n';
+        BOOST_CHECK_EQUAL(static_cast<std::string::size_type>( buf.size()) , str.size());
+
+        avro::istream is(buf);
+
+        const std::string part1 = "Sample";
+        char buffer[16];
+        is.read(buffer, part1.size());
+        std::string sample1(buffer, part1.size());
+        cout << "After reading bytes: " << sample1 << '\n';
+        BOOST_CHECK_EQUAL(sample1, part1);
+
+        const std::string part2 = "Message";
+        is.read(buffer, part2.size());
+        std::string sample2(buffer, part2.size());
+        cout << "After reading remaining bytes: " << sample2 << '\n';
+        BOOST_CHECK_EQUAL(sample2, part2);
+
+        cout << "Seeking back " << '\n';
+        is.seekg( - static_cast<std::streamoff>(part2.size()), std::ios_base::cur);
+
+        std::streampos loc = is.tellg();
+        cout << "Saved loc = " << loc << '\n';
+        BOOST_CHECK_EQUAL(static_cast<std::string::size_type>( loc ), (str.size()-part2.size()));
+
+        cout << "Reading remaining bytes: " << is.rdbuf() << '\n';
+        cout << "bytes avail = " << is.rdbuf()->in_avail() << '\n';
+        BOOST_CHECK_EQUAL(is.rdbuf()->in_avail(), 0);
+
+        cout << "Moving to saved loc = " << loc << '\n';
+        is.seekg(loc);
+        cout << "bytes avail = " << is.rdbuf()->in_avail() << '\n';
+
+        std::ostringstream oss;
+        oss << is.rdbuf();
+        cout << "After reading bytes: " << oss.str() << '\n';
+        BOOST_CHECK_EQUAL(oss.str(), part2);
+        
+    }
+}
+
+void TestIterator() 
+{
+    BOOST_MESSAGE( "TestIterator");
+    {
+        OutputBuffer ob(2 * kMaxBlockSize + 10);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 3);
+        BOOST_CHECK_EQUAL(ob.size(), 0U);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), 2 * kMaxBlockSize + kMinBlockSize);
+
+        BOOST_CHECK_EQUAL (std::distance(ob.begin(), ob.end()), 3);
+
+        OutputBuffer::const_iterator iter = ob.begin();
+        BOOST_CHECK_EQUAL( iter->size(), kMaxBlockSize);
+        ++iter;
+        BOOST_CHECK_EQUAL( iter->size(), kMaxBlockSize);
+        ++iter;
+        BOOST_CHECK_EQUAL( iter->size(), kMinBlockSize);
+        ++iter;
+        BOOST_CHECK( iter == ob.end());
+
+        size_t toWrite = kMaxBlockSize + kMinBlockSize;
+        ob.wroteTo(toWrite);
+        BOOST_CHECK_EQUAL(ob.size(), toWrite);
+        BOOST_CHECK_EQUAL(ob.freeSpace(), kMaxBlockSize);
+        BOOST_CHECK_EQUAL(ob.numChunks(), 2);
+        BOOST_CHECK_EQUAL(ob.numDataChunks(), 2);
+
+        InputBuffer ib = ob;
+        BOOST_CHECK_EQUAL (std::distance(ib.begin(), ib.end()), 2);
+
+        size_t acc = 0;
+        for(OutputBuffer::const_iterator iter = ob.begin();
+            iter != ob.end(); 
+            ++iter) {
+            acc += iter->size();
+        }
+        BOOST_CHECK_EQUAL(ob.freeSpace(), acc);
+
+        try {
+            ob.wroteTo(acc+1);
+        }
+        catch (std::exception &e) {
+            std::cout << "Intentionally triggered exception: " << e.what() << std::endl; 
+        }
+    }
+}
+
+#ifdef HAVE_BOOST_ASIO
+void server(boost::barrier &b) 
+{
+    using boost::asio::ip::tcp;
+    boost::asio::io_service io_service;
+    tcp::acceptor a(io_service, tcp::endpoint(tcp::v4(), 33333));
+    tcp::socket sock(io_service);
+    a.listen();
+
+    b.wait();
+
+    a.accept(sock);
+    avro::OutputBuffer buf(100);
+
+    size_t length = sock.receive(buf);
+    buf.wroteTo(length);
+    cout << "Server got " << length << " bytes\n";
+
+    InputBuffer rbuf(buf);
+
+    std::string res;
+
+    avro::InputBuffer::const_iterator iter = rbuf.begin();
+    while(iter != rbuf.end() ) {
+        res.append(boost::asio::buffer_cast<const char *>(*iter), boost::asio::buffer_size(*iter));
+        cout << "Received Buffer size: " << boost::asio::buffer_size(*iter) << endl;
+        BOOST_CHECK_EQUAL(length, boost::asio::buffer_size(*iter));
+        cout << "Received Buffer: \"" << res << '"' << endl;
+        ++iter;
+    }
+
+    BOOST_CHECK_EQUAL(res, "hello world");
+}
+
+void TestAsioBuffer()
+{
+    using boost::asio::ip::tcp;
+    BOOST_MESSAGE( "TestAsioBuffer");
+    {
+        boost::barrier b(2);
+
+        boost::thread t(boost::bind(server, boost::ref(b)));
+
+        b.wait();
+
+        // set up the thing 
+        boost::asio::io_service io_service;
+
+        tcp::resolver resolver(io_service);
+        tcp::resolver::query query(tcp::v4(), "localhost", "33333");
+        tcp::resolver::iterator endpoint_iterator = resolver.resolve(query);
+        tcp::resolver::iterator end;
+
+        tcp::socket socket(io_service);
+        boost::system::error_code error = boost::asio::error::host_not_found;
+        while (error && endpoint_iterator != end)
+        {
+          socket.close();
+          socket.connect(*endpoint_iterator++, error);
+        }
+        if (error) {
+          throw error;
+        }
+
+        std::string hello = "hello ";
+        std::string world = "world";
+        avro::OutputBuffer buf;
+        buf.writeTo(hello.c_str(), hello.size());
+        
+        BOOST_CHECK_EQUAL(buf.size(), hello.size());
+
+        avro::OutputBuffer buf2;
+        buf2.writeTo(world.c_str(), world.size());
+        BOOST_CHECK_EQUAL(buf2.size(), world.size());
+
+        buf.append(buf2);
+        BOOST_CHECK_EQUAL(buf.size(), hello.size() + world.size());
+
+        cout << "Distance " << std::distance(buf.begin(), buf.end()) << endl;
+        BOOST_CHECK_EQUAL(std::distance(buf.begin(), buf.end()), 1);
+
+        const avro::InputBuffer rbuf(buf);
+
+        avro::InputBuffer::const_iterator iter = rbuf.begin();
+        while(iter != rbuf.end() ) {
+            std::string str(boost::asio::buffer_cast<const char *>(*iter), boost::asio::buffer_size(*iter));
+            cout << "Buffer size: " << boost::asio::buffer_size(*iter) << endl;
+            cout << "Buffer: \"" << str << '"' << endl;
+            ++iter;
+        }
+
+        cout << "Buffer size " << rbuf.size() << endl;
+
+        std::size_t wrote = boost::asio::write(socket, rbuf);
+        cout << "Wrote " << wrote << endl;
+        BOOST_CHECK_EQUAL(wrote, rbuf.size());
+
+        t.join();
+    }
+}
+#else
+void TestAsioBuffer() 
+{
+    cout << "Skipping asio test\n";
+}
+#endif // HAVE_BOOST_ASIO
+
+void TestSplit()
+{
+    BOOST_MESSAGE( "TestSplit");
+    {
+        const std::string str = "This message is to be split";
+
+        avro::OutputBuffer buf;
+        buf.writeTo(str.c_str(), str.size()); 
+
+        char datain[12];
+        avro::istream is(buf);
+        size_t in = static_cast<size_t>(is.readsome(datain, sizeof(datain)));
+        BOOST_CHECK_EQUAL(in, sizeof(datain));
+        BOOST_CHECK_EQUAL(static_cast<size_t>(is.tellg()), sizeof(datain));
+
+        OutputBuffer part2;
+        part2.append(is.getBuffer());
+        BOOST_CHECK_EQUAL(part2.size(), buf.size());
+        InputBuffer part1 = part2.extractData(static_cast<size_t>(is.tellg()));
+
+        BOOST_CHECK_EQUAL(part2.size(), str.size() - in);
+
+        printBuffer(part1);
+        printBuffer(part2);
+    }
+}
+
+void TestSplitOnBorder()
+{
+    BOOST_MESSAGE( "TestSplitOnBorder");
+    {
+
+        const std::string part1 = "This message";
+        const std::string part2 = " is to be split";
+
+        avro::OutputBuffer buf;
+        buf.writeTo(part1.c_str(), part1.size()); 
+        size_t firstChunkSize = buf.size();
+
+        {
+            avro::OutputBuffer tmp;
+            tmp.writeTo(part2.c_str(), part2.size()); 
+            buf.append(tmp);
+            printBuffer(InputBuffer(buf));
+        }
+
+        BOOST_CHECK_EQUAL(buf.numDataChunks(), 2);
+        size_t bufsize = buf.size();
+    
+        boost::scoped_array<char> datain(new char[firstChunkSize]);
+        avro::istream is(buf);
+        size_t in = static_cast<size_t>(is.readsome(&datain[0], firstChunkSize));
+        BOOST_CHECK_EQUAL(in, firstChunkSize);
+
+        OutputBuffer newBuf;
+        newBuf.append(is.getBuffer());
+        newBuf.discardData(static_cast<size_t>(is.tellg()));
+        BOOST_CHECK_EQUAL(newBuf.numDataChunks(), 1);
+
+        BOOST_CHECK_EQUAL(newBuf.size(), bufsize - in);
+
+        cout << is.rdbuf() << endl;
+        printBuffer(newBuf);
+    }
+}
+
+void TestSplitTwice() 
+{
+    BOOST_MESSAGE( "TestSplitTwice");
+    {
+        const std::string msg1 = makeString(30);
+
+        avro::OutputBuffer buf1;
+        buf1.writeTo(msg1.c_str(), msg1.size());
+
+        BOOST_CHECK_EQUAL(buf1.size(), msg1.size());
+
+        printBuffer(buf1);
+
+        avro::istream is(buf1);
+        char buffer[6];
+        is.readsome(buffer, 5);
+        buffer[5] = 0;
+        std::cout << "buffer =" << buffer << std::endl;
+        
+        buf1.discardData(static_cast<size_t>(is.tellg()));
+        printBuffer(buf1);
+
+        avro::istream is2(buf1);
+        is2.seekg(15);
+
+        buf1.discardData(static_cast<size_t>(is2.tellg()));
+        printBuffer(buf1);
+    }
+}
+
+void TestCopy() 
+{
+    BOOST_MESSAGE( "TestCopy");
+
+    const std::string msg = makeString(30);
+    // Test1, small data, small buffer
+    {
+        std::cout << "Test1\n";
+        // put a small amount of data in the buffer
+        avro::OutputBuffer wb;
+
+        wb.writeTo(msg.c_str(), msg.size());
+
+        BOOST_CHECK_EQUAL(msg.size(), wb.size());
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 1);
+        BOOST_CHECK_EQUAL(kDefaultBlockSize - msg.size(), 
+                wb.freeSpace());
+
+        // copy starting at offset 5 and copying 10 less bytes
+        BufferReader br(wb);
+        br.seek(5);
+        avro::InputBuffer ib = br.copyData(msg.size() - 10);
+
+        printBuffer(ib);
+
+        BOOST_CHECK_EQUAL(ib.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ib.size(), msg.size()-10);
+
+        // buf 1 should be unchanged
+        BOOST_CHECK_EQUAL(msg.size(), wb.size());
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 1);
+        BOOST_CHECK_EQUAL(kDefaultBlockSize - msg.size(), 
+                wb.freeSpace());
+
+        // make sure wb is still functional
+        wb.reserve(kDefaultBlockSize);
+        BOOST_CHECK_EQUAL(wb.size(), msg.size());
+        BOOST_CHECK_EQUAL(wb.numChunks(), 2);
+        BOOST_CHECK_EQUAL(kDefaultBlockSize * 2 - msg.size(), 
+                wb.freeSpace());
+    }
+
+    // Test2, small data, large buffer
+    {
+        std::cout << "Test2\n";
+        // put a small amount of data in the buffer
+        const OutputBuffer::size_type bufsize= 3*kMaxBlockSize;
+
+        avro::OutputBuffer wb(bufsize);
+        BOOST_CHECK_EQUAL(wb.numChunks(), 3);
+        BOOST_CHECK_EQUAL(wb.freeSpace(), bufsize);
+
+        wb.writeTo(msg.c_str(), msg.size());
+
+        BOOST_CHECK_EQUAL(wb.size(), msg.size());
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 1);
+        BOOST_CHECK_EQUAL(bufsize - msg.size(), 
+                wb.freeSpace());
+
+        BufferReader br(wb);
+        br.seek(5);
+        avro::InputBuffer ib = br.copyData(msg.size() - 10);
+
+        printBuffer(ib);
+
+        BOOST_CHECK_EQUAL(ib.numChunks(), 1);
+        BOOST_CHECK_EQUAL(ib.size(), msg.size()-10);
+
+        // wb should be unchanged
+        BOOST_CHECK_EQUAL(msg.size(), wb.size());
+        BOOST_CHECK_EQUAL(wb.numChunks(), 3);
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 1);
+        BOOST_CHECK_EQUAL(bufsize - msg.size(), wb.freeSpace());
+
+        // reserving a small amount should have no effect
+        wb.reserve(1);
+        BOOST_CHECK_EQUAL(msg.size(), wb.size());
+        BOOST_CHECK_EQUAL(wb.numChunks(), 3);
+        BOOST_CHECK_EQUAL(bufsize - msg.size(), wb.freeSpace());
+
+        // reserve more (will get extra block)
+        wb.reserve(bufsize);
+        BOOST_CHECK_EQUAL(msg.size(), wb.size());
+        BOOST_CHECK_EQUAL(wb.numChunks(), 4);
+        BOOST_CHECK_EQUAL(kMaxBlockSize * 3 - msg.size() + kMinBlockSize, 
+                wb.freeSpace());
+    }
+
+    // Test3 Border case, buffer is exactly full
+    {
+        std::cout << "Test3\n";
+        const OutputBuffer::size_type bufsize= 2*kDefaultBlockSize;
+        avro::OutputBuffer wb;
+
+        for(unsigned i = 0; i < bufsize; ++i) {
+            wb.writeTo('a');
+        }
+
+        BOOST_CHECK_EQUAL(wb.size(), bufsize);
+        BOOST_CHECK_EQUAL(wb.freeSpace(), 0U);
+        BOOST_CHECK_EQUAL(wb.numChunks(), 0);
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 2);
+
+        // copy where the chunks overlap
+        BufferReader br(wb);
+        br.seek(bufsize/2 - 10);
+        avro::InputBuffer ib = br.copyData(20);
+
+        printBuffer(ib);
+
+        BOOST_CHECK_EQUAL(ib.size(), 20U);
+        BOOST_CHECK_EQUAL(ib.numChunks(), 2);
+
+        // wb should be unchanged
+        BOOST_CHECK_EQUAL(wb.size(), bufsize);
+        BOOST_CHECK_EQUAL(wb.freeSpace(), 0U);
+        BOOST_CHECK_EQUAL(wb.numDataChunks(), 2);
+    }
+
+    // Test4, no data 
+    {
+        const OutputBuffer::size_type bufsize= 2*kMaxBlockSize;
+        std::cout << "Test4\n";
+        avro::OutputBuffer wb(bufsize);
+        BOOST_CHECK_EQUAL(wb.numChunks(), 2);
+        BOOST_CHECK_EQUAL(wb.size(), 0U);
+        BOOST_CHECK_EQUAL(wb.freeSpace(), bufsize);
+
+        avro::InputBuffer ib;
+        try {
+            BufferReader br(wb);
+            br.seek(10);
+        }
+        catch (std::exception &e) {
+            cout << "Intentially triggered exception: " << e.what() << endl;
+        }
+        try {
+            BufferReader br(wb);
+            avro::InputBuffer ib = br.copyData(10);
+        }
+        catch (std::exception &e) {
+            cout << "Intentially triggered exception: " << e.what() << endl;
+        }
+
+
+        BOOST_CHECK_EQUAL(ib.numChunks(), 0);
+        BOOST_CHECK_EQUAL(ib.size(), 0U);
+
+        // wb should keep all blocks remaining
+        BOOST_CHECK_EQUAL(wb.numChunks(), 2);
+        BOOST_CHECK_EQUAL(wb.size(), 0U);
+        BOOST_CHECK_EQUAL(wb.freeSpace(), bufsize);
+    }
+}
+
+// this is reproducing a sequence of steps that caused a crash
+void TestBug()  
+{
+    BOOST_MESSAGE( "TestBug");
+    {
+        OutputBuffer rxBuf;
+        OutputBuffer  buf;
+        rxBuf.reserve(64 * 1024);
+
+        rxBuf.wroteTo(2896);
+
+        {
+            avro::InputBuffer ib(rxBuf.extractData());
+            buf.append(ib);
+        }
+        
+        buf.discardData(61);
+
+        rxBuf.reserve(64 * 1024);
+        rxBuf.wroteTo(381);
+
+        {
+            avro::InputBuffer ib(rxBuf.extractData());
+            buf.append(ib);
+        }
+
+        buf.discardData(3216);
+
+
+        rxBuf.reserve(64 * 1024);
+    }
+}
+
+bool safeToDelete = false;
+
+void deleteForeign(const std::string &val) 
+{
+    std::cout << "Deleting foreign string containing " << val << '\n';
+    BOOST_CHECK(safeToDelete);
+}
+
+void TestForeign ()  
+{
+    BOOST_MESSAGE( "TestForeign");
+    {
+        std::string hello = "hello ";
+        std::string there = "there ";
+        std::string world = "world ";
+
+        OutputBuffer copy;
+
+        {
+            OutputBuffer buf;
+            buf.writeTo(hello.c_str(), hello.size());
+            buf.appendForeignData(there.c_str(), there.size(), boost::bind(&deleteForeign, there));
+            buf.writeTo(world.c_str(), world.size());
+        
+            printBuffer(buf);
+            BOOST_CHECK_EQUAL(buf.size(), 18U);
+            copy = buf;
+        }
+        std::cout << "Leaving inner scope\n";
+        safeToDelete = true;
+    }
+    std::cout << "Leaving outer scope\n";
+    safeToDelete = false;
+}
+
+void TestForeignDiscard ()  
+{
+    BOOST_MESSAGE( "TestForeign");
+    {
+        std::string hello = "hello ";
+        std::string again = "again ";
+        std::string there = "there ";
+        std::string world = "world ";
+
+        OutputBuffer buf;
+        buf.writeTo(hello.c_str(), hello.size());
+        buf.appendForeignData(again.c_str(), again.size(), boost::bind(&deleteForeign, again));
+        buf.appendForeignData(there.c_str(), there.size(), boost::bind(&deleteForeign, there));
+        buf.writeTo(world.c_str(), world.size());
+        
+        printBuffer(buf);
+        BOOST_CHECK_EQUAL(buf.size(), 24U);
+
+        // discard some data including half the foreign buffer
+        buf.discardData(9);
+        printBuffer(buf);
+        BOOST_CHECK_EQUAL(buf.size(), 15U);
+        
+        // discard some more data, which will lop off the first foreign buffer
+        safeToDelete = true;
+        buf.discardData(6);
+        safeToDelete = false;
+        printBuffer(buf);
+        BOOST_CHECK_EQUAL(buf.size(), 9U);
+
+        // discard some more data, which will lop off the second foreign buffer
+        safeToDelete = true;
+        buf.discardData(3);
+        safeToDelete = false;
+        printBuffer(buf);
+        BOOST_CHECK_EQUAL(buf.size(), 6U);
+    }
+}
+
+void TestPrinter()
+{
+    BOOST_MESSAGE( "TestPrinter");
+    {
+        OutputBuffer ob;
+        addDataToBuffer(ob, 128);
+
+        std::cout << ob << std::endl;
+    }
+}
+
+struct BufferTestSuite : public boost::unit_test::test_suite
+{
+    BufferTestSuite()  : 
+        boost::unit_test::test_suite("BufferTestSuite")
+    {
+        add (BOOST_TEST_CASE( TestReserve ));
+        add (BOOST_TEST_CASE( TestGrow ));
+        add (BOOST_TEST_CASE( TestDiscard ));
+        add (BOOST_TEST_CASE( TestConvertToInput ));
+        add (BOOST_TEST_CASE( TestExtractToInput ));
+        add (BOOST_TEST_CASE( TestAppend ));
+        add (BOOST_TEST_CASE( TestBufferStream ));
+        add (BOOST_TEST_CASE( TestBufferStreamEof ));
+        add (BOOST_TEST_CASE( TestSeekAndTell ));
+        add (BOOST_TEST_CASE( TestReadSome ));
+        add (BOOST_TEST_CASE( TestSeek));
+        add (BOOST_TEST_CASE( TestIterator));
+        add (BOOST_TEST_CASE( TestAsioBuffer));
+        add (BOOST_TEST_CASE( TestSplit));
+        add (BOOST_TEST_CASE( TestSplitOnBorder));
+        add (BOOST_TEST_CASE( TestSplitTwice));
+        add (BOOST_TEST_CASE( TestCopy));
+        add (BOOST_TEST_CASE( TestBug));
+        add (BOOST_TEST_CASE( TestForeign));
+        add (BOOST_TEST_CASE( TestForeignDiscard));
+        add (BOOST_TEST_CASE( TestPrinter));
+    }
+};
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int, char* [] ) 
+{
+    boost::unit_test::test_suite *test (BOOST_TEST_SUITE ("Buffer Unit Tests"));
+    test->add (new BufferTestSuite() );
+
+    return test;
+}
+
diff --git a/lang/c++/test/precompile.cc b/lang/c++/test/precompile.cc
new file mode 100644
index 0000000..8ee9263
--- /dev/null
+++ b/lang/c++/test/precompile.cc
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <fstream>
+
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+
+int main(int argc, char** argv)
+{
+    int ret = 0;
+    try {
+        avro::ValidSchema schema;
+        if (argc > 1) {
+            std::ifstream in(argv[1]);
+            avro::compileJsonSchema(in, schema);
+        } else {
+            avro::compileJsonSchema(std::cin, schema);
+        }
+
+        if (argc > 2) {
+            std::ofstream out(argv[2]);
+            schema.toFlatList(out);
+        } else {
+            schema.toFlatList(std::cout);
+        }
+    }
+    catch (std::exception &e) {
+        std::cerr << "Failed to parse or compile schema: " << e.what() << std::endl;
+        ret = 1;
+    }
+
+    return ret;
+}
diff --git a/lang/c++/test/testgentest.cc b/lang/c++/test/testgentest.cc
new file mode 100644
index 0000000..7305ccb
--- /dev/null
+++ b/lang/c++/test/testgentest.cc
@@ -0,0 +1,556 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+#include <fstream>
+#include <sstream>
+#include <boost/test/included/unit_test_framework.hpp>
+
+#include "testgen.hh" // < generated header
+#include "testgen2.hh" // < generated header
+
+#include "Serializer.hh"
+#include "Writer.hh"
+#include "Reader.hh"
+#include "Node.hh"
+#include "ValidSchema.hh"
+#include "Compiler.hh"
+#include "ResolvingReader.hh"
+#include "ResolverSchema.hh"
+#include "buffer/BufferPrint.hh"
+
+std::string gWriter ("jsonschemas/bigrecord");
+std::string gReader ("jsonschemas/bigrecord2");
+
+void printRecord(testgen::RootRecord &record)
+{
+    using namespace testgen;
+    std::cout << "mylong " << record.mylong << '\n';
+    std::cout << "inval1 " << record.nestedrecord.inval1 << '\n';
+    std::cout << "inval2 " << record.nestedrecord.inval2 << '\n';
+    std::cout << "inval3 " << record.nestedrecord.inval3 << '\n';
+
+    Map_of_int::MapType::const_iterator mapiter = record.mymap.value.begin();
+    while(mapiter != record.mymap.value.end()){
+        std::cout << "mymap " << mapiter->first << " " << mapiter->second << '\n';
+        ++mapiter;
+    }
+
+    Array_of_double::ArrayType::iterator arrayiter = record.myarray.value.begin();
+    while(arrayiter != record.myarray.value.end()) {
+        std::cout << "myarray " << *arrayiter << '\n';
+        ++arrayiter;
+    }
+
+    std::cout << "myeum = " << record.myenum.value << '\n';
+
+    if(record.myunion.choice == 1) {
+        const Map_of_int &theMap = record.myunion.getValue<Map_of_int>();
+        mapiter = theMap.value.begin();
+        while(mapiter != theMap.value.end()){
+            std::cout << "unionmap " << mapiter->first << " " << mapiter->second << '\n';
+            ++mapiter;
+        }
+    }
+
+    if(record.anotherunion.choice == 0) {
+        std::cout << "unionbytes ";
+        const std::vector<uint8_t> &val = record.anotherunion.getValue< std::vector<uint8_t> >();
+        for(size_t i = 0; i < val.size(); ++i) {
+            std::cout << i << ":" << static_cast<int>(val[i]) << " ";
+        }
+        std::cout  << '\n';
+    }
+    
+    std::cout << "mybool " << record.mybool << '\n';
+    std::cout << "inval1 " << record.anothernested.inval1 << '\n';
+    std::cout << "inval2 " << record.anothernested.inval2 << '\n';
+    std::cout << "inval3 " << record.anothernested.inval3 << '\n';
+
+    std::cout << "fixed ";
+    for(size_t i = 0; i < record.myfixed.fixedSize; ++i) {
+        std::cout << i << ":" << static_cast<int>(record.myfixed.value[i]) << " ";
+    }
+    std::cout  << '\n';
+
+    std::cout << "anotherint " << record.anotherint << '\n';
+
+    std::cout << "bytes ";
+    for(size_t i = 0; i < record.bytes.size(); ++i) {
+        std::cout << i << ":" << static_cast<int>(record.bytes[i]) << " ";
+    }
+    std::cout  << '\n';
+}
+
+void printRecord(testgen2::RootRecord &record)
+{
+    using namespace testgen2;
+    std::cout << "mylong " << record.mylong << '\n';
+    std::cout << "inval1 " << record.nestedrecord.inval1 << '\n';
+    std::cout << "inval2 " << record.nestedrecord.inval2 << '\n';
+    std::cout << "inval3 " << record.nestedrecord.inval3 << '\n';
+
+    Map_of_long::MapType::const_iterator mapiter = record.mymap.value.begin();
+    while(mapiter != record.mymap.value.end()){
+        std::cout << "mymap " << mapiter->first << " " << mapiter->second << '\n';
+        ++mapiter;
+    }
+
+    Array_of_double::ArrayType::iterator arrayiter = record.myarray.value.begin();
+    while(arrayiter != record.myarray.value.end()) {
+        std::cout << "myarray " << *arrayiter << '\n';
+        ++arrayiter;
+    }
+
+    std::cout << "myeum = " << record.myenum.value << '\n';
+
+    if(record.myunion.choice == 1) {
+        const Map_of_float &theMap = record.myunion.getValue<Map_of_float>();
+        Map_of_float::MapType::const_iterator mapiter = theMap.value.begin();
+        while(mapiter != theMap.value.end()){
+            std::cout << "unionmap " << mapiter->first << " " << mapiter->second << '\n';
+            ++mapiter;
+        }
+    }
+
+    std::cout << "unionbytes ";
+    const std::vector<uint8_t> &val = record.anotherunion;
+    for(size_t i = 0; i < val.size(); ++i) {
+        std::cout << i << ":" << static_cast<int>(val[i]) << " ";
+    }
+    std::cout  << '\n';
+    
+    std::cout << "inval1 " << record.anothernested.inval1 << '\n';
+    std::cout << "inval2 " << record.anothernested.inval2 << '\n';
+    std::cout << "inval3 " << record.anothernested.inval3 << '\n';
+
+    if(record.myfixed.choice == 1) {
+        const md5 &myfixed = record.myfixed.getValue<md5>();
+        std::cout << "fixed ";
+        for(size_t i = 0; i < myfixed.fixedSize; ++i) {
+            std::cout << i << ":" << static_cast<int>(myfixed.value[i]) << " ";
+        }
+        std::cout  << '\n';
+    }
+
+    std::cout << "anotherint " << record.anotherint << '\n';
+
+    std::cout << "bytes ";
+    for(size_t i = 0; i < record.bytes.size(); ++i) {
+        std::cout << i << ":" << static_cast<int>(record.bytes[i]) << " ";
+    }
+    std::cout  << '\n';
+    std::cout << "newbool " << record.newbool << '\n';
+}
+
+
+void setRecord(testgen::RootRecord &myRecord) 
+{
+    using namespace testgen;
+
+    uint8_t fixed[] =  {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
+
+    myRecord.mylong = 212;
+    myRecord.nestedrecord.inval1 = std::numeric_limits<double>::min();
+    myRecord.nestedrecord.inval2 = "hello world";
+    myRecord.nestedrecord.inval3 = std::numeric_limits<int32_t>::max();
+
+    Map_of_int::GenericSetter setter = myRecord.mymap.genericSetter;
+    Map_of_int::ValueType *val = setter(&myRecord.mymap, "one");
+    *val = 100;
+    val = setter(&myRecord.mymap, "two");
+    *val = 200;
+
+    myRecord.myarray.addValue(3434.9);
+    myRecord.myarray.addValue(7343.9);
+    myRecord.myarray.addValue(-63445.9);
+    myRecord.myenum.value = testgen::ExampleEnum::one;
+    testgen::Map_of_int map;
+    map.addValue("one", 1);
+    map.addValue("two", 2);
+    myRecord.myunion.set_Map_of_int(map);
+    std::vector<uint8_t> vec;
+    vec.push_back(1);
+    vec.push_back(2);
+    myRecord.anotherunion.set_bytes(vec);
+    myRecord.mybool = true;
+    myRecord.anothernested.inval1 = std::numeric_limits<double>::max();
+    myRecord.anothernested.inval2 = "goodbye world";
+    myRecord.anothernested.inval3 = std::numeric_limits<int32_t>::min();
+    memcpy(myRecord.myfixed.value, fixed, testgen::md5::fixedSize);
+    myRecord.anotherint = 4534;
+    myRecord.bytes.push_back(10);
+    myRecord.bytes.push_back(20);
+}
+
+struct TestCodeGenerator {
+
+    void serializeToScreen()
+    {
+        std::cout << "Serialize:\n";
+        avro::Writer writer;
+
+        avro::serialize(writer, myRecord_);
+        std::cout << writer.buffer();
+        std::cout << "end Serialize\n";
+    }
+
+    void serializeToScreenValid()
+    {
+        std::cout << "Validated Serialize:\n";
+        avro::ValidatingWriter writer(schema_);
+
+        avro::serialize(writer, myRecord_);
+        std::cout << writer.buffer();
+        std::cout << "end Validated Serialize\n";
+    }
+
+    void checkArray(const testgen::Array_of_double &a1, const testgen::Array_of_double &a2) 
+    {
+        BOOST_CHECK_EQUAL(a1.value.size(), 3U);
+        BOOST_CHECK_EQUAL(a1.value.size(), a2.value.size());
+        for(size_t i = 0; i < a1.value.size(); ++i) {
+            BOOST_CHECK_EQUAL(a1.value[i], a2.value[i]);
+        }
+    }
+
+    void checkMap(const testgen::Map_of_int &map1, const testgen::Map_of_int &map2) 
+    {
+        BOOST_CHECK_EQUAL(map1.value.size(), map2.value.size());
+        testgen::Map_of_int::MapType::const_iterator iter1 = map1.value.begin();
+        testgen::Map_of_int::MapType::const_iterator end   = map1.value.end();
+        testgen::Map_of_int::MapType::const_iterator iter2 = map2.value.begin();
+
+        while(iter1 != end) {
+            BOOST_CHECK_EQUAL(iter1->first, iter2->first);
+            BOOST_CHECK_EQUAL(iter1->second, iter2->second);
+            ++iter1;
+            ++iter2;
+        }
+    }
+
+    void checkBytes(const std::vector<uint8_t> &v1, const std::vector<uint8_t> &v2)
+    {
+        BOOST_CHECK_EQUAL(v1.size(), 2U);
+        BOOST_CHECK_EQUAL(v1.size(), v2.size());
+        for(size_t i = 0; i < v1.size(); ++i) {
+            BOOST_CHECK_EQUAL(v1[i], v2[i]);
+        }
+    }
+
+    void checkNested(const testgen::Nested &rec1, const testgen::Nested &rec2)
+    {
+        BOOST_CHECK_EQUAL(rec1.inval1, rec2.inval1);
+        BOOST_CHECK_EQUAL(rec1.inval2, rec2.inval2);
+        BOOST_CHECK_EQUAL(rec1.inval3, rec2.inval3);
+    }
+
+    void checkOk(const testgen::RootRecord &rec1, const testgen::RootRecord &rec2)
+    {
+        BOOST_CHECK_EQUAL(rec1.mylong, rec1.mylong);
+
+        checkNested(rec1.nestedrecord, rec2.nestedrecord);
+        checkMap(rec1.mymap, rec2.mymap);
+        checkArray(rec1.myarray, rec2.myarray);
+
+        BOOST_CHECK_EQUAL(rec1.myenum.value, rec2.myenum.value);
+
+        BOOST_CHECK_EQUAL(rec1.myunion.choice, rec2.myunion.choice);
+        // in this test I know choice was 1
+        {
+            BOOST_CHECK_EQUAL(rec1.myunion.choice, 1);
+            checkMap(rec1.myunion.getValue<testgen::Map_of_int>(), rec2.myunion.getValue<testgen::Map_of_int>());
+        }
+
+        BOOST_CHECK_EQUAL(rec1.anotherunion.choice, rec2.anotherunion.choice);
+        // in this test I know choice was 0
+        {
+            BOOST_CHECK_EQUAL(rec1.anotherunion.choice, 0);
+            typedef std::vector<uint8_t> mytype;
+            checkBytes(rec1.anotherunion.getValue<mytype>(), rec2.anotherunion.getValue<testgen::Union_of_bytes_null::T0>());
+        }
+
+        checkNested(rec1.anothernested, rec2.anothernested);
+
+        BOOST_CHECK_EQUAL(rec1.mybool, rec2.mybool);
+
+        for(int i = 0; i < static_cast<int>(testgen::md5::fixedSize); ++i) {
+            BOOST_CHECK_EQUAL(rec1.myfixed.value[i], rec2.myfixed.value[i]);
+        }
+        BOOST_CHECK_EQUAL(rec1.anotherint, rec2.anotherint);
+
+        checkBytes(rec1.bytes, rec2.bytes);
+    }
+
+    void testParser()
+    {
+        avro::Writer s;
+
+        avro::serialize(s, myRecord_); 
+
+        testgen::RootRecord inRecord;
+        avro::Reader p(s.buffer());
+        avro::parse(p, inRecord);
+
+        checkOk(myRecord_, inRecord);
+    }
+
+
+    void testParserValid()
+    {
+        avro::ValidatingWriter s (schema_);
+
+        avro::serialize(s, myRecord_);
+
+        testgen::RootRecord inRecord;
+        avro::ValidatingReader p(schema_, s.buffer());
+        avro::parse(p, inRecord);
+
+        checkOk(myRecord_, inRecord);
+    }
+
+    void testNameIndex()
+    {
+        const avro::NodePtr &node = schema_.root();
+        size_t index = 0;
+        bool found = node->nameIndex("anothernested", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 8U);
+
+        found = node->nameIndex("myenum", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 4U);
+
+        const avro::NodePtr &enumNode = node->leafAt(index);
+        found = enumNode->nameIndex("one", index); 
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 1U);
+    }
+
+    void test() 
+    {
+        std::cout << "Running code generation tests\n";
+
+        testNameIndex();
+
+        serializeToScreen();
+        serializeToScreenValid();
+
+        testParser();
+        testParserValid();
+
+        std::cout << "Finished code generation tests\n";
+    }
+
+    TestCodeGenerator() 
+    {
+        setRecord(myRecord_);
+        std::ifstream in(gWriter.c_str());
+        avro::compileJsonSchema(in, schema_);
+    }
+
+    testgen::RootRecord myRecord_;
+    avro::ValidSchema schema_;
+
+};
+
+struct TestSchemaResolving {
+
+    void checkArray(const testgen::Array_of_double &a1, const testgen2::Array_of_double &a2) 
+    {
+        BOOST_CHECK_EQUAL(a1.value.size(), 3U);
+        BOOST_CHECK_EQUAL(a1.value.size(), a2.value.size());
+        for(size_t i = 0; i < a1.value.size(); ++i) {
+            BOOST_CHECK_EQUAL(a1.value[i], a2.value[i]);
+        }
+    }
+
+    void checkMap(const testgen::Map_of_int &map1, const testgen2::Map_of_long &map2) 
+    {
+        BOOST_CHECK_EQUAL(map1.value.size(), map2.value.size());
+        testgen::Map_of_int::MapType::const_iterator iter1 = map1.value.begin();
+        testgen::Map_of_int::MapType::const_iterator end   = map1.value.end();
+        testgen2::Map_of_long::MapType::const_iterator iter2 = map2.value.begin();
+
+        while(iter1 != end) {
+            BOOST_CHECK_EQUAL(iter1->first, iter2->first);
+            BOOST_CHECK_EQUAL(static_cast<float>(iter1->second), iter2->second);
+            ++iter1;
+            ++iter2;
+        }
+    }
+
+    void checkMap(const testgen::Map_of_int &map1, const testgen2::Map_of_float &map2) 
+    {
+        BOOST_CHECK_EQUAL(map1.value.size(), map2.value.size());
+        testgen::Map_of_int::MapType::const_iterator iter1 = map1.value.begin();
+        testgen::Map_of_int::MapType::const_iterator end   = map1.value.end();
+        testgen2::Map_of_float::MapType::const_iterator iter2 = map2.value.begin();
+
+        while(iter1 != end) {
+            BOOST_CHECK_EQUAL(iter1->first, iter2->first);
+            BOOST_CHECK_EQUAL(static_cast<int64_t>(iter1->second), iter2->second);
+            ++iter1;
+            ++iter2;
+        }
+    }
+
+    void checkBytes(const std::vector<uint8_t> &v1, const std::vector<uint8_t> &v2)
+    {
+        BOOST_CHECK_EQUAL(v1.size(), 2U);
+        BOOST_CHECK_EQUAL(v1.size(), v2.size());
+        for(size_t i = 0; i < v1.size(); ++i) {
+            BOOST_CHECK_EQUAL(v1[i], v2[i]);
+        }
+    }
+
+    void checkNested(const testgen::Nested &rec1, const testgen2::Nested &rec2)
+    {
+        BOOST_CHECK_EQUAL(rec1.inval1, rec2.inval1);
+        BOOST_CHECK_EQUAL(rec1.inval2, rec2.inval2);
+        BOOST_CHECK_EQUAL(rec1.inval3, rec2.inval3);
+    }
+
+    void checkOk(const testgen::RootRecord &rec1, const testgen2::RootRecord &rec2)
+    {
+        BOOST_CHECK_EQUAL(rec1.mylong, rec1.mylong);
+
+        checkNested(rec1.nestedrecord, rec2.nestedrecord);
+        checkMap(rec1.mymap, rec2.mymap);
+        checkArray(rec1.myarray, rec2.myarray);
+
+        // enum was remapped from 1 to 2
+        BOOST_CHECK_EQUAL(rec1.myenum.value, 1);
+        BOOST_CHECK_EQUAL(rec2.myenum.value, 2);
+
+        // in this test I know choice was 1
+        {
+            BOOST_CHECK_EQUAL(rec1.myunion.choice, 1);
+            BOOST_CHECK_EQUAL(rec2.myunion.choice, 2);
+            checkMap(rec1.myunion.getValue<testgen::Map_of_int>(), rec2.myunion.getValue<testgen2::Map_of_float>());
+        }
+
+        {
+            BOOST_CHECK_EQUAL(rec1.anotherunion.choice, 0);
+            typedef std::vector<uint8_t> mytype;
+            checkBytes(rec1.anotherunion.getValue<mytype>(), rec2.anotherunion);
+        }
+
+        checkNested(rec1.anothernested, rec2.anothernested);
+
+        BOOST_CHECK_EQUAL(rec2.newbool, false);
+
+        BOOST_CHECK_EQUAL(rec2.myfixed.choice, 1);
+        {
+            const testgen2::md5 &myfixed2 = rec2.myfixed.getValue<testgen2::md5>();
+            for(int i = 0; i < static_cast<int>(testgen::md5::fixedSize); ++i) {
+                BOOST_CHECK_EQUAL(rec1.myfixed.value[i], myfixed2.value[i]);
+            }
+        }
+    }
+
+    avro::InputBuffer serializeWriteRecordToBuffer()
+    {
+        std::ostringstream ostring;
+        avro::Writer s;
+        avro::serialize(s, writeRecord_);
+        return s.buffer();
+    }
+
+    void parseData(const avro::InputBuffer &buf, avro::ResolverSchema &xSchema)
+    {
+        avro::ResolvingReader r(xSchema, buf);
+
+        avro::parse(r, readRecord_);
+    }
+
+    void test()
+    {
+        std::cout << "Running schema resolution tests\n";
+        testgen2::RootRecord_Layout layout;
+
+        avro::ResolverSchema xSchema(writerSchema_, readerSchema_, layout);
+
+        printRecord(writeRecord_);
+
+        avro::InputBuffer buffer = serializeWriteRecordToBuffer();
+        parseData(buffer, xSchema);
+
+        printRecord(readRecord_);
+
+        checkOk(writeRecord_, readRecord_);
+        std::cout << "Finished schema resolution tests\n";
+    }
+
+    TestSchemaResolving()
+    {
+        setRecord(writeRecord_);
+        std::ifstream win(gWriter.c_str());
+        avro::compileJsonSchema(win, writerSchema_);
+
+        std::ifstream rin(gReader.c_str());
+        avro::compileJsonSchema(rin, readerSchema_);
+    }
+
+    testgen::RootRecord writeRecord_;
+    avro::ValidSchema writerSchema_;
+
+    testgen2::RootRecord readRecord_;
+    avro::ValidSchema readerSchema_;
+};
+
+template<typename T>
+void addTestCase(boost::unit_test::test_suite &test)
+{
+    boost::shared_ptr<T> newtest( new T );
+    test.add( BOOST_CLASS_TEST_CASE( &T::test, newtest ));
+}
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    using namespace boost::unit_test;
+
+    const char *srcPath = getenv("top_srcdir");
+
+    if(srcPath) {
+        std::string srcPathStr(srcPath);
+        gWriter = srcPathStr + '/' + gWriter;
+        gReader = srcPathStr + '/' + gReader;
+    }
+    else {
+        if(argc > 1) {
+            gWriter = argv[1];
+        }
+
+        if(argc > 2) {
+            gReader = argv[2];
+        }
+    }
+    std::cout << "Using writer schema " << gWriter << std::endl;
+    std::cout << "Using reader schema " << gReader << std::endl;
+
+    test_suite* test= BOOST_TEST_SUITE( "Avro C++ unit test suite" );
+
+    addTestCase<TestCodeGenerator>(*test);
+    addTestCase<TestSchemaResolving>(*test);
+
+    return test;
+}
+
diff --git a/lang/c++/test/testparser.cc b/lang/c++/test/testparser.cc
new file mode 100644
index 0000000..1df384f
--- /dev/null
+++ b/lang/c++/test/testparser.cc
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "Compiler.hh"
+#include "ValidSchema.hh"
+
+int main()
+{
+    int ret = 0;
+    try {
+        avro::ValidSchema schema;
+        avro::compileJsonSchema(std::cin, schema);
+
+        schema.toJson(std::cout);
+    }
+    catch (std::exception &e) {
+        std::cerr << "Failed to parse or compile schema: " << e.what() << std::endl;
+        ret = 1;
+    }
+
+    return ret;
+}
diff --git a/lang/c++/test/unittest.cc b/lang/c++/test/unittest.cc
new file mode 100644
index 0000000..5aef9b7
--- /dev/null
+++ b/lang/c++/test/unittest.cc
@@ -0,0 +1,794 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <iostream>
+#include <fstream>
+#include <sstream>
+#include <boost/test/included/unit_test_framework.hpp>
+
+#include "Zigzag.hh"
+#include "Node.hh"
+#include "Schema.hh"
+#include "ValidSchema.hh"
+#include "Serializer.hh"
+#include "Parser.hh"
+#include "Compiler.hh"
+#include "SchemaResolution.hh"
+#include "buffer/BufferStream.hh"
+#include "buffer/BufferPrint.hh"
+
+#include "AvroSerialize.hh"
+
+using namespace avro;
+
+static const uint8_t fixeddata[16] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15};
+
+#ifdef max
+#undef max
+#endif
+struct TestSchema
+{
+    TestSchema() 
+    {}
+
+    void createExampleSchema()
+    {
+        // First construct our complex data type:
+        avro::RecordSchema myRecord("complex");
+   
+        // Now populate my record with fields (each field is another schema):
+        myRecord.addField("real", avro::DoubleSchema());
+        myRecord.addField("imaginary", avro::DoubleSchema());
+
+        // The complex record is the same as used above, let's make a schema 
+        // for an array of these record
+  
+        avro::ArraySchema complexArray(myRecord); 
+
+        avro::ValidSchema validComplexArray(complexArray);
+        validComplexArray.toJson(std::cout);
+    }
+
+    void buildSchema()
+    {
+        RecordSchema record("RootRecord");
+
+        record.addField("mylong", LongSchema());
+
+        IntSchema intSchema;
+        avro::MapSchema map = MapSchema(IntSchema());
+
+        record.addField("mymap", map);
+
+        ArraySchema array = ArraySchema(DoubleSchema());
+
+        const std::string s("myarray");
+        record.addField(s, array);
+
+        EnumSchema myenum("ExampleEnum");
+        myenum.addSymbol("zero");
+        myenum.addSymbol("one");
+        myenum.addSymbol("two");
+        myenum.addSymbol("three");
+
+        bool caught = false;
+        try {
+            myenum.addSymbol("three");
+        }
+        catch(Exception &e) {
+            std::cout << "(intentional) exception: " << e.what() << '\n';
+            caught = true;
+        }
+        BOOST_CHECK_EQUAL(caught, true);
+
+        record.addField("myenum", myenum); 
+
+        UnionSchema onion;
+        onion.addType(NullSchema());
+        onion.addType(map);
+        onion.addType(FloatSchema());
+       
+        record.addField("myunion", onion); 
+
+        RecordSchema nestedRecord("NestedRecord");
+        nestedRecord.addField("floatInNested", FloatSchema());
+
+        record.addField("nested", nestedRecord);
+
+        record.addField("mybool", BoolSchema());
+        FixedSchema fixed(16, "fixed16");
+        record.addField("myfixed", fixed);
+
+        caught = false;
+        try {
+            record.addField("mylong", LongSchema());
+        }
+        catch(Exception &e) {
+            std::cout << "(intentional) exception: " << e.what() << '\n';
+            caught = true;
+        }
+        BOOST_CHECK_EQUAL(caught, true);
+
+        record.addField("mylong2", LongSchema());
+
+        record.addField("anotherint", intSchema);
+
+        schema_.setSchema(record);
+    }
+
+    void checkNameLookup() {
+        NodePtr node = schema_.root();
+
+        size_t index = 0;
+        bool found = node->nameIndex("mylongxxx", index);
+        BOOST_CHECK_EQUAL(found, false);
+
+        found = node->nameIndex("mylong", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 0U);
+
+        found = node->nameIndex("mylong2", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 8U);
+
+        found = node->nameIndex("myenum", index);
+        BOOST_CHECK_EQUAL(found, true);
+        NodePtr enumNode = node->leafAt(index);
+
+        found = enumNode->nameIndex("one", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 1U);
+
+        found = enumNode->nameIndex("three", index);
+        BOOST_CHECK_EQUAL(found, true);
+        BOOST_CHECK_EQUAL(index, 3U);
+
+        found = enumNode->nameIndex("four", index);
+        BOOST_CHECK_EQUAL(found, false);
+    }
+
+    template<typename Serializer>
+    void printUnion(Serializer &s, int path)
+    {
+        s.writeUnion(path);
+        if(path == 0) {
+            std::cout << "Null in union\n";
+            s.writeNull();
+        }
+        else if(path == 1) {
+            std::cout << "Map in union\n";
+            s.writeMapBlock(2);
+            s.writeString("Foo");
+            s.writeInt(16);
+            s.writeString("Bar");
+            s.writeInt(17);
+            s.writeMapBlock(1);
+            s.writeString("FooBar");
+            s.writeInt(18);
+            s.writeMapEnd();
+        }
+        else {
+            std::cout << "Float in union\n";
+            s.writeFloat(200.);
+        }
+    }
+
+    template<typename Serializer>
+    void writeEncoding(Serializer &s, int path)
+    {
+        std::cout << "Record\n";
+        s.writeRecord();
+        s.writeInt(1000);
+
+        std::cout << "Map\n";
+        s.writeMapBlock(2);
+        s.writeString(std::string("Foo"));
+        s.writeInt(16);
+        s.writeString(std::string("Bar"));
+        s.writeInt(17);
+        s.writeMapEnd();
+
+        std::cout << "Array\n";
+        s.writeArrayBlock(2);
+        s.writeDouble(100.0);
+        s.writeDouble(1000.0);
+        s.writeArrayEnd();
+
+        std::cout << "Enum\n";
+        s.writeEnum(3);
+
+        std::cout << "Union\n";
+        printUnion(s, path);
+
+        std::cout << "Record\n";
+        s.writeRecord();
+        s.writeFloat(-101.101f);
+        s.writeRecordEnd();
+
+        std::cout << "Bool\n";
+        s.writeBool(true);
+
+        std::cout << "Fixed16\n";
+        
+        s.writeFixed(fixeddata);
+
+        std::cout << "Long\n";
+        s.writeLong(7010728798977672067LL);
+
+        std::cout << "Int\n";
+        s.writeInt(-3456);
+        s.writeRecordEnd();
+    }
+
+    void printEncoding() {
+        std::cout << "Encoding\n";
+        Serializer<Writer> s;
+        writeEncoding(s, 0);
+        std::cout << s.buffer();
+    }
+
+    void printValidatingEncoding(int path)
+    {
+        std::cout << "Validating Encoding " << path << "\n";
+        Serializer<ValidatingWriter> s(schema_);
+        writeEncoding(s, path);
+        std::cout << s.buffer();
+    }
+
+    void saveValidatingEncoding(int path) 
+    {
+        std::ofstream out("test.avro");
+        Serializer<ValidatingWriter> s(schema_);
+        writeEncoding(s, path);
+        InputBuffer buf = s.buffer();
+        istream is(buf);
+        out << is.rdbuf();
+    }
+
+    void printNext(Parser<Reader> &p) {
+        // no-op printer
+    }
+
+    void printNext(Parser<ValidatingReader> &p)
+    {
+        std::cout << "Next: \"" << nextType(p);
+        std::string recordName;
+        std::string fieldName;
+        if( currentRecordName(p, recordName) ) {
+            std::cout << "\" record: \"" << recordName;
+        }
+        if( nextFieldName(p, fieldName) ) {
+            std::cout << "\" field: \"" << fieldName;
+        }
+        std::cout << "\"\n";
+
+    }
+
+    template <typename Parser>
+    void readMap(Parser &p)
+    {
+        int64_t size = 0;
+        do { 
+            printNext(p);
+            size = p.readMapBlockSize();
+            std::cout << "Size " << size << '\n';
+            for(int64_t i=0; i < size; ++i) {
+                std::string key;
+                printNext(p);
+                p.readString(key);
+                printNext(p);
+                int32_t intval = p.readInt();
+                std::cout << key << ":" << intval << '\n';
+            }
+        } while (size != 0);
+    }
+
+    template <typename Parser>
+    void readArray(Parser &p)
+    {
+        int64_t size = 0;
+        double d = 0.0;
+        do {
+            printNext(p);
+            size = p.readArrayBlockSize();
+            std::cout << "Size " << size << '\n';
+            for(int64_t i=0; i < size; ++i) {
+                printNext(p);
+                d = p.readDouble();
+                std::cout << i << ":" << d << '\n';
+            }
+        } while(size != 0);
+        BOOST_CHECK_EQUAL(d, 1000.0);
+    }
+
+    template <typename Parser>
+    void readNestedRecord(Parser &p)
+    {
+        printNext(p);
+        p.readRecord();
+        printNext(p);
+        float f = p.readFloat();
+        std::cout << f << '\n';
+        BOOST_CHECK_EQUAL(f, -101.101f);
+        p.readRecordEnd();
+    }
+
+    template <typename Parser>
+    void readFixed(Parser &p) {
+
+        boost::array<uint8_t, 16> input;
+        p.readFixed(input);
+        BOOST_CHECK_EQUAL(input.size(), 16U);
+
+        for(int i=0; i< 16; ++i) {
+            std::cout << static_cast<int>(input[i]) << ' ';
+        }
+        std::cout << '\n';
+    }
+
+    template <typename Parser>
+    void readData(Parser &p)
+    {
+        printNext(p);
+        p.readRecord();
+
+        printNext(p);
+        int64_t longval = p.readLong();
+        std::cout << longval << '\n';
+        BOOST_CHECK_EQUAL(longval, 1000);
+
+        readMap(p);
+        readArray(p);
+
+        printNext(p);
+        longval = p.readEnum();
+        std::cout << "Enum choice " << longval << '\n';
+
+        printNext(p);
+        longval = p.readUnion();
+        std::cout << "Union path " << longval << '\n';
+        readMap(p);
+
+        readNestedRecord(p);
+
+        printNext(p);
+        bool boolval = p.readBool();
+        std::cout << boolval << '\n';
+        BOOST_CHECK_EQUAL(boolval, true);
+
+        printNext(p);
+        readFixed(p);
+
+        printNext(p);
+        longval = p.readLong();
+        std::cout << longval << '\n';
+        BOOST_CHECK_EQUAL(longval, 7010728798977672067LL);
+
+        printNext(p);
+        int32_t intval = p.readInt();
+        std::cout << intval << '\n';
+        BOOST_CHECK_EQUAL(intval, -3456);
+        p.readRecordEnd();
+    }
+
+    void readRawData() {
+        std::ifstream in("test.avro");
+        ostream os;
+        os << in.rdbuf();
+        Parser<Reader> p(os.getBuffer());
+        readData(p);
+    }
+
+    void readValidatedData()
+    {
+        std::ifstream in("test.avro");
+        ostream os;
+        os << in.rdbuf();
+        Parser<ValidatingReader> p(schema_, os.getBuffer());
+        readData(p);
+    }
+
+    void test()
+    {
+        std::cout << "Before\n";
+        schema_.toJson(std::cout);
+        schema_.toFlatList(std::cout);
+        buildSchema();
+        std::cout << "After\n";
+        schema_.toJson(std::cout);
+        schema_.toFlatList(std::cout);
+
+        checkNameLookup();
+
+        printEncoding();
+        printValidatingEncoding(0);
+        printValidatingEncoding(1);
+        printValidatingEncoding(2);
+
+        saveValidatingEncoding(1);
+        readRawData();
+        readValidatedData();
+
+        createExampleSchema();
+    }
+
+    ValidSchema schema_;
+};
+
+struct TestEncoding {
+
+    void compare(int32_t val) {
+        uint32_t encoded = encodeZigzag32(val);
+        BOOST_CHECK_EQUAL(decodeZigzag32(encoded), val);
+    }
+
+    void compare(int64_t val) {
+        uint64_t encoded = encodeZigzag64(val);
+        BOOST_CHECK_EQUAL(decodeZigzag64(encoded), val);
+    }
+
+    template<typename IntType>
+    void testEncoding(IntType start, IntType stop)
+    {
+        std::cout << "testing from " << start << " to " << stop << " inclusive\n";
+        IntType val = start;
+        IntType diff = stop - start + 1;
+
+        for(IntType i = 0; i < diff; ++i) {
+            compare(val+i);
+        }
+    }
+
+    template<typename IntType>
+    void testEncoding()
+    {
+        testEncoding<IntType>(std::numeric_limits<IntType>::min(), std::numeric_limits<IntType>::min() + 1000);
+        testEncoding<IntType>(-1000, 1000);
+        testEncoding<IntType>(std::numeric_limits<IntType>::max()-1000, std::numeric_limits<IntType>::max());
+    }
+
+    void test() {
+        testEncoding<int32_t>();
+        testEncoding<int64_t>();
+    }
+
+};
+
+struct TestNested
+{
+    TestNested()
+    {}
+
+    void createSchema() 
+    {
+        std::cout << "TestNested\n";
+        RecordSchema rec("LongList");
+        rec.addField("value", LongSchema());
+        UnionSchema next;
+        next.addType(NullSchema());
+        next.addType(SymbolicSchema(Name("LongList"), rec.root()));
+        rec.addField("next", next);
+        rec.addField("end", BoolSchema());
+
+        schema_.setSchema(rec);
+        schema_.toJson(std::cout);
+        schema_.toFlatList(std::cout);
+    }
+
+    InputBuffer serializeNoRecurse()
+    {
+        std::cout << "No recurse\n";
+        Serializer<ValidatingWriter> s(schema_);
+        s.writeRecord();
+        s.writeLong(1);
+        s.writeUnion(0);
+        s.writeNull();
+        s.writeBool(true);
+        s.writeRecordEnd();
+
+        return s.buffer();
+    }
+
+    InputBuffer serializeRecurse()
+    {
+        std::cout << "Recurse\n";
+        Serializer<ValidatingWriter> s(schema_);
+        s.writeRecord();
+        s.writeLong(1);
+        s.writeUnion(1);
+        {
+            s.writeRecord();
+            s.writeLong(2);
+            s.writeUnion(1);
+            {
+                s.writeRecord();
+                s.writeLong(3);
+                s.writeUnion(0);
+                { 
+                    s.writeNull();
+                }
+                s.writeBool(false);
+                s.writeRecordEnd();
+            }
+            s.writeBool(false);
+            s.writeRecordEnd();
+
+        }
+        s.writeBool(true);
+        s.writeRecordEnd();
+
+        return s.buffer();
+    }
+
+    void readRecord(Parser<ValidatingReader> &p) 
+    {
+        p.readRecord();
+        int64_t val = p.readLong();
+        std::cout << "longval = " << val << '\n';
+        int64_t path = p.readUnion();
+        if (path == 1) {
+            readRecord(p);
+        }
+        else {
+            p.readNull();
+        }
+        bool b = p.readBool();
+        std::cout << "bval = " << b << '\n';
+        p.readRecordEnd();
+    }
+
+    void validatingParser(InputBuffer &buf) 
+    {
+        Parser<ValidatingReader> p(schema_, buf);
+        readRecord(p);
+    }
+
+    void testToScreen() {
+        InputBuffer buf1 = serializeNoRecurse();
+        InputBuffer buf2 = serializeRecurse();
+        std::cout << buf1;
+        std::cout << buf2;
+    }
+
+    void testParseNoRecurse() {
+        std::cout << "ParseNoRecurse\n";
+        InputBuffer buf = serializeNoRecurse();
+    
+        validatingParser(buf);
+    }
+
+    void testParseRecurse() {
+        std::cout << "ParseRecurse\n";
+        InputBuffer buf = serializeRecurse();
+
+        validatingParser(buf);
+    }
+
+
+    void test() {
+        createSchema();
+        testToScreen();
+
+        testParseNoRecurse();
+        testParseRecurse();
+
+    }
+
+    ValidSchema schema_;
+};
+
+struct TestGenerated
+{
+    TestGenerated()
+    {}
+
+    void test() 
+    {
+        std::cout << "TestGenerated\n";
+
+        int32_t val = 100;
+        float   f   = 200.0;
+
+        Writer writer;
+
+        serialize(writer, val);
+        serialize(writer, Null());
+        serialize(writer, f);
+
+        std::cout << writer.buffer();
+    }
+};
+
+struct TestBadStuff
+{
+    void testBadFile() 
+    {
+        std::cout << "TestBadFile\n";
+
+        avro::ValidSchema schema;
+        std::ifstream in("agjoewejefkjs");
+        std::string error;
+        bool result = avro::compileJsonSchema(in, schema, error);
+        BOOST_CHECK_EQUAL(result, false);
+        std::cout << "(intentional) error: " << error << '\n';
+    }
+
+    void testBadSchema()
+    {
+        std::cout << "TestBadSchema\n";
+
+        std::string str ("{ \"type\" : \"wrong\" }");
+        std::istringstream in(str);
+
+        avro::ValidSchema schema;
+        std::string error;
+        bool result = avro::compileJsonSchema(in, schema, error);
+        BOOST_CHECK_EQUAL(result, false);
+        std::cout << "(intentional) error: " << error << '\n';
+    }
+
+    void test() 
+    {
+        std::cout << "TestBadStuff\n";
+        testBadFile();
+        testBadSchema();
+    }
+};
+
+struct TestResolution
+{
+    TestResolution() :
+        int_(IntSchema()), 
+        long_(LongSchema()),
+        bool_(BoolSchema()), 
+        float_(FloatSchema()), 
+        double_(DoubleSchema()),
+
+        mapOfInt_(MapSchema(IntSchema())),
+        mapOfDouble_(MapSchema(DoubleSchema())),
+
+        arrayOfLong_(ArraySchema(LongSchema())),
+        arrayOfFloat_(ArraySchema(FloatSchema()))
+    {
+        {
+            EnumSchema one("one");
+            one.addSymbol("X");
+            enumOne_.setSchema(one);
+
+            EnumSchema two("two");
+            two.addSymbol("Y");
+            enumTwo_.setSchema(two);
+        }
+    
+        {
+            UnionSchema one;
+            one.addType(IntSchema());
+            one.addType(FloatSchema());
+            unionOne_.setSchema(one);
+
+            UnionSchema two;
+            two.addType(IntSchema());
+            two.addType(DoubleSchema());
+            unionTwo_.setSchema(two);
+        }
+    }
+
+    SchemaResolution resolve(const ValidSchema &writer, const ValidSchema &reader)
+    {
+        return writer.root()->resolve(*reader.root());
+    }
+
+    void test() 
+    {
+        std::cout << "TestResolution\n";
+
+        BOOST_CHECK_EQUAL(resolve(long_, long_), RESOLVE_MATCH); 
+        BOOST_CHECK_EQUAL(resolve(long_, bool_), RESOLVE_NO_MATCH); 
+        BOOST_CHECK_EQUAL(resolve(bool_, long_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(int_, long_), RESOLVE_PROMOTABLE_TO_LONG); 
+        BOOST_CHECK_EQUAL(resolve(long_, int_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(int_, float_), RESOLVE_PROMOTABLE_TO_FLOAT); 
+        BOOST_CHECK_EQUAL(resolve(float_, int_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(int_, double_), RESOLVE_PROMOTABLE_TO_DOUBLE); 
+        BOOST_CHECK_EQUAL(resolve(double_, int_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(long_, float_), RESOLVE_PROMOTABLE_TO_FLOAT); 
+        BOOST_CHECK_EQUAL(resolve(float_, long_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(long_, double_), RESOLVE_PROMOTABLE_TO_DOUBLE); 
+        BOOST_CHECK_EQUAL(resolve(double_, long_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(float_, double_), RESOLVE_PROMOTABLE_TO_DOUBLE); 
+        BOOST_CHECK_EQUAL(resolve(double_, float_), RESOLVE_NO_MATCH); 
+
+        BOOST_CHECK_EQUAL(resolve(int_, mapOfInt_), RESOLVE_NO_MATCH);
+        BOOST_CHECK_EQUAL(resolve(mapOfInt_, int_), RESOLVE_NO_MATCH);
+
+        BOOST_CHECK_EQUAL(resolve(mapOfInt_, mapOfInt_), RESOLVE_MATCH);
+        BOOST_CHECK_EQUAL(resolve(mapOfDouble_, mapOfInt_), RESOLVE_NO_MATCH);
+        BOOST_CHECK_EQUAL(resolve(mapOfInt_, mapOfDouble_), RESOLVE_PROMOTABLE_TO_DOUBLE);
+
+        BOOST_CHECK_EQUAL(resolve(long_, arrayOfLong_), RESOLVE_NO_MATCH);
+        BOOST_CHECK_EQUAL(resolve(arrayOfLong_, long_), RESOLVE_NO_MATCH);
+
+        BOOST_CHECK_EQUAL(resolve(arrayOfLong_, arrayOfLong_), RESOLVE_MATCH);
+        BOOST_CHECK_EQUAL(resolve(arrayOfFloat_, arrayOfLong_), RESOLVE_NO_MATCH);
+        BOOST_CHECK_EQUAL(resolve(arrayOfLong_, arrayOfFloat_), RESOLVE_PROMOTABLE_TO_FLOAT);
+
+        BOOST_CHECK_EQUAL(resolve(enumOne_, enumOne_), RESOLVE_MATCH);
+        BOOST_CHECK_EQUAL(resolve(enumOne_, enumTwo_), RESOLVE_NO_MATCH);
+
+        BOOST_CHECK_EQUAL(resolve(float_, unionOne_), RESOLVE_MATCH);
+        BOOST_CHECK_EQUAL(resolve(double_, unionOne_), RESOLVE_NO_MATCH);
+        BOOST_CHECK_EQUAL(resolve(float_, unionTwo_), RESOLVE_PROMOTABLE_TO_DOUBLE);
+
+        BOOST_CHECK_EQUAL(resolve(unionOne_, float_), RESOLVE_MATCH);
+        BOOST_CHECK_EQUAL(resolve(unionOne_, double_), RESOLVE_PROMOTABLE_TO_DOUBLE);
+        BOOST_CHECK_EQUAL(resolve(unionTwo_, float_), RESOLVE_PROMOTABLE_TO_FLOAT);
+        BOOST_CHECK_EQUAL(resolve(unionOne_, unionTwo_), RESOLVE_MATCH);
+    }
+
+  private:
+
+    ValidSchema int_;
+    ValidSchema long_;
+    ValidSchema bool_;
+    ValidSchema float_;
+    ValidSchema double_;
+
+    ValidSchema mapOfInt_;
+    ValidSchema mapOfDouble_;
+
+    ValidSchema arrayOfLong_;
+    ValidSchema arrayOfFloat_;
+
+    ValidSchema enumOne_;
+    ValidSchema enumTwo_;
+
+    ValidSchema unionOne_;
+    ValidSchema unionTwo_;
+};
+
+
+template<typename T>
+void addTestCase(boost::unit_test::test_suite &test) 
+{
+    boost::shared_ptr<T> newtest( new T );
+    test.add( BOOST_CLASS_TEST_CASE( &T::test, newtest ));
+}
+
+boost::unit_test::test_suite*
+init_unit_test_suite( int argc, char* argv[] ) 
+{
+    using namespace boost::unit_test;
+
+    test_suite* test= BOOST_TEST_SUITE( "Avro C++ unit test suite" );
+
+    addTestCase<TestEncoding>(*test);
+    addTestCase<TestSchema>(*test);
+    addTestCase<TestNested>(*test);
+    addTestCase<TestGenerated>(*test);
+    addTestCase<TestBadStuff>(*test);
+    addTestCase<TestResolution>(*test);
+
+    return test;
+}
+
diff --git a/lang/c/.gitignore b/lang/c/.gitignore
new file mode 100644
index 0000000..827a3df
--- /dev/null
+++ b/lang/c/.gitignore
@@ -0,0 +1,3 @@
+build
+cscope.*
+VERSION.txt
diff --git a/lang/c/AUTHORS b/lang/c/AUTHORS
new file mode 100644
index 0000000..7b45e94
--- /dev/null
+++ b/lang/c/AUTHORS
@@ -0,0 +1,4 @@
+
+See http://hadoop.apache.org/avro/ for a list of authors
+
+
diff --git a/lang/c/CMakeLists.txt b/lang/c/CMakeLists.txt
new file mode 100644
index 0000000..a223f34
--- /dev/null
+++ b/lang/c/CMakeLists.txt
@@ -0,0 +1,178 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+cmake_minimum_required(VERSION 2.4)
+project(AvroC)
+enable_testing()
+
+set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR})
+
+# Eliminates warning about linker paths when linking both zlib and
+# liblzma.
+cmake_policy(SET CMP0003 NEW)
+
+#-----------------------------------------------------------------------
+# Retrieve the current version number
+
+if (UNIX)
+    execute_process(COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/version.sh project
+        WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+        RESULT_VARIABLE AVRO_VERSION_RESULT
+        OUTPUT_VARIABLE AVRO_VERSION
+        OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if(AVRO_VERSION_RESULT)
+        message(FATAL_ERROR "Cannot determine Avro version number")
+    endif(AVRO_VERSION_RESULT)
+
+    execute_process(COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/version.sh libtool
+        WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
+        RESULT_VARIABLE LIBAVRO_VERSION_RESULT
+        OUTPUT_VARIABLE LIBAVRO_VERSION
+        OUTPUT_STRIP_TRAILING_WHITESPACE)
+    if(LIBAVRO_VERSION_RESULT)
+        message(FATAL_ERROR "Cannot determine libavro version number")
+    endif(LIBAVRO_VERSION_RESULT)
+else(UNIX)
+    # Hard code for win32 -- need to figure out how to port version.sh for
+    # Windows.
+    set(LIBAVRO_VERSION "22:0:0")
+endif(UNIX)
+
+
+#-----------------------------------------------------------------------
+# Extract major.minor.patch from version number
+
+if (UNIX)
+    string(REGEX REPLACE "([0-9]+)\\..*"
+        "\\1"
+        AVRO_MAJOR_VERSION
+        ${AVRO_VERSION}
+    )
+    string(REGEX REPLACE ".*\\.([0-9]+)\\..*"
+        "\\1"
+        AVRO_MINOR_VERSION
+        ${AVRO_VERSION}
+    )
+    string(REGEX REPLACE ".*\\..*\\.([0-9]+).*"
+        "\\1"
+        AVRO_PATCH_VERSION
+        ${AVRO_VERSION}
+    )
+    string(REGEX REPLACE ".*\\..*\\.[0-9]+(.*)"
+        "\\1"
+        AVRO_VERSION_EXTENSION
+        ${AVRO_VERSION}
+    )
+endif(UNIX)
+
+#-----------------------------------------------------------------------
+# Source package support
+
+include(CPackConfig.txt)
+include(CheckLibraryExists)
+
+
+if(APPLE)
+    if (NOT CMAKE_INSTALL_NAME_DIR)
+        set(CMAKE_INSTALL_NAME_DIR "${CMAKE_INSTALL_PREFIX}/lib")
+    endif (NOT CMAKE_INSTALL_NAME_DIR)
+endif(APPLE)
+
+if(CMAKE_COMPILER_IS_GNUCC)
+    add_definitions(-W -Wall)
+endif(CMAKE_COMPILER_IS_GNUCC)
+
+if (WIN32)
+   # Compile win32 in C++ to allow declarations after statements
+   add_definitions(/TP)
+endif(WIN32)
+
+# Uncomment to allow missing fields in the resolved-writer
+# add_definitions(-DAVRO_ALLOW_MISSING_FIELDS_IN_RESOLVED_WRITER)
+
+# Uncomment to allow non-atomic increment/decrement of reference count
+# add_definitions(-DAVRO_ALLOW_NON_ATOMIC_REFCOUNT)
+
+# Thread support (only for *nix with pthreads)
+set(THREADS_LIBRARIES)
+if(UNIX AND THREADSAFE AND CMAKE_COMPILER_IS_GNUCC)
+    set(CMAKE_THREAD_PREFER_PTHREAD)
+    find_package(Threads)
+
+    if(NOT CMAKE_USE_PTHREADS_INIT)
+        message(FATAL_ERROR "pthreads not found")
+    endif(NOT CMAKE_USE_PTHREADS_INIT)
+
+    add_definitions(-DTHREADSAFE -D_REENTRANT)
+    set(THREADS_LIBRARIES ${CMAKE_THREAD_LIBS_INIT})
+endif(UNIX AND THREADSAFE AND CMAKE_COMPILER_IS_GNUCC)
+
+include_directories(${AvroC_SOURCE_DIR}/src)
+include_directories(${AvroC_SOURCE_DIR}/jansson/src)
+
+
+# Enable codecs
+
+find_package(ZLIB)
+if (ZLIB_FOUND)
+    set(ZLIB_PKG zlib)
+    add_definitions(-DDEFLATE_CODEC)
+    include_directories(${ZLIB_INCLUDE_DIR})
+    message("Enabled deflate codec")
+else (ZLIB_FOUND)
+    set(ZLIB_PKG "")
+    set(ZLIB_LIBRARIES "")
+    message("Disabled deflate codec. zlib not found.")
+endif (ZLIB_FOUND)
+
+find_package(Snappy)
+if (SNAPPY_FOUND AND ZLIB_FOUND)  # Snappy borrows crc32 from zlib
+    set(SNAPPY_PKG libsnappy)
+    add_definitions(-DSNAPPY_CODEC)
+    message("Enabled snappy codec")
+else (SNAPPY_FOUND AND ZLIB_FOUND)
+    set(SNAPPY_PKG "")
+    set(SNAPPY_LIBRARIES "")
+    message("Disabled snappy codec. libsnappy not found or zlib not found.")
+endif (SNAPPY_FOUND AND ZLIB_FOUND)
+
+find_package(PkgConfig)
+pkg_check_modules(LZMA liblzma)
+if (LZMA_FOUND)
+    set(LZMA_PKG liblzma)
+    add_definitions(-DLZMA_CODEC)
+    include_directories(${LZMA_INCLUDE_DIRS})
+    link_directories(${LZMA_LIBRARY_DIRS})
+    message("Enabled lzma codec")
+else (LZMA_FOUND)
+    set(LZMA_PKG "")
+    set(LZMA_LIBRARIES "")
+    message("Disabled lzma codec. liblzma not found.")
+endif (LZMA_FOUND)
+
+set(CODEC_LIBRARIES ${ZLIB_LIBRARIES} ${LZMA_LIBRARIES} ${SNAPPY_LIBRARIES})
+set(CODEC_PKG "@ZLIB_PKG@ @LZMA_PKG@ @SNAPPY_PKG@")
+
+
+add_subdirectory(src)
+add_subdirectory(examples)
+add_subdirectory(tests)
+add_subdirectory(docs)
+
+add_custom_target(pretty
+    "${CMAKE_COMMAND}" -P "${CMAKE_CURRENT_SOURCE_DIR}/cmake_pretty.cmake")
diff --git a/lang/c/COPYING b/lang/c/COPYING
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/lang/c/COPYING
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/lang/c/CPackConfig.txt b/lang/c/CPackConfig.txt
new file mode 100644
index 0000000..aa2f553
--- /dev/null
+++ b/lang/c/CPackConfig.txt
@@ -0,0 +1,39 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+set(CPACK_PACKAGE_NAME "avro-c")
+set(CPACK_PACKAGE_DESCRIPTION "C bindings for Avro data serialization framework")
+set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "C bindings for Avro data serialization framework")
+set(CPACK_PACKAGE_VENDOR "Apache Software Foundation")
+set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README")
+set(CPACK_PACKAGE_VERSION_MAJOR "${AVRO_MAJOR_VERSION}")
+set(CPACK_PACKAGE_VERSION_MINOR "${AVRO_MINOR_VERSION}")
+set(CPACK_PACKAGE_VERSION_PATCH "${AVRO_PATCH_VERSION}${AVRO_VERSION_EXTENSION}")
+set(CPACK_PACKAGE_VERSION "${AVRO_VERSION}")
+set(CPACK_PACKAGE_CONTACT "avro-dev at apache.org")
+set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/COPYING")
+set(CPACK_STRIP_FILES true)
+
+set(CPACK_SOURCE_PACKAGE_FILE_NAME "${CPACK_PACKAGE_NAME}-${CPACK_PACKAGE_VERSION}")
+set(CPACK_PACKAGE_INSTALL_DIRECTORY "/usr")
+set(CPACK_GENERATOR "TGZ")
+set(CPACK_SOURCE_GENERATOR "TGZ")
+
+set(CPACK_SOURCE_IGNORE_FILES "^${CMAKE_BINARY_DIR};/\\\\.gitignore;/\\\\.svn;\\\\.swp$;\\\\.#;/#;.*~")
+
+include(CPack)
diff --git a/lang/c/ChangeLog b/lang/c/ChangeLog
new file mode 100644
index 0000000..fd40910
--- /dev/null
+++ b/lang/c/ChangeLog
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/lang/c/FindSnappy.cmake b/lang/c/FindSnappy.cmake
new file mode 100644
index 0000000..e9053af
--- /dev/null
+++ b/lang/c/FindSnappy.cmake
@@ -0,0 +1,54 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# Tries to find Snappy headers and libraries.
+#
+# Usage of this module as follows:
+#
+#  find_package(Snappy)
+#
+# Variables used by this module, they can change the default behaviour and need
+# to be set before calling find_package:
+#
+#  SNAPPY_ROOT_DIR  Set this variable to the root installation of
+#                    Snappy if the module has problems finding
+#                    the proper installation path.
+#
+# Variables defined by this module:
+#
+#  SNAPPY_FOUND              System has Snappy libs/headers
+#  SNAPPY_LIBRARIES          The Snappy libraries
+#  SNAPPY_INCLUDE_DIR        The location of Snappy headers
+
+find_path(SNAPPY_INCLUDE_DIR
+    NAMES snappy.h
+    HINTS ${SNAPPY_ROOT_DIR}/include)
+
+find_library(SNAPPY_LIBRARIES
+    NAMES snappy
+    HINTS ${SNAPPY_ROOT_DIR}/lib)
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(Snappy DEFAULT_MSG
+    SNAPPY_LIBRARIES
+    SNAPPY_INCLUDE_DIR)
+
+mark_as_advanced(
+    SNAPPY_ROOT_DIR
+    SNAPPY_LIBRARIES
+    SNAPPY_INCLUDE_DIR)
diff --git a/lang/c/INSTALL b/lang/c/INSTALL
new file mode 100644
index 0000000..b6dc0b2
--- /dev/null
+++ b/lang/c/INSTALL
@@ -0,0 +1,63 @@
+Installation instructions
+=========================
+
+The Avro library is written in ANSI C.  It uses CMake [1] as its build
+manager.
+
+[1] http://www.cmake.org/
+
+
+Prerequisites
+-------------
+
+Avro uses CMake [1] as its build manager.  You must have this installed,
+along with a C compiler (such as gcc) to build the library.
+
+Avro doesn't depend on any other libraries, but you need asciidoc [2]
+and source-highlight [3] installed to build Avro's documentation.  The
+build scripts will automatically detect whether these tools are
+installed, and will skip the documentation if they're not.
+
+[1] http://www.cmake.org/
+[2] http://www.methods.co.nz/asciidoc
+[3] http://www.gnu.org/software/src-highlite/
+
+
+Building from source
+--------------------
+
+The Avro C library uses CMake as its build manager.  In most cases, you
+should be able to build the source code by running the following:
+
+    $ mkdir build
+    $ cd build
+    $ cmake .. \
+        -DCMAKE_INSTALL_PREFIX=$PREFIX \
+        -DCMAKE_BUILD_TYPE=RelWithDebInfo
+    $ make
+    $ make test
+    $ make install
+
+You might have to run the last command using sudo, if you need
+administrative privileges to write to the $PREFIX directory.
+
+The "RelWithDebInfo" build type will build an optimized copy of the
+library, including debugging symbols.  Use the "Release" build type if
+you don't want debugging symbols.  Use the "Debug" build type if you
+want a non-optimized library, with debugging symbols.
+
+On Mac OS X, you can build a universal binary by setting the
+CMAKE_OSX_ARCHITECTURES option when running cmake.  Just add something
+like the following:
+
+        -DCMAKE_OSX_ARCHITECTURES=i386;x86_64
+
+to the cmake command given above.  The set of architectures that you can
+choose from differs depending on which version of OS X you're running;
+common possibilities include "i386", "x86_64", "ppc", and "ppc64".
+
+On Unix, you can request thread-safe versions of the Avro library's
+global functions by defining the THREADSAFE cmake variable.  Just add
+the following to your cmake invokation:
+
+        -DTHREADSAFE=true
diff --git a/lang/c/NEWS b/lang/c/NEWS
new file mode 100644
index 0000000..e70f439
--- /dev/null
+++ b/lang/c/NEWS
@@ -0,0 +1,5 @@
+
+For news, visit the Avro web site at
+http://hadoop.apache.org/avro/
+
+
diff --git a/lang/c/README b/lang/c/README
new file mode 100644
index 0000000..442415e
--- /dev/null
+++ b/lang/c/README
@@ -0,0 +1,10 @@
+    ___                      ______
+   /   |_   ___________     / ____/
+  / /| | | / / ___/ __ \   / /     
+ / ___ | |/ / /  / /_/ /  / /___   
+/_/  |_|___/_/   \____/   \____/   
+
+======================================================
+Please see the INSTALL file for installation instructions, and the
+documentation in the "docs" directory for more details about the
+library.
diff --git a/lang/c/README.maintaining_win32.txt b/lang/c/README.maintaining_win32.txt
new file mode 100644
index 0000000..62754db
--- /dev/null
+++ b/lang/c/README.maintaining_win32.txt
@@ -0,0 +1,166 @@
+Win32 C++ builds of Avro-C
+*****************************
+
+April 2, 2012
+
+These instructions describe some of the changes required to allow
+Avro-C to compile under the Microsoft Visual C++ 2008 compiler, as
+well as some limitations of the Windows build.
+
+Status of the Windows Build:
+****************************
+
+The Windows build of Avro-C compiles under Microsoft Visual C++
+2008. You can use C-Make to create the solution file (AvroC.sln) for
+the build.  The solution file contains projects for the build as well
+as projects for the tests. All the tests are run and pass when the
+project RUN_TESTS is built from within MS Visual C++ 2008.
+
+Limitations of Windows Build:
+******************************
+
+1. The Windows build of Avro-C is compiled using Microsoft's C++
+   compiler and not Microsoft's C compiler. This is done using the /TP
+   flag in the MSVC++ compiler. This flag is automatically set using
+   CMakeLists.txt
+
+   The reason to compile Win32 under C++ instead of C is that there
+   are lots of places where variables are declared after statements,
+   the Microsoft C compiler does not support declarations after
+   statements. It is possible, that if all the declarations after
+   statements were removed, that Avro-C would compile under
+   Microsoft's C compiler also. I have not tried this.
+
+2. The shared library, i.e. DLL, for avro has not been built. There
+   are instructions on how to build DLLs using CMake at
+   http://www.cmake.org/Wiki/BuildingWinDLL
+
+3. Currently avropipe.c and avromod.c do not compile under Windows. In
+   order for them to compile we would have to either port getopt() to
+   Windows, or remove their dependency on getopt().
+
+4. Windows cannot run version.sh to generate the version
+   number. Currently, LIBAVRO_VERSION is hard coded to "22:0:0" for
+   the Windows build, in the top level CMakeLists.txt.
+
+5. Found two bugs related to improper return values under error
+   conditions in Avro. These bugs were marked with #warnings in the
+   code.
+
+
+Instructions for Maintenance
+*****************************
+
+1. Instructions to check name mangling in Visual C++:
+
+    In a DOS prompt go to "C:\Program Files(x86)\Microsoft Visual Studio 9.0\VC\"
+    Run the program vcvarsall.bat . This will set up environment variables.
+
+    Now go to the avro_c\build_win32\src\Debug\ directory.
+    Run the command
+
+    dumpbin /ALL avro.lib > tmp.txt
+
+    View tmp.txt in your favorite editor. This will allow you to see
+    which names are mangled and which names are not mangled.
+
+    Every header file should start with
+
+    #ifndef HEADER_FILE_NAME_H
+    #define HEADER_FILE_NAME_H
+    #ifdef __cplusplus
+    extern "C" {
+    #define CLOSE_EXTERN }
+    #else
+    #define CLOSE_EXTERN
+    #endif
+
+    and end with
+
+    CLOSE_EXTERN
+    #endif /* HEADER_FILE_NAME_H */
+
+    This will ensure that all exported (public) functions are mangled
+    using C name mangling instead of C++ name mangling.
+
+2. All file I/O operations should have "b" for binary in the fopen
+   statements. Otherwise Windows will replace LF with CRLF in binary
+   data.
+
+3. Windows does not allow writing to a file with exclusive access
+   using the mode "wbx". Use the non-exclusive mode "wb" instead.
+
+4. If the hashtable from st.c is used, the functions in the struct
+   st_hash_type should be cast to HASH_FUNCTION_CAST.
+
+5. Do not use "%zu" to print size_t. Use '"%" PRIsz' without the
+   single quotes, instead.
+
+6. MS Visual C++ 2008 does not properly expand variadic preprocessor
+   macros by default. It is possible to "trick" MSVC++ to properly
+   expand variadic preprocessor macros by using an extra (dummy)
+   preprocessor macro, whose only purpose is to properly expand its
+   input. This method is described here:
+
+   http://stackoverflow.com/questions/2575864/the-problem-about-different-treatment-to-va-args-when-using-vs-2008-and-gcc
+   See the solution described by monkeyman.
+
+   This method is used in the macro expand_args(...) in test_avro_values.c.
+
+7. Web site describing how to debug macro expansion in Visual C++:
+   http://fneep.fiffa.net/?p=66
+
+8. Sometimes it is necessary to declare a struct at the top of a file
+   and define it at the bottom of a file. An example is
+   AVRO_DATUM_VALUE_CLASS in src/datum_value.c. A C++ compiler will
+   complain that the struct is defined twice. To avoid this, declare
+   the struct with the modifier "extern" at the top of the file, and
+   then define it at the bottom of the file. Note that it cannot be
+   defined as "static" because Win32 does not like an extern struct
+   mapping to a static struct.
+
+9. Use __FUNCTION__ instead of __func__ for generating the function
+   name.
+
+10. All variables need to be explicitly cast when calling functions
+    with differing function signatures
+
+11. All pointers need to be explicitly cast when assigning to other
+    pointers of a different type.
+
+12. Do not perform pointer arithmetic on void * pointers. Cast the
+    pointers to char * before performing pointer arithmetic.
+
+13. The executable names of any examples and tests need to be set
+    explicitly to include the "Debug/" directory in the path, and the
+    ".exe" ending. See the CMakeLists.txt in the examples and the
+    tests directory to see how this is done.
+
+14. Do not include the headers inttypes.h or unistd.h or
+    stdint.h. Instead include avro/platform.h in your C files.
+
+15. Do not include dirent.h in your tests. When _WIN32 is defined
+    include msdirent.h. See example in test_avro_schema.c.
+
+16. If _WIN32 is defined, define snprintf() to_snprintf(), which MS
+    Visual C++ recognizes. See jansson_private.h for an example.
+
+17. MSVC++ does not recognize strtoll(). Define it to _strtoi64()
+    instead. See jansson/src/load.c for an example.
+
+18. Old-style C function declarations are not allowed in C++. See the
+    changes in st.c and st.h -- which were converted to new-style
+    function declarations.
+
+19. Structures cannot be initialized using the .element notation for
+    Win32. For example if we have a struct test_t:
+        typedef struct
+        {
+           int a;
+           int b;
+        } test_t;
+    Then we can initialize the struct using the syntax:
+        test_t t1 = { 0, 0 };
+    But we cannot use the syntax:
+        test_t t2 = { .a = 0, . b = 0 };
+    because Win32 does not support it.
diff --git a/lang/c/build.sh b/lang/c/build.sh
new file mode 100755
index 0000000..71e36b2
--- /dev/null
+++ b/lang/c/build.sh
@@ -0,0 +1,75 @@
+#!/bin/bash
+set -e						  # exit on error
+#set -x		
+
+root_dir=$(pwd)
+build_dir="../../build/c"
+dist_dir="../../dist/c"
+version=$(./version.sh project)
+tarball="avro-c-$version.tar.gz"
+doc_dir="../../build/avro-doc-$version/api/c"
+
+function prepare_build {
+  clean
+  mkdir -p $build_dir
+  (cd $build_dir && cmake $root_dir -DCMAKE_BUILD_TYPE=RelWithDebInfo)
+}
+
+function clean {
+  if [ -d $build_dir ]; then
+	find $build_dir | xargs chmod 755
+	rm -rf $build_dir
+  fi
+}
+
+case "$1" in
+
+    interop-data-generate)
+	prepare_build
+	make -C $build_dir
+	$build_dir/tests/generate_interop_data "../../share/test/schemas/interop.avsc"  "../../build/interop/data"
+	;;
+
+    interop-data-test)
+	prepare_build
+	make -C $build_dir
+	$build_dir/tests/test_interop_data "../../build/interop/data"
+	;;
+
+    test)
+	prepare_build
+	make -C $build_dir
+	make -C $build_dir test
+        clean
+	;;
+
+    dist)
+	prepare_build
+	cp ../../share/VERSION.txt $root_dir
+	make -C $build_dir docs
+        # This is a hack to force the built documentation to be included
+        # in the source package.
+	cp $build_dir/docs/*.html $root_dir/docs
+	make -C $build_dir package_source
+	rm $root_dir/docs/*.html
+	if [ ! -d $dist_dir ]; then 
+           mkdir -p $dist_dir 
+        fi
+	if [ ! -d $doc_dir ]; then
+           mkdir -p $doc_dir
+	fi
+	mv $build_dir/$tarball $dist_dir
+	cp $build_dir/docs/*.html $doc_dir
+        clean
+	;;
+
+    clean)
+        clean
+	;;
+
+    *)
+        echo "Usage: $0 {interop-data-generate|interop-data-test|test|dist|clean}"
+        exit 1
+esac
+
+exit 0
diff --git a/lang/c/cmake_avrolib.bat b/lang/c/cmake_avrolib.bat
new file mode 100644
index 0000000..28b134d
--- /dev/null
+++ b/lang/c/cmake_avrolib.bat
@@ -0,0 +1,48 @@
+REM  Licensed to the Apache Software Foundation (ASF) under one
+REM  or more contributor license agreements.  See the NOTICE file
+REM  distributed with this work for additional information
+REM  regarding copyright ownership.  The ASF licenses this file
+REM  to you under the Apache License, Version 2.0 (the
+REM  "License"); you may not use this file except in compliance
+REM  with the License.  You may obtain a copy of the License at
+REM 
+REM    http://www.apache.org/licenses/LICENSE-2.0
+REM 
+REM  Unless required by applicable law or agreed to in writing,
+REM  software distributed under the License is distributed on an
+REM  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM  KIND, either express or implied.  See the License for the
+REM  specific language governing permissions and limitations
+REM  under the License.
+
+echo off
+
+REM Set up the solution file in Windows. 
+
+set my_cmake_path="put_your_cmake_path_here"
+set cmake_path_win7="C:\Program Files (x86)\CMake 2.8\bin\cmake.exe"
+set cmake_path_xp="C:\Program Files\CMake 2.8\bin\cmake.exe"
+
+if exist %my_cmake_path% (
+   set cmake_path=%my_cmake_path%
+   goto RUN_CMAKE
+)
+
+if exist %cmake_path_win7% (
+   set cmake_path=%cmake_path_win7%
+   goto RUN_CMAKE
+)
+
+if exist %cmake_path_xp% (
+   set cmake_path=%cmake_path_xp%
+   goto RUN_CMAKE
+)
+
+echo "Set the proper cmake path in the variable 'my_cmake_path' in cmake_windows.bat, and re-run"
+goto EXIT_ERROR
+
+:RUN_CMAKE
+%cmake_path% -G"Visual Studio 9 2008" -H. -Bbuild_win32
+
+
+:EXIT_ERROR
diff --git a/lang/c/cmake_avrolib.sh b/lang/c/cmake_avrolib.sh
new file mode 100755
index 0000000..24d3683
--- /dev/null
+++ b/lang/c/cmake_avrolib.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+mkdir -p build
+cd build
+cmake .. -DCMAKE_INSTALL_PREFIX=avrolib -DCMAKE_BUILD_TYPE=Debug
+make
+make test
+make install
+mkdir -p avrolib/lib/static
+cp avrolib/lib/libavro.a avrolib/lib/static/libavro.a
diff --git a/lang/c/cmake_pretty.cmake b/lang/c/cmake_pretty.cmake
new file mode 100644
index 0000000..aee633e
--- /dev/null
+++ b/lang/c/cmake_pretty.cmake
@@ -0,0 +1,33 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Linux kernel source indent format options
+set(INDENT_OPTS -nbad -bap -nbc -bbo -hnl -br -brs -c33 -cd33 -ncdb -ce -ci4
+    -cli0 -d0 -di1 -nfc1 -i8 -ip0 -l80 -lp -npcs -nprs -npsl -sai
+    -saf -saw -ncs -nsc -sob -nfca -cp33 -ss -ts8)
+
+foreach($dir src tests examples)
+	exec_program(indent
+                 ARGS ${INDENT_OPTS} ${CMAKE_CURRENT_SOURCE_DIR}/${dir}/*.[c,h]
+                 OUTPUT_VARIABLE indent_output
+                 RETURN_VALUE ret)
+    message(STATUS ${indent_output})
+	# TODO: mv ${CMAKE_CURRENT_SOURCE_DIR}/${dir}/*~ /tmp; \
+endforeach()
+
diff --git a/lang/c/docs/CMakeLists.txt b/lang/c/docs/CMakeLists.txt
new file mode 100644
index 0000000..1f67065
--- /dev/null
+++ b/lang/c/docs/CMakeLists.txt
@@ -0,0 +1,53 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set (AVRO_DOC_SRC
+    index.txt
+)
+
+# TODO(dln): Use FindAsciidoc script instead.
+message(STATUS "Searching for asciidoc...")
+find_program(ASCIIDOC_EXECUTABLE asciidoc)
+find_program(SOURCE_HIGHLIGHT_EXECUTABLE source-highlight)
+
+if (ASCIIDOC_EXECUTABLE AND SOURCE_HIGHLIGHT_EXECUTABLE)
+    foreach(_file ${AVRO_DOC_SRC})
+        get_filename_component(_file_we ${_file} NAME_WE)
+        set(_file_path "${CMAKE_CURRENT_SOURCE_DIR}/${_file}")
+        set(_html_out "${CMAKE_CURRENT_BINARY_DIR}/${_file_we}.html")
+        add_custom_command(
+            OUTPUT "${_html_out}"
+            COMMAND ${ASCIIDOC_EXECUTABLE}
+                -a avro_version=${AVRO_VERSION}
+                -a libavro_version=${LIBAVRO_VERSION}
+                -a toc
+                --unsafe -n -o "${_html_out}" "${_file_path}"
+            DEPENDS "${_file_path}"
+            COMMENT "asciidoc ${_file}"
+        )
+        install(FILES "${_html_out}" DESTINATION share/doc/avro-c)
+        add_custom_target("${_file_we}_html" ALL echo -n
+            DEPENDS "${_file}" "${_html_out}"
+        )
+        add_custom_target(docs DEPENDS "${_html_out}")
+    endforeach(_file)
+else(ASCIIDOC_EXECUTABLE AND SOURCE_HIGHLIGHT_EXECUTABLE)
+    message(WARNING "asciidoc not found. HTML documentation will *NOT* be built.")
+endif(ASCIIDOC_EXECUTABLE AND SOURCE_HIGHLIGHT_EXECUTABLE)
+
diff --git a/lang/c/docs/index.txt b/lang/c/docs/index.txt
new file mode 100644
index 0000000..f48cfea
--- /dev/null
+++ b/lang/c/docs/index.txt
@@ -0,0 +1,754 @@
+Avro C
+======
+
+The current version of Avro is +{avro_version}+.  The current version of +libavro+ is +{libavro_version}+.
+This document was created +{docdate}+.
+
+== Introduction to Avro
+
+Avro is a data serialization system.
+
+Avro provides:
+
+* Rich data structures.
+* A compact, fast, binary data format.
+* A container file, to store persistent data.
+* Remote procedure call (RPC).
+
+This document will focus on the C implementation of Avro.  To learn more about
+Avro in general, http://hadoop.apache.org/avro/[visit the Avro website].
+
+== Introduction to Avro C 
+
+....
+    ___                      ______
+   /   |_   ___________     / ____/
+  / /| | | / / ___/ __ \   / /     
+ / ___ | |/ / /  / /_/ /  / /___   
+/_/  |_|___/_/   \____/   \____/   
+                                   
+....
+
+[quote,Waldi Ravens,(walra%moacs11 @ nl.net) 94/03/18]
+____
+A C program is like a fast dance on a newly waxed dance floor by people carrying razors.
+____
+
+The C implementation has been tested on +MacOSX+ and +Linux+ but, over
+time, the number of support OSes should grow.  Please let us know if
+you're using +Avro C+ on other systems. There are no dependencies on 
+external libraries.  We embedded http://www.digip.org/jansson/[Jansson] into
++Avro C+ for parsing JSON into schema structures.  
+
+The C implementation supports:
+
+* binary encoding/decoding of all primitive and complex data types
+* storage to an Avro Object Container File
+* schema resolution, promotion and projection
+* validating and non-validating mode for writing Avro data
+
+The C implementation is lacking:
+
+* RPC
+
+To learn about the API, take a look at the examples and reference files
+later in this document.
+
+We're always looking for contributions so, if you're a C hacker, please
+feel free to http://hadoop.apache.org/avro/[submit patches to the
+project].
+
+
+== Error reporting
+
+Most functions in the Avro C library return a single +int+ status code.
+Following the POSIX _errno.h_ convention, a status code of 0 indicates
+success.  Non-zero codes indiciate an error condition.  Some functions
+return a pointer value instead of an +int+ status code; for these
+functions, a +NULL+ pointer indicates an error.
+
+You can retrieve
+a string description of the most recent error using the +avro_strerror+
+function:
+
+[source,c]
+----
+avro_schema_t  schema = avro_schema_string();
+if (schema == NULL) {
+    fprintf(stderr, "Error was %s\n", avro_strerror());
+}
+----
+
+
+== Avro values
+
+Starting with version 1.6.0, the Avro C library has a new API for
+handling Avro data.  To help distinguish between the two APIs, we refer
+to the old one as the _legacy_ or _datum_ API, and the new one as the
+_value_ API.  (These names come from the names of the C types used to
+represent Avro data in the corresponding API — +avro_datum_t+ and
++avro_value_t+.)  The legacy API is still present, but it's deprecated —
+you shouldn't use the +avro_datum_t+ type or the +avro_datum_*+
+functions in new code.
+
+One main benefit of the new value API is that you can treat any existing
+C type as an Avro value; you just have to provide a custom
+implementation of the value interface.  In addition, we provide a
+_generic_ value implementation; “generic”, in this sense, meaning that
+this single implementation works for instances of any Avro schema type.
+Finally, we also provide a wrapper implementation for the deprecated
++avro_datum_t+ type, which lets you gradually transition to the new
+value API.
+
+
+=== Avro value interface
+
+You interact with Avro values using the _value interface_, which defines
+methods for setting and retrieving the contents of an Avro value.  An
+individual value is represented by an instance of the +avro_value_t+
+type.
+
+This section provides an overview of the methods that you can call on an
++avro_value_t+ instance.  There are quite a few methods in the value
+interface, but not all of them make sense for all Avro schema types.
+For instance, you won't be able to call +avro_value_set_boolean+ on an
+Avro array value.  If you try to call an inappropriate method, we'll
+return an +EINVAL+ error code.
+
+Note that the functions in this section apply to _all_ Avro values,
+regardless of which value implementation is used under the covers.  This
+section doesn't describe how to _create_ value instances, since those
+constructors will be specific to a particular value implementation.
+
+
+==== Common methods
+
+There are a handful of methods that can be used with any value,
+regardless of which Avro schema it's an instance of:
+
+[source,c]
+----
+#include <stdint.h>
+#include <avro.h>
+
+avro_type_t avro_value_get_type(const avro_value_t *value);
+avro_schema_t avro_value_get_schema(const avro_value_t *value);
+
+int avro_value_equal(const avro_value_t *v1, const avro_value_t *v2);
+int avro_value_equal_fast(const avro_value_t *v1, const avro_value_t *v2);
+
+int avro_value_copy(avro_value_t *dest, const avro_value_t *src);
+int avro_value_copy_fast(avro_value_t *dest, const avro_value_t *src);
+
+uint32_t avro_value_hash(avro_value_t *value);
+
+int avro_value_reset(avro_value_t *value);
+----
+
+The +get_type+ and +get_schema+ methods can be used to get information
+about what kind of Avro value a given +avro_value_t+ instance
+represents.  (For +get_schema+, you borrow the value's reference to the
+schema; if you need to save it and ensure that it outlives the value,
+you need to call +avro_schema_incref+ on it.)
+
+The +equal+ and +equal_fast+ methods compare two values for equality.
+The two values do _not_ have to have the same value implementations, but
+they _do_ have to be instances of the same schema.  (Not _equivalent_
+schemas; the _same_ schema.)  The +equal+ method checks that the schemas
+match; the +equal_fast+ method assumes that they do.
+
+The +copy+ and +copy_fast+ methods copy the contents of one Avro value
+into another.  (Where possible, this is done without copying the actual
+content of a +bytes+, +string+, or +fixed+ value, using the
++avro_wrapped_buffer_t+ functions described in the next section.)  Like
++equal+, the two values must have the same schema; +copy+ checks this,
+while +copy_fast+ assumes it.
+
+The +hash+ method returns a hash value for the given Avro value.  This
+can be used to construct hash tables that use Avro values as keys.  The
+function works correctly even with maps; it produces a hash that doesn't
+depend on the ordering of the elements of the map.  Hash values are only
+meaningful for comparing values of exactly the same schema.  Hash values
+are _not_ guaranteed to be consistent across different platforms, or
+different versions of the Avro library.  That means that it's really
+only safe to use these hash values internally within the context of a
+single execution of a single application.
+
+The +reset+ method “clears out” an +avro_value_t instance, making sure
+that it's ready to accept the contents of a new value.  For scalars,
+this is usually a no-op, since the new value will just overwrite the old
+one.  For arrays and maps, this removes any existing elements from the
+container, so that we can append the elements of the new value.  For
+records and unions, this just recursively resets the fields or current
+branch.
+
+
+==== Scalar values
+
+The simplest case is handling instances of the scalar Avro schema types.
+In Avro, the scalars are all of the primitive schema types, as well as
++enum+ and +fixed+ — i.e., anything that can't contain another Avro
+value.  Note that we use standard C99 types to represent the primitive
+contents of an Avro scalar.
+
+To retrieve the contents of an Avro scalar, you can use one of the
+_getter_ methods:
+
+[source,c]
+----
+#include <stdint.h>
+#include <stdlib.h>
+#include <avro.h>
+
+int avro_value_get_boolean(const avro_value_t *value, int *dest);
+int avro_value_get_bytes(const avro_value_t *value,
+                         const void **dest, size_t *size);
+int avro_value_get_double(const avro_value_t *value, double *dest);
+int avro_value_get_float(const avro_value_t *value, float *dest);
+int avro_value_get_int(const avro_value_t *value, int32_t *dest);
+int avro_value_get_long(const avro_value_t *value, int64_t *dest);
+int avro_value_get_null(const avro_value_t *value);
+int avro_value_get_string(const avro_value_t *value,
+                          const char **dest, size_t *size);
+int avro_value_get_enum(const avro_value_t *value, int *dest);
+int avro_value_get_fixed(const avro_value_t *value,
+                         const void **dest, size_t *size);
+----
+
+For the most part, these should be self-explanatory.  For +bytes+,
++string+, and +fixed+ values, the pointer to the underlying content is
++const+ — you aren't allowed to modify the contents directly.  We
+guarantee that the content of a +string+ will be NUL-terminated, so you
+can use it as a C string as you'd expect.  The +size+ returned for a
++string+ object will include the NUL terminator; it will be one more
+than you'd get from calling +strlen+ on the content.
+
+Also, for +bytes+, +string+, and +fixed+, the +dest+ and +size+
+parameters are optional; if you only want to determine the length of a
++bytes+ value, you can use:
+
+[source,c]
+----
+avro_value_t  *value = /* from somewhere */;
+size_t  size;
+avro_value_get_bytes(value, NULL, &size);
+----
+
+To set the contents of an Avro scalar, you can use one of the _setter_
+methods:
+
+[source,c]
+----
+#include <stdint.h>
+#include <stdlib.h>
+#include <avro.h>
+
+int avro_value_set_boolean(avro_value_t *value, int src);
+int avro_value_set_bytes(avro_value_t *value,
+                         void *buf, size_t size);
+int avro_value_set_double(avro_value_t *value, double src);
+int avro_value_set_float(avro_value_t *value, float src);
+int avro_value_set_int(avro_value_t *value, int32_t src);
+int avro_value_set_long(avro_value_t *value, int64_t src);
+int avro_value_set_null(avro_value_t *value);
+int avro_value_set_string(avro_value_t *value, const char *src);
+int avro_value_set_string_len(avro_value_t *value,
+                              const char *src, size_t size);
+int avro_value_set_enum(avro_value_t *value, int src);
+int avro_value_set_fixed(avro_value_t *value,
+                         void *buf, size_t size);
+----
+
+These are also straightforward.  For +bytes+, +string+, and +fixed+
+values, the +set+ methods will make a copy of the underlying data.  For
++string+ values, the content must be NUL-terminated.  You can use
++set_string_len+ if you already know the length of the string content;
+the length you pass in should include the NUL terminator.  If you call
++set_string+, then we'll use +strlen+ to calculate the length.
+
+For +fixed+ values, the +size+ must match what's expected by the value's
+underlying +fixed+ schema; if the sizes don't match, you'll get an error
+code.
+
+If you don't want to copy the contents of a +bytes+, +string+, or
++fixed+ value, you can use the _giver_ and _grabber_ functions:
+
+[source,c]
+----
+#include <stdint.h>
+#include <stdlib.h>
+#include <avro.h>
+
+typedef void
+(*avro_buf_free_t)(void *ptr, size_t sz, void *user_data);
+
+int avro_value_give_bytes(avro_value_t *value, avro_wrapped_buffer_t *src);
+int avro_value_give_string_len(avro_value_t *value, avro_wrapped_buffer_t *src);
+int avro_value_give_fixed(avro_value_t *value, avro_wrapped_buffer_t *src);
+
+int avro_value_grab_bytes(const avro_value_t *value, avro_wrapped_buffer_t *dest);
+int avro_value_grab_string(const avro_value_t *value, avro_wrapped_buffer_t *dest);
+int avro_value_grab_fixed(const avro_value_t *value, avro_wrapped_buffer_t *dest);
+
+typedef struct avro_wrapped_buffer {
+    const void  *buf;
+    size_t  size;
+    void (*free)(avro_wrapped_buffer_t *self);
+    int (*copy)(avro_wrapped_buffer_t *dest,
+                const avro_wrapped_buffer_t *src,
+                size_t offset, size_t length);
+    int (*slice)(avro_wrapped_buffer_t *self,
+                 size_t offset, size_t length);
+} avro_wrapped_buffer_t;
+
+void
+avro_wrapped_buffer_free(avro_wrapped_buffer_t *buf);
+
+int
+avro_wrapped_buffer_copy(avro_wrapped_buffer_t *dest,
+                         const avro_wrapped_buffer_t *src,
+                         size_t offset, size_t length);
+
+int
+avro_wrapped_buffer_slice(avro_wrapped_buffer_t *self,
+                          size_t offset, size_t length);
+----
+
+The +give+ functions give control of an existing buffer to the value.
+(You should *not* try to free the +src+ wrapped buffer after calling
+this method.)  The +grab+ function fills in a wrapped buffer with a
+pointer to the contents of an Avro value.  (You *should* free the +dest+
+wrapped buffer when you're done with it.)
+
+The +avro_wrapped_buffer_t+ struct encapsulates the location and size of
+the existing buffer.  It also includes several methods.  The +free+
+method will be called when the content of the buffer is no longer
+needed.  The +slice+ method will be called when the wrapped buffer needs
+to be updated to point at a subset of what it pointed at before.  (This
+doesn't create a new wrapped buffer; it updates an existing one.)  The
++copy+ method will be called if the content needs to be copied.  Note
+that if you're wrapping a buffer with nice reference counting features,
+you don't need to perform an actual copy; you just need to ensure that
+the +free+ function can be called on both the original and the copy, and
+not have things blow up.
+
+The “generic” value implementation takes advantage of this feature; if
+you pass in a wrapped buffer with a +give+ method, and then retrieve it
+later with a +grab+ method, then we'll use the wrapped buffer's +copy+
+method to fill in the +dest+ parameter.  If your wrapped buffer
+implements a +slice+ method that updates reference counts instead of
+actually copying, then you've got nice zero-copy access to the contents
+of an Avro value.
+
+
+==== Compound values
+
+The following sections describe the getter and setter methods for
+handling compound Avro values.  All of the compound values are
+responsible for the storage of their children; this means that there
+isn't a method, for instance, that lets you add an existing
++avro_value_t+ to an array.  Instead, there's a method that creates a
+new, empty +avro_value_t+ of the appropriate type, adds it to the array,
+and returns it for you to fill in as needed.
+
+You also shouldn't try to free the child elements that are created this
+way; the container value is responsible for their life cycle.  The child
+element is guaranteed to be valid for as long as the container value
+is.  You'll usually define an +avro_value_t+ in the stack, and let it
+fall out of scope when you're done with it:
+
+[source,c]
+----
+avro_value_t  *array = /* from somewhere else */;
+
+{
+    avro_value_t  child;
+    avro_value_get_by_index(array, 0, &child, NULL);
+    /* do something interesting with the array element */
+}
+----
+
+
+==== Arrays
+
+There are three methods that can be used with array values:
+
+[source,c]
+----
+#include <stdlib.h>
+#include <avro.h>
+
+int avro_value_get_size(const avro_value_t *array, size_t *size);
+int avro_value_get_by_index(const avro_value_t *array, size_t index,
+                            avro_value_t *element, const char **unused);
+int avro_value_append(avro_value_t *array, avro_value_t *element,
+                      size_t *new_index);
+----
+
+The +get_size+ method returns the number of elements currently in the
+array.  The +get_by_index+ method fills in +element+ to point at the
+array element with the given index.  (You should use +NULL+ for the
++unused+ parameter; it's ignored for array values.)
+
+The +append+ method creates a new value, appends it to the array, and
+returns it in +element+.  If +new_index+ is given, then it will be
+filled in with the index of the new element.
+
+
+==== Maps
+
+There are four methods that can be used with map values:
+
+[source,c]
+----
+#include <stdlib.h>
+#include <avro.h>
+
+int avro_value_get_size(const avro_value_t *map, size_t *size);
+int avro_value_get_by_name(const avro_value_t *map, const char *key,
+                           avro_value_t *element, size_t *index);
+int avro_value_get_by_index(const avro_value_t *map, size_t index,
+                            avro_value_t *element, const char **key);
+int avro_value_add(avro_value_t *map,
+                   const char *key, avro_value_t *element,
+                   size_t *index, int *is_new);
+----
+
+The +get_size+ method returns the number of elements currently in the
+map.  Map elements can be retrieved either by their key (+get_by_name+)
+or by their numeric index (+get_by_index+).  (Numeric indices in a map
+are based on the order that the elements were added to the map.)  In
+either case, the method takes in an optional output parameter that let
+you retrieve the index associated with a key, and vice versa.
+
+The +add+ method will add a new value to the map, if the given key isn't
+already present.  If the key is present, then the existing value with be
+returned.  The +index+ parameter, if given, will be filled in the
+element's index.  The +is_new+ parameter, if given, can be used to
+determine whether the mapped value is new or not.
+
+
+==== Records
+
+There are three methods that can be used with record values:
+
+[source,c]
+----
+#include <stdlib.h>
+#include <avro.h>
+
+int avro_value_get_size(const avro_value_t *record, size_t *size);
+int avro_value_get_by_index(const avro_value_t *record, size_t index,
+                            avro_value_t *element, const char **field_name);
+int avro_value_get_by_name(const avro_value_t *record, const char *field_name,
+                           avro_value_t *element, size_t *index);
+----
+
+The +get_size+ method returns the number of fields in the record.  (You
+can also get this by querying the value's schema, but for some
+implementations, this method can be faster.)
+
+The +get_by_index+ and +get_by_name+ functions can be used to retrieve
+one of the fields in the record, either by its ordinal position within
+the record, or by the name of the underlying field.  Like with maps, the
+methods take in an additional parameter that let you retrieve the index
+associated with a field name, and vice versa.
+
+When possible, it's recommended that you access record fields by their
+numeric index, rather than by their field name.  For most
+implementations, this will be more efficient.
+
+
+==== Unions
+
+There are three methods that can be used with union values:
+
+[source,c]
+----
+#include <avro.h>
+
+int avro_value_get_discriminant(const avro_value_t *union_val, int *disc);
+int avro_value_get_current_branch(const avro_value_t *union_val, avro_value_t *branch);
+int avro_value_set_branch(avro_value_t *union_val,
+                          int discriminant, avro_value_t *branch);
+----
+
+The +get_discriminant+ and +get_current_branch+ methods return the
+current state of the union value, without modifying which branch is
+currently selected.  The +set_branch+ method can be used to choose the
+active branch, filling in the +branch+ value to point at the branch's
+value instance.  (Most implementations will be smart enough to detect
+when the desired branch is already selected, so you should always call
+this method unless you can _guarantee_ that the right branch is already
+current.)
+
+
+=== Creating value instances
+
+Okay, so we've described how to interact with a value that you already
+have a pointer to, but how do you create one in the first place?  Each
+implementation of the value interface must provide its own functions for
+creating +avro_value_t+ instances for that class.  The 10,000-foot view
+is to:
+
+1. Get an _implementation struct_ for the value implementation that you
+   want to use.  (This is represented by an +avro_value_iface_t+
+   pointer.)
+
+2. Use the implementation's constructor function to allocate instances
+   of that value implementation.
+
+3. Do whatever you need to the value (using the +avro_value_t+ methods
+   described in the previous section).
+
+4. Free the value instance, if necessary, using the implementation's
+   destructor function.
+
+5. Free the implementation struct when you're done creating value
+   instances.
+
+These steps use the following functions:
+
+[source,c]
+----
+#include <avro.h>
+
+avro_value_iface_t *avro_value_iface_incref(avro_value_iface_t *iface);
+void avro_value_iface_decref(avro_value_iface_t *iface);
+----
+
+Note that for most value implementations, it's fine to reuse a single
++avro_value_t+ instance for multiple values, using the
++avro_value_reset+ function before filling in the instance for each
+value.  (This helps reduce the number of +malloc+ and +free+ calls that
+your application will make.)
+
+We provide a “generic” value implementation that will work (efficiently)
+for any Avro schema.
+
+
+For most applications, you won't need to write your own value
+implementation; the Avro C library provides an efficient “generic”
+implementation, which supports the full range of Avro schema types.
+There's a good chance that you just want to use this implementation,
+rather than rolling your own.  (The primary reason for rolling your own
+would be if you want to access the elements of a compound value using C
+syntax — for instance, translating an Avro record into a C struct.) You
+can use the following functions to create and work with a generic value
+implementation for a particular schema:
+
+[source,c]
+----
+#include <avro.h>
+
+avro_value_iface_t *avro_generic_class_from_schema(avro_schema_t schema);
+int avro_generic_value_new(const avro_value_iface_t *iface, avro_value_t *dest);
+void avro_generic_value_free(avro_value_t *self);
+----
+
+Combining all of this together, you might have the following snippet of
+code:
+
+[source,c]
+----
+avro_schema_t  schema = avro_schema_long();
+avro_value_iface_t  *iface = avro_generic_class_from_schema(schema);
+
+avro_value_t  val;
+avro_generic_value_new(iface, &val);
+
+/* Generate Avro longs from 0-499 */
+int  i;
+for (i = 0; i < 500; i++) {
+    avro_value_reset(&val);
+    avro_value_set_long(&val, i);
+    /* do something with the value */
+}
+
+avro_generic_value_free(&val);
+avro_value_iface_decref(iface);
+avro_schema_decref(schema);
+----
+
+
+== Reference Counting
+
++Avro C+ does reference counting for all schema and data objects.
+When the number of references drops to zero, the memory is freed.
+
+For example, to create and free a string, you would use:
+----
+avro_datum_t string = avro_string("This is my string");
+
+...
+avro_datum_decref(string);
+----
+
+Things get a little more complicated when you consider more elaborate
+schema and data structures.
+
+For example, let's say that you create a record with a single
+string field:
+----
+avro_datum_t example = avro_record("Example");
+avro_datum_t solo_field = avro_string("Example field value");
+
+avro_record_set(example, "solo", solo_field);
+
+...
+avro_datum_decref(example);
+----
+
+In this example, the +solo_field+ datum would *not* be freed since it 
+has two references: the original reference and a reference inside
+the +Example+ record.  The +avro_datum_decref(example)+ call drops
+the number of reference to one.  If you are finished with the +solo_field+
+schema, then you need to +avro_schema_decref(solo_field)+ to
+completely dereference the +solo_field+ datum and free it.
+
+== Wrap It and Give It
+
+You'll notice that some datatypes can be "wrapped" and "given".  This
+allows C programmers the freedom to decide who is responsible for
+the memory.  Let's take strings for example.
+
+To create a string datum, you have three different methods:
+----
+avro_datum_t avro_string(const char *str);
+avro_datum_t avro_wrapstring(const char *str);
+avro_datum_t avro_givestring(const char *str);
+----
+
+If you use, +avro_string+ then +Avro C+ will make a copy of your
+string and free it when the datum is dereferenced.  In some cases,
+especially when dealing with large amounts of data, you want 
+to avoid this memory copy.  That's where +avro_wrapstring+ and
++avro_givestring+ can help.
+
+If you use, +avro_wrapstring+ then +Avro C+ will do no memory 
+management at all.  It will just save a pointer to your data and
+it's your responsibility to free the string.  
+
+WARNING: When using +avro_wrapstring+, do not free the string 
+before you dereference the string datum with +avro_datum_decref()+.
+
+Lastly, if you use +avro_givestring+ then +Avro C+ will free the
+string later when the datum is dereferenced.  In a sense, you
+are "giving" responsibility for freeing the string to +Avro C+.
+
+[WARNING] 
+===============================
+Don't "give" +Avro C+ a string that you haven't allocated from the heap with e.g. +malloc+ or +strdup+.
+
+For example, *don't* do this:
+----
+avro_datum_t bad_idea = avro_givestring("This isn't allocated on the heap");
+----
+===============================
+
+== Schema Validation
+
+If you want to write a datum, you would use the following function
+
+[source,c]
+----
+int avro_write_data(avro_writer_t writer,
+                    avro_schema_t writers_schema, avro_datum_t datum);
+----
+
+If you pass in a +writers_schema+, then you +datum+ will be validated *before*
+it is sent to the +writer+.  This check ensures that your data has the 
+correct format.  If you are certain your datum is correct, you can pass
+a +NULL+ value for +writers_schema+ and +Avro C+ will not validate before
+writing.
+
+NOTE: Data written to an Avro File Object Container is always validated.
+
+== Examples
+
+[quote,Dante Hicks]
+____
+I'm not even supposed to be here today!
+____
+
+Imagine you're a free-lance hacker in Leonardo, New Jersey and you've 
+been approached by the owner of the local *Quick Stop Convenience* store.
+He wants you to create a contact database case he needs to call employees
+to work on their day off.
+
+You might build a simple contact system using Avro C like the following...
+
+[source,c]
+----
+include::../examples/quickstop.c[]
+----
+
+When you compile and run this program, you should get the following output
+
+----
+Successfully added Hicks, Dante id=1
+Successfully added Graves, Randal id=2
+Successfully added Loughran, Veronica id=3
+Successfully added Bree, Caitlin id=4
+Successfully added Silent, Bob id=5
+Successfully added ???, Jay id=6
+
+Avro is compact. Here is the data for all 6 people.
+| 02 0A 44 61 6E 74 65 0A | 48 69 63 6B 73 1C 28 35 |	..Dante.Hicks.(5
+| 35 35 29 20 31 32 33 2D | 34 35 36 37 40 04 0C 52 |	55) 123-4567 at ..R
+| 61 6E 64 61 6C 0C 47 72 | 61 76 65 73 1C 28 35 35 |	andal.Graves.(55
+| 35 29 20 31 32 33 2D 35 | 36 37 38 3C 06 10 56 65 |	5) 123-5678<..Ve
+| 72 6F 6E 69 63 61 10 4C | 6F 75 67 68 72 61 6E 1C |	ronica.Loughran.
+| 28 35 35 35 29 20 31 32 | 33 2D 30 39 38 37 38 08 |	(555) 123-09878.
+| 0E 43 61 69 74 6C 69 6E | 08 42 72 65 65 1C 28 35 |	.Caitlin.Bree.(5
+| 35 35 29 20 31 32 33 2D | 32 33 32 33 36 0A 06 42 |	55) 123-23236..B
+| 6F 62 0C 53 69 6C 65 6E | 74 1C 28 35 35 35 29 20 |	ob.Silent.(555) 
+| 31 32 33 2D 36 34 32 32 | 3A 0C 06 4A 61 79 06 3F |	123-6422:..Jay.?
+| 3F 3F 1C 28 35 35 35 29 | 20 31 32 33 2D 39 31 38 |	??.(555) 123-918
+| 32 34 .. .. .. .. .. .. | .. .. .. .. .. .. .. .. |	24..............
+
+Now let's read all the records back out
+1 |           Dante |           Hicks |  (555) 123-4567 | 32
+2 |          Randal |          Graves |  (555) 123-5678 | 30
+3 |        Veronica |        Loughran |  (555) 123-0987 | 28
+4 |         Caitlin |            Bree |  (555) 123-2323 | 27
+5 |             Bob |          Silent |  (555) 123-6422 | 29
+6 |             Jay |             ??? |  (555) 123-9182 | 26
+
+
+Use projection to print only the First name and phone numbers
+          Dante |  (555) 123-4567 | 
+         Randal |  (555) 123-5678 | 
+       Veronica |  (555) 123-0987 | 
+        Caitlin |  (555) 123-2323 | 
+            Bob |  (555) 123-6422 | 
+            Jay |  (555) 123-9182 | 
+----
+
+The *Quick Stop* owner was so pleased, he asked you to create a 
+movie database for his *RST Video* store.
+
+== Reference files
+
+=== avro.h
+
+The +avro.h+ header file contains the complete public API
+for +Avro C+.  The documentation is rather sparse right now
+but we'll be adding more information soon.
+
+[source,c]
+----
+include::../src/avro.h[]
+----
+
+=== test_avro_data.c 
+
+Another good way to learn how to encode/decode data in +Avro C+ is
+to look at the +test_avro_data.c+ unit test.  This simple unit test
+checks that all the avro types can be encoded/decoded correctly.
+
+[source,c]
+----
+include::../tests/test_avro_data.c[]
+----
+
diff --git a/lang/c/examples/.gitignore b/lang/c/examples/.gitignore
new file mode 100644
index 0000000..8505a32
--- /dev/null
+++ b/lang/c/examples/.gitignore
@@ -0,0 +1,2 @@
+quickstop
+quickstop.db
diff --git a/lang/c/examples/CMakeLists.txt b/lang/c/examples/CMakeLists.txt
new file mode 100644
index 0000000..d00499d
--- /dev/null
+++ b/lang/c/examples/CMakeLists.txt
@@ -0,0 +1,32 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+add_executable(quickstop quickstop.c)
+target_link_libraries(quickstop avro-static)
+
+if (WIN32)
+    set(exec_name ${CMAKE_CURRENT_BINARY_DIR}/Debug/quickstop.exe)
+else (WIN32)
+    set(exec_name ${CMAKE_CURRENT_BINARY_DIR}/quickstop)
+endif (WIN32)
+
+add_test(quickstop
+    ${CMAKE_COMMAND} -E chdir ${AvroC_SOURCE_DIR}/examples
+    ${exec_name}
+)
diff --git a/lang/c/examples/quickstop.c b/lang/c/examples/quickstop.c
new file mode 100644
index 0000000..d0c396f
--- /dev/null
+++ b/lang/c/examples/quickstop.c
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#ifdef DEFLATE_CODEC
+#define QUICKSTOP_CODEC  "deflate"
+#else
+#define QUICKSTOP_CODEC  "null"
+#endif
+
+avro_schema_t person_schema;
+int64_t id = 0;
+
+/* A simple schema for our tutorial */
+const char  PERSON_SCHEMA[] =
+"{\"type\":\"record\",\
+  \"name\":\"Person\",\
+  \"fields\":[\
+     {\"name\": \"ID\", \"type\": \"long\"},\
+     {\"name\": \"First\", \"type\": \"string\"},\
+     {\"name\": \"Last\", \"type\": \"string\"},\
+     {\"name\": \"Phone\", \"type\": \"string\"},\
+     {\"name\": \"Age\", \"type\": \"int\"}]}";
+
+/* Parse schema into a schema data structure */
+void init_schema(void)
+{
+	if (avro_schema_from_json_literal(PERSON_SCHEMA, &person_schema)) {
+		fprintf(stderr, "Unable to parse person schema\n");
+		exit(EXIT_FAILURE);
+	}
+}
+
+/* Create a datum to match the person schema and save it */
+void
+add_person(avro_file_writer_t db, const char *first, const char *last,
+	   const char *phone, int32_t age)
+{
+	avro_datum_t person = avro_record(person_schema);
+
+	avro_datum_t id_datum = avro_int64(++id);
+	avro_datum_t first_datum = avro_string(first);
+	avro_datum_t last_datum = avro_string(last);
+	avro_datum_t age_datum = avro_int32(age);
+	avro_datum_t phone_datum = avro_string(phone);
+
+	if (avro_record_set(person, "ID", id_datum)
+	    || avro_record_set(person, "First", first_datum)
+	    || avro_record_set(person, "Last", last_datum)
+	    || avro_record_set(person, "Age", age_datum)
+	    || avro_record_set(person, "Phone", phone_datum)) {
+		fprintf(stderr, "Unable to create Person datum structure\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (avro_file_writer_append(db, person)) {
+		fprintf(stderr,
+			"Unable to write Person datum to memory buffer\nMessage: %s\n", avro_strerror());
+		exit(EXIT_FAILURE);
+	}
+
+	/* Decrement all our references to prevent memory from leaking */
+	avro_datum_decref(id_datum);
+	avro_datum_decref(first_datum);
+	avro_datum_decref(last_datum);
+	avro_datum_decref(age_datum);
+	avro_datum_decref(phone_datum);
+	avro_datum_decref(person);
+
+	//fprintf(stdout, "Successfully added %s, %s id=%"PRId64"\n", last, first, id);
+}
+
+int print_person(avro_file_reader_t db, avro_schema_t reader_schema)
+{
+	int rval;
+	avro_datum_t person;
+
+	rval = avro_file_reader_read(db, reader_schema, &person);
+	if (rval == 0) {
+		int64_t i64;
+		int32_t i32;
+		char *p;
+		avro_datum_t id_datum, first_datum, last_datum, phone_datum,
+		    age_datum;
+
+		if (avro_record_get(person, "ID", &id_datum) == 0) {
+			avro_int64_get(id_datum, &i64);
+			fprintf(stdout, "%"PRId64" | ", i64);
+		}
+		if (avro_record_get(person, "First", &first_datum) == 0) {
+			avro_string_get(first_datum, &p);
+			fprintf(stdout, "%15s | ", p);
+		}
+		if (avro_record_get(person, "Last", &last_datum) == 0) {
+			avro_string_get(last_datum, &p);
+			fprintf(stdout, "%15s | ", p);
+		}
+		if (avro_record_get(person, "Phone", &phone_datum) == 0) {
+			avro_string_get(phone_datum, &p);
+			fprintf(stdout, "%15s | ", p);
+		}
+		if (avro_record_get(person, "Age", &age_datum) == 0) {
+			avro_int32_get(age_datum, &i32);
+			fprintf(stdout, "%d", i32);
+		}
+		fprintf(stdout, "\n");
+
+		/* We no longer need this memory */
+		avro_datum_decref(person);
+	}
+	return rval;
+}
+
+int main(void)
+{
+	int rval;
+	avro_file_reader_t dbreader;
+	avro_file_writer_t db;
+	avro_schema_t projection_schema, first_name_schema, phone_schema;
+	int64_t i;
+	const char *dbname = "quickstop.db";
+	char number[15] = {0};
+
+	/* Initialize the schema structure from JSON */
+	init_schema();
+
+	/* Delete the database if it exists */
+	remove(dbname);
+	/* Create a new database */
+	rval = avro_file_writer_create_with_codec
+	    (dbname, person_schema, &db, QUICKSTOP_CODEC, 0);
+	if (rval) {
+		fprintf(stderr, "There was an error creating %s\n", dbname);
+		fprintf(stderr, " error message: %s\n", avro_strerror());
+		exit(EXIT_FAILURE);
+	}
+
+	/* Add lots of people to the database */
+	for (i = 0; i < 1000; i++)
+	{
+		sprintf(number, "(%d)", (int)i);
+		add_person(db, "Dante", "Hicks", number, 32);
+		add_person(db, "Randal", "Graves", "(555) 123-5678", 30);
+		add_person(db, "Veronica", "Loughran", "(555) 123-0987", 28);
+		add_person(db, "Caitlin", "Bree", "(555) 123-2323", 27);
+		add_person(db, "Bob", "Silent", "(555) 123-6422", 29);
+		add_person(db, "Jay", "???", number, 26);
+	}
+
+	/* Close the block and open a new one */
+	avro_file_writer_flush(db);
+	add_person(db, "Super", "Man", "123456", 31);
+
+	avro_file_writer_close(db);
+
+	fprintf(stdout, "\nNow let's read all the records back out\n");
+
+	/* Read all the records and print them */
+	if (avro_file_reader(dbname, &dbreader)) {
+		fprintf(stderr, "Error opening file: %s\n", avro_strerror());
+		exit(EXIT_FAILURE);
+	}
+	for (i = 0; i < id; i++) {
+		if (print_person(dbreader, NULL)) {
+			fprintf(stderr, "Error printing person\nMessage: %s\n", avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+	}
+	avro_file_reader_close(dbreader);
+
+	/* You can also use projection, to only decode only the data you are
+	   interested in.  This is particularly useful when you have 
+	   huge data sets and you'll only interest in particular fields
+	   e.g. your contacts First name and phone number */
+	projection_schema = avro_schema_record("Person", NULL);
+	first_name_schema = avro_schema_string();
+	phone_schema = avro_schema_string();
+	avro_schema_record_field_append(projection_schema, "First",
+					first_name_schema);
+	avro_schema_record_field_append(projection_schema, "Phone",
+					phone_schema);
+
+	/* Read only the record you're interested in */
+	fprintf(stdout,
+		"\n\nUse projection to print only the First name and phone numbers\n");
+	if (avro_file_reader(dbname, &dbreader)) {
+		fprintf(stderr, "Error opening file: %s\n", avro_strerror());
+		exit(EXIT_FAILURE);
+	}
+	for (i = 0; i < id; i++) {
+		if (print_person(dbreader, projection_schema)) {
+			fprintf(stderr, "Error printing person: %s\n",
+				avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+	}
+	avro_file_reader_close(dbreader);
+	avro_schema_decref(first_name_schema);
+	avro_schema_decref(phone_schema);
+	avro_schema_decref(projection_schema);
+
+	/* We don't need this schema anymore */
+	avro_schema_decref(person_schema);
+	return 0;
+}
diff --git a/lang/c/jansson/.gitignore b/lang/c/jansson/.gitignore
new file mode 100644
index 0000000..2ab2196
--- /dev/null
+++ b/lang/c/jansson/.gitignore
@@ -0,0 +1 @@
+jansson.pc
diff --git a/lang/c/jansson/CHANGES b/lang/c/jansson/CHANGES
new file mode 100644
index 0000000..5e002d5
--- /dev/null
+++ b/lang/c/jansson/CHANGES
@@ -0,0 +1,330 @@
+Version 2.1 (in development)
+============================
+
+* New features:
+
+  - `json_loadb()`: Decode a string with a given size, useful if the
+    string is not null terminated.
+
+  - Add ``JSON_ENCODE_ANY`` encoding flag to allow encoding any JSON
+    value. By default, only arrays and objects can be encoded. (#19)
+
+  - Add ``JSON_REJECT_DUPLICATES`` decoding flag to issue a decoding
+    error if any JSON object in the input contins duplicate keys. (#3)
+
+  - Add ``JSON_DISABLE_EOF_CHECK`` decoding flag to stop decoding after a
+    valid JSON input. This allows other data after the JSON data.
+
+* Bug fixes:
+
+  - Fix an additional memory leak when memory allocation fails in
+    `json_object_set()` and friends.
+
+  - Clear errno before calling `strtod()` for better portability. (#27)
+
+* Building:
+
+  - Avoid set-but-not-used warning/error in a test. (#20)
+
+* Other:
+
+  - Minor clarifications to documentation.
+
+
+Version 2.0.1
+=============
+
+Released 2011-03-31
+
+* Bug fixes:
+
+  - Replace a few `malloc()` and `free()` calls with their
+    counterparts that support custom memory management.
+
+  - Fix object key hashing in json_unpack() strict checking mode.
+
+  - Fix the parentheses in ``JANSSON_VERSION_HEX`` macro.
+
+  - Fix `json_object_size()` return value.
+
+  - Fix a few compilation issues.
+
+* Portability:
+
+  - Enhance portability of `va_copy()`.
+
+  - Test framework portability enhancements.
+
+* Documentation:
+
+  - Distribute ``doc/upgrading.rst`` with the source tarball.
+
+  - Build documentation in strict mode in ``make distcheck``.
+
+
+Version 2.0
+===========
+
+Released 2011-02-28
+
+This release is backwards incompatible with the 1.x release series.
+See the chapter "Upgrading from older versions" in documentation for
+details.
+
+* Backwards incompatible changes:
+
+  - Unify unsigned integer usage in the API: All occurences of
+    unsigned int and unsigned long have been replaced with size_t.
+
+  - Change JSON integer's underlying type to the widest signed integer
+    type available, i.e. long long if it's supported, otherwise long.
+    Add a typedef json_int_t that defines the type.
+
+  - Change the maximum indentation depth to 31 spaces in encoder. This
+    frees up bits from the flags parameter of encoding functions
+    `json_dumpf()`, `json_dumps()` and `json_dump_file()`.
+
+  - For future needs, add a flags parameter to all decoding functions
+    `json_loadf()`, `json_loads()` and `json_load_file()`.
+
+* New features
+
+  - `json_pack()`, `json_pack_ex()`, `json_vpack_ex()`: Create JSON
+    values based on a format string.
+
+  - `json_unpack()`, `json_unpack_ex()`, `json_vunpack_ex()`: Simple
+    value extraction and validation functionality based on a format
+    string.
+
+  - Add column, position and source fields to the ``json_error_t``
+    struct.
+
+  - Enhance error reporting in the decoder.
+
+  - ``JANSSON_VERSION`` et al.: Preprocessor constants that define the
+    library version.
+
+  - `json_set_alloc_funcs()`: Set custom memory allocation functions.
+
+* Fix many portability issues, especially on Windows.
+
+* Configuration
+
+  - Add file ``jansson_config.h`` that contains site specific
+    configuration. It's created automatically by the configure script,
+    or can be created by hand if the configure script cannot be used.
+    The file ``jansson_config.h.win32`` can be used without
+    modifications on Windows systems.
+
+  - Add a section to documentation describing how to build Jansson on
+    Windows.
+
+  - Documentation now requires Sphinx 1.0 or newer.
+
+
+Version 1.3
+===========
+
+Released 2010-06-13
+
+* New functions:
+
+  - `json_object_iter_set()`, `json_object_iter_set_new()`: Change
+    object contents while iterating over it.
+
+  - `json_object_iter_at()`: Return an iterator that points to a
+    specific object item.
+
+* New encoding flags:
+
+  - ``JSON_PRESERVE_ORDER``: Preserve the insertion order of object
+    keys.
+
+* Bug fixes:
+
+  - Fix an error that occured when an array or object was first
+    encoded as empty, then populated with some data, and then
+    re-encoded
+
+  - Fix the situation like above, but when the first encoding resulted
+    in an error
+
+* Documentation:
+
+  - Clarify the documentation on reference stealing, providing an
+    example usage pattern
+
+
+Version 1.2.1
+=============
+
+Released 2010-04-03
+
+* Bug fixes:
+
+  - Fix reference counting on ``true``, ``false`` and ``null``
+  - Estimate real number underflows in decoder with 0.0 instead of
+    issuing an error
+
+* Portability:
+
+  - Make ``int32_t`` available on all systems
+  - Support compilers that don't have the ``inline`` keyword
+  - Require Autoconf 2.60 (for ``int32_t``)
+
+* Tests:
+
+  - Print test names correctly when ``VERBOSE=1``
+  - ``test/suites/api``: Fail when a test fails
+  - Enhance tests for iterators
+  - Enhance tests for decoding texts that contain null bytes
+
+* Documentation:
+
+  - Don't remove ``changes.rst`` in ``make clean``
+  - Add a chapter on RFC conformance
+
+
+Version 1.2
+===========
+
+Released 2010-01-21
+
+* New functions:
+
+  - `json_equal()`: Test whether two JSON values are equal
+  - `json_copy()` and `json_deep_copy()`: Make shallow and deep copies
+    of JSON values
+  - Add a version of all functions taking a string argument that
+    doesn't check for valid UTF-8: `json_string_nocheck()`,
+    `json_string_set_nocheck()`, `json_object_set_nocheck()`,
+    `json_object_set_new_nocheck()`
+
+* New encoding flags:
+
+  - ``JSON_SORT_KEYS``: Sort objects by key
+  - ``JSON_ENSURE_ASCII``: Escape all non-ASCII Unicode characters
+  - ``JSON_COMPACT``: Use a compact representation with all unneeded
+    whitespace stripped
+
+* Bug fixes:
+
+  - Revise and unify whitespace usage in encoder: Add spaces between
+    array and object items, never append newline to output.
+  - Remove const qualifier from the ``json_t`` parameter in
+    `json_string_set()`, `json_integer_set()` and `json_real_set`.
+  - Use ``int32_t`` internally for representing Unicode code points
+    (int is not enough on all platforms)
+
+* Other changes:
+
+  - Convert ``CHANGES`` (this file) to reStructured text and add it to
+    HTML documentation
+  - The test system has been refactored. Python is no longer required
+    to run the tests.
+  - Documentation can now be built by invoking ``make html``
+  - Support for pkg-config
+
+
+Version 1.1.3
+=============
+
+Released 2009-12-18
+
+* Encode reals correctly, so that first encoding and then decoding a
+  real always produces the same value
+* Don't export private symbols in ``libjansson.so``
+
+
+Version 1.1.2
+=============
+
+Released 2009-11-08
+
+* Fix a bug where an error message was not produced if the input file
+  could not be opened in `json_load_file()`
+* Fix an assertion failure in decoder caused by a minus sign without a
+  digit after it
+* Remove an unneeded include of ``stdint.h`` in ``jansson.h``
+
+
+Version 1.1.1
+=============
+
+Released 2009-10-26
+
+* All documentation files were not distributed with v1.1; build
+  documentation in make distcheck to prevent this in the future
+* Fix v1.1 release date in ``CHANGES``
+
+
+Version 1.1
+===========
+
+Released 2009-10-20
+
+* API additions and improvements:
+
+  - Extend array and object APIs
+  - Add functions to modify integer, real and string values
+  - Improve argument validation
+  - Use unsigned int instead of ``uint32_t`` for encoding flags
+
+* Enhance documentation
+
+  - Add getting started guide and tutorial
+  - Fix some typos
+  - General clarifications and cleanup
+
+* Check for integer and real overflows and underflows in decoder
+* Make singleton values thread-safe (``true``, ``false`` and ``null``)
+* Enhance circular reference handling
+* Don't define ``-std=c99`` in ``AM_CFLAGS``
+* Add C++ guards to ``jansson.h``
+* Minor performance and portability improvements
+* Expand test coverage
+
+
+Version 1.0.4
+=============
+
+Released 2009-10-11
+
+* Relax Autoconf version requirement to 2.59
+* Make Jansson compile on platforms where plain ``char`` is unsigned
+* Fix API tests for object
+
+
+Version 1.0.3
+=============
+
+Released 2009-09-14
+
+* Check for integer and real overflows and underflows in decoder
+* Use the Python json module for tests, or simplejson if the json
+  module is not found
+* Distribute changelog (this file)
+
+
+Version 1.0.2
+=============
+
+Released 2009-09-08
+
+* Handle EOF correctly in decoder
+
+
+Version 1.0.1
+=============
+
+Released 2009-09-04
+
+* Fixed broken `json_is_boolean()`
+
+
+Version 1.0
+===========
+
+Released 2009-08-25
+
+* Initial release
diff --git a/lang/c/jansson/LICENSE b/lang/c/jansson/LICENSE
new file mode 100644
index 0000000..6d70a1b
--- /dev/null
+++ b/lang/c/jansson/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/lang/c/jansson/Makefile.am b/lang/c/jansson/Makefile.am
new file mode 100644
index 0000000..e101fa2
--- /dev/null
+++ b/lang/c/jansson/Makefile.am
@@ -0,0 +1,10 @@
+EXTRA_DIST = CHANGES LICENSE README.rst
+SUBDIRS = doc src test
+
+# "make distcheck" builds the dvi target, so use it to check that the
+# documentation is built correctly.
+dvi:
+	$(MAKE) SPHINXOPTS_EXTRA=-W html
+
+#pkgconfigdir = $(libdir)/pkgconfig
+#pkgconfig_DATA = jansson.pc
diff --git a/lang/c/jansson/Makefile.in b/lang/c/jansson/Makefile.in
new file mode 100644
index 0000000..9dcf437
--- /dev/null
+++ b/lang/c/jansson/Makefile.in
@@ -0,0 +1,640 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = .
+DIST_COMMON = $(am__configure_deps) $(srcdir)/Makefile.am \
+	$(srcdir)/Makefile.in $(srcdir)/config.h.in \
+	$(srcdir)/jansson.pc.in $(top_srcdir)/configure config.guess \
+	config.sub depcomp install-sh ltmain.sh missing
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
+ configure.lineno config.status.lineno
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = config.h
+CONFIG_CLEAN_FILES = jansson.pc
+SOURCES =
+DIST_SOURCES =
+RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \
+	html-recursive info-recursive install-data-recursive \
+	install-dvi-recursive install-exec-recursive \
+	install-html-recursive install-info-recursive \
+	install-pdf-recursive install-ps-recursive install-recursive \
+	installcheck-recursive installdirs-recursive pdf-recursive \
+	ps-recursive uninstall-recursive
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
+  distclean-recursive maintainer-clean-recursive
+ETAGS = etags
+CTAGS = ctags
+DIST_SUBDIRS = $(SUBDIRS)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+distdir = $(PACKAGE)-$(VERSION)
+top_distdir = $(distdir)
+am__remove_distdir = \
+  { test ! -d $(distdir) \
+    || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \
+         && rm -fr $(distdir); }; }
+DIST_ARCHIVES = $(distdir).tar.gz
+GZIP_ENV = --best
+distuninstallcheck_listfiles = find . -type f -print
+distcleancheck_listfiles = find . -type f -print
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+EXTRA_DIST = CHANGES LICENSE README.rst
+SUBDIRS = doc src test
+all: config.h
+	$(MAKE) $(AM_MAKEFLAGS) all-recursive
+
+.SUFFIXES:
+am--refresh:
+	@:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \
+	      cd $(srcdir) && $(AUTOMAKE) --foreign  \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    echo ' $(SHELL) ./config.status'; \
+	    $(SHELL) ./config.status;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	$(SHELL) ./config.status --recheck
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(srcdir) && $(AUTOCONF)
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
+
+config.h: stamp-h1
+	@if test ! -f $@; then \
+	  rm -f stamp-h1; \
+	  $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \
+	else :; fi
+
+stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status
+	@rm -f stamp-h1
+	cd $(top_builddir) && $(SHELL) ./config.status config.h
+$(srcdir)/config.h.in:  $(am__configure_deps) 
+	cd $(top_srcdir) && $(AUTOHEADER)
+	rm -f stamp-h1
+	touch $@
+
+distclean-hdr:
+	-rm -f config.h stamp-h1
+jansson.pc: $(top_builddir)/config.status $(srcdir)/jansson.pc.in
+	cd $(top_builddir) && $(SHELL) ./config.status $@
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+distclean-libtool:
+	-rm -f libtool
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run `make' without going through this Makefile.
+# To change the values of `make' variables: instead of editing Makefiles,
+# (1) if the variable is set in `config.status', edit `config.status'
+#     (which will cause the Makefiles to be regenerated when you run `make');
+# (2) otherwise, pass the desired values on the `make' command line.
+$(RECURSIVE_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	target=`echo $@ | sed s/-recursive//`; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    dot_seen=yes; \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done; \
+	if test "$$dot_seen" = "no"; then \
+	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+	fi; test -z "$$fail"
+
+$(RECURSIVE_CLEAN_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	case "$@" in \
+	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+	  *) list='$(SUBDIRS)' ;; \
+	esac; \
+	rev=''; for subdir in $$list; do \
+	  if test "$$subdir" = "."; then :; else \
+	    rev="$$subdir $$rev"; \
+	  fi; \
+	done; \
+	rev="$$rev ."; \
+	target=`echo $@ | sed s/-recursive//`; \
+	for subdir in $$rev; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done && test -z "$$fail"
+tags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \
+	done
+ctags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \
+	done
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+	  include_option=--etags-include; \
+	  empty_fix=.; \
+	else \
+	  include_option=--include; \
+	  empty_fix=; \
+	fi; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test ! -f $$subdir/TAGS || \
+	      tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \
+	  fi; \
+	done; \
+	list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	$(am__remove_distdir)
+	test -d $(distdir) || mkdir $(distdir)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+	list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test -d "$(distdir)/$$subdir" \
+	    || $(MKDIR_P) "$(distdir)/$$subdir" \
+	    || exit 1; \
+	    distdir=`$(am__cd) $(distdir) && pwd`; \
+	    top_distdir=`$(am__cd) $(top_distdir) && pwd`; \
+	    (cd $$subdir && \
+	      $(MAKE) $(AM_MAKEFLAGS) \
+	        top_distdir="$$top_distdir" \
+	        distdir="$$distdir/$$subdir" \
+		am__remove_distdir=: \
+		am__skip_length_check=: \
+	        distdir) \
+	      || exit 1; \
+	  fi; \
+	done
+	-find $(distdir) -type d ! -perm -755 -exec chmod u+rwx,go+rx {} \; -o \
+	  ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -400 -exec chmod a+r {} \; -o \
+	  ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
+	|| chmod -R a+r $(distdir)
+dist-gzip: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__remove_distdir)
+
+dist-bzip2: distdir
+	tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2
+	$(am__remove_distdir)
+
+dist-tarZ: distdir
+	tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
+	$(am__remove_distdir)
+
+dist-shar: distdir
+	shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz
+	$(am__remove_distdir)
+
+dist-zip: distdir
+	-rm -f $(distdir).zip
+	zip -rq $(distdir).zip $(distdir)
+	$(am__remove_distdir)
+
+dist dist-all: distdir
+	tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
+	$(am__remove_distdir)
+
+# This target untars the dist file and tries a VPATH configuration.  Then
+# it guarantees that the distribution is self-contained by making another
+# tarfile.
+distcheck: dist
+	case '$(DIST_ARCHIVES)' in \
+	*.tar.gz*) \
+	  GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\
+	*.tar.bz2*) \
+	  bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\
+	*.tar.Z*) \
+	  uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
+	*.shar.gz*) \
+	  GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\
+	*.zip*) \
+	  unzip $(distdir).zip ;;\
+	esac
+	chmod -R a-w $(distdir); chmod a+w $(distdir)
+	mkdir $(distdir)/_build
+	mkdir $(distdir)/_inst
+	chmod a-w $(distdir)
+	dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
+	  && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+	  && cd $(distdir)/_build \
+	  && ../configure --srcdir=.. --prefix="$$dc_install_base" \
+	    $(DISTCHECK_CONFIGURE_FLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) \
+	  && $(MAKE) $(AM_MAKEFLAGS) dvi \
+	  && $(MAKE) $(AM_MAKEFLAGS) check \
+	  && $(MAKE) $(AM_MAKEFLAGS) install \
+	  && $(MAKE) $(AM_MAKEFLAGS) installcheck \
+	  && $(MAKE) $(AM_MAKEFLAGS) uninstall \
+	  && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
+	        distuninstallcheck \
+	  && chmod -R a-w "$$dc_install_base" \
+	  && ({ \
+	       (cd ../.. && umask 077 && mkdir "$$dc_destdir") \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
+	       && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
+	            distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
+	      } || { rm -rf "$$dc_destdir"; exit 1; }) \
+	  && rm -rf "$$dc_destdir" \
+	  && $(MAKE) $(AM_MAKEFLAGS) dist \
+	  && rm -rf $(DIST_ARCHIVES) \
+	  && $(MAKE) $(AM_MAKEFLAGS) distcleancheck
+	$(am__remove_distdir)
+	@(echo "$(distdir) archives ready for distribution: "; \
+	  list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
+	  sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
+distuninstallcheck:
+	@cd $(distuninstallcheck_dir) \
+	&& test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
+	   || { echo "ERROR: files left after uninstall:" ; \
+	        if test -n "$(DESTDIR)"; then \
+	          echo "  (check DESTDIR support)"; \
+	        fi ; \
+	        $(distuninstallcheck_listfiles) ; \
+	        exit 1; } >&2
+distcleancheck: distclean
+	@if test '$(srcdir)' = . ; then \
+	  echo "ERROR: distcleancheck can only run from a VPATH build" ; \
+	  exit 1 ; \
+	fi
+	@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
+	  || { echo "ERROR: files left in build directory after distclean:" ; \
+	       $(distcleancheck_listfiles) ; \
+	       exit 1; } >&2
+check-am: all-am
+check: check-recursive
+all-am: Makefile config.h
+installdirs: installdirs-recursive
+installdirs-am:
+install: install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-recursive
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-recursive
+
+clean-am: clean-generic clean-libtool mostlyclean-am
+
+distclean: distclean-recursive
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic distclean-hdr \
+	distclean-libtool distclean-tags
+
+dvi-am:
+
+html: html-recursive
+
+info: info-recursive
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-recursive
+
+install-exec-am:
+
+install-html: install-html-recursive
+
+install-info: install-info-recursive
+
+install-man:
+
+install-pdf: install-pdf-recursive
+
+install-ps: install-ps-recursive
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-recursive
+	-rm -f $(am__CONFIG_DISTCLEAN_FILES)
+	-rm -rf $(top_srcdir)/autom4te.cache
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-recursive
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-recursive
+
+pdf-am:
+
+ps: ps-recursive
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \
+	install-strip
+
+.PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \
+	all all-am am--refresh check check-am clean clean-generic \
+	clean-libtool ctags ctags-recursive dist dist-all dist-bzip2 \
+	dist-gzip dist-shar dist-tarZ dist-zip distcheck distclean \
+	distclean-generic distclean-hdr distclean-libtool \
+	distclean-tags distcleancheck distdir distuninstallcheck dvi \
+	dvi-am html html-am info info-am install install-am \
+	install-data install-data-am install-dvi install-dvi-am \
+	install-exec install-exec-am install-html install-html-am \
+	install-info install-info-am install-man install-pdf \
+	install-pdf-am install-ps install-ps-am install-strip \
+	installcheck installcheck-am installdirs installdirs-am \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags tags-recursive uninstall uninstall-am
+
+
+# "make distcheck" builds the dvi target, so use it to check that the
+# documentation is built correctly.
+dvi:
+	$(MAKE) SPHINXOPTS_EXTRA=-W html
+
+#pkgconfigdir = $(libdir)/pkgconfig
+#pkgconfig_DATA = jansson.pc
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/README.rst b/lang/c/jansson/README.rst
new file mode 100644
index 0000000..cbb171f
--- /dev/null
+++ b/lang/c/jansson/README.rst
@@ -0,0 +1,59 @@
+Jansson README
+==============
+
+Jansson_ is a C library for encoding, decoding and manipulating JSON
+data. Its main features and design principles are:
+
+- Simple and intuitive API and data model
+
+- Comprehensive documentation
+
+- No dependencies on other libraries
+
+- Full Unicode support (UTF-8)
+
+- Extensive test suite
+
+Jansson is licensed under the `MIT license`_; see LICENSE in the
+source distribution for details.
+
+
+Compilation and Installation
+----------------------------
+
+If you obtained a source tarball, just use the standard autotools
+commands::
+
+   $ ./configure
+   $ make
+   $ make install
+
+To run the test suite, invoke::
+
+   $ make check
+
+If the source has been checked out from a Git repository, the
+./configure script has to be generated fist. The easiest way is to use
+autoreconf::
+
+   $ autoreconf -i
+
+
+Documentation
+-------------
+
+Prebuilt HTML documentation is available at
+http://www.digip.org/jansson/doc/.
+
+The documentation source is in the ``doc/`` subdirectory. To generate
+HTML documentation, invoke::
+
+   $ make html
+
+Then, point your browser to ``doc/_build/html/index.html``. Sphinx_
+1.0 or newer is required to generate the documentation.
+
+
+.. _Jansson: http://www.digip.org/jansson/
+.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
+.. _Sphinx: http://sphinx.pocoo.org/
diff --git a/lang/c/jansson/aclocal.m4 b/lang/c/jansson/aclocal.m4
new file mode 100644
index 0000000..8bfbdca
--- /dev/null
+++ b/lang/c/jansson/aclocal.m4
@@ -0,0 +1,8895 @@
+# generated automatically by aclocal 1.10 -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
+# 2005, 2006  Free Software Foundation, Inc.
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+m4_if(m4_PACKAGE_VERSION, [2.68],,
+[m4_fatal([this file was generated for autoconf 2.68.
+You have another version of autoconf.  If you want to use that,
+you should regenerate the build system entirely.], [63])])
+
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+#                 Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+#                 Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+])
+
+# serial 57 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+       [m4_default([$3],
+		   [m4_fatal([Libtool version $1 or higher is required],
+		             63)])],
+       [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+  *\ * | *\	*)
+    AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# Calculate cc_basename.  Skip known compiler wrappers and cross-prefix.
+m4_defun([_LT_CC_BASENAME],
+[for cc_temp in $1""; do
+  case $cc_temp in
+    compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+    distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    _LT_PATH_MAGIC
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from `configure', and `config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool.  Notably,
+# `config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain="$ac_aux_dir/ltmain.sh"
+])# _LT_PROG_LTMAIN
+
+
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the `libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+              [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME.  Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+    [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+	[m4_ifval([$1], [$1], [$2])])
+    lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+    m4_ifval([$4],
+	[lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+    lt_dict_add_subkey([lt_decl_dict], [$2],
+	[tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+  [0], [m4_fatal([$0: too few arguments: $#])],
+  [1], [m4_fatal([$0: too few arguments: $#: $1])],
+  [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+  [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+  [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+    m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+    m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+     m4_if([$2], [],
+	   m4_quote(lt_decl_varnames),
+	m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+			lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to `config.status' so that its
+# declaration there will have the same value as in `configure'.  VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly.  In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+#    <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+    [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl
+available_tags="_LT_TAGS"dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+#    # Some comment about what VAR is for.
+#    visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+					   [description])))[]dnl
+m4_pushdef([_libtool_name],
+    m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+    [0], [_libtool_name=[$]$1],
+    [1], [_libtool_name=$lt_[]$1],
+    [2], [_libtool_name=$lt_[]$1],
+    [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the `libtool'
+# script.  Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+    m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS.  Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into `config.status', and then the shell code to quote escape them in
+# for loops in `config.status'.  Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+	dnl If the libtool generation code has been placed in $CONFIG_LT,
+	dnl instead of duplicating it all over again into config.status,
+	dnl then we will have config.status run $CONFIG_LT later, so it
+	dnl needs to know what name is stored there:
+        [AC_CONFIG_COMMANDS([libtool],
+            [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+    dnl If the libtool generation code is destined for config.status,
+    dnl expand the accumulated commands and init code now:
+    [AC_CONFIG_COMMANDS([libtool],
+        [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# `#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test $lt_write_fail = 0 && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+\`$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+  -q, --quiet     do not print progress messages
+  -d, --debug     don't remove temporary files
+
+Report bugs to <bug-libtool at gnu.org>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test $[#] != 0
+do
+  case $[1] in
+    --version | --v* | -V )
+      echo "$lt_cl_version"; exit 0 ;;
+    --help | --h* | -h )
+      echo "$lt_cl_help"; exit 0 ;;
+    --debug | --d* | -d )
+      debug=: ;;
+    --quiet | --q* | --silent | --s* | -q )
+      lt_cl_silent=: ;;
+
+    -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try \`$[0] --help' for more information.]) ;;
+
+    *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try \`$[0] --help' for more information.]) ;;
+  esac
+  shift
+done
+
+if $lt_cl_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure.  Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test "$silent" = yes &&
+  lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars.  Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+  m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+  m4_if(_LT_TAG, [C], [
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM -f \"$cfgfile\"; exit 1" 1 2 15
+    $RM -f "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+  _LT_PROG_LTMAIN
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+    || (rm -f "$cfgfile"; exit 1)
+
+  _LT_PROG_XSI_SHELLFNS
+
+  sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+    || (rm -f "$cfgfile"; exit 1)
+
+  mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+#    autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+  [C],			[_LT_LANG(C)],
+  [C++],		[_LT_LANG(CXX)],
+  [Java],		[_LT_LANG(GCJ)],
+  [Fortran 77],		[_LT_LANG(F77)],
+  [Fortran],		[_LT_LANG(FC)],
+  [Windows Resource],	[_LT_LANG(RC)],
+  [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+    [_LT_LANG($1)],
+    [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+  [LT_SUPPORTED_TAG([$1])dnl
+  m4_append([_LT_TAGS], [$1 ])dnl
+  m4_define([_LT_LANG_]$1[_enabled], [])dnl
+  _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+  [LT_LANG(CXX)],
+  [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+  [LT_LANG(F77)],
+  [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+  [LT_LANG(FC)],
+  [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+  [LT_LANG(GCJ)],
+  [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+    [LT_LANG(GCJ)],
+    [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+      [LT_LANG(GCJ)],
+      [m4_ifdef([AC_PROG_GCJ],
+	[m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([A][M_PROG_GCJ],
+	[m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([LT_PROG_GCJ],
+	[m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+  [LT_LANG(RC)],
+  [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+  case $host_os in
+    rhapsody* | darwin*)
+    AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+    AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+    AC_CHECK_TOOL([LIPO], [lipo], [:])
+    AC_CHECK_TOOL([OTOOL], [otool], [:])
+    AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+    _LT_DECL([], [DSYMUTIL], [1],
+      [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+    _LT_DECL([], [NMEDIT], [1],
+      [Tool to change global to local symbols on Mac OS X])
+    _LT_DECL([], [LIPO], [1],
+      [Tool to manipulate fat objects and archives on Mac OS X])
+    _LT_DECL([], [OTOOL], [1],
+      [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+    _LT_DECL([], [OTOOL64], [1],
+      [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+    AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod],
+      [lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&AS_MESSAGE_LOG_FD
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi])
+    AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+      [lt_cv_ld_exported_symbols_list],
+      [lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+	[lt_cv_ld_exported_symbols_list=yes],
+	[lt_cv_ld_exported_symbols_list=no])
+	LDFLAGS="$save_LDFLAGS"
+    ])
+    AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+      [lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+      echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+      $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+      echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+      $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&AS_MESSAGE_LOG_FD
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+    ])
+    case $host_os in
+    rhapsody* | darwin1.[[012]])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[[012]]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES
+# --------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+  m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_automatic, $1)=yes
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+  else
+    _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+  fi
+  _LT_TAGVAR(link_all_deplibs, $1)=yes
+  _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+    m4_if([$1], [CXX],
+[   if test "$lt_cv_apple_cc_single_mod" != "yes"; then
+      _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}"
+      _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}"
+    fi
+],[])
+  else
+  _LT_TAGVAR(ld_shlibs, $1)=no
+  fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX
+# -----------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+AC_LINK_IFELSE(AC_LANG_PROGRAM,[
+lt_aix_libpath_sed='
+    /Import File Strings/,/^$/ {
+	/^0/ {
+	    s/^0  *\(.*\)$/\1/
+	    p
+	}
+    }'
+aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+# Check for a 64-bit object if we didn't find anything.
+if test -z "$aix_libpath"; then
+  aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+fi],[])
+if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script which will find a shell with a builtin
+# printf (which we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`print -r -- -n 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*" 
+}
+
+case "$ECHO" in
+  printf*) AC_MSG_RESULT([printf]) ;;
+  print*) AC_MSG_RESULT([print -r]) ;;
+  *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+  test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test "X`printf %s $ECHO`" = "X$ECHO" \
+      || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+  [AS_HELP_STRING([--disable-libtool-lock],
+    [avoid locking (might break parallel builds)])])
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+    [AC_LANG_PUSH(C)
+     AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+     AC_LANG_POP])
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+sparc*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*) LD="${LD-ld} -m elf64_sparc" ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+])# _LT_ENABLE_LOCK
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[AC_CHECK_TOOL(AR, ar, false)
+test -z "$AR" && AR=ar
+test -z "$AR_FLAGS" && AR_FLAGS=cru
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1])
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+    [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+    [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+    [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#		[OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$3"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$5], , :, [$5])
+else
+    m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#                  [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $3"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&AS_MESSAGE_LOG_FD
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         $2=yes
+       fi
+     else
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+])
+
+if test x"[$]$2" = xyes; then
+    m4_if([$4], , :, [$4])
+else
+    m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+  i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[	 ]]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`func_fallback_echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+])
+if test -n $lt_cv_sys_max_cmd_len ; then
+  AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+  AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+    [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+#                      ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "$cross_compiling" = yes; then :
+  [$4]
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+void fnord () __attribute__((visibility("default")));
+#endif
+
+void fnord () { int i=42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}]
+_LT_EOF
+  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) $1 ;;
+      x$lt_dlneed_uscore) $2 ;;
+      x$lt_dlunknown|x*) $3 ;;
+    esac
+  else :
+    # compilation failed
+    $3
+  fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ])
+    ;;
+
+  *)
+    AC_CHECK_FUNC([shl_load],
+	  [lt_cv_dlopen="shl_load"],
+      [AC_CHECK_LIB([dld], [shl_load],
+	    [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"],
+	[AC_CHECK_FUNC([dlopen],
+	      [lt_cv_dlopen="dlopen"],
+	  [AC_CHECK_LIB([dl], [dlopen],
+		[lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],
+	    [AC_CHECK_LIB([svld], [dlopen],
+		  [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"],
+	      [AC_CHECK_LIB([dld], [dld_link],
+		    [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"])
+	      ])
+	    ])
+	  ])
+	])
+      ])
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    AC_CACHE_CHECK([whether a program can dlopen itself],
+	  lt_cv_dlopen_self, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+	    lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+    ])
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+	  lt_cv_dlopen_self_static, [dnl
+	  _LT_TRY_DLOPEN_SELF(
+	    lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+	    lt_cv_dlopen_self_static=no,  lt_cv_dlopen_self_static=cross)
+      ])
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+	 [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+	 [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+	 [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+     fi
+   fi
+   chmod u+w . 2>&AS_MESSAGE_LOG_FD
+   $RM -r conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM -r conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+	[Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links="nottested"
+if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  AC_MSG_CHECKING([if we can lock with hard links])
+  hard_links=yes
+  $RM -r conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  AC_MSG_RESULT([$hard_links])
+  if test "$hard_links" = no; then
+    AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe])
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+         [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/",
+  [Define to the sub-directory in which libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+   test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+   test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$_LT_TAGVAR(hardcode_direct, $1)" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no &&
+     test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then
+    # Linking always hardcodes the temporary library directory.
+    _LT_TAGVAR(hardcode_action, $1)=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    _LT_TAGVAR(hardcode_action, $1)=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test "$_LT_TAGVAR(hardcode_action, $1)" = relink ||
+   test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+    [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      AC_MSG_RESULT([yes])
+    else
+      AC_MSG_RESULT([no])
+    fi
+    ;;
+  *)
+    AC_MSG_RESULT([no])
+    ;;
+  esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+	[], [
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([[A-Za-z]]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+  if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([[A-Za-z]]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[[4-9]]*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[[01]] | aix4.[[01]].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[[45]]*)
+  version_type=linux
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$host_os in
+  yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+m4_if([$1], [],[
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    ;;
+
+  *)
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    ;;
+  esac
+  dynamic_linker='Win32 ld.exe'
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext ${libname}${release}${versuffix}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd1*)
+  dynamic_linker=no
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[[123]]*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+  freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[[3-9]]*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+    [lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+	 LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+      [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null],
+	 [lt_cv_shlibpath_overrides_runpath=yes])])
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+    ])
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[[89]] | openbsd2.[[89]].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+    [Variables whose values should be saved in libtool wrapper scripts and
+    restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+    [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0],  [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+    [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+    [[List of archive names.  First name is the real one, the rest are links.
+    The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+    [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+    [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+    [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+    [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+    [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+    [[As "finish_cmds", except a single script fragment to be evaled but
+    not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+    [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+    [Compile-time system search path for libraries])
+_LT_DECL([], [sys_lib_dlsearch_path_spec], [2],
+    [Run-time system search path for libraries])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program which can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] |  ?:[\\/]*])
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word.  This closes a longstanding sh security hole.
+  ac_dummy="m4_if([$2], , $PATH, [$2])"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/$1; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/$1"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool at gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac])
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  AC_MSG_RESULT($MAGIC_CMD)
+else
+  AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+	 [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program which can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+  else
+    MAGIC_CMD=:
+  fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+    [AS_HELP_STRING([--with-gnu-ld],
+	[assume the C compiler uses GNU ld @<:@default=no@:>@])],
+    [test "$withval" = no || with_gnu_ld=yes],
+    [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  AC_MSG_CHECKING([for ld used by $CC])
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [[\\/]]* | ?:[[\\/]]*)
+      re_direlt='/[[^/]][[^/]]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  AC_MSG_CHECKING([for GNU ld])
+else
+  AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi])
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  AC_MSG_RESULT($LD)
+else
+  AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+#   -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+  lt_cv_ld_reload_flag,
+  [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[[45]]*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]']
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[[3-9]]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+])
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+    [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+    [Command to use when deplibs_check_method == "file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi])
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+  AC_SUBST([DUMPBIN])
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+  [lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+  cat conftest.out >&AS_MESSAGE_LOG_FD
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -r -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+  # These system don't have libm, or don't need it
+  ;;
+*-ncr-sysv4.3*)
+  AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw")
+  AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+  ;;
+*)
+  AC_CHECK_LIB(m, cos, LIBM="-lm")
+  ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+  esac
+
+  _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+    lt_cv_prog_compiler_rtti_exceptions,
+    [-fno-rtti -fno-exceptions], [],
+    [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+	[Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[[BCDT]]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[[ABCDGISTW]]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[[ABCDEGRST]]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[[BCDEGRST]]'
+  ;;
+osf*)
+  symcode='[[BCDEGQRST]]'
+  ;;
+solaris*)
+  symcode='[[BDRT]]'
+  ;;
+sco3.2v5*)
+  symcode='[[DT]]'
+  ;;
+sysv4.2uw2*)
+  symcode='[[DT]]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[[ABDT]]'
+  ;;
+sysv4)
+  symcode='[[DFNSTU]]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK ['"\
+"     {last_section=section; section=\$ 3};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx]"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[	 ]]\($symcode$symcode*\)[[	 ]][[	 ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -r -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+const struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_save_LIBS="$LIBS"
+	  lt_save_CFLAGS="$CFLAGS"
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+	  if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS="$lt_save_LIBS"
+	  CFLAGS="$lt_save_CFLAGS"
+	else
+	  echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  AC_MSG_RESULT(failed)
+else
+  AC_MSG_RESULT(ok)
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+    [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+    [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_c_name_address],
+    [lt_cv_sys_global_symbol_to_c_name_address], [1],
+    [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+    [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+    [Transform the output of nm in a C name address pair when lib prefix is needed])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+AC_MSG_CHECKING([for $compiler option to produce PIC])
+m4_if([$1], [CXX], [
+  # C++ specific cases for pic, static, wl, etc.
+  if test "$GXX" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+    aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+    mingw* | cygwin* | os2* | pw32* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+    *djgpp*)
+      # DJGPP does not support shared libraries at all
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+      ;;
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+    *qnx* | *nto*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+  else
+    case $host_os in
+      aix[[4-9]]*)
+	# All AIX code is PIC.
+	if test "$host_cpu" = ia64; then
+	  # AIX 5 now supports IA64 processor
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	else
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+	fi
+	;;
+      chorus*)
+	case $cc_basename in
+	cxch68*)
+	  # Green Hills C++ Compiler
+	  # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a"
+	  ;;
+	esac
+	;;
+      dgux*)
+	case $cc_basename in
+	  ec++*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  ghcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      freebsd* | dragonfly*)
+	# FreeBSD uses GNU C++
+	;;
+      hpux9* | hpux10* | hpux11*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    if test "$host_cpu" != ia64; then
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	    fi
+	    ;;
+	  aCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+	    case $host_cpu in
+	    hppa*64*|ia64*)
+	      # +Z the default
+	      ;;
+	    *)
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	      ;;
+	    esac
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      interix*)
+	# This is c89, which is MS Visual C++ (no shared libs)
+	# Anyone wants to do a port?
+	;;
+      irix5* | irix6* | nonstopux*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    # CC pic flag -KPIC is the default.
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+	case $cc_basename in
+	  KCC*)
+	    # KAI C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    ;;
+	  ecpc* )
+	    # old Intel C++ for x86_64 which still supported -KPIC.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  icpc* )
+	    # Intel C++, used to be incompatible with GCC.
+	    # ICC 10 doesn't accept -KPIC any more.
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+	    ;;
+	  pgCC* | pgcpp*)
+	    # Portland Group C++ compiler
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  cxx*)
+	    # Compaq C++
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+	    # IBM XL 8.0, 9.0 on PPC and BlueGene
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+      lynxos*)
+	;;
+      m88k*)
+	;;
+      mvs*)
+	case $cc_basename in
+	  cxx*)
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      netbsd*)
+	;;
+      *qnx* | *nto*)
+        # QNX uses GNU C++, but need to define -shared option too, otherwise
+        # it will coredump.
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+        ;;
+      osf3* | osf4* | osf5*)
+	case $cc_basename in
+	  KCC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+	    ;;
+	  RCC*)
+	    # Rational C++ 2.4.1
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  cxx*)
+	    # Digital/Compaq C++
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    # Make sure the PIC flag is empty.  It appears that all Alpha
+	    # Linux and Compaq Tru64 Unix objects are PIC.
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      psos*)
+	;;
+      solaris*)
+	case $cc_basename in
+	  CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+	    ;;
+	  gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sunos4*)
+	case $cc_basename in
+	  CC*)
+	    # Sun C++ 4.x
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	  lcc*)
+	    # Lucid
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+	case $cc_basename in
+	  CC*)
+	    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	    ;;
+	esac
+	;;
+      tandem*)
+	case $cc_basename in
+	  NCC*)
+	    # NonStop-UX NCC 3.20
+	    _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	    ;;
+	  *)
+	    ;;
+	esac
+	;;
+      vxworks*)
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+	;;
+    esac
+  fi
+],
+[
+  if test "$GCC" = yes; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Xcompiler -fPIC'
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      else
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      m4_if([$1], [GCJ], [],
+	[_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC (with -KPIC) is the default.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      ccc*)
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+        # All Alpha code is PIC.
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ F* | *Sun*Fortran*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+	  _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+	  _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # All OSF/1 code is PIC.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    rdos*)
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+      *)
+	_LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	_LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+	_LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    unicos*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+    esac
+  fi
+])
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+    ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t at m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+    ;;
+esac
+AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)])
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+	[How to pass a linker flag through the compiler])
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+  _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works],
+    [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+    [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t at m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+    [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+     "" | " "*) ;;
+     *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+     esac],
+    [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+     _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+	[Additional compiler flags for building library objects])
+
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+  _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+  $lt_tmp_static_flag,
+  [],
+  [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+	[Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  case $host_os in
+  aix[[4-9]]*)
+    # If we're using GNU nm, then we don't want the "-C" option.
+    # -C means demangle to AIX nm, but means don't demangle with GNU nm
+    # Also, AIX nm treats weak defined symbols like other global defined
+    # symbols, whereas GNU nm marks them as "W".
+    if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    else
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+    fi
+    ;;
+  pw32*)
+    _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds"
+  ;;
+  cygwin* | mingw* | cegcc*)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols'
+  ;;
+  *)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  ;;
+  esac
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+], [
+  runpath_var=
+  _LT_TAGVAR(allow_undefined_flag, $1)=
+  _LT_TAGVAR(always_export_symbols, $1)=no
+  _LT_TAGVAR(archive_cmds, $1)=
+  _LT_TAGVAR(archive_expsym_cmds, $1)=
+  _LT_TAGVAR(compiler_needs_object, $1)=no
+  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+  _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(hardcode_automatic, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+  _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+  _LT_TAGVAR(hardcode_minus_L, $1)=no
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  _LT_TAGVAR(inherit_rpath, $1)=no
+  _LT_TAGVAR(link_all_deplibs, $1)=unknown
+  _LT_TAGVAR(module_cmds, $1)=
+  _LT_TAGVAR(module_expsym_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+  _LT_TAGVAR(thread_safe_flag_spec, $1)=
+  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  _LT_TAGVAR(include_expsyms, $1)=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  _LT_TAGVAR(ld_shlibs, $1)=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+	  *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[[3-9]]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	# Joseph Beckenbach <jrb3 at best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	_LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+      # as there is no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=no
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols'
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	_LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    haiku*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    interix[[3-9]]*)
+      _LT_TAGVAR(hardcode_direct, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+	  tmp_sharedflag='--shared' ;;
+	xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  _LT_TAGVAR(compiler_needs_object, $1)=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	_LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+	  _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir'
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+    esac
+
+    if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then
+      runpath_var=
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	_LT_TAGVAR(hardcode_direct, $1)=unsupported
+      fi
+      ;;
+
+    aix[[4-9]]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      _LT_TAGVAR(archive_cmds, $1)=''
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[[012]]|aix4.[[012]].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	_LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	  _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 _LT_SYS_MODULE_PATH_AIX
+	 _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	  _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	  fi
+	  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[[45]]*)
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      # Tell ltmain to make .lib files, not .a files.
+      libext=lib
+      # Tell ltmain to make .dll files, not .so files.
+      shrext_cmds=".dll"
+      # FIXME: Setting linknames here is a bad hack.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+      # The linker will automatically build a .lib file if we build a DLL.
+      _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+      # FIXME: Should let the user specify the lib program.
+      _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+      _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`'
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      ;;
+
+    darwin* | rhapsody*)
+      _LT_DARWIN_LINKER_FEATURES($1)
+      ;;
+
+    dgux*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    freebsd1*)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	_LT_TAGVAR(hardcode_minus_L, $1)=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	m4_if($1, [], [
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  _LT_LINKER_OPTION([if $CC understands -b],
+	    _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+	    [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+	    [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])],
+	  [_LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  ;;
+	*)
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+        save_LDFLAGS="$LDFLAGS"
+        LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+        AC_LINK_IFELSE(int foo(void) {},
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+        )
+        LDFLAGS="$save_LDFLAGS"
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(inherit_rpath, $1)=yes
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	_LT_TAGVAR(hardcode_direct, $1)=yes
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*)
+	     _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	     ;;
+	   *)
+	     _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	_LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+      else
+	_LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      case $host_os in
+      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+	  _LT_TAGVAR(hardcode_direct, $1)=no
+        ;;
+	motorola)
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4.3*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	_LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	_LT_TAGVAR(ld_shlibs, $1)=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	_LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+    [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+  # Assume -lc should be added
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $_LT_TAGVAR(archive_cmds, $1) in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+	[lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+	[$RM -r conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+	  pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+	  _LT_TAGVAR(allow_undefined_flag, $1)=
+	  if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1)
+	  then
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	  else
+	    lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	  fi
+	  _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM -r conftest*
+	])
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+    [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+    [enable_shared_with_static_runtimes], [0],
+    [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+    [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+    [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+    [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+    [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+    [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+    [Commands used to build a loadable module if different from building
+    a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+    [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+    [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+    [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+    [Flag to hardcode $libdir into a binary during linking.
+    This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec_ld], [1],
+    [[If ld is used when linking, flag to hardcode $libdir into a binary
+    during linking.  This must work even if $libdir does not exist]])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+    [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+    [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes
+    DIR into the resulting binary and the resulting library dependency is
+    "absolute", i.e impossible to change by setting ${shlibpath_var} if the
+    library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+    [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+    [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+    [Set to "yes" if building a shared library automatically hardcodes DIR
+    into the library and all subsequent libraries and executables linked
+    against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+    [Set to yes if linker adds runtime paths of dependent libraries
+    to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+    [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [fix_srcfile_path], [1],
+    [Fix the shell variable $srcfile for the compiler])
+_LT_TAGDECL([], [always_export_symbols], [0],
+    [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+    [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+    [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+    [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+    [Commands necessary for linking programs (against libraries) with templates])
+_LT_TAGDECL([], [file_list_spec], [1],
+    [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl    [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC="$CC"
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_SYS_DYNAMIC_LINKER($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+  LT_SYS_DLOPEN_SELF
+  _LT_CMD_STRIPLIB
+
+  # Report which library types will actually be built
+  AC_MSG_CHECKING([if libtool supports shared libraries])
+  AC_MSG_RESULT([$can_build_shared])
+
+  AC_MSG_CHECKING([whether to build shared libraries])
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[[4-9]]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  AC_MSG_RESULT([$enable_shared])
+
+  AC_MSG_CHECKING([whether to build static libraries])
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  AC_MSG_RESULT([$enable_static])
+
+  _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC="$lt_save_CC"
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+if test -n "$CXX" && ( test "X$CXX" != "Xno" &&
+    ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) ||
+    (test "X$CXX" != "Xg++"))) ; then
+  AC_PROG_CXXCPP
+else
+  _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_caught_CXX_error" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="int some_variable = 0;"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_LD=$LD
+  lt_save_GCC=$GCC
+  GCC=$GXX
+  lt_save_with_gnu_ld=$with_gnu_ld
+  lt_save_path_LD=$lt_cv_path_LD
+  if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+    lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+  else
+    $as_unset lt_cv_prog_gnu_ld
+  fi
+  if test -n "${lt_cv_path_LDCXX+set}"; then
+    lt_cv_path_LD=$lt_cv_path_LDCXX
+  else
+    $as_unset lt_cv_path_LD
+  fi
+  test -z "${LDCXX+set}" || LD=$LDCXX
+  CC=${CXX-"c++"}
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    # We don't want -fno-exception when compiling C++ code, so set the
+    # no_builtin_flag separately
+    if test "$GXX" = yes; then
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+    else
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+    fi
+
+    if test "$GXX" = yes; then
+      # Set up default GNU C++ configuration
+
+      LT_PATH_LD
+
+      # Check if GNU C++ uses GNU ld as the underlying linker, since the
+      # archiving commands below assume that GNU ld is being used.
+      if test "$with_gnu_ld" = yes; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+        # If archive_cmds runs LD, not CC, wlarc should be empty
+        # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+        #     investigate it a little bit more. (MM)
+        wlarc='${wl}'
+
+        # ancient GNU ld didn't support --whole-archive et. al.
+        if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+	  $GREP 'no-whole-archive' > /dev/null; then
+          _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+        else
+          _LT_TAGVAR(whole_archive_flag_spec, $1)=
+        fi
+      else
+        with_gnu_ld=no
+        wlarc=
+
+        # A generic and very simple default shared library creation
+        # command for GNU C++ for the case where it uses the native
+        # linker, instead of GNU ld.  If possible, this setting should
+        # overridden to take advantage of the native linker features on
+        # the platform it is being used on.
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+      fi
+
+      # Commands to make compiler produce verbose output that lists
+      # what "hidden" libraries, object files and flags are used when
+      # linking a shared library.
+      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+    else
+      GXX=no
+      with_gnu_ld=no
+      wlarc=
+    fi
+
+    # PORTME: fill in a description of your system's C++ link characteristics
+    AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+    _LT_TAGVAR(ld_shlibs, $1)=yes
+    case $host_os in
+      aix3*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+      aix[[4-9]]*)
+        if test "$host_cpu" = ia64; then
+          # On IA64, the linker does run time linking by default, so we don't
+          # have to do anything special.
+          aix_use_runtimelinking=no
+          exp_sym_flag='-Bexport'
+          no_entry_flag=""
+        else
+          aix_use_runtimelinking=no
+
+          # Test if we are trying to use run time linking or normal
+          # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+          # need to do runtime linking.
+          case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+	    for ld_flag in $LDFLAGS; do
+	      case $ld_flag in
+	      *-brtl*)
+	        aix_use_runtimelinking=yes
+	        break
+	        ;;
+	      esac
+	    done
+	    ;;
+          esac
+
+          exp_sym_flag='-bexport'
+          no_entry_flag='-bnoentry'
+        fi
+
+        # When large executables or shared objects are built, AIX ld can
+        # have problems creating the table of contents.  If linking a library
+        # or program results in "error TOC overflow" add -mminimal-toc to
+        # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+        # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+        _LT_TAGVAR(archive_cmds, $1)=''
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        _LT_TAGVAR(file_list_spec, $1)='${wl}-f,'
+
+        if test "$GXX" = yes; then
+          case $host_os in aix4.[[012]]|aix4.[[012]].*)
+          # We only want to do this on AIX 4.2 and lower, the check
+          # below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	     strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	    # We have reworked collect2
+	    :
+	  else
+	    # We have old collect2
+	    _LT_TAGVAR(hardcode_direct, $1)=unsupported
+	    # It fails to find uninstalled libraries when the uninstalled
+	    # path is not listed in the libpath.  Setting hardcode_minus_L
+	    # to unsupported forces relinking
+	    _LT_TAGVAR(hardcode_minus_L, $1)=yes
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=
+	  fi
+          esac
+          shared_flag='-shared'
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag="$shared_flag "'${wl}-G'
+	  fi
+        else
+          # not using gcc
+          if test "$host_cpu" = ia64; then
+	  # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	  # chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+          else
+	    if test "$aix_use_runtimelinking" = yes; then
+	      shared_flag='${wl}-G'
+	    else
+	      shared_flag='${wl}-bM:SRE'
+	    fi
+          fi
+        fi
+
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall'
+        # It seems that -bexpall does not export symbols beginning with
+        # underscore (_), so it is better to generate a list of symbols to
+	# export.
+        _LT_TAGVAR(always_export_symbols, $1)=yes
+        if test "$aix_use_runtimelinking" = yes; then
+          # Warning - without using the other runtime loading flags (-brtl),
+          # -berok will link without error, but may produce a broken library.
+          _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+          # Determine the default libpath from the value encoded in an empty
+          # executable.
+          _LT_SYS_MODULE_PATH_AIX
+          _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+        else
+          if test "$host_cpu" = ia64; then
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib'
+	    _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+          else
+	    # Determine the default libpath from the value encoded in an
+	    # empty executable.
+	    _LT_SYS_MODULE_PATH_AIX
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath"
+	    # Warning - without using the other run time loading flags,
+	    # -berok will link without error, but may produce a broken library.
+	    _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok'
+	    _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok'
+	    if test "$with_gnu_ld" = yes; then
+	      # We only use this code for GNU lds that support --whole-archive.
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    else
+	      # Exported symbols can be pulled into shared objects from archives
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+	    fi
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+	    # This is similar to how AIX traditionally builds its shared
+	    # libraries.
+	    _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+          fi
+        fi
+        ;;
+
+      beos*)
+	if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	  _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+	  # Joseph Beckenbach <jrb3 at best.com> says some releases of gcc
+	  # support --undefined.  This deserves some investigation.  FIXME
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      chorus*)
+        case $cc_basename in
+          *)
+	  # FIXME: insert proper C++ library support
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	  ;;
+        esac
+        ;;
+
+      cygwin* | mingw* | pw32* | cegcc*)
+        # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+        # as there is no search path for DLLs.
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols'
+        _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+        _LT_TAGVAR(always_export_symbols, $1)=no
+        _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+        if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+          # If the export-symbols file already is a .def file (1st line
+          # is EXPORTS), use it as is; otherwise, prepend...
+          _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	    cp $export_symbols $output_objdir/$soname.def;
+          else
+	    echo EXPORTS > $output_objdir/$soname.def;
+	    cat $export_symbols >> $output_objdir/$soname.def;
+          fi~
+          $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+        else
+          _LT_TAGVAR(ld_shlibs, $1)=no
+        fi
+        ;;
+      darwin* | rhapsody*)
+        _LT_DARWIN_LINKER_FEATURES($1)
+	;;
+
+      dgux*)
+        case $cc_basename in
+          ec++*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          ghcx*)
+	    # Green Hills C++ Compiler
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      freebsd[[12]]*)
+        # C++ shared libraries reported to be fairly broken before
+	# switch to ELF
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      freebsd-elf*)
+        _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+        ;;
+
+      freebsd* | dragonfly*)
+        # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+        # conventions
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+        ;;
+
+      gnu*)
+        ;;
+
+      haiku*)
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        ;;
+
+      hpux9*)
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+				             # but as the default
+				             # location of the library.
+
+        case $cc_basename in
+          CC*)
+            # FIXME: insert proper C++ library support
+            _LT_TAGVAR(ld_shlibs, $1)=no
+            ;;
+          aCC*)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            # Commands to make compiler produce verbose output that lists
+            # what "hidden" libraries, object files and flags are used when
+            # linking a shared library.
+            #
+            # There doesn't appear to be a way to prevent this compiler from
+            # explicitly linking system object files so we need to strip them
+            # from the output so that they don't get included in the library
+            # dependencies.
+            output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+            ;;
+          *)
+            if test "$GXX" = yes; then
+              _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+            else
+              # FIXME: insert proper C++ library support
+              _LT_TAGVAR(ld_shlibs, $1)=no
+            fi
+            ;;
+        esac
+        ;;
+
+      hpux10*|hpux11*)
+        if test $with_gnu_ld = no; then
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir'
+	  _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+          case $host_cpu in
+            hppa*64*|ia64*)
+              ;;
+            *)
+	      _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+              ;;
+          esac
+        fi
+        case $host_cpu in
+          hppa*64*|ia64*)
+            _LT_TAGVAR(hardcode_direct, $1)=no
+            _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+            ;;
+          *)
+            _LT_TAGVAR(hardcode_direct, $1)=yes
+            _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+					         # but as the default
+					         # location of the library.
+            ;;
+        esac
+
+        case $cc_basename in
+          CC*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          aCC*)
+	    case $host_cpu in
+	      hppa*64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      ia64*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	      *)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	        ;;
+	    esac
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test $with_gnu_ld = no; then
+	        case $host_cpu in
+	          hppa*64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          ia64*)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	          *)
+	            _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	            ;;
+	        esac
+	      fi
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      interix[[3-9]]*)
+	_LT_TAGVAR(hardcode_direct, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	# Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+	# Instead, shared libraries are loaded at an image base (0x10000000 by
+	# default) and relocated if they conflict, which is a slow very memory
+	# consuming and fragmenting process.  To avoid this, we pick a random,
+	# 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+	# time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+	_LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	_LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+	;;
+      irix5* | irix6*)
+        case $cc_basename in
+          CC*)
+	    # SGI C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -ar", where "CC" is the IRIX C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    if test "$GXX" = yes; then
+	      if test "$with_gnu_ld" = no; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	      else
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib'
+	      fi
+	    fi
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+	    ;;
+        esac
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(inherit_rpath, $1)=yes
+        ;;
+
+      linux* | k*bsd*-gnu | kopensolaris*-gnu)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib'
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+	    ;;
+	  icpc* | ecpc* )
+	    # Intel C++
+	    with_gnu_ld=yes
+	    # version 8.0 and above of icpc choke on multiply defined symbols
+	    # if we add $predep_objects and $postdep_objects, however 7.1 and
+	    # earlier do not add the objects themselves.
+	    case `$CC -V 2>&1` in
+	      *"Version 7."*)
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	      *)  # Version 8.0 or newer
+	        tmp_idyn=
+	        case $host_cpu in
+		  ia64*) tmp_idyn=' -i_dynamic';;
+		esac
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+		_LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+		;;
+	    esac
+	    _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	    ;;
+          pgCC* | pgcpp*)
+            # Portland Group C++ compiler
+	    case `$CC -V` in
+	    *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+	      _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~
+		compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"'
+	      _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~
+		$AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~
+		$RANLIB $oldlib'
+	      _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+		rm -rf $tpldir~
+		$CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~
+		$CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    *) # Version 6 and above use weak symbols
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib'
+	      ;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+            ;;
+	  cxx*)
+	    # Compaq C++
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname  -o $lib ${wl}-retain-symbols-file $wl$export_symbols'
+
+	    runpath_var=LD_RUN_PATH
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed'
+	    ;;
+	  xl* | mpixl* | bgxl*)
+	    # IBM XL 8.0 on PPC, with GNU ld
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    if test "x$supports_anon_versioning" = xyes; then
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~
+		cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+		echo "local: *; };" >> $output_objdir/$libname.ver~
+		$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+	    fi
+	    ;;
+	  *)
+	    case `$CC -V 2>&1 | sed 5q` in
+	    *Sun\ C*)
+	      # Sun C++ 5.9
+	      _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	      _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	      _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols'
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	      _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	      _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+	      # Not sure whether something based on
+	      # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+	      # would be better.
+	      output_verbose_link_cmd='func_echo_all'
+
+	      # Archives containing C++ object files must be created using
+	      # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	      # necessary to make sure instantiated templates are included
+	      # in the archive.
+	      _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	      ;;
+	    esac
+	    ;;
+	esac
+	;;
+
+      lynxos*)
+        # FIXME: insert proper C++ library support
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      m88k*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      mvs*)
+        case $cc_basename in
+          cxx*)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	  *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+	esac
+	;;
+
+      netbsd*)
+        if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	  _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable  -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+	  wlarc=
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	fi
+	# Workaround some broken pre-1.5 toolchains
+	output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+	;;
+
+      *nto* | *qnx*)
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+	;;
+
+      openbsd2*)
+        # C++ shared libraries are fairly broken
+	_LT_TAGVAR(ld_shlibs, $1)=no
+	;;
+
+      openbsd*)
+	if test -f /usr/libexec/ld.so; then
+	  _LT_TAGVAR(hardcode_direct, $1)=yes
+	  _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	  _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+	  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	  if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib'
+	    _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E'
+	    _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+	  fi
+	  output_verbose_link_cmd=func_echo_all
+	else
+	  _LT_TAGVAR(ld_shlibs, $1)=no
+	fi
+	;;
+
+      osf3* | osf4* | osf5*)
+        case $cc_basename in
+          KCC*)
+	    # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+	    # KCC will only create a shared library if the output file
+	    # ends with ".so" (or ".sl" for HP-UX), so rename the library
+	    # to its proper name (with version) after linking.
+	    _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir'
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Archives containing C++ object files must be created using
+	    # the KAI C++ compiler.
+	    case $host in
+	      osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;;
+	      *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+	    esac
+	    ;;
+          RCC*)
+	    # Rational C++ 2.4.1
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          cxx*)
+	    case $host in
+	      osf3*)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && func_echo_all "${wl}-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+		;;
+	      *)
+	        _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+	          echo "-hidden">> $lib.exp~
+	          $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp  `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~
+	          $RM $lib.exp'
+	        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+		;;
+	    esac
+
+	    _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	    # Commands to make compiler produce verbose output that lists
+	    # what "hidden" libraries, object files and flags are used when
+	    # linking a shared library.
+	    #
+	    # There doesn't appear to be a way to prevent this compiler from
+	    # explicitly linking system object files so we need to strip them
+	    # from the output so that they don't get included in the library
+	    # dependencies.
+	    output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+	    ;;
+	  *)
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*'
+	      case $host in
+	        osf3*)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	        *)
+	          _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+		  ;;
+	      esac
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir'
+	      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+	      # Commands to make compiler produce verbose output that lists
+	      # what "hidden" libraries, object files and flags are used when
+	      # linking a shared library.
+	      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+
+	    else
+	      # FIXME: insert proper C++ library support
+	      _LT_TAGVAR(ld_shlibs, $1)=no
+	    fi
+	    ;;
+        esac
+        ;;
+
+      psos*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      sunos4*)
+        case $cc_basename in
+          CC*)
+	    # Sun C++ 4.x
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          lcc*)
+	    # Lucid
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      solaris*)
+        case $cc_basename in
+          CC* | sunCC*)
+	    # Sun C++ 4.2, 5.x and Centerline C++
+            _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+	    _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag}  -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	      $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+	    _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	    case $host_os in
+	      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+	      *)
+		# The compiler driver will combine and reorder linker options,
+		# but understands `-z linker_flag'.
+	        # Supported since Solaris 2.6 (maybe 2.5.1?)
+		_LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract'
+	        ;;
+	    esac
+	    _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+	    output_verbose_link_cmd='func_echo_all'
+
+	    # Archives containing C++ object files must be created using
+	    # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+	    # necessary to make sure instantiated templates are included
+	    # in the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+	    ;;
+          gcx*)
+	    # Green Hills C++ Compiler
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+
+	    # The C++ compiler must be used to create the archive.
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs'
+	    ;;
+          *)
+	    # GNU C++ compiler with Solaris linker
+	    if test "$GXX" = yes && test "$with_gnu_ld" = no; then
+	      _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs'
+	      if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      else
+	        # g++ 2.7 appears to require `-G' NOT `-shared' on this
+	        # platform.
+	        _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib'
+	        _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+		  $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+	        # Commands to make compiler produce verbose output that lists
+	        # what "hidden" libraries, object files and flags are used when
+	        # linking a shared library.
+	        output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"'
+	      fi
+
+	      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir'
+	      case $host_os in
+		solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+		*)
+		  _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+		  ;;
+	      esac
+	    fi
+	    ;;
+        esac
+        ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      case $cc_basename in
+        CC*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+      esac
+      ;;
+
+      sysv5* | sco3.2v5* | sco5v6*)
+	# Note: We can NOT use -z defs as we might desire, because we do not
+	# link with -lc, and that would cause any symbols used from libc to
+	# always be unresolved, which means just about no library would
+	# ever link correctly.  If we're not using GNU ld we use -z text
+	# though, which does catch some bad symbols but isn't as heavy-handed
+	# as -z defs.
+	_LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text'
+	_LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs'
+	_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+	_LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+	_LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir'
+	_LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+	_LT_TAGVAR(link_all_deplibs, $1)=yes
+	_LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport'
+	runpath_var='LD_RUN_PATH'
+
+	case $cc_basename in
+          CC*)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+	      '"$_LT_TAGVAR(old_archive_cmds, $1)"
+	    _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+	      '"$_LT_TAGVAR(reload_cmds, $1)"
+	    ;;
+	  *)
+	    _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	    ;;
+	esac
+      ;;
+
+      tandem*)
+        case $cc_basename in
+          NCC*)
+	    # NonStop-UX NCC 3.20
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+          *)
+	    # FIXME: insert proper C++ library support
+	    _LT_TAGVAR(ld_shlibs, $1)=no
+	    ;;
+        esac
+        ;;
+
+      vxworks*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      *)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+    esac
+
+    AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+    test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no
+
+    _LT_TAGVAR(GCC, $1)="$GXX"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  CC=$lt_save_CC
+  LDCXX=$LD
+  LD=$lt_save_LD
+  GCC=$lt_save_GCC
+  with_gnu_ld=$lt_save_with_gnu_ld
+  lt_cv_path_LDCXX=$lt_cv_path_LD
+  lt_cv_path_LD=$lt_save_path_LD
+  lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+  lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test "$_lt_caught_CXX_error" != yes
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library.  It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+  Foo (void) { a = 0; }
+private:
+  int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer*4 a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+  private int a;
+  public void bar (void) {
+    a = 0;
+  }
+};
+_LT_EOF
+])
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+  # Parse the compiler output and extract the necessary
+  # objects, libraries and library flags.
+
+  # Sentinel used to keep track of whether or not we are before
+  # the conftest object file.
+  pre_test_object_deps_done=no
+
+  for p in `eval "$output_verbose_link_cmd"`; do
+    case $p in
+
+    -L* | -R* | -l*)
+       # Some compilers place space between "-{L,R}" and the path.
+       # Remove the space.
+       if test $p = "-L" ||
+          test $p = "-R"; then
+	 prev=$p
+	 continue
+       else
+	 prev=
+       fi
+
+       if test "$pre_test_object_deps_done" = no; then
+	 case $p in
+	 -L* | -R*)
+	   # Internal compiler library paths should come after those
+	   # provided the user.  The postdeps already come after the
+	   # user supplied libs so there is no need to process them.
+	   if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}"
+	   else
+	     _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}"
+	   fi
+	   ;;
+	 # The "-l" case would never come before the object being
+	 # linked, so don't bother handling this case.
+	 esac
+       else
+	 if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+	   _LT_TAGVAR(postdeps, $1)="${prev}${p}"
+	 else
+	   _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}"
+	 fi
+       fi
+       ;;
+
+    *.$objext)
+       # This assumes that the test object file only shows up
+       # once in the compiler output.
+       if test "$p" = "conftest.$objext"; then
+	 pre_test_object_deps_done=yes
+	 continue
+       fi
+
+       if test "$pre_test_object_deps_done" = no; then
+	 if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+	   _LT_TAGVAR(predep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+	 fi
+       else
+	 if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+	   _LT_TAGVAR(postdep_objects, $1)="$p"
+	 else
+	   _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+	 fi
+       fi
+       ;;
+
+    *) ;; # Ignore the rest.
+
+    esac
+  done
+
+  # Clean up.
+  rm -f a.out a.exe
+else
+  echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+  # Interix 3.5 installs completely hosed .la files for C++, so rather than
+  # hack all around it, let's just trust "g++" to DTRT.
+  _LT_TAGVAR(predep_objects,$1)=
+  _LT_TAGVAR(postdep_objects,$1)=
+  _LT_TAGVAR(postdeps,$1)=
+  ;;
+
+linux*)
+  case `$CC -V 2>&1 | sed 5q` in
+  *Sun\ C*)
+    # Sun C++ 5.9
+
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+
+solaris*)
+  case $cc_basename in
+  CC* | sunCC*)
+    # The more standards-conforming stlport4 library is
+    # incompatible with the Cstd library. Avoid specifying
+    # it if it's in CXXFLAGS. Ignore libCrun as
+    # -library=stlport4 depends on it.
+    case " $CXX $CXXFLAGS " in
+    *" -library=stlport4 "*)
+      solaris_use_stlport4=yes
+      ;;
+    esac
+
+    # Adding this requires a known-good setup of shared libraries for
+    # Sun compiler versions before 5.6, else PIC objects from an old
+    # archive will be linked into the output, leading to subtle bugs.
+    if test "$solaris_use_stlport4" != yes; then
+      _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun'
+    fi
+    ;;
+  esac
+  ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+    [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+    [Dependencies to place before and after the objects being linked to
+    create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+    [The library search path used internally by the compiler when linking
+    a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test "X$F77" = "Xno"; then
+  _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_F77" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  CC=${F77-"f77"}
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+  GCC=$G77
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$G77"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC="$lt_save_CC"
+fi # test "$_lt_disable_F77" != yes
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test "X$FC" = "Xno"; then
+  _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test "$_lt_disable_FC" != yes; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC="$CC"
+  lt_save_GCC=$GCC
+  CC=${FC-"f95"}
+  compiler=$CC
+  GCC=$ac_cv_fc_compiler_gnu
+
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test "$can_build_shared" = "no" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test "$enable_shared" = yes && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+	  test "$enable_shared" = yes && enable_static=no
+	fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test "$enable_shared" = yes || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu"
+    _LT_TAGVAR(LD, $1)="$LD"
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC="$lt_save_CC"
+fi # test "$_lt_disable_FC" != yes
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)="$LD"
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC="$lt_save_CC"
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to `libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code="$lt_simple_compile_test_code"
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC="$CC"
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+  :
+  _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC="$lt_save_CC"
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+  [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+    [AC_CHECK_TOOL(GCJ, gcj,)
+      test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2"
+      AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+    [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_SED.  When it is available in   #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for lt_ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+        lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+      fi
+    done
+  done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+  test ! -f $lt_ac_sed && continue
+  cat /dev/null > conftest.in
+  lt_ac_count=0
+  echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+  # Check for GNU sed and select it if it is found.
+  if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+    lt_cv_path_SED=$lt_ac_sed
+    break
+  fi
+  while true; do
+    cat conftest.in conftest.in >conftest.tmp
+    mv conftest.tmp conftest.in
+    cp conftest.in conftest.nl
+    echo >>conftest.nl
+    $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+    cmp -s conftest.out conftest.nl || break
+    # 10000 chars as input seems more than enough
+    test $lt_ac_count -gt 10 && break
+    lt_ac_count=`expr $lt_ac_count + 1`
+    if test $lt_ac_count -gt $lt_ac_max; then
+      lt_ac_max=$lt_ac_count
+      lt_cv_path_SED=$lt_ac_sed
+    fi
+  done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[AC_MSG_CHECKING([whether the shell understands some XSI constructs])
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+AC_MSG_RESULT([$xsi_shell])
+_LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell'])
+
+AC_MSG_CHECKING([whether the shell understands "+="])
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+AC_MSG_RESULT([$lt_shell_append])
+_LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append'])
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PROG_XSI_SHELLFNS
+# ---------------------
+# Bourne and XSI compatible variants of some useful shell functions.
+m4_defun([_LT_PROG_XSI_SHELLFNS],
+[case $xsi_shell in
+  yes)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+  case ${1} in
+    */*) func_dirname_result="${1%/*}${2}" ;;
+    *  ) func_dirname_result="${3}" ;;
+  esac
+}
+
+# func_basename file
+func_basename ()
+{
+  func_basename_result="${1##*/}"
+}
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+  case ${1} in
+    */*) func_dirname_result="${1%/*}${2}" ;;
+    *  ) func_dirname_result="${3}" ;;
+  esac
+  func_basename_result="${1##*/}"
+}
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+func_stripname ()
+{
+  # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+  # positional parameters, so assign one to ordinary parameter first.
+  func_stripname_result=${3}
+  func_stripname_result=${func_stripname_result#"${1}"}
+  func_stripname_result=${func_stripname_result%"${2}"}
+}
+
+# func_opt_split
+func_opt_split ()
+{
+  func_opt_split_opt=${1%%=*}
+  func_opt_split_arg=${1#*=}
+}
+
+# func_lo2o object
+func_lo2o ()
+{
+  case ${1} in
+    *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+    *)    func_lo2o_result=${1} ;;
+  esac
+}
+
+# func_xform libobj-or-source
+func_xform ()
+{
+  func_xform_result=${1%.*}.lo
+}
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+  func_arith_result=$(( $[*] ))
+}
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+  func_len_result=${#1}
+}
+
+_LT_EOF
+    ;;
+  *) # Bourne compatible functions.
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+  # Extract subdirectory from the argument.
+  func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+  if test "X$func_dirname_result" = "X${1}"; then
+    func_dirname_result="${3}"
+  else
+    func_dirname_result="$func_dirname_result${2}"
+  fi
+}
+
+# func_basename file
+func_basename ()
+{
+  func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+}
+
+dnl func_dirname_and_basename
+dnl A portable version of this function is already defined in general.m4sh
+dnl so there is no need for it here.
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+  case ${2} in
+    .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+    *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+  esac
+}
+
+# sed scripts:
+my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q'
+my_sed_long_arg='1s/^-[[^=]]*=//'
+
+# func_opt_split
+func_opt_split ()
+{
+  func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+  func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+}
+
+# func_lo2o object
+func_lo2o ()
+{
+  func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+}
+
+# func_xform libobj-or-source
+func_xform ()
+{
+  func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'`
+}
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+  func_arith_result=`expr "$[@]"`
+}
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+  func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len`
+}
+
+_LT_EOF
+esac
+
+case $lt_shell_append in
+  yes)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+  eval "$[1]+=\$[2]"
+}
+_LT_EOF
+    ;;
+  *)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+  eval "$[1]=\$$[1]\$[2]"
+}
+
+_LT_EOF
+    ;;
+  esac
+])
+
+# Helper functions for option handling.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation,
+#   Inc.
+#   Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 7 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it.  Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+        _LT_MANGLE_DEFUN([$1], [$2]),
+    [m4_warning([Unknown $1 option `$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+	    [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+		      [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME.  If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+    [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+  dnl
+  dnl Simply set some default values (i.e off) if boolean options were not
+  dnl specified:
+  _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+  ])
+  _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+  ])
+  dnl
+  dnl If no reference was made to various pairs of opposing options, then
+  dnl we run the default mode handler for the pair.  For example, if neither
+  dnl `shared' nor `disable-shared' was passed, we enable building of shared
+  dnl archives by default:
+  _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+  _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+  		   [_LT_ENABLE_FAST_INSTALL])
+  ])
+])# _LT_SET_OPTIONS
+
+
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  AC_CHECK_TOOL(AS, as, false)
+  AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+  AC_CHECK_TOOL(OBJDUMP, objdump, false)
+  ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS],      [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the `shared' and
+# `disable-shared' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+    [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+	[build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+    _LT_DECL([build_libtool_libs], [enable_shared], [0],
+	[Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the `static' and
+# `disable-static' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+    [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+	[build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+    _LT_DECL([build_old_libs], [enable_static], [0],
+	[Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the `fast-install'
+# and `disable-fast-install' LT_INIT options.
+# DEFAULT is either `yes' or `no'.  If omitted, it defaults to `yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+    [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+    [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac],
+    [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+	 [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the `disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the `pic-only' and `no-pic'
+# LT_INIT options.
+# MODE is either `yes' or `no'.  If omitted, it defaults to `both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+    [AS_HELP_STRING([--with-pic],
+	[try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+    [pic_mode="$withval"],
+    [pic_mode=default])
+
+test -z "$pic_mode" && pic_mode=m4_default([$1], [default])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the `pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+		 [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+		 [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+		 [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+		 [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+		 [m4_define([_LTDL_TYPE], [convenience])])
+
+# ltsugar.m4 -- libtool m4 base layer.                         -*-Autoconf-*-
+#
+# Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59 which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+	   m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn.  Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+       [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+	     [m4_foreach([_Lt_suffix],
+		]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+	[_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+	  [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+		 [lt_append([$1], [$2], [$3])$4],
+		 [$5])],
+	  [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+	m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+    m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+	[$5],
+    [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+  [lt_join(m4_quote(m4_default([$4], [[, ]])),
+           lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]),
+		      [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl
+])
+
+# ltversion.m4 -- version numbers			-*- Autoconf -*-
+#
+#   Copyright (C) 2004 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# Generated from ltversion.in.
+
+# serial 3175 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.2.10])
+m4_define([LT_PACKAGE_REVISION], [1.3175])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.2.10'
+macro_revision='1.3175'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
+
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions.    -*-Autoconf-*-
+#
+#   Copyright (C) 2004, 2005, 2007, 2009 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN)
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick.  It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else.  This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. 
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION],	[AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP],		[AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT],		[AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX],	[AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN],		[AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR],		[AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL],	[AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN],		[AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER],	[AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK],		[AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE],	[AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF],	[AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O],	[AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR],		[AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR],		[AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP],	[AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC],		[AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU],		[AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG],	[AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD],	[AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS],	[AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP],	[AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP],		[AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED],		[AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME],		[AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE],	[AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE],	[AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL],		[AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP],		[AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN],		[AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER],	[AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG],		[AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL],	[AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX],		[AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77],		[AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ],		[AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG],	[AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG],	[AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG],	[AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG],	[AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG],	[AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG],	[AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG],		[AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C],	[AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS],	[AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP],		[AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS],	[AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH],	[AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77],		[AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC],		[AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX],		[AC_DEFUN([_LT_PROG_CXX])])
+
+# Copyright (C) 2002, 2003, 2005, 2006  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_AUTOMAKE_VERSION(VERSION)
+# ----------------------------
+# Automake X.Y traces this macro to ensure aclocal.m4 has been
+# generated from the m4 files accompanying Automake X.Y.
+# (This private macro should not be called outside this file.)
+AC_DEFUN([AM_AUTOMAKE_VERSION],
+[am__api_version='1.10'
+dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+dnl require some minimum version.  Point them to the right macro.
+m4_if([$1], [1.10], [],
+      [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+])
+
+# _AM_AUTOCONF_VERSION(VERSION)
+# -----------------------------
+# aclocal traces this macro to find the Autoconf version.
+# This is a private macro too.  Using m4_define simplifies
+# the logic in aclocal, which can simply ignore this definition.
+m4_define([_AM_AUTOCONF_VERSION], [])
+
+# AM_SET_CURRENT_AUTOMAKE_VERSION
+# -------------------------------
+# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+# This function is AC_REQUIREd by AC_INIT_AUTOMAKE.
+AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+[AM_AUTOMAKE_VERSION([1.10])dnl
+_AM_AUTOCONF_VERSION(m4_PACKAGE_VERSION)])
+
+# AM_AUX_DIR_EXPAND                                         -*- Autoconf -*-
+
+# Copyright (C) 2001, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets
+# $ac_aux_dir to `$srcdir/foo'.  In other projects, it is set to
+# `$srcdir', `$srcdir/..', or `$srcdir/../..'.
+#
+# Of course, Automake must honor this variable whenever it calls a
+# tool from the auxiliary directory.  The problem is that $srcdir (and
+# therefore $ac_aux_dir as well) can be either absolute or relative,
+# depending on how configure is run.  This is pretty annoying, since
+# it makes $ac_aux_dir quite unusable in subdirectories: in the top
+# source directory, any form will work fine, but in subdirectories a
+# relative path needs to be adjusted first.
+#
+# $ac_aux_dir/missing
+#    fails when called from a subdirectory if $ac_aux_dir is relative
+# $top_srcdir/$ac_aux_dir/missing
+#    fails if $ac_aux_dir is absolute,
+#    fails when called from a subdirectory in a VPATH build with
+#          a relative $ac_aux_dir
+#
+# The reason of the latter failure is that $top_srcdir and $ac_aux_dir
+# are both prefixed by $srcdir.  In an in-source build this is usually
+# harmless because $srcdir is `.', but things will broke when you
+# start a VPATH build or use an absolute $srcdir.
+#
+# So we could use something similar to $top_srcdir/$ac_aux_dir/missing,
+# iff we strip the leading $srcdir from $ac_aux_dir.  That would be:
+#   am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"`
+# and then we would define $MISSING as
+#   MISSING="\${SHELL} $am_aux_dir/missing"
+# This will work as long as MISSING is not called from configure, because
+# unfortunately $(top_srcdir) has no meaning in configure.
+# However there are other variables, like CC, which are often used in
+# configure, and could therefore not use this "fixed" $ac_aux_dir.
+#
+# Another solution, used here, is to always expand $ac_aux_dir to an
+# absolute PATH.  The drawback is that using absolute paths prevent a
+# configured tree to be moved without reconfiguration.
+
+AC_DEFUN([AM_AUX_DIR_EXPAND],
+[dnl Rely on autoconf to set up CDPATH properly.
+AC_PREREQ([2.50])dnl
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+])
+
+# AM_CONDITIONAL                                            -*- Autoconf -*-
+
+# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 8
+
+# AM_CONDITIONAL(NAME, SHELL-CONDITION)
+# -------------------------------------
+# Define a conditional.
+AC_DEFUN([AM_CONDITIONAL],
+[AC_PREREQ(2.52)dnl
+ ifelse([$1], [TRUE],  [AC_FATAL([$0: invalid condition: $1])],
+	[$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl
+AC_SUBST([$1_TRUE])dnl
+AC_SUBST([$1_FALSE])dnl
+_AM_SUBST_NOTMAKE([$1_TRUE])dnl
+_AM_SUBST_NOTMAKE([$1_FALSE])dnl
+if $2; then
+  $1_TRUE=
+  $1_FALSE='#'
+else
+  $1_TRUE='#'
+  $1_FALSE=
+fi
+AC_CONFIG_COMMANDS_PRE(
+[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then
+  AC_MSG_ERROR([[conditional "$1" was never defined.
+Usually this means the macro was only invoked conditionally.]])
+fi])])
+
+# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 9
+
+# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be
+# written in clear, in which case automake, when reading aclocal.m4,
+# will think it sees a *use*, and therefore will trigger all it's
+# C support machinery.  Also note that it means that autoscan, seeing
+# CC etc. in the Makefile, will ask for an AC_PROG_CC use...
+
+
+# _AM_DEPENDENCIES(NAME)
+# ----------------------
+# See how the compiler implements dependency checking.
+# NAME is "CC", "CXX", "GCJ", or "OBJC".
+# We try a few techniques and use that to set a single cache variable.
+#
+# We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was
+# modified to invoke _AM_DEPENDENCIES(CC); we would have a circular
+# dependency, and given that the user is not expected to run this macro,
+# just rely on AC_PROG_CC.
+AC_DEFUN([_AM_DEPENDENCIES],
+[AC_REQUIRE([AM_SET_DEPDIR])dnl
+AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl
+AC_REQUIRE([AM_MAKE_INCLUDE])dnl
+AC_REQUIRE([AM_DEP_TRACK])dnl
+
+ifelse([$1], CC,   [depcc="$CC"   am_compiler_list=],
+       [$1], CXX,  [depcc="$CXX"  am_compiler_list=],
+       [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'],
+       [$1], UPC,  [depcc="$UPC"  am_compiler_list=],
+       [$1], GCJ,  [depcc="$GCJ"  am_compiler_list='gcc3 gcc'],
+                   [depcc="$$1"   am_compiler_list=])
+
+AC_CACHE_CHECK([dependency style of $depcc],
+               [am_cv_$1_dependencies_compiler_type],
+[if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named `D' -- because `-MD' means `put the output
+  # in D'.
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_$1_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp`
+  fi
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
+      # Solaris 8's {/usr,}/bin/sh.
+      touch sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    case $depmode in
+    nosideeffect)
+      # after this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    none) break ;;
+    esac
+    # We check with `-c' and `-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle `-M -o', and we need to detect this.
+    if depmode=$depmode \
+       source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_$1_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_$1_dependencies_compiler_type=none
+fi
+])
+AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type])
+AM_CONDITIONAL([am__fastdep$1], [
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_$1_dependencies_compiler_type" = gcc3])
+])
+
+
+# AM_SET_DEPDIR
+# -------------
+# Choose a directory name for dependency files.
+# This macro is AC_REQUIREd in _AM_DEPENDENCIES
+AC_DEFUN([AM_SET_DEPDIR],
+[AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl
+])
+
+
+# AM_DEP_TRACK
+# ------------
+AC_DEFUN([AM_DEP_TRACK],
+[AC_ARG_ENABLE(dependency-tracking,
+[  --disable-dependency-tracking  speeds up one-time build
+  --enable-dependency-tracking   do not reject slow dependency extractors])
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+fi
+AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno])
+AC_SUBST([AMDEPBACKSLASH])dnl
+_AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl
+])
+
+# Generate code to set up dependency tracking.              -*- Autoconf -*-
+
+# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+#serial 3
+
+# _AM_OUTPUT_DEPENDENCY_COMMANDS
+# ------------------------------
+AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS],
+[for mf in $CONFIG_FILES; do
+  # Strip MF so we end up with the name of the file.
+  mf=`echo "$mf" | sed -e 's/:.*$//'`
+  # Check whether this is an Automake generated Makefile or not.
+  # We used to match only the files named `Makefile.in', but
+  # some people rename them; so instead we look at the file content.
+  # Grep'ing the first line is not enough: some people post-process
+  # each Makefile.in and add a new line on top of each file to say so.
+  # Grep'ing the whole file is not good either: AIX grep has a line
+  # limit of 2048, but all sed's we know have understand at least 4000.
+  if sed 10q "$mf" | grep '^#.*generated by automake' > /dev/null 2>&1; then
+    dirpart=`AS_DIRNAME("$mf")`
+  else
+    continue
+  fi
+  # Extract the definition of DEPDIR, am__include, and am__quote
+  # from the Makefile without running `make'.
+  DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+  test -z "$DEPDIR" && continue
+  am__include=`sed -n 's/^am__include = //p' < "$mf"`
+  test -z "am__include" && continue
+  am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+  # When using ansi2knr, U may be empty or an underscore; expand it
+  U=`sed -n 's/^U = //p' < "$mf"`
+  # Find all dependency output files, they are included files with
+  # $(DEPDIR) in their names.  We invoke sed twice because it is the
+  # simplest approach to changing $(DEPDIR) to its actual value in the
+  # expansion.
+  for file in `sed -n "
+    s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+       sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
+    # Make sure the directory exists.
+    test -f "$dirpart/$file" && continue
+    fdir=`AS_DIRNAME(["$file"])`
+    AS_MKDIR_P([$dirpart/$fdir])
+    # echo "creating $dirpart/$file"
+    echo '# dummy' > "$dirpart/$file"
+  done
+done
+])# _AM_OUTPUT_DEPENDENCY_COMMANDS
+
+
+# AM_OUTPUT_DEPENDENCY_COMMANDS
+# -----------------------------
+# This macro should only be invoked once -- use via AC_REQUIRE.
+#
+# This code is only required when automatic dependency tracking
+# is enabled.  FIXME.  This creates each `.P' file that we will
+# need in order to bootstrap the dependency handling code.
+AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS],
+[AC_CONFIG_COMMANDS([depfiles],
+     [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS],
+     [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"])
+])
+
+# Do all the work for Automake.                             -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
+# 2005, 2006 Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 12
+
+# This macro actually does too much.  Some checks are only needed if
+# your package does certain things.  But this isn't really a big deal.
+
+# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE])
+# AM_INIT_AUTOMAKE([OPTIONS])
+# -----------------------------------------------
+# The call with PACKAGE and VERSION arguments is the old style
+# call (pre autoconf-2.50), which is being phased out.  PACKAGE
+# and VERSION should now be passed to AC_INIT and removed from
+# the call to AM_INIT_AUTOMAKE.
+# We support both call styles for the transition.  After
+# the next Automake release, Autoconf can make the AC_INIT
+# arguments mandatory, and then we can depend on a new Autoconf
+# release and drop the old call support.
+AC_DEFUN([AM_INIT_AUTOMAKE],
+[AC_PREREQ([2.60])dnl
+dnl Autoconf wants to disallow AM_ names.  We explicitly allow
+dnl the ones we care about.
+m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl
+AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl
+AC_REQUIRE([AC_PROG_INSTALL])dnl
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    AC_MSG_ERROR([source directory already configured; run "make distclean" there first])
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+AC_SUBST([CYGPATH_W])
+
+# Define the identity of the package.
+dnl Distinguish between old-style and new-style calls.
+m4_ifval([$2],
+[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl
+ AC_SUBST([PACKAGE], [$1])dnl
+ AC_SUBST([VERSION], [$2])],
+[_AM_SET_OPTIONS([$1])dnl
+dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT.
+m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,,
+  [m4_fatal([AC_INIT should be called with package and version arguments])])dnl
+ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl
+ AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl
+
+_AM_IF_OPTION([no-define],,
+[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package])
+ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl
+
+# Some tools Automake needs.
+AC_REQUIRE([AM_SANITY_CHECK])dnl
+AC_REQUIRE([AC_ARG_PROGRAM])dnl
+AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version})
+AM_MISSING_PROG(AUTOCONF, autoconf)
+AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version})
+AM_MISSING_PROG(AUTOHEADER, autoheader)
+AM_MISSING_PROG(MAKEINFO, makeinfo)
+AM_PROG_INSTALL_SH
+AM_PROG_INSTALL_STRIP
+AC_REQUIRE([AM_PROG_MKDIR_P])dnl
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([AC_PROG_MAKE_SET])dnl
+AC_REQUIRE([AM_SET_LEADING_DOT])dnl
+_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])],
+              [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])],
+	      		     [_AM_PROG_TAR([v7])])])
+_AM_IF_OPTION([no-dependencies],,
+[AC_PROVIDE_IFELSE([AC_PROG_CC],
+                  [_AM_DEPENDENCIES(CC)],
+                  [define([AC_PROG_CC],
+                          defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_CXX],
+                  [_AM_DEPENDENCIES(CXX)],
+                  [define([AC_PROG_CXX],
+                          defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl
+AC_PROVIDE_IFELSE([AC_PROG_OBJC],
+                  [_AM_DEPENDENCIES(OBJC)],
+                  [define([AC_PROG_OBJC],
+                          defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl
+])
+])
+
+
+# When config.status generates a header, we must update the stamp-h file.
+# This file resides in the same directory as the config header
+# that is generated.  The stamp files are numbered to have different names.
+
+# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the
+# loop where config.status creates the headers, so we can generate
+# our stamp files there.
+AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK],
+[# Compute $1's index in $config_headers.
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    $1 | $1:* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for $1" >`AS_DIRNAME([$1])`/stamp-h[]$_am_stamp_count])
+
+# Copyright (C) 2001, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_SH
+# ------------------
+# Define $install_sh.
+AC_DEFUN([AM_PROG_INSTALL_SH],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+install_sh=${install_sh-"\$(SHELL) $am_aux_dir/install-sh"}
+AC_SUBST(install_sh)])
+
+# Copyright (C) 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 2
+
+# Check whether the underlying file-system supports filenames
+# with a leading dot.  For instance MS-DOS doesn't.
+AC_DEFUN([AM_SET_LEADING_DOT],
+[rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+AC_SUBST([am__leading_dot])])
+
+# Check to see how 'make' treats includes.	            -*- Autoconf -*-
+
+# Copyright (C) 2001, 2002, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 3
+
+# AM_MAKE_INCLUDE()
+# -----------------
+# Check to see how make treats includes.
+AC_DEFUN([AM_MAKE_INCLUDE],
+[am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo done
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+AC_MSG_CHECKING([for style of include used by $am_make])
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# We grep out `Entering directory' and `Leaving directory'
+# messages which can occur if `w' ends up in MAKEFLAGS.
+# In particular we don't look at `^make:' because GNU make might
+# be invoked under some other name (usually "gmake"), in which
+# case it prints its new name instead of `make'.
+if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then
+   am__include=include
+   am__quote=
+   _am_result=GNU
+fi
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then
+      am__include=.include
+      am__quote="\""
+      _am_result=BSD
+   fi
+fi
+AC_SUBST([am__include])
+AC_SUBST([am__quote])
+AC_MSG_RESULT([$_am_result])
+rm -f confinc confmf
+])
+
+# Fake the existence of programs that GNU maintainers use.  -*- Autoconf -*-
+
+# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 5
+
+# AM_MISSING_PROG(NAME, PROGRAM)
+# ------------------------------
+AC_DEFUN([AM_MISSING_PROG],
+[AC_REQUIRE([AM_MISSING_HAS_RUN])
+$1=${$1-"${am_missing_run}$2"}
+AC_SUBST($1)])
+
+
+# AM_MISSING_HAS_RUN
+# ------------------
+# Define MISSING if not defined so far and test if it supports --run.
+# If it does, set am_missing_run to use it, otherwise, to nothing.
+AC_DEFUN([AM_MISSING_HAS_RUN],
+[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
+AC_REQUIRE_AUX_FILE([missing])dnl
+test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing"
+# Use eval to expand $SHELL
+if eval "$MISSING --run true"; then
+  am_missing_run="$MISSING --run "
+else
+  am_missing_run=
+  AC_MSG_WARN([`missing' script is too old or missing])
+fi
+])
+
+# Copyright (C) 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_MKDIR_P
+# ---------------
+# Check for `mkdir -p'.
+AC_DEFUN([AM_PROG_MKDIR_P],
+[AC_PREREQ([2.60])dnl
+AC_REQUIRE([AC_PROG_MKDIR_P])dnl
+dnl Automake 1.8 to 1.9.6 used to define mkdir_p.  We now use MKDIR_P,
+dnl while keeping a definition of mkdir_p for backward compatibility.
+dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile.
+dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of
+dnl Makefile.ins that do not define MKDIR_P, so we do our own
+dnl adjustment using top_builddir (which is defined more often than
+dnl MKDIR_P).
+AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl
+case $mkdir_p in
+  [[\\/$]]* | ?:[[\\/]]*) ;;
+  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
+esac
+])
+
+# Helper functions for option handling.                     -*- Autoconf -*-
+
+# Copyright (C) 2001, 2002, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 3
+
+# _AM_MANGLE_OPTION(NAME)
+# -----------------------
+AC_DEFUN([_AM_MANGLE_OPTION],
+[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])])
+
+# _AM_SET_OPTION(NAME)
+# ------------------------------
+# Set option NAME.  Presently that only means defining a flag for this option.
+AC_DEFUN([_AM_SET_OPTION],
+[m4_define(_AM_MANGLE_OPTION([$1]), 1)])
+
+# _AM_SET_OPTIONS(OPTIONS)
+# ----------------------------------
+# OPTIONS is a space-separated list of Automake options.
+AC_DEFUN([_AM_SET_OPTIONS],
+[AC_FOREACH([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])])
+
+# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET])
+# -------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+AC_DEFUN([_AM_IF_OPTION],
+[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])])
+
+# Check to make sure that the build environment is sane.    -*- Autoconf -*-
+
+# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005
+# Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 4
+
+# AM_SANITY_CHECK
+# ---------------
+AC_DEFUN([AM_SANITY_CHECK],
+[AC_MSG_CHECKING([whether build environment is sane])
+# Just in case
+sleep 1
+echo timestamp > conftest.file
+# Do `set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null`
+   if test "$[*]" = "X"; then
+      # -L didn't work.
+      set X `ls -t $srcdir/configure conftest.file`
+   fi
+   rm -f conftest.file
+   if test "$[*]" != "X $srcdir/configure conftest.file" \
+      && test "$[*]" != "X conftest.file $srcdir/configure"; then
+
+      # If neither matched, then we have a broken ls.  This can happen
+      # if, for instance, CONFIG_SHELL is bash and it inherits a
+      # broken ls alias from the environment.  This has actually
+      # happened.  Such a system could not be considered "sane".
+      AC_MSG_ERROR([ls -t appears to fail.  Make sure there is not a broken
+alias in your environment])
+   fi
+
+   test "$[2]" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   AC_MSG_ERROR([newly created file is older than distributed files!
+Check your system clock])
+fi
+AC_MSG_RESULT(yes)])
+
+# Copyright (C) 2001, 2003, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# AM_PROG_INSTALL_STRIP
+# ---------------------
+# One issue with vendor `install' (even GNU) is that you can't
+# specify the program used to strip binaries.  This is especially
+# annoying in cross-compiling environments, where the build's strip
+# is unlikely to handle the host's binaries.
+# Fortunately install-sh will honor a STRIPPROG variable, so we
+# always use install-sh in `make install-strip', and initialize
+# STRIPPROG with the value of the STRIP variable (set by the user).
+AC_DEFUN([AM_PROG_INSTALL_STRIP],
+[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl
+# Installed binaries are usually stripped using `strip' when the user
+# run `make install-strip'.  However `strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the `STRIP' environment variable to overrule this program.
+dnl Don't test for $cross_compiling = yes, because it might be `maybe'.
+if test "$cross_compiling" != no; then
+  AC_CHECK_TOOL([STRIP], [strip], :)
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+AC_SUBST([INSTALL_STRIP_PROGRAM])])
+
+# Copyright (C) 2006  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# _AM_SUBST_NOTMAKE(VARIABLE)
+# ---------------------------
+# Prevent Automake from outputing VARIABLE = @VARIABLE@ in Makefile.in.
+# This macro is traced by Automake.
+AC_DEFUN([_AM_SUBST_NOTMAKE])
+
+# Check how to create a tarball.                            -*- Autoconf -*-
+
+# Copyright (C) 2004, 2005  Free Software Foundation, Inc.
+#
+# This file is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# serial 2
+
+# _AM_PROG_TAR(FORMAT)
+# --------------------
+# Check how to create a tarball in format FORMAT.
+# FORMAT should be one of `v7', `ustar', or `pax'.
+#
+# Substitute a variable $(am__tar) that is a command
+# writing to stdout a FORMAT-tarball containing the directory
+# $tardir.
+#     tardir=directory && $(am__tar) > result.tar
+#
+# Substitute a variable $(am__untar) that extract such
+# a tarball read from stdin.
+#     $(am__untar) < result.tar
+AC_DEFUN([_AM_PROG_TAR],
+[# Always define AMTAR for backward compatibility.
+AM_MISSING_PROG([AMTAR], [tar])
+m4_if([$1], [v7],
+     [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'],
+     [m4_case([$1], [ustar],, [pax],,
+              [m4_fatal([Unknown tar format])])
+AC_MSG_CHECKING([how to create a $1 tar archive])
+# Loop over all known methods to create a tar archive until one works.
+_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none'
+_am_tools=${am_cv_prog_tar_$1-$_am_tools}
+# Do not fold the above two line into one, because Tru64 sh and
+# Solaris sh will not grok spaces in the rhs of `-'.
+for _am_tool in $_am_tools
+do
+  case $_am_tool in
+  gnutar)
+    for _am_tar in tar gnutar gtar;
+    do
+      AM_RUN_LOG([$_am_tar --version]) && break
+    done
+    am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"'
+    am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"'
+    am__untar="$_am_tar -xf -"
+    ;;
+  plaintar)
+    # Must skip GNU tar: if it does not support --format= it doesn't create
+    # ustar tarball either.
+    (tar --version) >/dev/null 2>&1 && continue
+    am__tar='tar chf - "$$tardir"'
+    am__tar_='tar chf - "$tardir"'
+    am__untar='tar xf -'
+    ;;
+  pax)
+    am__tar='pax -L -x $1 -w "$$tardir"'
+    am__tar_='pax -L -x $1 -w "$tardir"'
+    am__untar='pax -r'
+    ;;
+  cpio)
+    am__tar='find "$$tardir" -print | cpio -o -H $1 -L'
+    am__tar_='find "$tardir" -print | cpio -o -H $1 -L'
+    am__untar='cpio -i -H $1 -d'
+    ;;
+  none)
+    am__tar=false
+    am__tar_=false
+    am__untar=false
+    ;;
+  esac
+
+  # If the value was cached, stop now.  We just wanted to have am__tar
+  # and am__untar set.
+  test -n "${am_cv_prog_tar_$1}" && break
+
+  # tar/untar a dummy directory, and stop if the command works
+  rm -rf conftest.dir
+  mkdir conftest.dir
+  echo GrepMe > conftest.dir/file
+  AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar])
+  rm -rf conftest.dir
+  if test -s conftest.tar; then
+    AM_RUN_LOG([$am__untar <conftest.tar])
+    grep GrepMe conftest.dir/file >/dev/null 2>&1 && break
+  fi
+done
+rm -rf conftest.dir
+
+AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool])
+AC_MSG_RESULT([$am_cv_prog_tar_$1])])
+AC_SUBST([am__tar])
+AC_SUBST([am__untar])
+]) # _AM_PROG_TAR
+
diff --git a/lang/c/jansson/config.guess b/lang/c/jansson/config.guess
new file mode 100755
index 0000000..396482d
--- /dev/null
+++ b/lang/c/jansson/config.guess
@@ -0,0 +1,1500 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
+#   Inc.
+
+timestamp='2006-07-02'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Originally written by Per Bothner <per at bothner.com>.
+# Please send patches to <config-patches at gnu.org>.  Submit a context
+# diff and a properly formatted ChangeLog entry.
+#
+# This script attempts to guess a canonical system name similar to
+# config.sub.  If it succeeds, it prints the system name on stdout, and
+# exits with 0.  Otherwise, it exits with 1.
+#
+# The plan is that this can be called by configure scripts if you
+# don't specify an explicit build system type.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches at gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+trap 'exit 1' 1 2 15
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+set_cc_for_build='
+trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+: ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
+dummy=$tmp/dummy ;
+tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
+case $CC_FOR_BUILD,$HOST_CC,$CC in
+ ,,)    echo "int x;" > $dummy.c ;
+	for c in cc gcc c89 c99 ; do
+	  if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
+	     CC_FOR_BUILD="$c"; break ;
+	  fi ;
+	done ;
+	if test x"$CC_FOR_BUILD" = x ; then
+	  CC_FOR_BUILD=no_compiler_found ;
+	fi
+	;;
+ ,,*)   CC_FOR_BUILD=$CC ;;
+ ,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+esac ; set_cc_for_build= ;'
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi at noc.rutgers.edu 1994-08-24)
+if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null`  || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+# Note: order is significant - the case branches are not exclusive.
+
+case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	sysctl="sysctl -n hw.machine_arch"
+	UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+	    /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
+	case "${UNAME_MACHINE_ARCH}" in
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently, or will in the future.
+	case "${UNAME_MACHINE_ARCH}" in
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		eval $set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep __ELF__ >/dev/null
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+	        os=netbsd
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case "${UNAME_VERSION}" in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	echo "${machine}-${os}${release}"
+	exit ;;
+    *:OpenBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+	echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+	exit ;;
+    *:ekkoBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+	exit ;;
+    *:SolidBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
+	exit ;;
+    macppc:MirBSD:*:*)
+	echo powerpc-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    *:MirBSD:*:*)
+	echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+	exit ;;
+    alpha:OSF1:*:*)
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+	        UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case "$ALPHA_CPU_TYPE" in
+	    "EV4 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE="alpha" ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE="alpha" ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE="alphaev5" ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE="alphaev56" ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE="alphapca56" ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE="alphapca57" ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE="alphaev6" ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE="alphaev67" ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE="alphaev68" ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE="alphaev69" ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE="alphaev7" ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE="alphaev79" ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+	exit ;;
+    Alpha\ *:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# Should we change UNAME_MACHINE based on the output of uname instead
+	# of the specific Alpha model?
+	echo alpha-pc-interix
+	exit ;;
+    21064:Windows_NT:50:3)
+	echo alpha-dec-winnt3.5
+	exit ;;
+    Amiga*:UNIX_System_V:4.0:*)
+	echo m68k-unknown-sysv4
+	exit ;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-amigaos
+	exit ;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	echo ${UNAME_MACHINE}-unknown-morphos
+	exit ;;
+    *:OS/390:*:*)
+	echo i370-ibm-openedition
+	exit ;;
+    *:z/VM:*:*)
+	echo s390-ibm-zvmoe
+	exit ;;
+    *:OS400:*:*)
+        echo powerpc-ibm-os400
+	exit ;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	echo arm-acorn-riscix${UNAME_RELEASE}
+	exit ;;
+    arm:riscos:*:*|arm:RISCOS:*:*)
+	echo arm-unknown-riscos
+	exit ;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	echo hppa1.1-hitachi-hiuxmpp
+	exit ;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee at wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	if test "`(/bin/universe) 2>/dev/null`" = att ; then
+		echo pyramid-pyramid-sysv3
+	else
+		echo pyramid-pyramid-bsd
+	fi
+	exit ;;
+    NILE*:*:*:dcosx)
+	echo pyramid-pyramid-svr4
+	exit ;;
+    DRS?6000:unix:4.0:6*)
+	echo sparc-icl-nx6
+	exit ;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) echo sparc-icl-nx7; exit ;;
+	esac ;;
+    sun4H:SunOS:5.*:*)
+	echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    i86pc:SunOS:5.*:*)
+	echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    sun4*:SunOS:*:*)
+	case "`/usr/bin/arch -k`" in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
+	exit ;;
+    sun3*:SunOS:*:*)
+	echo m68k-sun-sunos${UNAME_RELEASE}
+	exit ;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
+	case "`/bin/arch`" in
+	    sun3)
+		echo m68k-sun-sunos${UNAME_RELEASE}
+		;;
+	    sun4)
+		echo sparc-sun-sunos${UNAME_RELEASE}
+		;;
+	esac
+	exit ;;
+    aushp:SunOS:*:*)
+	echo sparc-auspex-sunos${UNAME_RELEASE}
+	exit ;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+        echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	echo m68k-atari-mint${UNAME_RELEASE}
+        exit ;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+        echo m68k-atari-mint${UNAME_RELEASE}
+	exit ;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+        echo m68k-milan-mint${UNAME_RELEASE}
+        exit ;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+        echo m68k-hades-mint${UNAME_RELEASE}
+        exit ;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+        echo m68k-unknown-mint${UNAME_RELEASE}
+        exit ;;
+    m68k:machten:*:*)
+	echo m68k-apple-machten${UNAME_RELEASE}
+	exit ;;
+    powerpc:machten:*:*)
+	echo powerpc-apple-machten${UNAME_RELEASE}
+	exit ;;
+    RISC*:Mach:*:*)
+	echo mips-dec-mach_bsd4.3
+	exit ;;
+    RISC*:ULTRIX:*:*)
+	echo mips-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    VAX*:ULTRIX*:*:*)
+	echo vax-dec-ultrix${UNAME_RELEASE}
+	exit ;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	echo clipper-intergraph-clix${UNAME_RELEASE}
+	exit ;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c &&
+	  dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+	  SYSTEM_NAME=`$dummy $dummyarg` &&
+	    { echo "$SYSTEM_NAME"; exit; }
+	echo mips-mips-riscos${UNAME_RELEASE}
+	exit ;;
+    Motorola:PowerMAX_OS:*:*)
+	echo powerpc-motorola-powermax
+	exit ;;
+    Motorola:*:4.3:PL8-*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	echo powerpc-harris-powermax
+	exit ;;
+    Night_Hawk:Power_UNIX:*:*)
+	echo powerpc-harris-powerunix
+	exit ;;
+    m88k:CX/UX:7*:*)
+	echo m88k-harris-cxux7
+	exit ;;
+    m88k:*:4*:R4*)
+	echo m88k-motorola-sysv4
+	exit ;;
+    m88k:*:3*:R3*)
+	echo m88k-motorola-sysv3
+	exit ;;
+    AViiON:dgux:*:*)
+        # DG/UX returns AViiON for all architectures
+        UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+	then
+	    if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+	       [ ${TARGET_BINARY_INTERFACE}x = x ]
+	    then
+		echo m88k-dg-dgux${UNAME_RELEASE}
+	    else
+		echo m88k-dg-dguxbcs${UNAME_RELEASE}
+	    fi
+	else
+	    echo i586-dg-dgux${UNAME_RELEASE}
+	fi
+ 	exit ;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	echo m88k-dolphin-sysv3
+	exit ;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	echo m88k-motorola-sysv3
+	exit ;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	echo m88k-tektronix-sysv3
+	exit ;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	echo m68k-tektronix-bsd
+	exit ;;
+    *:IRIX*:*:*)
+	echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
+	exit ;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	echo romp-ibm-aix     # uname -m gives an 8 hex-code CPU id
+	exit ;;               # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	echo i386-ibm-aix
+	exit ;;
+    ia64:AIX:*:*)
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		eval $set_cc_for_build
+		sed 's/^		//' << EOF >$dummy.c
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
+		then
+			echo "$SYSTEM_NAME"
+		else
+			echo rs6000-ibm-aix3.2.5
+		fi
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		echo rs6000-ibm-aix3.2.4
+	else
+		echo rs6000-ibm-aix3.2
+	fi
+	exit ;;
+    *:AIX:*:[45])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if [ -x /usr/bin/oslevel ] ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+	fi
+	echo ${IBM_ARCH}-ibm-aix${IBM_REV}
+	exit ;;
+    *:AIX:*:*)
+	echo rs6000-ibm-aix
+	exit ;;
+    ibmrt:4.4BSD:*|romp-ibm:BSD:*)
+	echo romp-ibm-bsd4.4
+	exit ;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	echo romp-ibm-bsd${UNAME_RELEASE}   # 4.3 with uname added to
+	exit ;;                             # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	echo rs6000-bull-bosx
+	exit ;;
+    DPX/2?00:B.O.S.:*:*)
+	echo m68k-bull-sysv3
+	exit ;;
+    9000/[34]??:4.3bsd:1.*:*)
+	echo m68k-hp-bsd
+	exit ;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	echo m68k-hp-bsd4.4
+	exit ;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	case "${UNAME_MACHINE}" in
+	    9000/31? )            HP_ARCH=m68000 ;;
+	    9000/[34]?? )         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if [ -x /usr/bin/getconf ]; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+                    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+                    case "${sc_cpu_version}" in
+                      523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+                      528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+                      532)                      # CPU_PA_RISC2_0
+                        case "${sc_kernel_bits}" in
+                          32) HP_ARCH="hppa2.0n" ;;
+                          64) HP_ARCH="hppa2.0w" ;;
+			  '') HP_ARCH="hppa2.0" ;;   # HP-UX 10.20
+                        esac ;;
+                    esac
+		fi
+		if [ "${HP_ARCH}" = "" ]; then
+		    eval $set_cc_for_build
+		    sed 's/^              //' << EOF >$dummy.c
+
+              #define _HPUX_SOURCE
+              #include <stdlib.h>
+              #include <unistd.h>
+
+              int main ()
+              {
+              #if defined(_SC_KERNEL_BITS)
+                  long bits = sysconf(_SC_KERNEL_BITS);
+              #endif
+                  long cpu  = sysconf (_SC_CPU_VERSION);
+
+                  switch (cpu)
+              	{
+              	case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+              	case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+              	case CPU_PA_RISC2_0:
+              #if defined(_SC_KERNEL_BITS)
+              	    switch (bits)
+              		{
+              		case 64: puts ("hppa2.0w"); break;
+              		case 32: puts ("hppa2.0n"); break;
+              		default: puts ("hppa2.0"); break;
+              		} break;
+              #else  /* !defined(_SC_KERNEL_BITS) */
+              	    puts ("hppa2.0"); break;
+              #endif
+              	default: puts ("hppa1.0"); break;
+              	}
+                  exit (0);
+              }
+EOF
+		    (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if [ ${HP_ARCH} = "hppa2.0w" ]
+	then
+	    eval $set_cc_for_build
+
+	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
+	    # generating 64-bit code.  GNU and HP use different nomenclature:
+	    #
+	    # $ CC_FOR_BUILD=cc ./config.guess
+	    # => hppa2.0w-hp-hpux11.23
+	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+	    # => hppa64-hp-hpux11.23
+
+	    if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
+		grep __LP64__ >/dev/null
+	    then
+		HP_ARCH="hppa2.0w"
+	    else
+		HP_ARCH="hppa64"
+	    fi
+	fi
+	echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+	exit ;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
+	echo ia64-hp-hpux${HPUX_REV}
+	exit ;;
+    3050*:HI-UX:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
+		{ echo "$SYSTEM_NAME"; exit; }
+	echo unknown-hitachi-hiuxwe2
+	exit ;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
+	echo hppa1.1-hp-bsd
+	exit ;;
+    9000/8??:4.3bsd:*:*)
+	echo hppa1.0-hp-bsd
+	exit ;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	echo hppa1.0-hp-mpeix
+	exit ;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
+	echo hppa1.1-hp-osf
+	exit ;;
+    hp8??:OSF1:*:*)
+	echo hppa1.0-hp-osf
+	exit ;;
+    i*86:OSF1:*:*)
+	if [ -x /usr/sbin/sysversion ] ; then
+	    echo ${UNAME_MACHINE}-unknown-osf1mk
+	else
+	    echo ${UNAME_MACHINE}-unknown-osf1
+	fi
+	exit ;;
+    parisc*:Lites*:*:*)
+	echo hppa1.1-hp-lites
+	exit ;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	echo c1-convex-bsd
+        exit ;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+        exit ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	echo c34-convex-bsd
+        exit ;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	echo c38-convex-bsd
+        exit ;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	echo c4-convex-bsd
+        exit ;;
+    CRAY*Y-MP:*:*:*)
+	echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*[A-Z]90:*:*:*)
+	echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*TS:*:*:*)
+	echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*T3E:*:*:*)
+	echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*SV1:*:*:*)
+	echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    *:UNICOS/mp:*:*)
+	echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+	exit ;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+        FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+        FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+        echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+        exit ;;
+    5000:UNIX_System_V:4.*:*)
+        FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+        FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+        echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+	exit ;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+	exit ;;
+    sparc*:BSD/OS:*:*)
+	echo sparc-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:BSD/OS:*:*)
+	echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+	exit ;;
+    *:FreeBSD:*:*)
+	case ${UNAME_MACHINE} in
+	    pc98)
+		echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	    amd64)
+		echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	    *)
+		echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+	esac
+	exit ;;
+    i*:CYGWIN*:*)
+	echo ${UNAME_MACHINE}-pc-cygwin
+	exit ;;
+    i*:MINGW*:*)
+	echo ${UNAME_MACHINE}-pc-mingw32
+	exit ;;
+    i*:windows32*:*)
+    	# uname -m includes "-pc" on this system.
+    	echo ${UNAME_MACHINE}-mingw32
+	exit ;;
+    i*:PW*:*)
+	echo ${UNAME_MACHINE}-pc-pw32
+	exit ;;
+    x86:Interix*:[3456]*)
+	echo i586-pc-interix${UNAME_RELEASE}
+	exit ;;
+    EM64T:Interix*:[3456]*)
+	echo x86_64-unknown-interix${UNAME_RELEASE}
+	exit ;;
+    [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
+	echo i${UNAME_MACHINE}-pc-mks
+	exit ;;
+    i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
+	# How do we know it's Interix rather than the generic POSIX subsystem?
+	# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
+	# UNAME_MACHINE based on the output of uname instead of i386?
+	echo i586-pc-interix
+	exit ;;
+    i*:UWIN*:*)
+	echo ${UNAME_MACHINE}-pc-uwin
+	exit ;;
+    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+	echo x86_64-unknown-cygwin
+	exit ;;
+    p*:CYGWIN*:*)
+	echo powerpcle-unknown-cygwin
+	exit ;;
+    prep*:SunOS:5.*:*)
+	echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+	exit ;;
+    *:GNU:*:*)
+	# the GNU system
+	echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+	exit ;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
+	exit ;;
+    i*86:Minix:*:*)
+	echo ${UNAME_MACHINE}-pc-minix
+	exit ;;
+    arm*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    avr32*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    cris:Linux:*:*)
+	echo cris-axis-linux-gnu
+	exit ;;
+    crisv32:Linux:*:*)
+	echo crisv32-axis-linux-gnu
+	exit ;;
+    frv:Linux:*:*)
+    	echo frv-unknown-linux-gnu
+	exit ;;
+    ia64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m32r*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    m68*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    mips:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef mips
+	#undef mipsel
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=mipsel
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=mips
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '
+	    /^CPU/{
+		s: ::g
+		p
+	    }'`"
+	test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
+	;;
+    mips64:Linux:*:*)
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#undef CPU
+	#undef mips64
+	#undef mips64el
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	CPU=mips64el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	CPU=mips64
+	#else
+	CPU=
+	#endif
+	#endif
+EOF
+	eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '
+	    /^CPU/{
+		s: ::g
+		p
+	    }'`"
+	test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
+	;;
+    or32:Linux:*:*)
+	echo or32-unknown-linux-gnu
+	exit ;;
+    ppc:Linux:*:*)
+	echo powerpc-unknown-linux-gnu
+	exit ;;
+    ppc64:Linux:*:*)
+	echo powerpc64-unknown-linux-gnu
+	exit ;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+        esac
+	objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null
+	if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+	echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
+	exit ;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) echo hppa1.1-unknown-linux-gnu ;;
+	  PA8*) echo hppa2.0-unknown-linux-gnu ;;
+	  *)    echo hppa-unknown-linux-gnu ;;
+	esac
+	exit ;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	echo hppa64-unknown-linux-gnu
+	exit ;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	echo ${UNAME_MACHINE}-ibm-linux
+	exit ;;
+    sh64*:Linux:*:*)
+    	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sh*:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	echo ${UNAME_MACHINE}-unknown-linux-gnu
+	exit ;;
+    vax:Linux:*:*)
+	echo ${UNAME_MACHINE}-dec-linux-gnu
+	exit ;;
+    x86_64:Linux:*:*)
+	echo x86_64-unknown-linux-gnu
+	exit ;;
+    i*86:Linux:*:*)
+	# The BFD linker knows what the default object file format is, so
+	# first see if it will tell us. cd to the root directory to prevent
+	# problems with other programs or directories called `ld' in the path.
+	# Set LC_ALL=C to ensure ld outputs messages in English.
+	ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \
+			 | sed -ne '/supported targets:/!d
+				    s/[ 	][ 	]*/ /g
+				    s/.*supported targets: *//
+				    s/ .*//
+				    p'`
+        case "$ld_supported_targets" in
+	  elf32-i386)
+		TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu"
+		;;
+	  a.out-i386-linux)
+		echo "${UNAME_MACHINE}-pc-linux-gnuaout"
+		exit ;;
+	  coff-i386)
+		echo "${UNAME_MACHINE}-pc-linux-gnucoff"
+		exit ;;
+	  "")
+		# Either a pre-BFD a.out linker (linux-gnuoldld) or
+		# one that does not give us useful --help.
+		echo "${UNAME_MACHINE}-pc-linux-gnuoldld"
+		exit ;;
+	esac
+	# Determine whether the default compiler is a.out or elf
+	eval $set_cc_for_build
+	sed 's/^	//' << EOF >$dummy.c
+	#include <features.h>
+	#ifdef __ELF__
+	# ifdef __GLIBC__
+	#  if __GLIBC__ >= 2
+	LIBC=gnu
+	#  else
+	LIBC=gnulibc1
+	#  endif
+	# else
+	LIBC=gnulibc1
+	# endif
+	#else
+	#if defined(__INTEL_COMPILER) || defined(__PGI) || defined(__SUNPRO_C) || defined(__SUNPRO_CC)
+	LIBC=gnu
+	#else
+	LIBC=gnuaout
+	#endif
+	#endif
+	#ifdef __dietlibc__
+	LIBC=dietlibc
+	#endif
+EOF
+	eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n '
+	    /^LIBC/{
+		s: ::g
+		p
+	    }'`"
+	test x"${LIBC}" != x && {
+		echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
+		exit
+	}
+	test x"${TENTATIVE}" != x && { echo "${TENTATIVE}"; exit; }
+	;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	echo i386-sequent-sysv4
+	exit ;;
+    i*86:UNIX_SV:4.2MP:2.*)
+        # Unixware is an offshoot of SVR4, but it has its own version
+        # number series starting with 2...
+        # I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+        # Use sysv4.2uw... so that sysv4* matches it.
+	echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+	exit ;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	echo ${UNAME_MACHINE}-pc-os2-emx
+	exit ;;
+    i*86:XTS-300:*:STOP)
+	echo ${UNAME_MACHINE}-unknown-stop
+	exit ;;
+    i*86:atheos:*:*)
+	echo ${UNAME_MACHINE}-unknown-atheos
+	exit ;;
+    i*86:syllable:*:*)
+	echo ${UNAME_MACHINE}-pc-syllable
+	exit ;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*)
+	echo i386-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    i*86:*DOS:*:*)
+	echo ${UNAME_MACHINE}-pc-msdosdjgpp
+	exit ;;
+    i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
+	UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
+	else
+		echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
+	fi
+	exit ;;
+    i*86:*:5:[678]*)
+    	# UnixWare 7.x, OpenUNIX and OpenServer 6.
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	exit ;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
+	else
+		echo ${UNAME_MACHINE}-pc-sysv32
+	fi
+	exit ;;
+    pc:*:*:*)
+	# Left here for compatibility:
+        # uname -m prints for DJGPP always 'pc', but it prints nothing about
+        # the processor, so we play safe by assuming i386.
+	echo i386-pc-msdosdjgpp
+        exit ;;
+    Intel:Mach:3*:*)
+	echo i386-pc-mach3
+	exit ;;
+    paragon:*:*:*)
+	echo i860-intel-osf1
+	exit ;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  echo i860-unknown-sysv${UNAME_RELEASE}  # Unknown i860-SVR4
+	fi
+	exit ;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	echo m68010-convergent-sysv
+	exit ;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	echo m68k-convergent-sysv
+	exit ;;
+    M680?0:D-NIX:5.3:*)
+	echo m68k-diab-dnix
+	exit ;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+        /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+          && { echo i486-ncr-sysv4; exit; } ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	echo m68k-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    mc68030:UNIX_System_V:4.*:*)
+	echo m68k-atari-sysv4
+	exit ;;
+    TSUNAMI:LynxOS:2.*:*)
+	echo sparc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    rs6000:LynxOS:2.*:*)
+	echo rs6000-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*)
+	echo powerpc-unknown-lynxos${UNAME_RELEASE}
+	exit ;;
+    SM[BE]S:UNIX_SV:*:*)
+	echo mips-dde-sysv${UNAME_RELEASE}
+	exit ;;
+    RM*:ReliantUNIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    RM*:SINIX-*:*:*)
+	echo mips-sni-sysv4
+	exit ;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		echo ${UNAME_MACHINE}-sni-sysv4
+	else
+		echo ns32k-sni-sysv
+	fi
+	exit ;;
+    PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+                      # says <Richard.M.Bartel at ccMail.Census.GOV>
+        echo i586-unisys-sysv4
+        exit ;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes at openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	echo hppa1.1-stratus-sysv4
+	exit ;;
+    *:*:*:FTX*)
+	# From seanf at swdc.stratus.com.
+	echo i860-stratus-sysv4
+	exit ;;
+    i*86:VOS:*:*)
+	# From Paul.Green at stratus.com.
+	echo ${UNAME_MACHINE}-stratus-vos
+	exit ;;
+    *:VOS:*:*)
+	# From Paul.Green at stratus.com.
+	echo hppa1.1-stratus-vos
+	exit ;;
+    mc68*:A/UX:*:*)
+	echo m68k-apple-aux${UNAME_RELEASE}
+	exit ;;
+    news*:NEWS-OS:6*:*)
+	echo mips-sony-newsos6
+	exit ;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if [ -d /usr/nec ]; then
+	        echo mips-nec-sysv${UNAME_RELEASE}
+	else
+	        echo mips-unknown-sysv${UNAME_RELEASE}
+	fi
+        exit ;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	echo powerpc-be-beos
+	exit ;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	echo powerpc-apple-beos
+	exit ;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	echo i586-pc-beos
+	exit ;;
+    SX-4:SUPER-UX:*:*)
+	echo sx4-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-5:SUPER-UX:*:*)
+	echo sx5-nec-superux${UNAME_RELEASE}
+	exit ;;
+    SX-6:SUPER-UX:*:*)
+	echo sx6-nec-superux${UNAME_RELEASE}
+	exit ;;
+    Power*:Rhapsody:*:*)
+	echo powerpc-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Rhapsody:*:*)
+	echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
+	exit ;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+	case $UNAME_PROCESSOR in
+	    unknown) UNAME_PROCESSOR=powerpc ;;
+	esac
+	echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+	exit ;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = "x86"; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
+	exit ;;
+    *:QNX:*:4*)
+	echo i386-pc-qnx
+	exit ;;
+    NSE-?:NONSTOP_KERNEL:*:*)
+	echo nse-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    NSR-?:NONSTOP_KERNEL:*:*)
+	echo nsr-tandem-nsk${UNAME_RELEASE}
+	exit ;;
+    *:NonStop-UX:*:*)
+	echo mips-compaq-nonstopux
+	exit ;;
+    BS2000:POSIX*:*:*)
+	echo bs2000-siemens-sysv
+	exit ;;
+    DS/*:UNIX_System_V:*:*)
+	echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+	exit ;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "$cputype" = "386"; then
+	    UNAME_MACHINE=i386
+	else
+	    UNAME_MACHINE="$cputype"
+	fi
+	echo ${UNAME_MACHINE}-unknown-plan9
+	exit ;;
+    *:TOPS-10:*:*)
+	echo pdp10-unknown-tops10
+	exit ;;
+    *:TENEX:*:*)
+	echo pdp10-unknown-tenex
+	exit ;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	echo pdp10-dec-tops20
+	exit ;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	echo pdp10-xkl-tops20
+	exit ;;
+    *:TOPS-20:*:*)
+	echo pdp10-unknown-tops20
+	exit ;;
+    *:ITS:*:*)
+	echo pdp10-unknown-its
+	exit ;;
+    SEI:*:*:SEIUX)
+        echo mips-sei-seiux${UNAME_RELEASE}
+	exit ;;
+    *:DragonFly:*:*)
+	echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+	exit ;;
+    *:*VMS:*:*)
+    	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case "${UNAME_MACHINE}" in
+	    A*) echo alpha-dec-vms ; exit ;;
+	    I*) echo ia64-dec-vms ; exit ;;
+	    V*) echo vax-dec-vms ; exit ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	echo i386-pc-xenix
+	exit ;;
+    i*86:skyos:*:*)
+	echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
+	exit ;;
+    i*86:rdos:*:*)
+	echo ${UNAME_MACHINE}-pc-rdos
+	exit ;;
+esac
+
+#echo '(No uname command or uname output not recognized.)' 1>&2
+#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+
+eval $set_cc_for_build
+cat >$dummy.c <<EOF
+#ifdef _SEQUENT_
+# include <sys/types.h>
+# include <sys/utsname.h>
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+          "4"
+#else
+	  ""
+#endif
+         ); exit (0);
+#endif
+#endif
+
+#if defined (__arm) && defined (__acorn) && defined (__unix)
+  printf ("arm-acorn-riscix\n"); exit (0);
+#endif
+
+#if defined (hp300) && !defined (hpux)
+  printf ("m68k-hp-bsd\n"); exit (0);
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+    struct utsname un;
+
+    uname(&un);
+
+    if (strncmp(un.version, "V2", 2) == 0) {
+	printf ("i386-sequent-ptx2\n"); exit (0);
+    }
+    if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+	printf ("i386-sequent-ptx1\n"); exit (0);
+    }
+    printf ("i386-sequent-ptx\n"); exit (0);
+
+#endif
+
+#if defined (vax)
+# if !defined (ultrix)
+#  include <sys/param.h>
+#  if defined (BSD)
+#   if BSD == 43
+      printf ("vax-dec-bsd4.3\n"); exit (0);
+#   else
+#    if BSD == 199006
+      printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#    else
+      printf ("vax-dec-bsd\n"); exit (0);
+#    endif
+#   endif
+#  else
+    printf ("vax-dec-bsd\n"); exit (0);
+#  endif
+# else
+    printf ("vax-dec-ultrix\n"); exit (0);
+# endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+	{ echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+
+test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+
+# Convex versions that predate uname can use getsysinfo(1)
+
+if [ -x /usr/convex/getsysinfo ]
+then
+    case `getsysinfo -f cpu_type` in
+    c1*)
+	echo c1-convex-bsd
+	exit ;;
+    c2*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    c34*)
+	echo c34-convex-bsd
+	exit ;;
+    c38*)
+	echo c38-convex-bsd
+	exit ;;
+    c4*)
+	echo c4-convex-bsd
+	exit ;;
+    esac
+fi
+
+cat >&2 <<EOF
+$0: unable to guess system type
+
+This script, last modified $timestamp, has failed to recognize
+the operating system you are using. It is advised that you
+download the most up to date version of the config scripts from
+
+  http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.guess
+and
+  http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.sub
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches at gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM  = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/lang/c/jansson/config.h.in b/lang/c/jansson/config.h.in
new file mode 100644
index 0000000..b5c82aa
--- /dev/null
+++ b/lang/c/jansson/config.h.in
@@ -0,0 +1,75 @@
+/* config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if the system has the type `long long int'. */
+#undef HAVE_LONG_LONG_INT
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#undef HAVE_STDLIB_H
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#undef HAVE_STRING_H
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#undef HAVE_SYS_STAT_H
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#undef HAVE_SYS_TYPES_H
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to 1 if you have the ANSI C header files. */
+#undef STDC_HEADERS
+
+/* Version number of package */
+#undef VERSION
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+   calls it, or to nothing if 'inline' is not supported under any name.  */
+#ifndef __cplusplus
+#undef inline
+#endif
+
+/* Define to the type of a signed integer type of width exactly 32 bits if
+   such a type exists and the standard includes do not define it. */
+#undef int32_t
diff --git a/lang/c/jansson/config.sub b/lang/c/jansson/config.sub
new file mode 100755
index 0000000..fab0aa3
--- /dev/null
+++ b/lang/c/jansson/config.sub
@@ -0,0 +1,1616 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+#   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
+#   Inc.
+
+timestamp='2006-09-20'
+
+# This file is (in principle) common to ALL GNU software.
+# The presence of a machine in this file suggests that SOME GNU software
+# can handle that machine.  It does not imply ALL GNU software can.
+#
+# This file is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+# 02110-1301, USA.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+
+# Please send patches to <config-patches at gnu.org>.  Submit a context
+# diff and a properly formatted ChangeLog entry.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+       $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches at gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
+Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help"
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo $1
+       exit ;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
+# Here we must recognize all the valid KERNEL-OS combinations.
+maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
+case $maybe_os in
+  nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \
+  uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \
+  storm-chaos* | os2-emx* | rtmk-nova*)
+    os=-$maybe_os
+    basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
+    ;;
+  *)
+    basic_machine=`echo $1 | sed 's/-[^-]*$//'`
+    if [ $basic_machine != $1 ]
+    then os=`echo $1 | sed 's/.*-/-/'`
+    else os=; fi
+    ;;
+esac
+
+### Let's recognize common machines as not being operating systems so
+### that things like config.sub decstation-3100 work.  We also
+### recognize some manufacturers as not being operating systems, so we
+### can provide default operating systems below.
+case $os in
+	-sun*os*)
+		# Prevent following clause from handling this invalid input.
+		;;
+	-dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
+	-att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
+	-unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
+	-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
+	-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
+	-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
+	-apple | -axis | -knuth | -cray)
+		os=
+		basic_machine=$1
+		;;
+	-sim | -cisco | -oki | -wec | -winbond)
+		os=
+		basic_machine=$1
+		;;
+	-scout)
+		;;
+	-wrs)
+		os=-vxworks
+		basic_machine=$1
+		;;
+	-chorusos*)
+		os=-chorusos
+		basic_machine=$1
+		;;
+ 	-chorusrdb)
+ 		os=-chorusrdb
+		basic_machine=$1
+ 		;;
+	-hiux*)
+		os=-hiuxwe2
+		;;
+	-sco6)
+		os=-sco5v6
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5)
+		os=-sco3.2v5
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco4)
+		os=-sco3.2v4
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2.[4-9]*)
+		os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco3.2v[4-9]*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco5v6*)
+		# Don't forget version if it is 3.2v4 or newer.
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-sco*)
+		os=-sco3.2v2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-udk*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-isc)
+		os=-isc2.2
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-clix*)
+		basic_machine=clipper-intergraph
+		;;
+	-isc*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
+		;;
+	-lynx*)
+		os=-lynxos
+		;;
+	-ptx*)
+		basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
+		;;
+	-windowsnt*)
+		os=`echo $os | sed -e 's/windowsnt/winnt/'`
+		;;
+	-psos*)
+		os=-psos
+		;;
+	-mint | -mint[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+esac
+
+# Decode aliases for certain CPU-COMPANY combinations.
+case $basic_machine in
+	# Recognize the basic CPU types without company name.
+	# Some are omitted here because they have special meanings below.
+	1750a | 580 \
+	| a29k \
+	| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
+	| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
+	| am33_2.0 \
+	| arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
+	| bfin \
+	| c4x | clipper \
+	| d10v | d30v | dlx | dsp16xx \
+	| fr30 | frv \
+	| h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+	| i370 | i860 | i960 | ia64 \
+	| ip2k | iq2000 \
+	| m32c | m32r | m32rle | m68000 | m68k | m88k \
+	| maxq | mb | microblaze | mcore \
+	| mips | mipsbe | mipseb | mipsel | mipsle \
+	| mips16 \
+	| mips64 | mips64el \
+	| mips64vr | mips64vrel \
+	| mips64orion | mips64orionel \
+	| mips64vr4100 | mips64vr4100el \
+	| mips64vr4300 | mips64vr4300el \
+	| mips64vr5000 | mips64vr5000el \
+	| mips64vr5900 | mips64vr5900el \
+	| mipsisa32 | mipsisa32el \
+	| mipsisa32r2 | mipsisa32r2el \
+	| mipsisa64 | mipsisa64el \
+	| mipsisa64r2 | mipsisa64r2el \
+	| mipsisa64sb1 | mipsisa64sb1el \
+	| mipsisa64sr71k | mipsisa64sr71kel \
+	| mipstx39 | mipstx39el \
+	| mn10200 | mn10300 \
+	| mt \
+	| msp430 \
+	| nios | nios2 \
+	| ns16k | ns32k \
+	| or32 \
+	| pdp10 | pdp11 | pj | pjl \
+	| powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
+	| pyramid \
+	| score \
+	| sh | sh[1234] | sh[24]a | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
+	| sh64 | sh64le \
+	| sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
+	| sparcv8 | sparcv9 | sparcv9b | sparcv9v \
+	| spu | strongarm \
+	| tahoe | thumb | tic4x | tic80 | tron \
+	| v850 | v850e \
+	| we32k \
+	| x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \
+	| z8k)
+		basic_machine=$basic_machine-unknown
+		;;
+	m6811 | m68hc11 | m6812 | m68hc12)
+		# Motorola 68HC11/12.
+		basic_machine=$basic_machine-unknown
+		os=-none
+		;;
+	m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
+		;;
+	ms1)
+		basic_machine=mt-unknown
+		;;
+
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+	  basic_machine=$basic_machine-pc
+	  ;;
+	# Object if more than one company name word.
+	*-*-*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+	# Recognize the basic CPU types with company name.
+	580-* \
+	| a29k-* \
+	| alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
+	| alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
+	| alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
+	| arm-*  | armbe-* | armle-* | armeb-* | armv*-* \
+	| avr-* | avr32-* \
+	| bfin-* | bs2000-* \
+	| c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
+	| clipper-* | craynv-* | cydra-* \
+	| d10v-* | d30v-* | dlx-* \
+	| elxsi-* \
+	| f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \
+	| h8300-* | h8500-* \
+	| hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
+	| i*86-* | i860-* | i960-* | ia64-* \
+	| ip2k-* | iq2000-* \
+	| m32c-* | m32r-* | m32rle-* \
+	| m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
+	| m88110-* | m88k-* | maxq-* | mcore-* \
+	| mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
+	| mips16-* \
+	| mips64-* | mips64el-* \
+	| mips64vr-* | mips64vrel-* \
+	| mips64orion-* | mips64orionel-* \
+	| mips64vr4100-* | mips64vr4100el-* \
+	| mips64vr4300-* | mips64vr4300el-* \
+	| mips64vr5000-* | mips64vr5000el-* \
+	| mips64vr5900-* | mips64vr5900el-* \
+	| mipsisa32-* | mipsisa32el-* \
+	| mipsisa32r2-* | mipsisa32r2el-* \
+	| mipsisa64-* | mipsisa64el-* \
+	| mipsisa64r2-* | mipsisa64r2el-* \
+	| mipsisa64sb1-* | mipsisa64sb1el-* \
+	| mipsisa64sr71k-* | mipsisa64sr71kel-* \
+	| mipstx39-* | mipstx39el-* \
+	| mmix-* \
+	| mt-* \
+	| msp430-* \
+	| nios-* | nios2-* \
+	| none-* | np1-* | ns16k-* | ns32k-* \
+	| orion-* \
+	| pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
+	| powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
+	| pyramid-* \
+	| romp-* | rs6000-* \
+	| sh-* | sh[1234]-* | sh[24]a-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
+	| shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
+	| sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
+	| sparclite-* \
+	| sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \
+	| tahoe-* | thumb-* \
+	| tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
+	| tron-* \
+	| v850-* | v850e-* | vax-* \
+	| we32k-* \
+	| x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \
+	| xstormy16-* | xtensa-* \
+	| ymp-* \
+	| z8k-*)
+		;;
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	386bsd)
+		basic_machine=i386-unknown
+		os=-bsd
+		;;
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		basic_machine=m68000-att
+		;;
+	3b*)
+		basic_machine=we32k-att
+		;;
+	a29khif)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+    	abacus)
+		basic_machine=abacus-unknown
+		;;
+	adobe68k)
+		basic_machine=m68010-adobe
+		os=-scout
+		;;
+	alliant | fx80)
+		basic_machine=fx80-alliant
+		;;
+	altos | altos3068)
+		basic_machine=m68k-altos
+		;;
+	am29k)
+		basic_machine=a29k-none
+		os=-bsd
+		;;
+	amd64)
+		basic_machine=x86_64-pc
+		;;
+	amd64-*)
+		basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	amdahl)
+		basic_machine=580-amdahl
+		os=-sysv
+		;;
+	amiga | amiga-*)
+		basic_machine=m68k-unknown
+		;;
+	amigaos | amigados)
+		basic_machine=m68k-unknown
+		os=-amigaos
+		;;
+	amigaunix | amix)
+		basic_machine=m68k-unknown
+		os=-sysv4
+		;;
+	apollo68)
+		basic_machine=m68k-apollo
+		os=-sysv
+		;;
+	apollo68bsd)
+		basic_machine=m68k-apollo
+		os=-bsd
+		;;
+	aux)
+		basic_machine=m68k-apple
+		os=-aux
+		;;
+	balance)
+		basic_machine=ns32k-sequent
+		os=-dynix
+		;;
+	c90)
+		basic_machine=c90-cray
+		os=-unicos
+		;;
+	convex-c1)
+		basic_machine=c1-convex
+		os=-bsd
+		;;
+	convex-c2)
+		basic_machine=c2-convex
+		os=-bsd
+		;;
+	convex-c32)
+		basic_machine=c32-convex
+		os=-bsd
+		;;
+	convex-c34)
+		basic_machine=c34-convex
+		os=-bsd
+		;;
+	convex-c38)
+		basic_machine=c38-convex
+		os=-bsd
+		;;
+	cray | j90)
+		basic_machine=j90-cray
+		os=-unicos
+		;;
+	craynv)
+		basic_machine=craynv-cray
+		os=-unicosmp
+		;;
+	cr16c)
+		basic_machine=cr16c-unknown
+		os=-elf
+		;;
+	crds | unos)
+		basic_machine=m68k-crds
+		;;
+	crisv32 | crisv32-* | etraxfs*)
+		basic_machine=crisv32-axis
+		;;
+	cris | cris-* | etrax*)
+		basic_machine=cris-axis
+		;;
+	crx)
+		basic_machine=crx-unknown
+		os=-elf
+		;;
+	da30 | da30-*)
+		basic_machine=m68k-da30
+		;;
+	decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
+		basic_machine=mips-dec
+		;;
+	decsystem10* | dec10*)
+		basic_machine=pdp10-dec
+		os=-tops10
+		;;
+	decsystem20* | dec20*)
+		basic_machine=pdp10-dec
+		os=-tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		basic_machine=m68k-motorola
+		;;
+	delta88)
+		basic_machine=m88k-motorola
+		os=-sysv3
+		;;
+	djgpp)
+		basic_machine=i586-pc
+		os=-msdosdjgpp
+		;;
+	dpx20 | dpx20-*)
+		basic_machine=rs6000-bull
+		os=-bosx
+		;;
+	dpx2* | dpx2*-bull)
+		basic_machine=m68k-bull
+		os=-sysv3
+		;;
+	ebmon29k)
+		basic_machine=a29k-amd
+		os=-ebmon
+		;;
+	elxsi)
+		basic_machine=elxsi-elxsi
+		os=-bsd
+		;;
+	encore | umax | mmax)
+		basic_machine=ns32k-encore
+		;;
+	es1800 | OSE68k | ose68k | ose | OSE)
+		basic_machine=m68k-ericsson
+		os=-ose
+		;;
+	fx2800)
+		basic_machine=i860-alliant
+		;;
+	genix)
+		basic_machine=ns32k-ns
+		;;
+	gmicro)
+		basic_machine=tron-gmicro
+		os=-sysv
+		;;
+	go32)
+		basic_machine=i386-pc
+		os=-go32
+		;;
+	h3050r* | hiux*)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	h8300hms)
+		basic_machine=h8300-hitachi
+		os=-hms
+		;;
+	h8300xray)
+		basic_machine=h8300-hitachi
+		os=-xray
+		;;
+	h8500hms)
+		basic_machine=h8500-hitachi
+		os=-hms
+		;;
+	harris)
+		basic_machine=m88k-harris
+		os=-sysv3
+		;;
+	hp300-*)
+		basic_machine=m68k-hp
+		;;
+	hp300bsd)
+		basic_machine=m68k-hp
+		os=-bsd
+		;;
+	hp300hpux)
+		basic_machine=m68k-hp
+		os=-hpux
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		basic_machine=m68000-hp
+		;;
+	hp9k3[2-9][0-9])
+		basic_machine=m68k-hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		basic_machine=hppa1.1-hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		basic_machine=hppa1.0-hp
+		;;
+	hppa-next)
+		os=-nextstep3
+		;;
+	hppaosf)
+		basic_machine=hppa1.1-hp
+		os=-osf
+		;;
+	hppro)
+		basic_machine=hppa1.1-hp
+		os=-proelf
+		;;
+	i370-ibm* | ibm*)
+		basic_machine=i370-ibm
+		;;
+# I'm not sure what "Sysv32" means.  Should this be sysv3.2?
+	i*86v32)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv32
+		;;
+	i*86v4*)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv4
+		;;
+	i*86v)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-sysv
+		;;
+	i*86sol2)
+		basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
+		os=-solaris2
+		;;
+	i386mach)
+		basic_machine=i386-mach
+		os=-mach
+		;;
+	i386-vsta | vsta)
+		basic_machine=i386-unknown
+		os=-vsta
+		;;
+	iris | iris4d)
+		basic_machine=mips-sgi
+		case $os in
+		    -irix*)
+			;;
+		    *)
+			os=-irix4
+			;;
+		esac
+		;;
+	isi68 | isi)
+		basic_machine=m68k-isi
+		os=-sysv
+		;;
+	m88k-omron*)
+		basic_machine=m88k-omron
+		;;
+	magnum | m3230)
+		basic_machine=mips-mips
+		os=-sysv
+		;;
+	merlin)
+		basic_machine=ns32k-utek
+		os=-sysv
+		;;
+	mingw32)
+		basic_machine=i386-pc
+		os=-mingw32
+		;;
+	miniframe)
+		basic_machine=m68000-convergent
+		;;
+	*mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		basic_machine=m68k-atari
+		os=-mint
+		;;
+	mips3*-*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
+		;;
+	mips3*)
+		basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
+		;;
+	monitor)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	morphos)
+		basic_machine=powerpc-unknown
+		os=-morphos
+		;;
+	msdos)
+		basic_machine=i386-pc
+		os=-msdos
+		;;
+	ms1-*)
+		basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
+		;;
+	mvs)
+		basic_machine=i370-ibm
+		os=-mvs
+		;;
+	ncr3000)
+		basic_machine=i486-ncr
+		os=-sysv4
+		;;
+	netbsd386)
+		basic_machine=i386-unknown
+		os=-netbsd
+		;;
+	netwinder)
+		basic_machine=armv4l-rebel
+		os=-linux
+		;;
+	news | news700 | news800 | news900)
+		basic_machine=m68k-sony
+		os=-newsos
+		;;
+	news1000)
+		basic_machine=m68030-sony
+		os=-newsos
+		;;
+	news-3600 | risc-news)
+		basic_machine=mips-sony
+		os=-newsos
+		;;
+	necv70)
+		basic_machine=v70-nec
+		os=-sysv
+		;;
+	next | m*-next )
+		basic_machine=m68k-next
+		case $os in
+		    -nextstep* )
+			;;
+		    -ns2*)
+		      os=-nextstep2
+			;;
+		    *)
+		      os=-nextstep3
+			;;
+		esac
+		;;
+	nh3000)
+		basic_machine=m68k-harris
+		os=-cxux
+		;;
+	nh[45]000)
+		basic_machine=m88k-harris
+		os=-cxux
+		;;
+	nindy960)
+		basic_machine=i960-intel
+		os=-nindy
+		;;
+	mon960)
+		basic_machine=i960-intel
+		os=-mon960
+		;;
+	nonstopux)
+		basic_machine=mips-compaq
+		os=-nonstopux
+		;;
+	np1)
+		basic_machine=np1-gould
+		;;
+	nsr-tandem)
+		basic_machine=nsr-tandem
+		;;
+	op50n-* | op60c-*)
+		basic_machine=hppa1.1-oki
+		os=-proelf
+		;;
+	openrisc | openrisc-*)
+		basic_machine=or32-unknown
+		;;
+	os400)
+		basic_machine=powerpc-ibm
+		os=-os400
+		;;
+	OSE68000 | ose68000)
+		basic_machine=m68000-ericsson
+		os=-ose
+		;;
+	os68k)
+		basic_machine=m68k-none
+		os=-os68k
+		;;
+	pa-hitachi)
+		basic_machine=hppa1.1-hitachi
+		os=-hiuxwe2
+		;;
+	paragon)
+		basic_machine=i860-intel
+		os=-osf
+		;;
+	pbd)
+		basic_machine=sparc-tti
+		;;
+	pbb)
+		basic_machine=m68k-tti
+		;;
+	pc532 | pc532-*)
+		basic_machine=ns32k-pc532
+		;;
+	pc98)
+		basic_machine=i386-pc
+		;;
+	pc98-*)
+		basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium | p5 | k5 | k6 | nexgen | viac3)
+		basic_machine=i586-pc
+		;;
+	pentiumpro | p6 | 6x86 | athlon | athlon_*)
+		basic_machine=i686-pc
+		;;
+	pentiumii | pentium2 | pentiumiii | pentium3)
+		basic_machine=i686-pc
+		;;
+	pentium4)
+		basic_machine=i786-pc
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pentium4-*)
+		basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	pn)
+		basic_machine=pn-gould
+		;;
+	power)	basic_machine=power-ibm
+		;;
+	ppc)	basic_machine=powerpc-unknown
+		;;
+	ppc-*)	basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppcle | powerpclittle | ppc-le | powerpc-little)
+		basic_machine=powerpcle-unknown
+		;;
+	ppcle-* | powerpclittle-*)
+		basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64)	basic_machine=powerpc64-unknown
+		;;
+	ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ppc64le | powerpc64little | ppc64-le | powerpc64-little)
+		basic_machine=powerpc64le-unknown
+		;;
+	ppc64le-* | powerpc64little-*)
+		basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
+		;;
+	ps2)
+		basic_machine=i386-ibm
+		;;
+	pw32)
+		basic_machine=i586-unknown
+		os=-pw32
+		;;
+	rdos)
+		basic_machine=i386-pc
+		os=-rdos
+		;;
+	rom68k)
+		basic_machine=m68k-rom68k
+		os=-coff
+		;;
+	rm[46]00)
+		basic_machine=mips-siemens
+		;;
+	rtpc | rtpc-*)
+		basic_machine=romp-ibm
+		;;
+	s390 | s390-*)
+		basic_machine=s390-ibm
+		;;
+	s390x | s390x-*)
+		basic_machine=s390x-ibm
+		;;
+	sa29200)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	sb1)
+		basic_machine=mipsisa64sb1-unknown
+		;;
+	sb1el)
+		basic_machine=mipsisa64sb1el-unknown
+		;;
+	sde)
+		basic_machine=mipsisa32-sde
+		os=-elf
+		;;
+	sei)
+		basic_machine=mips-sei
+		os=-seiux
+		;;
+	sequent)
+		basic_machine=i386-sequent
+		;;
+	sh)
+		basic_machine=sh-hitachi
+		os=-hms
+		;;
+	sh64)
+		basic_machine=sh64-unknown
+		;;
+	sparclite-wrs | simso-wrs)
+		basic_machine=sparclite-wrs
+		os=-vxworks
+		;;
+	sps7)
+		basic_machine=m68k-bull
+		os=-sysv2
+		;;
+	spur)
+		basic_machine=spur-unknown
+		;;
+	st2000)
+		basic_machine=m68k-tandem
+		;;
+	stratus)
+		basic_machine=i860-stratus
+		os=-sysv4
+		;;
+	sun2)
+		basic_machine=m68000-sun
+		;;
+	sun2os3)
+		basic_machine=m68000-sun
+		os=-sunos3
+		;;
+	sun2os4)
+		basic_machine=m68000-sun
+		os=-sunos4
+		;;
+	sun3os3)
+		basic_machine=m68k-sun
+		os=-sunos3
+		;;
+	sun3os4)
+		basic_machine=m68k-sun
+		os=-sunos4
+		;;
+	sun4os3)
+		basic_machine=sparc-sun
+		os=-sunos3
+		;;
+	sun4os4)
+		basic_machine=sparc-sun
+		os=-sunos4
+		;;
+	sun4sol2)
+		basic_machine=sparc-sun
+		os=-solaris2
+		;;
+	sun3 | sun3-*)
+		basic_machine=m68k-sun
+		;;
+	sun4)
+		basic_machine=sparc-sun
+		;;
+	sun386 | sun386i | roadrunner)
+		basic_machine=i386-sun
+		;;
+	sv1)
+		basic_machine=sv1-cray
+		os=-unicos
+		;;
+	symmetry)
+		basic_machine=i386-sequent
+		os=-dynix
+		;;
+	t3e)
+		basic_machine=alphaev5-cray
+		os=-unicos
+		;;
+	t90)
+		basic_machine=t90-cray
+		os=-unicos
+		;;
+	tic54x | c54x*)
+		basic_machine=tic54x-unknown
+		os=-coff
+		;;
+	tic55x | c55x*)
+		basic_machine=tic55x-unknown
+		os=-coff
+		;;
+	tic6x | c6x*)
+		basic_machine=tic6x-unknown
+		os=-coff
+		;;
+	tx39)
+		basic_machine=mipstx39-unknown
+		;;
+	tx39el)
+		basic_machine=mipstx39el-unknown
+		;;
+	toad1)
+		basic_machine=pdp10-xkl
+		os=-tops20
+		;;
+	tower | tower-32)
+		basic_machine=m68k-ncr
+		;;
+	tpf)
+		basic_machine=s390x-ibm
+		os=-tpf
+		;;
+	udi29k)
+		basic_machine=a29k-amd
+		os=-udi
+		;;
+	ultra3)
+		basic_machine=a29k-nyu
+		os=-sym1
+		;;
+	v810 | necv810)
+		basic_machine=v810-nec
+		os=-none
+		;;
+	vaxv)
+		basic_machine=vax-dec
+		os=-sysv
+		;;
+	vms)
+		basic_machine=vax-dec
+		os=-vms
+		;;
+	vpp*|vx|vx-*)
+		basic_machine=f301-fujitsu
+		;;
+	vxworks960)
+		basic_machine=i960-wrs
+		os=-vxworks
+		;;
+	vxworks68)
+		basic_machine=m68k-wrs
+		os=-vxworks
+		;;
+	vxworks29k)
+		basic_machine=a29k-wrs
+		os=-vxworks
+		;;
+	w65*)
+		basic_machine=w65-wdc
+		os=-none
+		;;
+	w89k-*)
+		basic_machine=hppa1.1-winbond
+		os=-proelf
+		;;
+	xbox)
+		basic_machine=i686-pc
+		os=-mingw32
+		;;
+	xps | xps100)
+		basic_machine=xps100-honeywell
+		;;
+	ymp)
+		basic_machine=ymp-cray
+		os=-unicos
+		;;
+	z8k-*-coff)
+		basic_machine=z8k-unknown
+		os=-sim
+		;;
+	none)
+		basic_machine=none-none
+		os=-none
+		;;
+
+# Here we handle the default manufacturer of certain CPU types.  It is in
+# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		basic_machine=hppa1.1-winbond
+		;;
+	op50n)
+		basic_machine=hppa1.1-oki
+		;;
+	op60c)
+		basic_machine=hppa1.1-oki
+		;;
+	romp)
+		basic_machine=romp-ibm
+		;;
+	mmix)
+		basic_machine=mmix-knuth
+		;;
+	rs6000)
+		basic_machine=rs6000-ibm
+		;;
+	vax)
+		basic_machine=vax-dec
+		;;
+	pdp10)
+		# there are many clones, so DEC is not a safe bet
+		basic_machine=pdp10-unknown
+		;;
+	pdp11)
+		basic_machine=pdp11-dec
+		;;
+	we32k)
+		basic_machine=we32k-att
+		;;
+	sh[1234] | sh[24]a | sh[34]eb | sh[1234]le | sh[23]ele)
+		basic_machine=sh-unknown
+		;;
+	sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
+		basic_machine=sparc-sun
+		;;
+	cydra)
+		basic_machine=cydra-cydrome
+		;;
+	orion)
+		basic_machine=orion-highlevel
+		;;
+	orion105)
+		basic_machine=clipper-highlevel
+		;;
+	mac | mpw | mac-mpw)
+		basic_machine=m68k-apple
+		;;
+	pmac | pmac-mpw)
+		basic_machine=powerpc-apple
+		;;
+	*-unknown)
+		# Make sure to match an already-canonicalized machine name.
+		;;
+	*)
+		echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $basic_machine in
+	*-digital*)
+		basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
+		;;
+	*-commodore*)
+		basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if [ x"$os" != x"" ]
+then
+case $os in
+        # First match some system type aliases
+        # that might get confused with valid system types.
+	# -solaris* is a basic system type, with this one exception.
+	-solaris1 | -solaris1.*)
+		os=`echo $os | sed -e 's|solaris1|sunos4|'`
+		;;
+	-solaris)
+		os=-solaris2
+		;;
+	-svr4*)
+		os=-sysv4
+		;;
+	-unixware*)
+		os=-sysv4.2uw
+		;;
+	-gnu/linux*)
+		os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
+		;;
+	# First accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST END IN A *, to match a version number.
+	# -sysv* is not here because it comes later, after sysvr4.
+	-gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
+	      | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\
+	      | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \
+	      | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
+	      | -aos* \
+	      | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
+	      | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
+	      | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
+	      | -openbsd* | -solidbsd* \
+	      | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
+	      | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
+	      | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
+	      | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
+	      | -chorusos* | -chorusrdb* \
+	      | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
+	      | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \
+	      | -uxpv* | -beos* | -mpeix* | -udk* \
+	      | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
+	      | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
+	      | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
+	      | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
+	      | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
+	      | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
+	      | -skyos* | -haiku* | -rdos* | -toppers*)
+	# Remember, each alternative MUST END IN *, to match a version number.
+		;;
+	-qnx*)
+		case $basic_machine in
+		    x86-* | i*86-*)
+			;;
+		    *)
+			os=-nto$os
+			;;
+		esac
+		;;
+	-nto-qnx*)
+		;;
+	-nto*)
+		os=`echo $os | sed -e 's|nto|nto-qnx|'`
+		;;
+	-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
+	      | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
+	      | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
+		;;
+	-mac*)
+		os=`echo $os | sed -e 's|mac|macos|'`
+		;;
+	-linux-dietlibc)
+		os=-linux-dietlibc
+		;;
+	-linux*)
+		os=`echo $os | sed -e 's|linux|linux-gnu|'`
+		;;
+	-sunos5*)
+		os=`echo $os | sed -e 's|sunos5|solaris2|'`
+		;;
+	-sunos6*)
+		os=`echo $os | sed -e 's|sunos6|solaris3|'`
+		;;
+	-opened*)
+		os=-openedition
+		;;
+        -os400*)
+		os=-os400
+		;;
+	-wince*)
+		os=-wince
+		;;
+	-osfrose*)
+		os=-osfrose
+		;;
+	-osf*)
+		os=-osf
+		;;
+	-utek*)
+		os=-bsd
+		;;
+	-dynix*)
+		os=-bsd
+		;;
+	-acis*)
+		os=-aos
+		;;
+	-atheos*)
+		os=-atheos
+		;;
+	-syllable*)
+		os=-syllable
+		;;
+	-386bsd)
+		os=-bsd
+		;;
+	-ctix* | -uts*)
+		os=-sysv
+		;;
+	-nova*)
+		os=-rtmk-nova
+		;;
+	-ns2 )
+		os=-nextstep2
+		;;
+	-nsk*)
+		os=-nsk
+		;;
+	# Preserve the version number of sinix5.
+	-sinix5.*)
+		os=`echo $os | sed -e 's|sinix|sysv|'`
+		;;
+	-sinix*)
+		os=-sysv4
+		;;
+        -tpf*)
+		os=-tpf
+		;;
+	-triton*)
+		os=-sysv3
+		;;
+	-oss*)
+		os=-sysv3
+		;;
+	-svr4)
+		os=-sysv4
+		;;
+	-svr3)
+		os=-sysv3
+		;;
+	-sysvr4)
+		os=-sysv4
+		;;
+	# This must come after -sysvr4.
+	-sysv*)
+		;;
+	-ose*)
+		os=-ose
+		;;
+	-es1800*)
+		os=-ose
+		;;
+	-xenix)
+		os=-xenix
+		;;
+	-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+		os=-mint
+		;;
+	-aros*)
+		os=-aros
+		;;
+	-kaos*)
+		os=-kaos
+		;;
+	-zvmoe)
+		os=-zvmoe
+		;;
+	-none)
+		;;
+	*)
+		# Get rid of the `-' at the beginning of $os.
+		os=`echo $os | sed 's/[^-]*-//'`
+		echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
+		exit 1
+		;;
+esac
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+case $basic_machine in
+        score-*)
+		os=-elf
+		;;
+        spu-*)
+		os=-elf
+		;;
+	*-acorn)
+		os=-riscix1.2
+		;;
+	arm*-rebel)
+		os=-linux
+		;;
+	arm*-semi)
+		os=-aout
+		;;
+        c4x-* | tic4x-*)
+        	os=-coff
+		;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=-tops20
+		;;
+	pdp11-*)
+		os=-none
+		;;
+	*-dec | vax-*)
+		os=-ultrix4.2
+		;;
+	m68*-apollo)
+		os=-domain
+		;;
+	i386-sun)
+		os=-sunos4.0.2
+		;;
+	m68000-sun)
+		os=-sunos3
+		# This also exists in the configure program, but was not the
+		# default.
+		# os=-sunos4
+		;;
+	m68*-cisco)
+		os=-aout
+		;;
+	mips*-cisco)
+		os=-elf
+		;;
+	mips*-*)
+		os=-elf
+		;;
+	or32-*)
+		os=-coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=-sysv3
+		;;
+	sparc-* | *-sun)
+		os=-sunos4.1.1
+		;;
+	*-be)
+		os=-beos
+		;;
+	*-haiku)
+		os=-haiku
+		;;
+	*-ibm)
+		os=-aix
+		;;
+    	*-knuth)
+		os=-mmixware
+		;;
+	*-wec)
+		os=-proelf
+		;;
+	*-winbond)
+		os=-proelf
+		;;
+	*-oki)
+		os=-proelf
+		;;
+	*-hp)
+		os=-hpux
+		;;
+	*-hitachi)
+		os=-hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=-sysv
+		;;
+	*-cbm)
+		os=-amigaos
+		;;
+	*-dg)
+		os=-dgux
+		;;
+	*-dolphin)
+		os=-sysv3
+		;;
+	m68k-ccur)
+		os=-rtu
+		;;
+	m88k-omron*)
+		os=-luna
+		;;
+	*-next )
+		os=-nextstep
+		;;
+	*-sequent)
+		os=-ptx
+		;;
+	*-crds)
+		os=-unos
+		;;
+	*-ns)
+		os=-genix
+		;;
+	i370-*)
+		os=-mvs
+		;;
+	*-next)
+		os=-nextstep3
+		;;
+	*-gould)
+		os=-sysv
+		;;
+	*-highlevel)
+		os=-bsd
+		;;
+	*-encore)
+		os=-bsd
+		;;
+	*-sgi)
+		os=-irix
+		;;
+	*-siemens)
+		os=-sysv4
+		;;
+	*-masscomp)
+		os=-rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=-uxpv
+		;;
+	*-rom68k)
+		os=-coff
+		;;
+	*-*bug)
+		os=-coff
+		;;
+	*-apple)
+		os=-macos
+		;;
+	*-atari*)
+		os=-mint
+		;;
+	*)
+		os=-none
+		;;
+esac
+fi
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+vendor=unknown
+case $basic_machine in
+	*-unknown)
+		case $os in
+			-riscix*)
+				vendor=acorn
+				;;
+			-sunos*)
+				vendor=sun
+				;;
+			-aix*)
+				vendor=ibm
+				;;
+			-beos*)
+				vendor=be
+				;;
+			-hpux*)
+				vendor=hp
+				;;
+			-mpeix*)
+				vendor=hp
+				;;
+			-hiux*)
+				vendor=hitachi
+				;;
+			-unos*)
+				vendor=crds
+				;;
+			-dgux*)
+				vendor=dg
+				;;
+			-luna*)
+				vendor=omron
+				;;
+			-genix*)
+				vendor=ns
+				;;
+			-mvs* | -opened*)
+				vendor=ibm
+				;;
+			-os400*)
+				vendor=ibm
+				;;
+			-ptx*)
+				vendor=sequent
+				;;
+			-tpf*)
+				vendor=ibm
+				;;
+			-vxsim* | -vxworks* | -windiss*)
+				vendor=wrs
+				;;
+			-aux*)
+				vendor=apple
+				;;
+			-hms*)
+				vendor=hitachi
+				;;
+			-mpw* | -macos*)
+				vendor=apple
+				;;
+			-*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
+				vendor=atari
+				;;
+			-vos*)
+				vendor=stratus
+				;;
+		esac
+		basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
+		;;
+esac
+
+echo $basic_machine$os
+exit
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/lang/c/jansson/configure b/lang/c/jansson/configure
new file mode 100755
index 0000000..218c032
--- /dev/null
+++ b/lang/c/jansson/configure
@@ -0,0 +1,13010 @@
+#! /bin/sh
+# Guess values for system-dependent variables and create Makefiles.
+# Generated by GNU Autoconf 2.68 for jansson 2.1.
+#
+# Report bugs to <petri at digip.org>.
+#
+#
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
+# Foundation, Inc.
+#
+#
+# This configure script is free software; the Free Software Foundation
+# gives unlimited permission to copy, distribute and modify it.
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+if test "x$CONFIG_SHELL" = x; then
+  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+"
+  as_required="as_fn_return () { (exit \$1); }
+as_fn_success () { as_fn_return 0; }
+as_fn_failure () { as_fn_return 1; }
+as_fn_ret_success () { return 0; }
+as_fn_ret_failure () { return 1; }
+
+exitcode=0
+as_fn_success || { exitcode=1; echo as_fn_success failed.; }
+as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
+as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
+as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
+if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
+
+else
+  exitcode=1; echo positional parameters were not saved.
+fi
+test x\$exitcode = x0 || exit 1"
+  as_suggested="  as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
+  as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
+  eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
+  test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
+
+  test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || (
+    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\
+      || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1
+test \$(( 1 + 1 )) = 2 || exit 1"
+  if (eval "$as_required") 2>/dev/null; then :
+  as_have_required=yes
+else
+  as_have_required=no
+fi
+  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
+
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+as_found=false
+for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  as_found=:
+  case $as_dir in #(
+	 /*)
+	   for as_base in sh bash ksh sh5; do
+	     # Try only shells that exist, to save several forks.
+	     as_shell=$as_dir/$as_base
+	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
+		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  CONFIG_SHELL=$as_shell as_have_required=yes
+		   if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
+  break 2
+fi
+fi
+	   done;;
+       esac
+  as_found=false
+done
+$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
+	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
+  CONFIG_SHELL=$SHELL as_have_required=yes
+fi; }
+IFS=$as_save_IFS
+
+
+      if test "x$CONFIG_SHELL" != x; then :
+  # We cannot yet assume a decent shell, so we have to provide a
+	# neutralization value for shells without unset; and this also
+	# works around shells that cannot unset nonexistent variables.
+	# Preserve -v and -x to the replacement shell.
+	BASH_ENV=/dev/null
+	ENV=/dev/null
+	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
+	export CONFIG_SHELL
+	case $- in # ((((
+	  *v*x* | *x*v* ) as_opts=-vx ;;
+	  *v* ) as_opts=-v ;;
+	  *x* ) as_opts=-x ;;
+	  * ) as_opts= ;;
+	esac
+	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
+fi
+
+    if test x$as_have_required = xno; then :
+  $as_echo "$0: This script requires a shell more modern than all"
+  $as_echo "$0: the shells that I found on your system."
+  if test x${ZSH_VERSION+set} = xset ; then
+    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
+    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
+  else
+    $as_echo "$0: Please tell bug-autoconf at gnu.org and petri at digip.org
+$0: about your system, including any error possibly output
+$0: before this message. Then install a modern shell, or
+$0: manually run the script under such a shell if you do
+$0: have one."
+  fi
+  exit 1
+fi
+fi
+fi
+SHELL=${CONFIG_SHELL-/bin/sh}
+export SHELL
+# Unset more variables known to interfere with behavior of common tools.
+CLICOLOR_FORCE= GREP_OPTIONS=
+unset CLICOLOR_FORCE GREP_OPTIONS
+
+## --------------------- ##
+## M4sh Shell Functions. ##
+## --------------------- ##
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+
+  as_lineno_1=$LINENO as_lineno_1a=$LINENO
+  as_lineno_2=$LINENO as_lineno_2a=$LINENO
+  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
+  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
+  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
+  sed -n '
+    p
+    /[$]LINENO/=
+  ' <$as_myself |
+    sed '
+      s/[$]LINENO.*/&-/
+      t lineno
+      b
+      :lineno
+      N
+      :loop
+      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
+      t loop
+      s/-\n.*//
+    ' >$as_me.lineno &&
+  chmod +x "$as_me.lineno" ||
+    { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
+
+  # Don't try to exec as it changes $[0], causing all sort of problems
+  # (the dirname of $[0] is not the place where we might find the
+  # original and so on.  Autoconf is especially sensitive to this).
+  . "./$as_me.lineno"
+  # Exit status is that of the last command.
+  exit
+}
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+SHELL=${CONFIG_SHELL-/bin/sh}
+
+
+test -n "$DJDIR" || exec 7<&0 </dev/null
+exec 6>&1
+
+# Name of the host.
+# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# so uname gets run too.
+ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
+
+#
+# Initializations.
+#
+ac_default_prefix=/usr/local
+ac_clean_files=
+ac_config_libobj_dir=.
+LIBOBJS=
+cross_compiling=no
+subdirs=
+MFLAGS=
+MAKEFLAGS=
+
+# Identity of this package.
+PACKAGE_NAME='jansson'
+PACKAGE_TARNAME='jansson'
+PACKAGE_VERSION='2.1'
+PACKAGE_STRING='jansson 2.1'
+PACKAGE_BUGREPORT='petri at digip.org'
+PACKAGE_URL=''
+
+ac_unique_file="src/value.c"
+# Factoring default headers for most tests.
+ac_includes_default="\
+#include <stdio.h>
+#ifdef HAVE_SYS_TYPES_H
+# include <sys/types.h>
+#endif
+#ifdef HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+#ifdef STDC_HEADERS
+# include <stdlib.h>
+# include <stddef.h>
+#else
+# ifdef HAVE_STDLIB_H
+#  include <stdlib.h>
+# endif
+#endif
+#ifdef HAVE_STRING_H
+# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
+#  include <memory.h>
+# endif
+# include <string.h>
+#endif
+#ifdef HAVE_STRINGS_H
+# include <strings.h>
+#endif
+#ifdef HAVE_INTTYPES_H
+# include <inttypes.h>
+#endif
+#ifdef HAVE_STDINT_H
+# include <stdint.h>
+#endif
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif"
+
+ac_subst_vars='LTLIBOBJS
+LIBOBJS
+json_inline
+json_have_long_long
+GCC_FALSE
+GCC_TRUE
+CPP
+OTOOL64
+OTOOL
+LIPO
+NMEDIT
+DSYMUTIL
+RANLIB
+AR
+OBJDUMP
+LN_S
+NM
+ac_ct_DUMPBIN
+DUMPBIN
+LD
+FGREP
+EGREP
+GREP
+SED
+host_os
+host_vendor
+host_cpu
+host
+build_os
+build_vendor
+build_cpu
+build
+LIBTOOL
+am__fastdepCC_FALSE
+am__fastdepCC_TRUE
+CCDEPMODE
+AMDEPBACKSLASH
+AMDEP_FALSE
+AMDEP_TRUE
+am__quote
+am__include
+DEPDIR
+OBJEXT
+EXEEXT
+ac_ct_CC
+CPPFLAGS
+LDFLAGS
+CFLAGS
+CC
+am__untar
+am__tar
+AMTAR
+am__leading_dot
+SET_MAKE
+AWK
+mkdir_p
+MKDIR_P
+INSTALL_STRIP_PROGRAM
+STRIP
+install_sh
+MAKEINFO
+AUTOHEADER
+AUTOMAKE
+AUTOCONF
+ACLOCAL
+VERSION
+PACKAGE
+CYGPATH_W
+am__isrc
+INSTALL_DATA
+INSTALL_SCRIPT
+INSTALL_PROGRAM
+target_alias
+host_alias
+build_alias
+LIBS
+ECHO_T
+ECHO_N
+ECHO_C
+DEFS
+mandir
+localedir
+libdir
+psdir
+pdfdir
+dvidir
+htmldir
+infodir
+docdir
+oldincludedir
+includedir
+localstatedir
+sharedstatedir
+sysconfdir
+datadir
+datarootdir
+libexecdir
+sbindir
+bindir
+program_transform_name
+prefix
+exec_prefix
+PACKAGE_URL
+PACKAGE_BUGREPORT
+PACKAGE_STRING
+PACKAGE_VERSION
+PACKAGE_TARNAME
+PACKAGE_NAME
+PATH_SEPARATOR
+SHELL'
+ac_subst_files=''
+ac_user_opts='
+enable_option_checking
+enable_dependency_tracking
+enable_shared
+enable_static
+with_pic
+enable_fast_install
+with_gnu_ld
+enable_libtool_lock
+'
+      ac_precious_vars='build_alias
+host_alias
+target_alias
+CC
+CFLAGS
+LDFLAGS
+LIBS
+CPPFLAGS
+CPP'
+
+
+# Initialize some variables set by options.
+ac_init_help=
+ac_init_version=false
+ac_unrecognized_opts=
+ac_unrecognized_sep=
+# The variables have the same names as the options, with
+# dashes changed to underlines.
+cache_file=/dev/null
+exec_prefix=NONE
+no_create=
+no_recursion=
+prefix=NONE
+program_prefix=NONE
+program_suffix=NONE
+program_transform_name=s,x,x,
+silent=
+site=
+srcdir=
+verbose=
+x_includes=NONE
+x_libraries=NONE
+
+# Installation directory options.
+# These are left unexpanded so users can "make install exec_prefix=/foo"
+# and all the variables that are supposed to be based on exec_prefix
+# by default will actually change.
+# Use braces instead of parens because sh, perl, etc. also accept them.
+# (The list follows the same order as the GNU Coding Standards.)
+bindir='${exec_prefix}/bin'
+sbindir='${exec_prefix}/sbin'
+libexecdir='${exec_prefix}/libexec'
+datarootdir='${prefix}/share'
+datadir='${datarootdir}'
+sysconfdir='${prefix}/etc'
+sharedstatedir='${prefix}/com'
+localstatedir='${prefix}/var'
+includedir='${prefix}/include'
+oldincludedir='/usr/include'
+docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
+infodir='${datarootdir}/info'
+htmldir='${docdir}'
+dvidir='${docdir}'
+pdfdir='${docdir}'
+psdir='${docdir}'
+libdir='${exec_prefix}/lib'
+localedir='${datarootdir}/locale'
+mandir='${datarootdir}/man'
+
+ac_prev=
+ac_dashdash=
+for ac_option
+do
+  # If the previous option needs an argument, assign it.
+  if test -n "$ac_prev"; then
+    eval $ac_prev=\$ac_option
+    ac_prev=
+    continue
+  fi
+
+  case $ac_option in
+  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+  *=)   ac_optarg= ;;
+  *)    ac_optarg=yes ;;
+  esac
+
+  # Accept the important Cygnus configure options, so we can diagnose typos.
+
+  case $ac_dashdash$ac_option in
+  --)
+    ac_dashdash=yes ;;
+
+  -bindir | --bindir | --bindi | --bind | --bin | --bi)
+    ac_prev=bindir ;;
+  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
+    bindir=$ac_optarg ;;
+
+  -build | --build | --buil | --bui | --bu)
+    ac_prev=build_alias ;;
+  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
+    build_alias=$ac_optarg ;;
+
+  -cache-file | --cache-file | --cache-fil | --cache-fi \
+  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
+    ac_prev=cache_file ;;
+  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
+  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
+    cache_file=$ac_optarg ;;
+
+  --config-cache | -C)
+    cache_file=config.cache ;;
+
+  -datadir | --datadir | --datadi | --datad)
+    ac_prev=datadir ;;
+  -datadir=* | --datadir=* | --datadi=* | --datad=*)
+    datadir=$ac_optarg ;;
+
+  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
+  | --dataroo | --dataro | --datar)
+    ac_prev=datarootdir ;;
+  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
+  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
+    datarootdir=$ac_optarg ;;
+
+  -disable-* | --disable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=no ;;
+
+  -docdir | --docdir | --docdi | --doc | --do)
+    ac_prev=docdir ;;
+  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
+    docdir=$ac_optarg ;;
+
+  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
+    ac_prev=dvidir ;;
+  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
+    dvidir=$ac_optarg ;;
+
+  -enable-* | --enable-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid feature name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"enable_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval enable_$ac_useropt=\$ac_optarg ;;
+
+  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
+  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
+  | --exec | --exe | --ex)
+    ac_prev=exec_prefix ;;
+  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
+  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
+  | --exec=* | --exe=* | --ex=*)
+    exec_prefix=$ac_optarg ;;
+
+  -gas | --gas | --ga | --g)
+    # Obsolete; use --with-gas.
+    with_gas=yes ;;
+
+  -help | --help | --hel | --he | -h)
+    ac_init_help=long ;;
+  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
+    ac_init_help=recursive ;;
+  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
+    ac_init_help=short ;;
+
+  -host | --host | --hos | --ho)
+    ac_prev=host_alias ;;
+  -host=* | --host=* | --hos=* | --ho=*)
+    host_alias=$ac_optarg ;;
+
+  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
+    ac_prev=htmldir ;;
+  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
+  | --ht=*)
+    htmldir=$ac_optarg ;;
+
+  -includedir | --includedir | --includedi | --included | --include \
+  | --includ | --inclu | --incl | --inc)
+    ac_prev=includedir ;;
+  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
+  | --includ=* | --inclu=* | --incl=* | --inc=*)
+    includedir=$ac_optarg ;;
+
+  -infodir | --infodir | --infodi | --infod | --info | --inf)
+    ac_prev=infodir ;;
+  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
+    infodir=$ac_optarg ;;
+
+  -libdir | --libdir | --libdi | --libd)
+    ac_prev=libdir ;;
+  -libdir=* | --libdir=* | --libdi=* | --libd=*)
+    libdir=$ac_optarg ;;
+
+  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
+  | --libexe | --libex | --libe)
+    ac_prev=libexecdir ;;
+  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
+  | --libexe=* | --libex=* | --libe=*)
+    libexecdir=$ac_optarg ;;
+
+  -localedir | --localedir | --localedi | --localed | --locale)
+    ac_prev=localedir ;;
+  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
+    localedir=$ac_optarg ;;
+
+  -localstatedir | --localstatedir | --localstatedi | --localstated \
+  | --localstate | --localstat | --localsta | --localst | --locals)
+    ac_prev=localstatedir ;;
+  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
+  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
+    localstatedir=$ac_optarg ;;
+
+  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
+    ac_prev=mandir ;;
+  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
+    mandir=$ac_optarg ;;
+
+  -nfp | --nfp | --nf)
+    # Obsolete; use --without-fp.
+    with_fp=no ;;
+
+  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
+  | --no-cr | --no-c | -n)
+    no_create=yes ;;
+
+  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
+  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
+    no_recursion=yes ;;
+
+  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
+  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
+  | --oldin | --oldi | --old | --ol | --o)
+    ac_prev=oldincludedir ;;
+  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
+  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
+  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
+    oldincludedir=$ac_optarg ;;
+
+  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
+    ac_prev=prefix ;;
+  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
+    prefix=$ac_optarg ;;
+
+  -program-prefix | --program-prefix | --program-prefi | --program-pref \
+  | --program-pre | --program-pr | --program-p)
+    ac_prev=program_prefix ;;
+  -program-prefix=* | --program-prefix=* | --program-prefi=* \
+  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
+    program_prefix=$ac_optarg ;;
+
+  -program-suffix | --program-suffix | --program-suffi | --program-suff \
+  | --program-suf | --program-su | --program-s)
+    ac_prev=program_suffix ;;
+  -program-suffix=* | --program-suffix=* | --program-suffi=* \
+  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
+    program_suffix=$ac_optarg ;;
+
+  -program-transform-name | --program-transform-name \
+  | --program-transform-nam | --program-transform-na \
+  | --program-transform-n | --program-transform- \
+  | --program-transform | --program-transfor \
+  | --program-transfo | --program-transf \
+  | --program-trans | --program-tran \
+  | --progr-tra | --program-tr | --program-t)
+    ac_prev=program_transform_name ;;
+  -program-transform-name=* | --program-transform-name=* \
+  | --program-transform-nam=* | --program-transform-na=* \
+  | --program-transform-n=* | --program-transform-=* \
+  | --program-transform=* | --program-transfor=* \
+  | --program-transfo=* | --program-transf=* \
+  | --program-trans=* | --program-tran=* \
+  | --progr-tra=* | --program-tr=* | --program-t=*)
+    program_transform_name=$ac_optarg ;;
+
+  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
+    ac_prev=pdfdir ;;
+  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
+    pdfdir=$ac_optarg ;;
+
+  -psdir | --psdir | --psdi | --psd | --ps)
+    ac_prev=psdir ;;
+  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
+    psdir=$ac_optarg ;;
+
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil)
+    silent=yes ;;
+
+  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
+    ac_prev=sbindir ;;
+  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
+  | --sbi=* | --sb=*)
+    sbindir=$ac_optarg ;;
+
+  -sharedstatedir | --sharedstatedir | --sharedstatedi \
+  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
+  | --sharedst | --shareds | --shared | --share | --shar \
+  | --sha | --sh)
+    ac_prev=sharedstatedir ;;
+  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
+  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
+  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
+  | --sha=* | --sh=*)
+    sharedstatedir=$ac_optarg ;;
+
+  -site | --site | --sit)
+    ac_prev=site ;;
+  -site=* | --site=* | --sit=*)
+    site=$ac_optarg ;;
+
+  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
+    ac_prev=srcdir ;;
+  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
+    srcdir=$ac_optarg ;;
+
+  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
+  | --syscon | --sysco | --sysc | --sys | --sy)
+    ac_prev=sysconfdir ;;
+  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
+  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
+    sysconfdir=$ac_optarg ;;
+
+  -target | --target | --targe | --targ | --tar | --ta | --t)
+    ac_prev=target_alias ;;
+  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
+    target_alias=$ac_optarg ;;
+
+  -v | -verbose | --verbose | --verbos | --verbo | --verb)
+    verbose=yes ;;
+
+  -version | --version | --versio | --versi | --vers | -V)
+    ac_init_version=: ;;
+
+  -with-* | --with-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=\$ac_optarg ;;
+
+  -without-* | --without-*)
+    ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
+    # Reject names that are not valid shell variable names.
+    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
+      as_fn_error $? "invalid package name: $ac_useropt"
+    ac_useropt_orig=$ac_useropt
+    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
+    case $ac_user_opts in
+      *"
+"with_$ac_useropt"
+"*) ;;
+      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
+	 ac_unrecognized_sep=', ';;
+    esac
+    eval with_$ac_useropt=no ;;
+
+  --x)
+    # Obsolete; use --with-x.
+    with_x=yes ;;
+
+  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
+  | --x-incl | --x-inc | --x-in | --x-i)
+    ac_prev=x_includes ;;
+  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
+  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
+    x_includes=$ac_optarg ;;
+
+  -x-libraries | --x-libraries | --x-librarie | --x-librari \
+  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
+    ac_prev=x_libraries ;;
+  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
+  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
+    x_libraries=$ac_optarg ;;
+
+  -*) as_fn_error $? "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information"
+    ;;
+
+  *=*)
+    ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
+    # Reject names that are not valid shell variable names.
+    case $ac_envvar in #(
+      '' | [0-9]* | *[!_$as_cr_alnum]* )
+      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
+    esac
+    eval $ac_envvar=\$ac_optarg
+    export $ac_envvar ;;
+
+  *)
+    # FIXME: should be removed in autoconf 3.0.
+    $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
+    expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
+      $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
+    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
+    ;;
+
+  esac
+done
+
+if test -n "$ac_prev"; then
+  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
+  as_fn_error $? "missing argument to $ac_option"
+fi
+
+if test -n "$ac_unrecognized_opts"; then
+  case $enable_option_checking in
+    no) ;;
+    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
+    *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
+  esac
+fi
+
+# Check all directory arguments for consistency.
+for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
+		datadir sysconfdir sharedstatedir localstatedir includedir \
+		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
+		libdir localedir mandir
+do
+  eval ac_val=\$$ac_var
+  # Remove trailing slashes.
+  case $ac_val in
+    */ )
+      ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
+      eval $ac_var=\$ac_val;;
+  esac
+  # Be sure to have absolute directory names.
+  case $ac_val in
+    [\\/$]* | ?:[\\/]* )  continue;;
+    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
+  esac
+  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
+done
+
+# There might be people who depend on the old broken behavior: `$host'
+# used to hold the argument of --host etc.
+# FIXME: To remove some day.
+build=$build_alias
+host=$host_alias
+target=$target_alias
+
+# FIXME: To remove some day.
+if test "x$host_alias" != x; then
+  if test "x$build_alias" = x; then
+    cross_compiling=maybe
+    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used" >&2
+  elif test "x$build_alias" != "x$host_alias"; then
+    cross_compiling=yes
+  fi
+fi
+
+ac_tool_prefix=
+test -n "$host_alias" && ac_tool_prefix=$host_alias-
+
+test "$silent" = yes && exec 6>/dev/null
+
+
+ac_pwd=`pwd` && test -n "$ac_pwd" &&
+ac_ls_di=`ls -di .` &&
+ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
+  as_fn_error $? "working directory cannot be determined"
+test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
+  as_fn_error $? "pwd does not report name of working directory"
+
+
+# Find the source files, if location was not specified.
+if test -z "$srcdir"; then
+  ac_srcdir_defaulted=yes
+  # Try the directory containing this script, then the parent directory.
+  ac_confdir=`$as_dirname -- "$as_myself" ||
+$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_myself" : 'X\(//\)[^/]' \| \
+	 X"$as_myself" : 'X\(//\)$' \| \
+	 X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_myself" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  srcdir=$ac_confdir
+  if test ! -r "$srcdir/$ac_unique_file"; then
+    srcdir=..
+  fi
+else
+  ac_srcdir_defaulted=no
+fi
+if test ! -r "$srcdir/$ac_unique_file"; then
+  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
+  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
+fi
+ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
+ac_abs_confdir=`(
+	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
+	pwd)`
+# When building in place, set srcdir=.
+if test "$ac_abs_confdir" = "$ac_pwd"; then
+  srcdir=.
+fi
+# Remove unnecessary trailing slashes from srcdir.
+# Double slashes in file names in object file debugging info
+# mess up M-x gdb in Emacs.
+case $srcdir in
+*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
+esac
+for ac_var in $ac_precious_vars; do
+  eval ac_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_env_${ac_var}_value=\$${ac_var}
+  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
+  eval ac_cv_env_${ac_var}_value=\$${ac_var}
+done
+
+#
+# Report the --help message.
+#
+if test "$ac_init_help" = "long"; then
+  # Omit some internal or obsolete options to make the list less imposing.
+  # This message is too long to be a string in the A/UX 3.1 sh.
+  cat <<_ACEOF
+\`configure' configures jansson 2.1 to adapt to many kinds of systems.
+
+Usage: $0 [OPTION]... [VAR=VALUE]...
+
+To assign environment variables (e.g., CC, CFLAGS...), specify them as
+VAR=VALUE.  See below for descriptions of some of the useful variables.
+
+Defaults for the options are specified in brackets.
+
+Configuration:
+  -h, --help              display this help and exit
+      --help=short        display options specific to this package
+      --help=recursive    display the short help of all the included packages
+  -V, --version           display version information and exit
+  -q, --quiet, --silent   do not print \`checking ...' messages
+      --cache-file=FILE   cache test results in FILE [disabled]
+  -C, --config-cache      alias for \`--cache-file=config.cache'
+  -n, --no-create         do not create output files
+      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
+
+Installation directories:
+  --prefix=PREFIX         install architecture-independent files in PREFIX
+                          [$ac_default_prefix]
+  --exec-prefix=EPREFIX   install architecture-dependent files in EPREFIX
+                          [PREFIX]
+
+By default, \`make install' will install all the files in
+\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
+an installation prefix other than \`$ac_default_prefix' using \`--prefix',
+for instance \`--prefix=\$HOME'.
+
+For better control, use the options below.
+
+Fine tuning of the installation directories:
+  --bindir=DIR            user executables [EPREFIX/bin]
+  --sbindir=DIR           system admin executables [EPREFIX/sbin]
+  --libexecdir=DIR        program executables [EPREFIX/libexec]
+  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
+  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
+  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
+  --libdir=DIR            object code libraries [EPREFIX/lib]
+  --includedir=DIR        C header files [PREFIX/include]
+  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
+  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
+  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
+  --infodir=DIR           info documentation [DATAROOTDIR/info]
+  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
+  --mandir=DIR            man documentation [DATAROOTDIR/man]
+  --docdir=DIR            documentation root [DATAROOTDIR/doc/jansson]
+  --htmldir=DIR           html documentation [DOCDIR]
+  --dvidir=DIR            dvi documentation [DOCDIR]
+  --pdfdir=DIR            pdf documentation [DOCDIR]
+  --psdir=DIR             ps documentation [DOCDIR]
+_ACEOF
+
+  cat <<\_ACEOF
+
+Program names:
+  --program-prefix=PREFIX            prepend PREFIX to installed program names
+  --program-suffix=SUFFIX            append SUFFIX to installed program names
+  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
+
+System types:
+  --build=BUILD     configure for building on BUILD [guessed]
+  --host=HOST       cross-compile to build programs to run on HOST [BUILD]
+_ACEOF
+fi
+
+if test -n "$ac_init_help"; then
+  case $ac_init_help in
+     short | recursive ) echo "Configuration of jansson 2.1:";;
+   esac
+  cat <<\_ACEOF
+
+Optional Features:
+  --disable-option-checking  ignore unrecognized --enable/--with options
+  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
+  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
+  --disable-dependency-tracking  speeds up one-time build
+  --enable-dependency-tracking   do not reject slow dependency extractors
+  --enable-shared[=PKGS]  build shared libraries [default=yes]
+  --enable-static[=PKGS]  build static libraries [default=yes]
+  --enable-fast-install[=PKGS]
+                          optimize for fast installation [default=yes]
+  --disable-libtool-lock  avoid locking (might break parallel builds)
+
+Optional Packages:
+  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
+  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
+  --with-pic              try to use only PIC/non-PIC objects [default=use
+                          both]
+  --with-gnu-ld           assume the C compiler uses GNU ld [default=no]
+
+Some influential environment variables:
+  CC          C compiler command
+  CFLAGS      C compiler flags
+  LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries in a
+              nonstandard directory <lib dir>
+  LIBS        libraries to pass to the linker, e.g. -l<library>
+  CPPFLAGS    (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
+              you have headers in a nonstandard directory <include dir>
+  CPP         C preprocessor
+
+Use these variables to override the choices made by `configure' or to help
+it to find libraries and programs with nonstandard names/locations.
+
+Report bugs to <petri at digip.org>.
+_ACEOF
+ac_status=$?
+fi
+
+if test "$ac_init_help" = "recursive"; then
+  # If there are subdirs, report their specific --help.
+  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
+    test -d "$ac_dir" ||
+      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
+      continue
+    ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+    cd "$ac_dir" || { ac_status=$?; continue; }
+    # Check for guested configure.
+    if test -f "$ac_srcdir/configure.gnu"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
+    elif test -f "$ac_srcdir/configure"; then
+      echo &&
+      $SHELL "$ac_srcdir/configure" --help=recursive
+    else
+      $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
+    fi || ac_status=$?
+    cd "$ac_pwd" || { ac_status=$?; break; }
+  done
+fi
+
+test -n "$ac_init_help" && exit $ac_status
+if $ac_init_version; then
+  cat <<\_ACEOF
+jansson configure 2.1
+generated by GNU Autoconf 2.68
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This configure script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it.
+_ACEOF
+  exit
+fi
+
+## ------------------------ ##
+## Autoconf initialization. ##
+## ------------------------ ##
+
+# ac_fn_c_try_compile LINENO
+# --------------------------
+# Try to compile conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext
+  if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest.$ac_objext; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_compile
+
+# ac_fn_c_try_link LINENO
+# -----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_link ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  rm -f conftest.$ac_objext conftest$ac_exeext
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && {
+	 test -z "$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       } && test -s conftest$ac_exeext && {
+	 test "$cross_compiling" = yes ||
+	 $as_test_x conftest$ac_exeext
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+	ac_retval=1
+fi
+  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
+  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
+  # interfere with the next link command; also delete a directory that is
+  # left behind by Apple's compiler.  We do this before executing the actions.
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_link
+
+# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
+# -------------------------------------------------------
+# Tests whether HEADER exists and can be compiled using the include files in
+# INCLUDES, setting the cache variable VAR accordingly.
+ac_fn_c_check_header_compile ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$4
+#include <$2>
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_header_compile
+
+# ac_fn_c_try_cpp LINENO
+# ----------------------
+# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
+ac_fn_c_try_cpp ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_cpp conftest.$ac_ext"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    grep -v '^ *+' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+    mv -f conftest.er1 conftest.err
+  fi
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } > conftest.i && {
+	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
+	 test ! -s conftest.err
+       }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+    ac_retval=1
+fi
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_cpp
+
+# ac_fn_c_try_run LINENO
+# ----------------------
+# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
+# that executables *can* be run.
+ac_fn_c_try_run ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then :
+  ac_retval=0
+else
+  $as_echo "$as_me: program exited with status $ac_status" >&5
+       $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+       ac_retval=$ac_status
+fi
+  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+  as_fn_set_status $ac_retval
+
+} # ac_fn_c_try_run
+
+# ac_fn_c_check_func LINENO FUNC VAR
+# ----------------------------------
+# Tests whether FUNC exists, setting the cache variable VAR accordingly
+ac_fn_c_check_func ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
+$as_echo_n "checking for $2... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
+   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
+#define $2 innocuous_$2
+
+/* System header to define __stub macros and hopefully few prototypes,
+    which can conflict with char $2 (); below.
+    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+    <limits.h> exists even on freestanding compilers.  */
+
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+
+#undef $2
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char $2 ();
+/* The GNU C library defines this for functions which it implements
+    to always fail with ENOSYS.  Some functions are actually named
+    something starting with __ and the normal name is an alias.  */
+#if defined __stub_$2 || defined __stub___$2
+choke me
+#endif
+
+int
+main ()
+{
+return $2 ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  eval "$3=yes"
+else
+  eval "$3=no"
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_check_func
+
+# ac_fn_c_find_intX_t LINENO BITS VAR
+# -----------------------------------
+# Finds a signed integer type with width BITS, setting cache variable VAR
+# accordingly.
+ac_fn_c_find_intX_t ()
+{
+  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for int$2_t" >&5
+$as_echo_n "checking for int$2_t... " >&6; }
+if eval \${$3+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  eval "$3=no"
+     # Order is important - never check a type that is potentially smaller
+     # than half of the expected target width.
+     for ac_type in int$2_t 'int' 'long int' \
+	 'long long int' 'short int' 'signed char'; do
+       cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$ac_includes_default
+	     enum { N = $2 / 2 - 1 };
+int
+main ()
+{
+static int test_array [1 - 2 * !(0 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1))];
+test_array [0] = 0
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+$ac_includes_default
+	        enum { N = $2 / 2 - 1 };
+int
+main ()
+{
+static int test_array [1 - 2 * !(($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1)
+		 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 2))];
+test_array [0] = 0
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  case $ac_type in #(
+  int$2_t) :
+    eval "$3=yes" ;; #(
+  *) :
+    eval "$3=\$ac_type" ;;
+esac
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+       if eval test \"x\$"$3"\" = x"no"; then :
+
+else
+  break
+fi
+     done
+fi
+eval ac_res=\$$3
+	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
+$as_echo "$ac_res" >&6; }
+  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
+
+} # ac_fn_c_find_intX_t
+cat >config.log <<_ACEOF
+This file contains any messages produced by compilers while
+running configure, to aid debugging if configure makes a mistake.
+
+It was created by jansson $as_me 2.1, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  $ $0 $@
+
+_ACEOF
+exec 5>>config.log
+{
+cat <<_ASUNAME
+## --------- ##
+## Platform. ##
+## --------- ##
+
+hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
+
+/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
+/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
+/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
+/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
+
+_ASUNAME
+
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    $as_echo "PATH: $as_dir"
+  done
+IFS=$as_save_IFS
+
+} >&5
+
+cat >&5 <<_ACEOF
+
+
+## ----------- ##
+## Core tests. ##
+## ----------- ##
+
+_ACEOF
+
+
+# Keep a trace of the command line.
+# Strip out --no-create and --no-recursion so they do not pile up.
+# Strip out --silent because we don't want to record it for future runs.
+# Also quote any args containing shell meta-characters.
+# Make two passes to allow for proper duplicate-argument suppression.
+ac_configure_args=
+ac_configure_args0=
+ac_configure_args1=
+ac_must_keep_next=false
+for ac_pass in 1 2
+do
+  for ac_arg
+  do
+    case $ac_arg in
+    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
+    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+    | -silent | --silent | --silen | --sile | --sil)
+      continue ;;
+    *\'*)
+      ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    case $ac_pass in
+    1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
+    2)
+      as_fn_append ac_configure_args1 " '$ac_arg'"
+      if test $ac_must_keep_next = true; then
+	ac_must_keep_next=false # Got value, back to normal.
+      else
+	case $ac_arg in
+	  *=* | --config-cache | -C | -disable-* | --disable-* \
+	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
+	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
+	  | -with-* | --with-* | -without-* | --without-* | --x)
+	    case "$ac_configure_args0 " in
+	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
+	    esac
+	    ;;
+	  -* ) ac_must_keep_next=true ;;
+	esac
+      fi
+      as_fn_append ac_configure_args " '$ac_arg'"
+      ;;
+    esac
+  done
+done
+{ ac_configure_args0=; unset ac_configure_args0;}
+{ ac_configure_args1=; unset ac_configure_args1;}
+
+# When interrupted or exit'd, cleanup temporary files, and complete
+# config.log.  We remove comments because anyway the quotes in there
+# would cause problems or look ugly.
+# WARNING: Use '\'' to represent an apostrophe within the trap.
+# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
+trap 'exit_status=$?
+  # Save into config.log some information that might help in debugging.
+  {
+    echo
+
+    $as_echo "## ---------------- ##
+## Cache variables. ##
+## ---------------- ##"
+    echo
+    # The following way of writing the cache mishandles newlines in values,
+(
+  for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+  (set) 2>&1 |
+    case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      sed -n \
+	"s/'\''/'\''\\\\'\'''\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
+      ;; #(
+    *)
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+)
+    echo
+
+    $as_echo "## ----------------- ##
+## Output variables. ##
+## ----------------- ##"
+    echo
+    for ac_var in $ac_subst_vars
+    do
+      eval ac_val=\$$ac_var
+      case $ac_val in
+      *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+      esac
+      $as_echo "$ac_var='\''$ac_val'\''"
+    done | sort
+    echo
+
+    if test -n "$ac_subst_files"; then
+      $as_echo "## ------------------- ##
+## File substitutions. ##
+## ------------------- ##"
+      echo
+      for ac_var in $ac_subst_files
+      do
+	eval ac_val=\$$ac_var
+	case $ac_val in
+	*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
+	esac
+	$as_echo "$ac_var='\''$ac_val'\''"
+      done | sort
+      echo
+    fi
+
+    if test -s confdefs.h; then
+      $as_echo "## ----------- ##
+## confdefs.h. ##
+## ----------- ##"
+      echo
+      cat confdefs.h
+      echo
+    fi
+    test "$ac_signal" != 0 &&
+      $as_echo "$as_me: caught signal $ac_signal"
+    $as_echo "$as_me: exit $exit_status"
+  } >&5
+  rm -f core *.core core.conftest.* &&
+    rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
+    exit $exit_status
+' 0
+for ac_signal in 1 2 13 15; do
+  trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
+done
+ac_signal=0
+
+# confdefs.h avoids OS command line length limits that DEFS can exceed.
+rm -f -r conftest* confdefs.h
+
+$as_echo "/* confdefs.h */" > confdefs.h
+
+# Predefined preprocessor variables.
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_NAME "$PACKAGE_NAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_VERSION "$PACKAGE_VERSION"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_STRING "$PACKAGE_STRING"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
+_ACEOF
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE_URL "$PACKAGE_URL"
+_ACEOF
+
+
+# Let the site file select an alternate cache file if it wants to.
+# Prefer an explicitly selected file to automatically selected ones.
+ac_site_file1=NONE
+ac_site_file2=NONE
+if test -n "$CONFIG_SITE"; then
+  # We do not want a PATH search for config.site.
+  case $CONFIG_SITE in #((
+    -*)  ac_site_file1=./$CONFIG_SITE;;
+    */*) ac_site_file1=$CONFIG_SITE;;
+    *)   ac_site_file1=./$CONFIG_SITE;;
+  esac
+elif test "x$prefix" != xNONE; then
+  ac_site_file1=$prefix/share/config.site
+  ac_site_file2=$prefix/etc/config.site
+else
+  ac_site_file1=$ac_default_prefix/share/config.site
+  ac_site_file2=$ac_default_prefix/etc/config.site
+fi
+for ac_site_file in "$ac_site_file1" "$ac_site_file2"
+do
+  test "x$ac_site_file" = xNONE && continue
+  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
+$as_echo "$as_me: loading site script $ac_site_file" >&6;}
+    sed 's/^/| /' "$ac_site_file" >&5
+    . "$ac_site_file" \
+      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "failed to load site script $ac_site_file
+See \`config.log' for more details" "$LINENO" 5; }
+  fi
+done
+
+if test -r "$cache_file"; then
+  # Some versions of bash will fail to source /dev/null (special files
+  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
+  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
+$as_echo "$as_me: loading cache $cache_file" >&6;}
+    case $cache_file in
+      [\\/]* | ?:[\\/]* ) . "$cache_file";;
+      *)                      . "./$cache_file";;
+    esac
+  fi
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
+$as_echo "$as_me: creating cache $cache_file" >&6;}
+  >$cache_file
+fi
+
+# Check that the precious variables saved in the cache have kept the same
+# value.
+ac_cache_corrupted=false
+for ac_var in $ac_precious_vars; do
+  eval ac_old_set=\$ac_cv_env_${ac_var}_set
+  eval ac_new_set=\$ac_env_${ac_var}_set
+  eval ac_old_val=\$ac_cv_env_${ac_var}_value
+  eval ac_new_val=\$ac_env_${ac_var}_value
+  case $ac_old_set,$ac_new_set in
+    set,)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,set)
+      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
+$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
+      ac_cache_corrupted=: ;;
+    ,);;
+    *)
+      if test "x$ac_old_val" != "x$ac_new_val"; then
+	# differences in whitespace do not lead to failure.
+	ac_old_val_w=`echo x $ac_old_val`
+	ac_new_val_w=`echo x $ac_new_val`
+	if test "$ac_old_val_w" != "$ac_new_val_w"; then
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
+$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
+	  ac_cache_corrupted=:
+	else
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
+$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
+	  eval $ac_var=\$ac_old_val
+	fi
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   former value:  \`$ac_old_val'" >&5
+$as_echo "$as_me:   former value:  \`$ac_old_val'" >&2;}
+	{ $as_echo "$as_me:${as_lineno-$LINENO}:   current value: \`$ac_new_val'" >&5
+$as_echo "$as_me:   current value: \`$ac_new_val'" >&2;}
+      fi;;
+  esac
+  # Pass precious variables to config.status.
+  if test "$ac_new_set" = set; then
+    case $ac_new_val in
+    *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
+    *) ac_arg=$ac_var=$ac_new_val ;;
+    esac
+    case " $ac_configure_args " in
+      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
+      *) as_fn_append ac_configure_args " '$ac_arg'" ;;
+    esac
+  fi
+done
+if $ac_cache_corrupted; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+  { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
+$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
+  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+fi
+## -------------------- ##
+## Main body of script. ##
+## -------------------- ##
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+
+am__api_version='1.10'
+
+ac_aux_dir=
+for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
+  if test -f "$ac_dir/install-sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install-sh -c"
+    break
+  elif test -f "$ac_dir/install.sh"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/install.sh -c"
+    break
+  elif test -f "$ac_dir/shtool"; then
+    ac_aux_dir=$ac_dir
+    ac_install_sh="$ac_aux_dir/shtool install -c"
+    break
+  fi
+done
+if test -z "$ac_aux_dir"; then
+  as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
+fi
+
+# These three variables are undocumented and unsupported,
+# and are intended to be withdrawn in a future Autoconf release.
+# They can cause serious problems if a builder's source tree is in a directory
+# whose full name contains unusual characters.
+ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
+ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
+ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
+
+
+# Find a good install program.  We prefer a C program (faster),
+# so one script is as good as another.  But avoid the broken or
+# incompatible versions:
+# SysV /etc/install, /usr/sbin/install
+# SunOS /usr/etc/install
+# IRIX /sbin/install
+# AIX /bin/install
+# AmigaOS /C/install, which installs bootblocks on floppy discs
+# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
+# AFS /usr/afsws/bin/install, which mishandles nonexistent args
+# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
+# OS/2's system install, which has a completely different semantic
+# ./install, which can be erroneously created by make from ./install.sh.
+# Reject install programs that cannot install multiple files.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
+$as_echo_n "checking for a BSD-compatible install... " >&6; }
+if test -z "$INSTALL"; then
+if ${ac_cv_path_install+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    # Account for people who put trailing slashes in PATH elements.
+case $as_dir/ in #((
+  ./ | .// | /[cC]/* | \
+  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
+  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
+  /usr/ucb/* ) ;;
+  *)
+    # OSF1 and SCO ODT 3.0 have their own names for install.
+    # Don't use installbsd from OSF since it installs stuff as root
+    # by default.
+    for ac_prog in ginstall scoinst install; do
+      for ac_exec_ext in '' $ac_executable_extensions; do
+	if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
+	  if test $ac_prog = install &&
+	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # AIX install.  It has an incompatible calling convention.
+	    :
+	  elif test $ac_prog = install &&
+	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
+	    # program-specific install script used by HP pwplus--don't use.
+	    :
+	  else
+	    rm -rf conftest.one conftest.two conftest.dir
+	    echo one > conftest.one
+	    echo two > conftest.two
+	    mkdir conftest.dir
+	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
+	      test -s conftest.one && test -s conftest.two &&
+	      test -s conftest.dir/conftest.one &&
+	      test -s conftest.dir/conftest.two
+	    then
+	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
+	      break 3
+	    fi
+	  fi
+	fi
+      done
+    done
+    ;;
+esac
+
+  done
+IFS=$as_save_IFS
+
+rm -rf conftest.one conftest.two conftest.dir
+
+fi
+  if test "${ac_cv_path_install+set}" = set; then
+    INSTALL=$ac_cv_path_install
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for INSTALL within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    INSTALL=$ac_install_sh
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
+$as_echo "$INSTALL" >&6; }
+
+# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
+# It thinks the first close brace ends the variable substitution.
+test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
+
+test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
+
+test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
+$as_echo_n "checking whether build environment is sane... " >&6; }
+# Just in case
+sleep 1
+echo timestamp > conftest.file
+# Do `set' in a subshell so we don't clobber the current shell's
+# arguments.  Must try -L first in case configure is actually a
+# symlink; some systems play weird games with the mod time of symlinks
+# (eg FreeBSD returns the mod time of the symlink's containing
+# directory).
+if (
+   set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null`
+   if test "$*" = "X"; then
+      # -L didn't work.
+      set X `ls -t $srcdir/configure conftest.file`
+   fi
+   rm -f conftest.file
+   if test "$*" != "X $srcdir/configure conftest.file" \
+      && test "$*" != "X conftest.file $srcdir/configure"; then
+
+      # If neither matched, then we have a broken ls.  This can happen
+      # if, for instance, CONFIG_SHELL is bash and it inherits a
+      # broken ls alias from the environment.  This has actually
+      # happened.  Such a system could not be considered "sane".
+      as_fn_error $? "ls -t appears to fail.  Make sure there is not a broken
+alias in your environment" "$LINENO" 5
+   fi
+
+   test "$2" = conftest.file
+   )
+then
+   # Ok.
+   :
+else
+   as_fn_error $? "newly created file is older than distributed files!
+Check your system clock" "$LINENO" 5
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+test "$program_prefix" != NONE &&
+  program_transform_name="s&^&$program_prefix&;$program_transform_name"
+# Use a double $ so make ignores it.
+test "$program_suffix" != NONE &&
+  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
+# Double any \ or $.
+# By default was `s,x,x', remove it if useless.
+ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
+program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
+
+# expand $ac_aux_dir to an absolute path
+am_aux_dir=`cd $ac_aux_dir && pwd`
+
+test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing"
+# Use eval to expand $SHELL
+if eval "$MISSING --run true"; then
+  am_missing_run="$MISSING --run "
+else
+  am_missing_run=
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5
+$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;}
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
+$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
+if test -z "$MKDIR_P"; then
+  if ${ac_cv_path_mkdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in mkdir gmkdir; do
+	 for ac_exec_ext in '' $ac_executable_extensions; do
+	   { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
+	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
+	     'mkdir (GNU coreutils) '* | \
+	     'mkdir (coreutils) '* | \
+	     'mkdir (fileutils) '4.1*)
+	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
+	       break 3;;
+	   esac
+	 done
+       done
+  done
+IFS=$as_save_IFS
+
+fi
+
+  test -d ./--version && rmdir ./--version
+  if test "${ac_cv_path_mkdir+set}" = set; then
+    MKDIR_P="$ac_cv_path_mkdir -p"
+  else
+    # As a last resort, use the slow shell script.  Don't cache a
+    # value for MKDIR_P within a source directory, because that will
+    # break other packages using the cache if that directory is
+    # removed, or if the value is a relative name.
+    MKDIR_P="$ac_install_sh -d"
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
+$as_echo "$MKDIR_P" >&6; }
+
+mkdir_p="$MKDIR_P"
+case $mkdir_p in
+  [\\/$]* | ?:[\\/]*) ;;
+  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
+esac
+
+for ac_prog in gawk mawk nawk awk
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AWK+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AWK"; then
+  ac_cv_prog_AWK="$AWK" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AWK="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AWK=$ac_cv_prog_AWK
+if test -n "$AWK"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
+$as_echo "$AWK" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$AWK" && break
+done
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
+$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
+set x ${MAKE-make}
+ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
+if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat >conftest.make <<\_ACEOF
+SHELL = /bin/sh
+all:
+	@echo '@@@%%%=$(MAKE)=@@@%%%'
+_ACEOF
+# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
+case `${MAKE-make} -f conftest.make 2>/dev/null` in
+  *@@@%%%=?*=@@@%%%*)
+    eval ac_cv_prog_make_${ac_make}_set=yes;;
+  *)
+    eval ac_cv_prog_make_${ac_make}_set=no;;
+esac
+rm -f conftest.make
+fi
+if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+  SET_MAKE=
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+  SET_MAKE="MAKE=${MAKE-make}"
+fi
+
+rm -rf .tst 2>/dev/null
+mkdir .tst 2>/dev/null
+if test -d .tst; then
+  am__leading_dot=.
+else
+  am__leading_dot=_
+fi
+rmdir .tst 2>/dev/null
+
+if test "`cd $srcdir && pwd`" != "`pwd`"; then
+  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
+  # is not polluted with repeated "-I."
+  am__isrc=' -I$(srcdir)'
+  # test to see if srcdir already configured
+  if test -f $srcdir/config.status; then
+    as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
+  fi
+fi
+
+# test whether we have cygpath
+if test -z "$CYGPATH_W"; then
+  if (cygpath --version) >/dev/null 2>/dev/null; then
+    CYGPATH_W='cygpath -w'
+  else
+    CYGPATH_W=echo
+  fi
+fi
+
+
+# Define the identity of the package.
+ PACKAGE='jansson'
+ VERSION='2.1'
+
+
+cat >>confdefs.h <<_ACEOF
+#define PACKAGE "$PACKAGE"
+_ACEOF
+
+
+cat >>confdefs.h <<_ACEOF
+#define VERSION "$VERSION"
+_ACEOF
+
+# Some tools Automake needs.
+
+ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
+
+
+AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
+
+
+AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
+
+
+AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
+
+
+MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
+
+install_sh=${install_sh-"\$(SHELL) $am_aux_dir/install-sh"}
+
+# Installed binaries are usually stripped using `strip' when the user
+# run `make install-strip'.  However `strip' might not be the right
+# tool to use in cross-compilation environments, therefore Automake
+# will honor the `STRIP' environment variable to overrule this program.
+if test "$cross_compiling" != no; then
+  if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+fi
+INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
+
+# We need awk for the "check" target.  The system "awk" is bad on
+# some platforms.
+# Always define AMTAR for backward compatibility.
+
+AMTAR=${AMTAR-"${am_missing_run}tar"}
+
+am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'
+
+
+
+
+
+
+
+ac_config_headers="$ac_config_headers config.h"
+
+
+# Checks for programs.
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_CC"; then
+  ac_ct_CC=$CC
+  # Extract the first word of "gcc", so it can be a program name with args.
+set dummy gcc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="gcc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+else
+  CC="$ac_cv_prog_CC"
+fi
+
+if test -z "$CC"; then
+          if test -n "$ac_tool_prefix"; then
+    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
+set dummy ${ac_tool_prefix}cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="${ac_tool_prefix}cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  fi
+fi
+if test -z "$CC"; then
+  # Extract the first word of "cc", so it can be a program name with args.
+set dummy cc; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+  ac_prog_rejected=no
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
+       ac_prog_rejected=yes
+       continue
+     fi
+    ac_cv_prog_CC="cc"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+if test $ac_prog_rejected = yes; then
+  # We found a bogon in the path, so make sure we never use it.
+  set dummy $ac_cv_prog_CC
+  shift
+  if test $# != 0; then
+    # We chose a different compiler from the bogus one.
+    # However, it has the same basename, so the bogon will be chosen
+    # first if we set CC to just the basename; use the full file name.
+    shift
+    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
+  fi
+fi
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$CC"; then
+  if test -n "$ac_tool_prefix"; then
+  for ac_prog in cl.exe
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$CC"; then
+  ac_cv_prog_CC="$CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+CC=$ac_cv_prog_CC
+if test -n "$CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
+$as_echo "$CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$CC" && break
+  done
+fi
+if test -z "$CC"; then
+  ac_ct_CC=$CC
+  for ac_prog in cl.exe
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_CC+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_CC"; then
+  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_CC="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_CC=$ac_cv_prog_ac_ct_CC
+if test -n "$ac_ct_CC"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
+$as_echo "$ac_ct_CC" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_CC" && break
+done
+
+  if test "x$ac_ct_CC" = x; then
+    CC=""
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    CC=$ac_ct_CC
+  fi
+fi
+
+fi
+
+
+test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "no acceptable C compiler found in \$PATH
+See \`config.log' for more details" "$LINENO" 5; }
+
+# Provide some information about the compiler.
+$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
+set X $ac_compile
+ac_compiler=$2
+for ac_option in --version -v -V -qversion; do
+  { { ac_try="$ac_compiler $ac_option >&5"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
+  ac_status=$?
+  if test -s conftest.err; then
+    sed '10a\
+... rest of stderr output deleted ...
+         10q' conftest.err >conftest.er1
+    cat conftest.er1 >&5
+  fi
+  rm -f conftest.er1 conftest.err
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+done
+
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
+# Try to create an executable without -o first, disregard a.out.
+# It will help us diagnose broken compilers, and finding out an intuition
+# of exeext.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
+$as_echo_n "checking whether the C compiler works... " >&6; }
+ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
+
+# The possible output files:
+ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
+
+ac_rmfiles=
+for ac_file in $ac_files
+do
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
+  esac
+done
+rm -f $ac_rmfiles
+
+if { { ac_try="$ac_link_default"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link_default") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
+# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
+# in a Makefile.  We should not override ac_cv_exeext if it was cached,
+# so that the user can short-circuit this test for compilers unknown to
+# Autoconf.
+for ac_file in $ac_files ''
+do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
+	;;
+    [ab].out )
+	# We found the default executable, but exeext='' is most
+	# certainly right.
+	break;;
+    *.* )
+	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
+	then :; else
+	   ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	fi
+	# We set ac_cv_exeext here because the later test for it is not
+	# safe: cross compilers may not add the suffix if given an `-o'
+	# argument, so we may need to know it at that point already.
+	# Even if this section looks crufty: it has the advantage of
+	# actually working.
+	break;;
+    * )
+	break;;
+  esac
+done
+test "$ac_cv_exeext" = no && ac_cv_exeext=
+
+else
+  ac_file=''
+fi
+if test -z "$ac_file"; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+$as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error 77 "C compiler cannot create executables
+See \`config.log' for more details" "$LINENO" 5; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
+$as_echo_n "checking for C compiler default output file name... " >&6; }
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
+$as_echo "$ac_file" >&6; }
+ac_exeext=$ac_cv_exeext
+
+rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
+$as_echo_n "checking for suffix of executables... " >&6; }
+if { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  # If both `conftest.exe' and `conftest' are `present' (well, observable)
+# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
+# work properly (i.e., refer to `conftest.exe'), while it won't with
+# `rm'.
+for ac_file in conftest.exe conftest conftest.*; do
+  test -f "$ac_file" || continue
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
+    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
+	  break;;
+    * ) break;;
+  esac
+done
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest conftest$ac_cv_exeext
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
+$as_echo "$ac_cv_exeext" >&6; }
+
+rm -f conftest.$ac_ext
+EXEEXT=$ac_cv_exeext
+ac_exeext=$EXEEXT
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdio.h>
+int
+main ()
+{
+FILE *f = fopen ("conftest.out", "w");
+ return ferror (f) || fclose (f) != 0;
+
+  ;
+  return 0;
+}
+_ACEOF
+ac_clean_files="$ac_clean_files conftest.out"
+# Check that the compiler produces executables we can run.  If not, either
+# the compiler is broken, or we cross compile.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
+$as_echo_n "checking whether we are cross compiling... " >&6; }
+if test "$cross_compiling" != yes; then
+  { { ac_try="$ac_link"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_link") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+  if { ac_try='./conftest$ac_cv_exeext'
+  { { case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_try") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; }; then
+    cross_compiling=no
+  else
+    if test "$cross_compiling" = maybe; then
+	cross_compiling=yes
+    else
+	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot run C compiled programs.
+If you meant to cross compile, use \`--host'.
+See \`config.log' for more details" "$LINENO" 5; }
+    fi
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
+$as_echo "$cross_compiling" >&6; }
+
+rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
+ac_clean_files=$ac_clean_files_save
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
+$as_echo_n "checking for suffix of object files... " >&6; }
+if ${ac_cv_objext+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+rm -f conftest.o conftest.obj
+if { { ac_try="$ac_compile"
+case "(($ac_try" in
+  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
+  *) ac_try_echo=$ac_try;;
+esac
+eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
+$as_echo "$ac_try_echo"; } >&5
+  (eval "$ac_compile") 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then :
+  for ac_file in conftest.o conftest.obj conftest.*; do
+  test -f "$ac_file" || continue;
+  case $ac_file in
+    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
+    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
+       break;;
+  esac
+done
+else
+  $as_echo "$as_me: failed program was:" >&5
+sed 's/^/| /' conftest.$ac_ext >&5
+
+{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+rm -f conftest.$ac_cv_objext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
+$as_echo "$ac_cv_objext" >&6; }
+OBJEXT=$ac_cv_objext
+ac_objext=$OBJEXT
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
+$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
+if ${ac_cv_c_compiler_gnu+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+#ifndef __GNUC__
+       choke me
+#endif
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_compiler_gnu=yes
+else
+  ac_compiler_gnu=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+ac_cv_c_compiler_gnu=$ac_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
+$as_echo "$ac_cv_c_compiler_gnu" >&6; }
+if test $ac_compiler_gnu = yes; then
+  GCC=yes
+else
+  GCC=
+fi
+ac_test_CFLAGS=${CFLAGS+set}
+ac_save_CFLAGS=$CFLAGS
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
+$as_echo_n "checking whether $CC accepts -g... " >&6; }
+if ${ac_cv_prog_cc_g+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_save_c_werror_flag=$ac_c_werror_flag
+   ac_c_werror_flag=yes
+   ac_cv_prog_cc_g=no
+   CFLAGS="-g"
+   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+else
+  CFLAGS=""
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+
+else
+  ac_c_werror_flag=$ac_save_c_werror_flag
+	 CFLAGS="-g"
+	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_g=yes
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+   ac_c_werror_flag=$ac_save_c_werror_flag
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
+$as_echo "$ac_cv_prog_cc_g" >&6; }
+if test "$ac_test_CFLAGS" = set; then
+  CFLAGS=$ac_save_CFLAGS
+elif test $ac_cv_prog_cc_g = yes; then
+  if test "$GCC" = yes; then
+    CFLAGS="-g -O2"
+  else
+    CFLAGS="-g"
+  fi
+else
+  if test "$GCC" = yes; then
+    CFLAGS="-O2"
+  else
+    CFLAGS=
+  fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
+$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
+if ${ac_cv_prog_cc_c89+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_cv_prog_cc_c89=no
+ac_save_CC=$CC
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdarg.h>
+#include <stdio.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
+struct buf { int x; };
+FILE * (*rcsopen) (struct buf *, struct stat *, int);
+static char *e (p, i)
+     char **p;
+     int i;
+{
+  return p[i];
+}
+static char *f (char * (*g) (char **, int), char **p, ...)
+{
+  char *s;
+  va_list v;
+  va_start (v,p);
+  s = g (p, va_arg (v,int));
+  va_end (v);
+  return s;
+}
+
+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
+   function prototypes and stuff, but not '\xHH' hex character constants.
+   These don't provoke an error unfortunately, instead are silently treated
+   as 'x'.  The following induces an error, until -std is added to get
+   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
+   array size at least.  It's necessary to write '\x00'==0 to get something
+   that's true only with -std.  */
+int osf4_cc_array ['\x00' == 0 ? 1 : -1];
+
+/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
+   inside strings and character constants.  */
+#define FOO(x) 'x'
+int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
+
+int test (int i, double x);
+struct s1 {int (*f) (int a);};
+struct s2 {int (*f) (double a);};
+int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
+int argc;
+char **argv;
+int
+main ()
+{
+return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
+  ;
+  return 0;
+}
+_ACEOF
+for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
+	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
+do
+  CC="$ac_save_CC $ac_arg"
+  if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_prog_cc_c89=$ac_arg
+fi
+rm -f core conftest.err conftest.$ac_objext
+  test "x$ac_cv_prog_cc_c89" != "xno" && break
+done
+rm -f conftest.$ac_ext
+CC=$ac_save_CC
+
+fi
+# AC_CACHE_VAL
+case "x$ac_cv_prog_cc_c89" in
+  x)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
+$as_echo "none needed" >&6; } ;;
+  xno)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
+$as_echo "unsupported" >&6; } ;;
+  *)
+    CC="$CC $ac_cv_prog_cc_c89"
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
+$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
+esac
+if test "x$ac_cv_prog_cc_c89" != xno; then :
+
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+DEPDIR="${am__leading_dot}deps"
+
+ac_config_commands="$ac_config_commands depfiles"
+
+
+am_make=${MAKE-make}
+cat > confinc << 'END'
+am__doit:
+	@echo done
+.PHONY: am__doit
+END
+# If we don't find an include directive, just comment out the code.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
+$as_echo_n "checking for style of include used by $am_make... " >&6; }
+am__include="#"
+am__quote=
+_am_result=none
+# First try GNU make style include.
+echo "include confinc" > confmf
+# We grep out `Entering directory' and `Leaving directory'
+# messages which can occur if `w' ends up in MAKEFLAGS.
+# In particular we don't look at `^make:' because GNU make might
+# be invoked under some other name (usually "gmake"), in which
+# case it prints its new name instead of `make'.
+if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then
+   am__include=include
+   am__quote=
+   _am_result=GNU
+fi
+# Now try BSD make style include.
+if test "$am__include" = "#"; then
+   echo '.include "confinc"' > confmf
+   if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then
+      am__include=.include
+      am__quote="\""
+      _am_result=BSD
+   fi
+fi
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
+$as_echo "$_am_result" >&6; }
+rm -f confinc confmf
+
+# Check whether --enable-dependency-tracking was given.
+if test "${enable_dependency_tracking+set}" = set; then :
+  enableval=$enable_dependency_tracking;
+fi
+
+if test "x$enable_dependency_tracking" != xno; then
+  am_depcomp="$ac_aux_dir/depcomp"
+  AMDEPBACKSLASH='\'
+fi
+ if test "x$enable_dependency_tracking" != xno; then
+  AMDEP_TRUE=
+  AMDEP_FALSE='#'
+else
+  AMDEP_TRUE='#'
+  AMDEP_FALSE=
+fi
+
+
+
+depcc="$CC"   am_compiler_list=
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
+$as_echo_n "checking dependency style of $depcc... " >&6; }
+if ${am_cv_CC_dependencies_compiler_type+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
+  # We make a subdir and do the tests there.  Otherwise we can end up
+  # making bogus files that we don't know about and never remove.  For
+  # instance it was reported that on HP-UX the gcc test will end up
+  # making a dummy file named `D' -- because `-MD' means `put the output
+  # in D'.
+  mkdir conftest.dir
+  # Copy depcomp to subdir because otherwise we won't find it if we're
+  # using a relative directory.
+  cp "$am_depcomp" conftest.dir
+  cd conftest.dir
+  # We will build objects and dependencies in a subdirectory because
+  # it helps to detect inapplicable dependency modes.  For instance
+  # both Tru64's cc and ICC support -MD to output dependencies as a
+  # side effect of compilation, but ICC will put the dependencies in
+  # the current directory while Tru64 will put them in the object
+  # directory.
+  mkdir sub
+
+  am_cv_CC_dependencies_compiler_type=none
+  if test "$am_compiler_list" = ""; then
+     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
+  fi
+  for depmode in $am_compiler_list; do
+    # Setup a source with many dependencies, because some compilers
+    # like to wrap large dependency lists on column 80 (with \), and
+    # we should not choose a depcomp mode which is confused by this.
+    #
+    # We need to recreate these files for each test, as the compiler may
+    # overwrite some of them when testing with obscure command lines.
+    # This happens at least with the AIX C compiler.
+    : > sub/conftest.c
+    for i in 1 2 3 4 5 6; do
+      echo '#include "conftst'$i'.h"' >> sub/conftest.c
+      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
+      # Solaris 8's {/usr,}/bin/sh.
+      touch sub/conftst$i.h
+    done
+    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
+
+    case $depmode in
+    nosideeffect)
+      # after this tag, mechanisms are not by side-effect, so they'll
+      # only be used when explicitly requested
+      if test "x$enable_dependency_tracking" = xyes; then
+	continue
+      else
+	break
+      fi
+      ;;
+    none) break ;;
+    esac
+    # We check with `-c' and `-o' for the sake of the "dashmstdout"
+    # mode.  It turns out that the SunPro C++ compiler does not properly
+    # handle `-M -o', and we need to detect this.
+    if depmode=$depmode \
+       source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \
+       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
+       $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \
+         >/dev/null 2>conftest.err &&
+       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
+       grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 &&
+       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
+      # icc doesn't choke on unknown options, it will just issue warnings
+      # or remarks (even with -Werror).  So we grep stderr for any message
+      # that says an option was ignored or not supported.
+      # When given -MP, icc 7.0 and 7.1 complain thusly:
+      #   icc: Command line warning: ignoring option '-M'; no argument required
+      # The diagnosis changed in icc 8.0:
+      #   icc: Command line remark: option '-MP' not supported
+      if (grep 'ignoring option' conftest.err ||
+          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
+        am_cv_CC_dependencies_compiler_type=$depmode
+        break
+      fi
+    fi
+  done
+
+  cd ..
+  rm -rf conftest.dir
+else
+  am_cv_CC_dependencies_compiler_type=none
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
+$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
+CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
+
+ if
+  test "x$enable_dependency_tracking" != xno \
+  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
+  am__fastdepCC_TRUE=
+  am__fastdepCC_FALSE='#'
+else
+  am__fastdepCC_TRUE='#'
+  am__fastdepCC_FALSE=
+fi
+
+
+case `pwd` in
+  *\ * | *\	*)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
+$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
+esac
+
+
+
+macro_version='2.2.10'
+macro_revision='1.3175'
+
+
+
+
+
+
+
+
+
+
+
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+# Make sure we can run config.sub.
+$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
+  as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
+$as_echo_n "checking build system type... " >&6; }
+if ${ac_cv_build+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_build_alias=$build_alias
+test "x$ac_build_alias" = x &&
+  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
+test "x$ac_build_alias" = x &&
+  as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
+ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
+  as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
+$as_echo "$ac_cv_build" >&6; }
+case $ac_cv_build in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
+esac
+build=$ac_cv_build
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_build
+shift
+build_cpu=$1
+build_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+build_os=$*
+IFS=$ac_save_IFS
+case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
+$as_echo_n "checking host system type... " >&6; }
+if ${ac_cv_host+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test "x$host_alias" = x; then
+  ac_cv_host=$ac_cv_build
+else
+  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
+    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
+$as_echo "$ac_cv_host" >&6; }
+case $ac_cv_host in
+*-*-*) ;;
+*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
+esac
+host=$ac_cv_host
+ac_save_IFS=$IFS; IFS='-'
+set x $ac_cv_host
+shift
+host_cpu=$1
+host_vendor=$2
+shift; shift
+# Remember, the first character of IFS is used to create $*,
+# except with old shells:
+host_os=$*
+IFS=$ac_save_IFS
+case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
+
+
+# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+
+ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
+$as_echo_n "checking how to print strings... " >&6; }
+# Test print first, because it will be a builtin if present.
+if test "X`print -r -- -n 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO ""
+}
+
+case "$ECHO" in
+  printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5
+$as_echo "printf" >&6; } ;;
+  print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5
+$as_echo "print -r" >&6; } ;;
+  *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5
+$as_echo "cat" >&6; } ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
+$as_echo_n "checking for a sed that does not truncate output... " >&6; }
+if ${ac_cv_path_SED+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+            ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
+     for ac_i in 1 2 3 4 5 6 7; do
+       ac_script="$ac_script$as_nl$ac_script"
+     done
+     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
+     { ac_script=; unset ac_script;}
+     if test -z "$SED"; then
+  ac_path_SED_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
+# Check for GNU ac_path_SED and select it if it is found.
+  # Check for GNU $ac_path_SED
+case `"$ac_path_SED" --version 2>&1` in
+*GNU*)
+  ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo '' >> "conftest.nl"
+    "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_SED_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_SED="$ac_path_SED"
+      ac_path_SED_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_SED_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_SED"; then
+    as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
+  fi
+else
+  ac_cv_path_SED=$SED
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
+$as_echo "$ac_cv_path_SED" >&6; }
+ SED="$ac_cv_path_SED"
+  rm -f conftest.sed
+
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+
+
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
+$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
+if ${ac_cv_path_GREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$GREP"; then
+  ac_path_GREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in grep ggrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
+# Check for GNU ac_path_GREP and select it if it is found.
+  # Check for GNU $ac_path_GREP
+case `"$ac_path_GREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'GREP' >> "conftest.nl"
+    "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_GREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_GREP="$ac_path_GREP"
+      ac_path_GREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_GREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_GREP"; then
+    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_GREP=$GREP
+fi
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
+$as_echo "$ac_cv_path_GREP" >&6; }
+ GREP="$ac_cv_path_GREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
+$as_echo_n "checking for egrep... " >&6; }
+if ${ac_cv_path_EGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
+   then ac_cv_path_EGREP="$GREP -E"
+   else
+     if test -z "$EGREP"; then
+  ac_path_EGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in egrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
+# Check for GNU ac_path_EGREP and select it if it is found.
+  # Check for GNU $ac_path_EGREP
+case `"$ac_path_EGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'EGREP' >> "conftest.nl"
+    "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_EGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_EGREP="$ac_path_EGREP"
+      ac_path_EGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_EGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_EGREP"; then
+    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_EGREP=$EGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
+$as_echo "$ac_cv_path_EGREP" >&6; }
+ EGREP="$ac_cv_path_EGREP"
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
+$as_echo_n "checking for fgrep... " >&6; }
+if ${ac_cv_path_FGREP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
+   then ac_cv_path_FGREP="$GREP -F"
+   else
+     if test -z "$FGREP"; then
+  ac_path_FGREP_found=false
+  # Loop through the user's path and test for each of PROGNAME-LIST
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_prog in fgrep; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
+      { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
+# Check for GNU ac_path_FGREP and select it if it is found.
+  # Check for GNU $ac_path_FGREP
+case `"$ac_path_FGREP" --version 2>&1` in
+*GNU*)
+  ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
+*)
+  ac_count=0
+  $as_echo_n 0123456789 >"conftest.in"
+  while :
+  do
+    cat "conftest.in" "conftest.in" >"conftest.tmp"
+    mv "conftest.tmp" "conftest.in"
+    cp "conftest.in" "conftest.nl"
+    $as_echo 'FGREP' >> "conftest.nl"
+    "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
+    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
+    as_fn_arith $ac_count + 1 && ac_count=$as_val
+    if test $ac_count -gt ${ac_path_FGREP_max-0}; then
+      # Best one so far, save it but keep looking for a better one
+      ac_cv_path_FGREP="$ac_path_FGREP"
+      ac_path_FGREP_max=$ac_count
+    fi
+    # 10*(2^10) chars as input seems more than enough
+    test $ac_count -gt 10 && break
+  done
+  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
+esac
+
+      $ac_path_FGREP_found && break 3
+    done
+  done
+  done
+IFS=$as_save_IFS
+  if test -z "$ac_cv_path_FGREP"; then
+    as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+  fi
+else
+  ac_cv_path_FGREP=$FGREP
+fi
+
+   fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
+$as_echo "$ac_cv_path_FGREP" >&6; }
+ FGREP="$ac_cv_path_FGREP"
+
+
+test -z "$GREP" && GREP=grep
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-gnu-ld was given.
+if test "${with_gnu_ld+set}" = set; then :
+  withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
+else
+  with_gnu_ld=no
+fi
+
+ac_prog=ld
+if test "$GCC" = yes; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
+$as_echo_n "checking for ld used by $CC... " >&6; }
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [\\/]* | ?:[\\/]*)
+      re_direlt='/[^/][^/]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD="$ac_prog"
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test "$with_gnu_ld" = yes; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
+$as_echo_n "checking for GNU ld... " >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
+$as_echo_n "checking for non-GNU ld... " >&6; }
+fi
+if ${lt_cv_path_LD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -z "$LD"; then
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD="$ac_dir/$ac_prog"
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+	test "$with_gnu_ld" != no && break
+	;;
+      *)
+	test "$with_gnu_ld" != yes && break
+	;;
+      esac
+    fi
+  done
+  IFS="$lt_save_ifs"
+else
+  lt_cv_path_LD="$LD" # Let the user override the test with a path.
+fi
+fi
+
+LD="$lt_cv_path_LD"
+if test -n "$LD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
+$as_echo "$LD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
+$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
+if ${lt_cv_prog_gnu_ld+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  # I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
+$as_echo "$lt_cv_prog_gnu_ld" >&6; }
+with_gnu_ld=$lt_cv_prog_gnu_ld
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
+$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
+if ${lt_cv_path_NM+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM="$NM"
+else
+  lt_nm_to_check="${ac_tool_prefix}nm"
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS="$lt_save_ifs"
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm="$ac_dir/$lt_tmp_nm"
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
+	# Check to see if the nm accepts a BSD-compat flag.
+	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
+	#   nm: unknown option "B" ignored
+	# Tru64's nm complains that /dev/null is an invalid object file
+	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+	*/dev/null* | *'Invalid file or object type'*)
+	  lt_cv_path_NM="$tmp_nm -B"
+	  break
+	  ;;
+	*)
+	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+	  */dev/null*)
+	    lt_cv_path_NM="$tmp_nm -p"
+	    break
+	    ;;
+	  *)
+	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+	    continue # so that we can try to find one that supports BSD flags
+	    ;;
+	  esac
+	  ;;
+	esac
+      fi
+    done
+    IFS="$lt_save_ifs"
+  done
+  : ${lt_cv_path_NM=no}
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
+$as_echo "$lt_cv_path_NM" >&6; }
+if test "$lt_cv_path_NM" != "no"; then
+  NM="$lt_cv_path_NM"
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    if test -n "$ac_tool_prefix"; then
+  for ac_prog in dumpbin "link -dump"
+  do
+    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DUMPBIN"; then
+  ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DUMPBIN=$ac_cv_prog_DUMPBIN
+if test -n "$DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
+$as_echo "$DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+    test -n "$DUMPBIN" && break
+  done
+fi
+if test -z "$DUMPBIN"; then
+  ac_ct_DUMPBIN=$DUMPBIN
+  for ac_prog in dumpbin "link -dump"
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DUMPBIN"; then
+  ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
+if test -n "$ac_ct_DUMPBIN"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
+$as_echo "$ac_ct_DUMPBIN" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  test -n "$ac_ct_DUMPBIN" && break
+done
+
+  if test "x$ac_ct_DUMPBIN" = x; then
+    DUMPBIN=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DUMPBIN=$ac_ct_DUMPBIN
+  fi
+fi
+
+    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+
+  if test "$DUMPBIN" != ":"; then
+    NM="$DUMPBIN"
+  fi
+fi
+test -z "$NM" && NM=nm
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
+$as_echo_n "checking the name lister ($NM) interface... " >&6; }
+if ${lt_cv_nm_interface+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&5
+  (eval echo "\"\$as_me:$LINENO: output\"" >&5)
+  cat conftest.out >&5
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -r -f conftest*
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
+$as_echo "$lt_cv_nm_interface" >&6; }
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
+$as_echo_n "checking whether ln -s works... " >&6; }
+LN_S=$as_ln_s
+if test "$LN_S" = "ln -s"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
+$as_echo "no, using $LN_S" >&6; }
+fi
+
+# find the maximum length of command line arguments
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
+$as_echo_n "checking the maximum length of command line arguments... " >&6; }
+if ${lt_cv_sys_max_cmd_len+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+    i=0
+  teststring="ABCD"
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[	 ]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8 ; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test "X"`func_fallback_echo "$teststring$teststring" 2>/dev/null` \
+	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+	      test $i != 17 # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+
+fi
+
+if test -n $lt_cv_sys_max_cmd_len ; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
+$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
+$as_echo "none" >&6; }
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+
+
+
+
+
+
+: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
+$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
+# Try some XSI features
+xsi_shell=no
+( _lt_dummy="a/b/c"
+  test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \
+      = c,a/b,, \
+    && eval 'test $(( 1 + 1 )) -eq 2 \
+    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
+  && xsi_shell=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
+$as_echo "$xsi_shell" >&6; }
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
+$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
+lt_shell_append=no
+( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
+    >/dev/null 2>&1 \
+  && lt_shell_append=yes
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
+$as_echo "$lt_shell_append" >&6; }
+
+
+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+
+
+
+
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
+$as_echo_n "checking for $LD option to reload object files... " >&6; }
+if ${lt_cv_ld_reload_flag+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_reload_flag='-r'
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
+$as_echo "$lt_cv_ld_reload_flag" >&6; }
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  darwin*)
+    if test "$GCC" = yes; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
+set dummy ${ac_tool_prefix}objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OBJDUMP"; then
+  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OBJDUMP=$ac_cv_prog_OBJDUMP
+if test -n "$OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
+$as_echo "$OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OBJDUMP"; then
+  ac_ct_OBJDUMP=$OBJDUMP
+  # Extract the first word of "objdump", so it can be a program name with args.
+set dummy objdump; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OBJDUMP"; then
+  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OBJDUMP="objdump"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
+if test -n "$ac_ct_OBJDUMP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
+$as_echo "$ac_ct_OBJDUMP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OBJDUMP" = x; then
+    OBJDUMP="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OBJDUMP=$ac_ct_OBJDUMP
+  fi
+else
+  OBJDUMP="$ac_cv_prog_OBJDUMP"
+fi
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+
+
+
+
+
+
+
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
+$as_echo_n "checking how to recognize dependent libraries... " >&6; }
+if ${lt_cv_deplibs_check_method+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# `unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# which responds to the $file_magic_cmd with a given extended regex.
+# If you have `file' or equivalent on your system and you're not sure
+# whether `pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[4-9]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[45]*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
+  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[3-9]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
+$as_echo "$lt_cv_deplibs_check_method" >&6; }
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$AR"; then
+  ac_cv_prog_AR="$AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_AR="${ac_tool_prefix}ar"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+AR=$ac_cv_prog_AR
+if test -n "$AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
+$as_echo "$AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_AR"; then
+  ac_ct_AR=$AR
+  # Extract the first word of "ar", so it can be a program name with args.
+set dummy ar; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_AR+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_AR"; then
+  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_AR="ar"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_AR=$ac_cv_prog_ac_ct_AR
+if test -n "$ac_ct_AR"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
+$as_echo "$ac_ct_AR" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_AR" = x; then
+    AR="false"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    AR=$ac_ct_AR
+  fi
+else
+  AR="$ac_cv_prog_AR"
+fi
+
+test -z "$AR" && AR=ar
+test -z "$AR_FLAGS" && AR_FLAGS=cru
+
+
+
+
+
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
+set dummy ${ac_tool_prefix}strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$STRIP"; then
+  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+STRIP=$ac_cv_prog_STRIP
+if test -n "$STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
+$as_echo "$STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_STRIP"; then
+  ac_ct_STRIP=$STRIP
+  # Extract the first word of "strip", so it can be a program name with args.
+set dummy strip; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_STRIP"; then
+  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_STRIP="strip"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
+if test -n "$ac_ct_STRIP"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
+$as_echo "$ac_ct_STRIP" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_STRIP" = x; then
+    STRIP=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    STRIP=$ac_ct_STRIP
+  fi
+else
+  STRIP="$ac_cv_prog_STRIP"
+fi
+
+test -z "$STRIP" && STRIP=:
+
+
+
+
+
+
+if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
+set dummy ${ac_tool_prefix}ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$RANLIB"; then
+  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+RANLIB=$ac_cv_prog_RANLIB
+if test -n "$RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
+$as_echo "$RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_RANLIB"; then
+  ac_ct_RANLIB=$RANLIB
+  # Extract the first word of "ranlib", so it can be a program name with args.
+set dummy ranlib; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_RANLIB"; then
+  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_RANLIB="ranlib"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
+if test -n "$ac_ct_RANLIB"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
+$as_echo "$ac_ct_RANLIB" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_RANLIB" = x; then
+    RANLIB=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    RANLIB=$ac_ct_RANLIB
+  fi
+else
+  RANLIB="$ac_cv_prog_RANLIB"
+fi
+
+test -z "$RANLIB" && RANLIB=:
+
+
+
+
+
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
+$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
+if ${lt_cv_sys_global_symbol_pipe+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[BCDEGRST]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[BCDT]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[ABCDGISTW]'
+  ;;
+hpux*)
+  if test "$host_cpu" = ia64; then
+    symcode='[ABCDEGRST]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[BCDEGRST]'
+  ;;
+osf*)
+  symcode='[BCDEGQRST]'
+  ;;
+solaris*)
+  symcode='[BDRT]'
+  ;;
+sco3.2v5*)
+  symcode='[DT]'
+  ;;
+sysv4.2uw2*)
+  symcode='[DT]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[ABDT]'
+  ;;
+sysv4)
+  symcode='[DFNSTU]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[ABCDGIRSTW]' ;;
+esac
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"\2\", (void *) \&\2},/p'"
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function
+    # and D for any global variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK '"\
+"     {last_section=section; section=\$ 3};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
+"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
+"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[	 ]\($symcode$symcode*\)[	 ][	 ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -r -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5
+  (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+	mv -f "$nlist"T "$nlist"
+      else
+	rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+	  cat <<_LT_EOF > conftest.$ac_ext
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+	  # Now generate the symbol file.
+	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
+
+	  cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+const struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
+	  cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+	  # Now try linking the two files.
+	  mv conftest.$ac_objext conftstm.$ac_objext
+	  lt_save_LIBS="$LIBS"
+	  lt_save_CFLAGS="$CFLAGS"
+	  LIBS="conftstm.$ac_objext"
+	  CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext}; then
+	    pipe_works=yes
+	  fi
+	  LIBS="$lt_save_LIBS"
+	  CFLAGS="$lt_save_CFLAGS"
+	else
+	  echo "cannot find nm_test_func in $nlist" >&5
+	fi
+      else
+	echo "cannot find nm_test_var in $nlist" >&5
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
+    fi
+  else
+    echo "$progname: failed program was:" >&5
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test "$pipe_works" = yes; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+
+fi
+
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
+$as_echo "failed" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
+$as_echo "ok" >&6; }
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+# Check whether --enable-libtool-lock was given.
+if test "${enable_libtool_lock+set}" = set; then :
+  enableval=$enable_libtool_lock;
+fi
+
+test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+	HPUX_IA64_MODE="32"
+	;;
+      *ELF-64*)
+	HPUX_IA64_MODE="64"
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out which ABI we are using.
+  echo '#line '$LINENO' "configure"' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    if test "$lt_cv_prog_gnu_ld" = yes; then
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -melf32bsmip"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -melf32bmipn32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -melf64bmip"
+	;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+	*32-bit*)
+	  LD="${LD-ld} -32"
+	  ;;
+	*N32*)
+	  LD="${LD-ld} -n32"
+	  ;;
+	*64-bit*)
+	  LD="${LD-ld} -64"
+	  ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_i386_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_i386"
+	    ;;
+	  ppc64-*linux*|powerpc64-*linux*)
+	    LD="${LD-ld} -m elf32ppclinux"
+	    ;;
+	  s390x-*linux*)
+	    LD="${LD-ld} -m elf_s390"
+	    ;;
+	  sparc64-*linux*)
+	    LD="${LD-ld} -m elf32_sparc"
+	    ;;
+	esac
+	;;
+      *64-bit*)
+	case $host in
+	  x86_64-*kfreebsd*-gnu)
+	    LD="${LD-ld} -m elf_x86_64_fbsd"
+	    ;;
+	  x86_64-*linux*)
+	    LD="${LD-ld} -m elf_x86_64"
+	    ;;
+	  ppc*-*linux*|powerpc*-*linux*)
+	    LD="${LD-ld} -m elf64ppc"
+	    ;;
+	  s390*-*linux*|s390*-*tpf*)
+	    LD="${LD-ld} -m elf64_s390"
+	    ;;
+	  sparc*-*linux*)
+	    LD="${LD-ld} -m elf64_sparc"
+	    ;;
+	esac
+	;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS="$CFLAGS"
+  CFLAGS="$CFLAGS -belf"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
+$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
+if ${lt_cv_cc_needs_belf+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+     cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_cc_needs_belf=yes
+else
+  lt_cv_cc_needs_belf=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+     ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
+$as_echo "$lt_cv_cc_needs_belf" >&6; }
+  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS="$SAVE_CFLAGS"
+  fi
+  ;;
+sparc*-*solaris*)
+  # Find out which ABI we are using.
+  echo 'int i;' > conftest.$ac_ext
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }; then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*) LD="${LD-ld} -m elf64_sparc" ;;
+      *)
+	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+	  LD="${LD-ld} -64"
+	fi
+	;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks="$enable_libtool_lock"
+
+
+  case $host_os in
+    rhapsody* | darwin*)
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
+set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$DSYMUTIL"; then
+  ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+DSYMUTIL=$ac_cv_prog_DSYMUTIL
+if test -n "$DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
+$as_echo "$DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_DSYMUTIL"; then
+  ac_ct_DSYMUTIL=$DSYMUTIL
+  # Extract the first word of "dsymutil", so it can be a program name with args.
+set dummy dsymutil; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_DSYMUTIL"; then
+  ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
+if test -n "$ac_ct_DSYMUTIL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
+$as_echo "$ac_ct_DSYMUTIL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_DSYMUTIL" = x; then
+    DSYMUTIL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    DSYMUTIL=$ac_ct_DSYMUTIL
+  fi
+else
+  DSYMUTIL="$ac_cv_prog_DSYMUTIL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
+set dummy ${ac_tool_prefix}nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$NMEDIT"; then
+  ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+NMEDIT=$ac_cv_prog_NMEDIT
+if test -n "$NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
+$as_echo "$NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_NMEDIT"; then
+  ac_ct_NMEDIT=$NMEDIT
+  # Extract the first word of "nmedit", so it can be a program name with args.
+set dummy nmedit; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_NMEDIT"; then
+  ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_NMEDIT="nmedit"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
+if test -n "$ac_ct_NMEDIT"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
+$as_echo "$ac_ct_NMEDIT" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_NMEDIT" = x; then
+    NMEDIT=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    NMEDIT=$ac_ct_NMEDIT
+  fi
+else
+  NMEDIT="$ac_cv_prog_NMEDIT"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
+set dummy ${ac_tool_prefix}lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$LIPO"; then
+  ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+LIPO=$ac_cv_prog_LIPO
+if test -n "$LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
+$as_echo "$LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_LIPO"; then
+  ac_ct_LIPO=$LIPO
+  # Extract the first word of "lipo", so it can be a program name with args.
+set dummy lipo; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_LIPO"; then
+  ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_LIPO="lipo"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
+if test -n "$ac_ct_LIPO"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
+$as_echo "$ac_ct_LIPO" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_LIPO" = x; then
+    LIPO=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    LIPO=$ac_ct_LIPO
+  fi
+else
+  LIPO="$ac_cv_prog_LIPO"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL"; then
+  ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL=$ac_cv_prog_OTOOL
+if test -n "$OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
+$as_echo "$OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL"; then
+  ac_ct_OTOOL=$OTOOL
+  # Extract the first word of "otool", so it can be a program name with args.
+set dummy otool; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL"; then
+  ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL="otool"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
+if test -n "$ac_ct_OTOOL"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
+$as_echo "$ac_ct_OTOOL" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL" = x; then
+    OTOOL=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL=$ac_ct_OTOOL
+  fi
+else
+  OTOOL="$ac_cv_prog_OTOOL"
+fi
+
+    if test -n "$ac_tool_prefix"; then
+  # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
+set dummy ${ac_tool_prefix}otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$OTOOL64"; then
+  ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+OTOOL64=$ac_cv_prog_OTOOL64
+if test -n "$OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
+$as_echo "$OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+fi
+if test -z "$ac_cv_prog_OTOOL64"; then
+  ac_ct_OTOOL64=$OTOOL64
+  # Extract the first word of "otool64", so it can be a program name with args.
+set dummy otool64; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$ac_ct_OTOOL64"; then
+  ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_ac_ct_OTOOL64="otool64"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
+if test -n "$ac_ct_OTOOL64"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
+$as_echo "$ac_ct_OTOOL64" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+  if test "x$ac_ct_OTOOL64" = x; then
+    OTOOL64=":"
+  else
+    case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+    OTOOL64=$ac_ct_OTOOL64
+  fi
+else
+  OTOOL64="$ac_cv_prog_OTOOL64"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
+$as_echo_n "checking for -single_module linker flag... " >&6; }
+if ${lt_cv_apple_cc_single_mod+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_apple_cc_single_mod=no
+      if test -z "${LT_MULTI_MODULE}"; then
+	# By default we will add the -single_module flag. You can override
+	# by either setting the environment variable LT_MULTI_MODULE
+	# non-empty at configure time, or by adding -multi_module to the
+	# link flags.
+	rm -rf libconftest.dylib*
+	echo "int foo(void){return 1;}" > conftest.c
+	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&5
+	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+	if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then
+	  lt_cv_apple_cc_single_mod=yes
+	else
+	  cat conftest.err >&5
+	fi
+	rm -rf libconftest.dylib*
+	rm -f conftest.*
+      fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
+$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
+$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
+if ${lt_cv_ld_exported_symbols_list+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  lt_cv_ld_exported_symbols_list=yes
+else
+  lt_cv_ld_exported_symbols_list=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+	LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
+$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5
+$as_echo_n "checking for -force_load linker flag... " >&6; }
+if ${lt_cv_ld_force_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
+      echo "$AR cru libconftest.a conftest.o" >&5
+      $AR cru libconftest.a conftest.o 2>&5
+      echo "$RANLIB libconftest.a" >&5
+      $RANLIB libconftest.a 2>&5
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then
+	lt_cv_ld_force_load=yes
+      else
+	cat conftest.err >&5
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
+$as_echo "$lt_cv_ld_force_load" >&6; }
+    case $host_os in
+    rhapsody* | darwin1.[012])
+      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+	10.0,*86*-darwin8*|10.0,*-darwin[91]*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+	10.[012]*)
+	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
+	10.*)
+	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
+      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
+    fi
+    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
+$as_echo_n "checking how to run the C preprocessor... " >&6; }
+# On Suns, sometimes $CPP names a directory.
+if test -n "$CPP" && test -d "$CPP"; then
+  CPP=
+fi
+if test -z "$CPP"; then
+  if ${ac_cv_prog_CPP+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+      # Double quotes because CPP needs to be expanded
+    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
+    do
+      ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+  break
+fi
+
+    done
+    ac_cv_prog_CPP=$CPP
+
+fi
+  CPP=$ac_cv_prog_CPP
+else
+  ac_cv_prog_CPP=$CPP
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
+$as_echo "$CPP" >&6; }
+ac_preproc_ok=false
+for ac_c_preproc_warn_flag in '' yes
+do
+  # Use a header file that comes with gcc, so configuring glibc
+  # with a fresh cross-compiler works.
+  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
+  # <limits.h> exists even on freestanding compilers.
+  # On the NeXT, cc -E runs the code through the compiler's parser,
+  # not just through cpp. "Syntax error" is here to catch this case.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifdef __STDC__
+# include <limits.h>
+#else
+# include <assert.h>
+#endif
+		     Syntax error
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+
+else
+  # Broken: fails on valid input.
+continue
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+  # OK, works on sane cases.  Now check whether nonexistent headers
+  # can be detected and how.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ac_nonexistent.h>
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"; then :
+  # Broken: success on invalid input.
+continue
+else
+  # Passes both tests.
+ac_preproc_ok=:
+break
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+
+done
+# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
+rm -f conftest.i conftest.err conftest.$ac_ext
+if $ac_preproc_ok; then :
+
+else
+  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details" "$LINENO" 5; }
+fi
+
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
+$as_echo_n "checking for ANSI C header files... " >&6; }
+if ${ac_cv_header_stdc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+#include <stdarg.h>
+#include <string.h>
+#include <float.h>
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_header_stdc=yes
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+
+if test $ac_cv_header_stdc = yes; then
+  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <string.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "memchr" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <stdlib.h>
+
+_ACEOF
+if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
+  $EGREP "free" >/dev/null 2>&1; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f conftest*
+
+fi
+
+if test $ac_cv_header_stdc = yes; then
+  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
+  if test "$cross_compiling" = yes; then :
+  :
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <ctype.h>
+#include <stdlib.h>
+#if ((' ' & 0x0FF) == 0x020)
+# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
+# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
+#else
+# define ISLOWER(c) \
+		   (('a' <= (c) && (c) <= 'i') \
+		     || ('j' <= (c) && (c) <= 'r') \
+		     || ('s' <= (c) && (c) <= 'z'))
+# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
+#endif
+
+#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
+int
+main ()
+{
+  int i;
+  for (i = 0; i < 256; i++)
+    if (XOR (islower (i), ISLOWER (i))
+	|| toupper (i) != TOUPPER (i))
+      return 2;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+
+else
+  ac_cv_header_stdc=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+fi
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
+$as_echo "$ac_cv_header_stdc" >&6; }
+if test $ac_cv_header_stdc = yes; then
+
+$as_echo "#define STDC_HEADERS 1" >>confdefs.h
+
+fi
+
+# On IRIX 5.3, sys/types and inttypes.h are conflicting.
+for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
+		  inttypes.h stdint.h unistd.h
+do :
+  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
+"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
+_ACEOF
+
+fi
+
+done
+
+
+for ac_header in dlfcn.h
+do :
+  ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
+"
+if test "x$ac_cv_header_dlfcn_h" = xyes; then :
+  cat >>confdefs.h <<_ACEOF
+#define HAVE_DLFCN_H 1
+_ACEOF
+
+fi
+
+done
+
+
+
+
+
+# Set options
+
+
+
+        enable_dlopen=no
+
+
+  enable_win32_dll=no
+
+
+            # Check whether --enable-shared was given.
+if test "${enable_shared+set}" = set; then :
+  enableval=$enable_shared; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_shared=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_shared=yes
+fi
+
+
+
+
+
+
+
+
+
+  # Check whether --enable-static was given.
+if test "${enable_static+set}" = set; then :
+  enableval=$enable_static; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_static=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_static=yes
+fi
+
+
+
+
+
+
+
+
+
+
+# Check whether --with-pic was given.
+if test "${with_pic+set}" = set; then :
+  withval=$with_pic; pic_mode="$withval"
+else
+  pic_mode=default
+fi
+
+
+test -z "$pic_mode" && pic_mode=default
+
+
+
+
+
+
+
+  # Check whether --enable-fast-install was given.
+if test "${enable_fast_install+set}" = set; then :
+  enableval=$enable_fast_install; p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
+      for pkg in $enableval; do
+	IFS="$lt_save_ifs"
+	if test "X$pkg" = "X$p"; then
+	  enable_fast_install=yes
+	fi
+      done
+      IFS="$lt_save_ifs"
+      ;;
+    esac
+else
+  enable_fast_install=yes
+fi
+
+
+
+
+
+
+
+
+
+
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS="$ltmain"
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+test -z "$LN_S" && LN_S="ln -s"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+if test -n "${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
+$as_echo_n "checking for objdir... " >&6; }
+if ${lt_cv_objdir+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
+$as_echo "$lt_cv_objdir" >&6; }
+objdir=$lt_cv_objdir
+
+
+
+
+
+cat >>confdefs.h <<_ACEOF
+#define LT_OBJDIR "$lt_cv_objdir/"
+_ACEOF
+
+
+
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test "X${COLLECT_NAMES+set}" != Xset; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a `.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld="$lt_cv_prog_gnu_ld"
+
+old_CC="$CC"
+old_CFLAGS="$CFLAGS"
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+for cc_temp in $compiler""; do
+  case $cc_temp in
+    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
+    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
+    \-*) ;;
+    *) break;;
+  esac
+done
+cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
+
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
+$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/${ac_tool_prefix}file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool at gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+
+
+
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
+$as_echo_n "checking for file... " >&6; }
+if ${lt_cv_path_MAGIC_CMD+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  case $MAGIC_CMD in
+[\\/*] |  ?:[\\/]*)
+  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD="$MAGIC_CMD"
+  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
+  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
+  for ac_dir in $ac_dummy; do
+    IFS="$lt_save_ifs"
+    test -z "$ac_dir" && ac_dir=.
+    if test -f $ac_dir/file; then
+      lt_cv_path_MAGIC_CMD="$ac_dir/file"
+      if test -n "$file_magic_test_file"; then
+	case $deplibs_check_method in
+	"file_magic "*)
+	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+	    $EGREP "$file_magic_regex" > /dev/null; then
+	    :
+	  else
+	    cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** bug-libtool at gnu.org
+
+_LT_EOF
+	  fi ;;
+	esac
+      fi
+      break
+    fi
+  done
+  IFS="$lt_save_ifs"
+  MAGIC_CMD="$lt_save_MAGIC_CMD"
+  ;;
+esac
+fi
+
+MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
+if test -n "$MAGIC_CMD"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
+$as_echo "$MAGIC_CMD" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+  else
+    MAGIC_CMD=:
+  fi
+fi
+
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+
+lt_save_CC="$CC"
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+objext=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+
+
+
+
+
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM -r conftest*
+
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+
+
+if test -n "$compiler"; then
+
+lt_prog_compiler_no_builtin_flag=
+
+if test "$GCC" = yes; then
+  case $cc_basename in
+  nvcc*)
+    lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;;
+  *)
+    lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;;
+  esac
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
+$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
+if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_rtti_exceptions=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="-fno-rtti -fno-exceptions"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_rtti_exceptions=yes
+     fi
+   fi
+   $RM -r conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
+$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
+
+if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
+    lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
+else
+    :
+fi
+
+fi
+
+
+
+
+
+
+  lt_prog_compiler_wl=
+lt_prog_compiler_pic=
+lt_prog_compiler_static=
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
+$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
+
+  if test "$GCC" = yes; then
+    lt_prog_compiler_wl='-Wl,'
+    lt_prog_compiler_static='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            lt_prog_compiler_pic='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the `-m68020' flag to GCC prevents building anything better,
+            # like `-m68040'.
+            lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      lt_prog_compiler_pic='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      lt_prog_compiler_static=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='-fPIC'
+	;;
+      esac
+      ;;
+
+    interix[3-9]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      lt_prog_compiler_can_build_shared=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	lt_prog_compiler_pic=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      lt_prog_compiler_pic='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      lt_prog_compiler_wl='-Xlinker '
+      lt_prog_compiler_pic='-Xcompiler -fPIC'
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      lt_prog_compiler_wl='-Wl,'
+      if test "$host_cpu" = ia64; then
+	# AIX 5 now supports IA64 processor
+	lt_prog_compiler_static='-Bstatic'
+      else
+	lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      lt_prog_compiler_pic='-DDLL_EXPORT'
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+	# +Z the default
+	;;
+      *)
+	lt_prog_compiler_pic='+Z'
+	;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled CC?
+      lt_prog_compiler_static='${wl}-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      lt_prog_compiler_wl='-Wl,'
+      # PIC (with -KPIC) is the default.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu)
+      case $cc_basename in
+      # old Intel for x86_64 which still supported -KPIC.
+      ecc*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-KPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fPIC'
+	lt_prog_compiler_static='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='--shared'
+	lt_prog_compiler_static='--static'
+	;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+	# which looks to be a dead project)
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-fpic'
+	lt_prog_compiler_static='-Bstatic'
+        ;;
+      ccc*)
+        lt_prog_compiler_wl='-Wl,'
+        # All Alpha code is PIC.
+        lt_prog_compiler_static='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+	lt_prog_compiler_wl='-Wl,'
+	lt_prog_compiler_pic='-qpic'
+	lt_prog_compiler_static='-qstaticlink'
+	;;
+      *)
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ F* | *Sun*Fortran*)
+	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl=''
+	  ;;
+	*Sun\ C*)
+	  # Sun C 5.9
+	  lt_prog_compiler_pic='-KPIC'
+	  lt_prog_compiler_static='-Bstatic'
+	  lt_prog_compiler_wl='-Wl,'
+	  ;;
+	esac
+	;;
+      esac
+      ;;
+
+    newsos6)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      lt_prog_compiler_pic='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      lt_prog_compiler_wl='-Wl,'
+      # All OSF/1 code is PIC.
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    rdos*)
+      lt_prog_compiler_static='-non_shared'
+      ;;
+
+    solaris*)
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+	lt_prog_compiler_wl='-Qoption ld ';;
+      *)
+	lt_prog_compiler_wl='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      lt_prog_compiler_wl='-Qoption ld '
+      lt_prog_compiler_pic='-PIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec ;then
+	lt_prog_compiler_pic='-Kconform_pic'
+	lt_prog_compiler_static='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_pic='-KPIC'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    unicos*)
+      lt_prog_compiler_wl='-Wl,'
+      lt_prog_compiler_can_build_shared=no
+      ;;
+
+    uts4*)
+      lt_prog_compiler_pic='-pic'
+      lt_prog_compiler_static='-Bstatic'
+      ;;
+
+    *)
+      lt_prog_compiler_can_build_shared=no
+      ;;
+    esac
+  fi
+
+case $host_os in
+  # For platforms which do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    lt_prog_compiler_pic=
+    ;;
+  *)
+    lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
+    ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5
+$as_echo "$lt_prog_compiler_pic" >&6; }
+
+
+
+
+
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$lt_prog_compiler_pic"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
+$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
+if ${lt_cv_prog_compiler_pic_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_pic_works=no
+   ac_outfile=conftest.$ac_objext
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_pic_works=yes
+     fi
+   fi
+   $RM -r conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
+$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
+    case $lt_prog_compiler_pic in
+     "" | " "*) ;;
+     *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
+     esac
+else
+    lt_prog_compiler_pic=
+     lt_prog_compiler_can_build_shared=no
+fi
+
+fi
+
+
+
+
+
+
+#
+# Check to make sure the static flag actually works.
+#
+wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
+$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
+if ${lt_cv_prog_compiler_static_works+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_static_works=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler_static_works=yes
+       fi
+     else
+       lt_cv_prog_compiler_static_works=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
+$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
+
+if test x"$lt_cv_prog_compiler_static_works" = xyes; then
+    :
+else
+    lt_prog_compiler_static=
+fi
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM -r conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM -r conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
+$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
+if ${lt_cv_prog_compiler_c_o+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler_c_o=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&5
+   echo "$as_me:$LINENO: \$? = $ac_status" >&5
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
+       lt_cv_prog_compiler_c_o=yes
+     fi
+   fi
+   chmod u+w . 2>&5
+   $RM -r conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM -r conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
+$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
+
+
+
+
+hard_links="nottested"
+if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
+  # do not overwrite the value of need_locks provided by the user
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
+$as_echo_n "checking if we can lock with hard links... " >&6; }
+  hard_links=yes
+  $RM -r conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
+$as_echo "$hard_links" >&6; }
+  if test "$hard_links" = no; then
+    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
+$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
+$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
+
+  runpath_var=
+  allow_undefined_flag=
+  always_export_symbols=no
+  archive_cmds=
+  archive_expsym_cmds=
+  compiler_needs_object=no
+  enable_shared_with_static_runtimes=no
+  export_dynamic_flag_spec=
+  export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  hardcode_automatic=no
+  hardcode_direct=no
+  hardcode_direct_absolute=no
+  hardcode_libdir_flag_spec=
+  hardcode_libdir_flag_spec_ld=
+  hardcode_libdir_separator=
+  hardcode_minus_L=no
+  hardcode_shlibpath_var=unsupported
+  inherit_rpath=no
+  link_all_deplibs=unknown
+  module_cmds=
+  module_expsym_cmds=
+  old_archive_from_new_cmds=
+  old_archive_from_expsyms_cmds=
+  thread_safe_flag_spec=
+  whole_archive_flag_spec=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  include_expsyms=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ` (' and `)$', so one must not match beginning or
+  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
+  # as well as any symbol that contains `d'.
+  exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test "$GCC" != yes; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd*)
+    with_gnu_ld=no
+    ;;
+  esac
+
+  ld_shlibs=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test "$with_gnu_ld" = yes; then
+    case $host_os in
+      aix*)
+	# The AIX port of GNU ld has always aspired to compatibility
+	# with the native linker.  However, as the warning in the GNU ld
+	# block says, versions before 2.19.5* couldn't really create working
+	# shared libraries, regardless of the interface used.
+	case `$LD -v 2>&1` in
+	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
+	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
+	  *)
+	    lt_use_gnu_ld_interface=yes
+	    ;;
+	esac
+	;;
+      *)
+	lt_use_gnu_ld_interface=yes
+	;;
+    esac
+  fi
+
+  if test "$lt_use_gnu_ld_interface" = yes; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='${wl}'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+    export_dynamic_flag_spec='${wl}--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
+    else
+      whole_archive_flag_spec=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[3-9]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test "$host_cpu" != ia64; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	allow_undefined_flag=unsupported
+	# Joseph Beckenbach <jrb3 at best.com> says some releases of gcc
+	# support --undefined.  This deserves some investigation.  FIXME
+	archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
+      # as there is no search path for DLLs.
+      hardcode_libdir_flag_spec='-L$libdir'
+      export_dynamic_flag_spec='${wl}--export-all-symbols'
+      allow_undefined_flag=unsupported
+      always_export_symbols=no
+      enable_shared_with_static_runtimes=yes
+      export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+	# If the export-symbols file already is a .def file (1st line
+	# is EXPORTS), use it as is; otherwise, prepend...
+	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
+	  cp $export_symbols $output_objdir/$soname.def;
+	else
+	  echo EXPORTS > $output_objdir/$soname.def;
+	  cat $export_symbols >> $output_objdir/$soname.def;
+	fi~
+	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    haiku*)
+      archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+      link_all_deplibs=yes
+      ;;
+
+    interix[3-9]*)
+      hardcode_direct=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+      export_dynamic_flag_spec='${wl}-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test "$host_os" = linux-dietlibc; then
+	case $cc_basename in
+	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
+	esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+	 && test "$tmp_diet" = no
+      then
+	tmp_addflag=
+	tmp_sharedflag='-shared'
+	case $cc_basename,$host_cpu in
+        pgcc*)				# Portland Group C compiler
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag'
+	  ;;
+	pgf77* | pgf90* | pgf95* | pgfortran*)
+					# Portland Group f77 and f90 compilers
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  tmp_addflag=' $pic_flag -Mnomain' ;;
+	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
+	  tmp_addflag=' -i_dynamic' ;;
+	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
+	  tmp_addflag=' -i_dynamic -nofor_main' ;;
+	ifc* | ifort*)			# Intel Fortran compiler
+	  tmp_addflag=' -nofor_main' ;;
+	lf95*)				# Lahey Fortran 8.1
+	  whole_archive_flag_spec=
+	  tmp_sharedflag='--shared' ;;
+	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
+	  tmp_sharedflag='-qmkshrobj'
+	  tmp_addflag= ;;
+	nvcc*)	# Cuda Compiler Driver 2.2
+	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  ;;
+	esac
+	case `$CC -V 2>&1 | sed 5q` in
+	*Sun\ C*)			# Sun C 5.9
+	  whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
+	  compiler_needs_object=yes
+	  tmp_sharedflag='-G' ;;
+	*Sun\ F*)			# Sun Fortran 8.3
+	  tmp_sharedflag='-G' ;;
+	esac
+	archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+
+        if test "x$supports_anon_versioning" = xyes; then
+          archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	    echo "local: *; };" >> $output_objdir/$libname.ver~
+	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
+        fi
+
+	case $cc_basename in
+	xlf* | bgf* | bgxlf* | mpixlf*)
+	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+	  whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
+	  hardcode_libdir_flag_spec=
+	  hardcode_libdir_flag_spec_ld='-rpath $libdir'
+	  archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
+	  if test "x$supports_anon_versioning" = xyes; then
+	    archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
+	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
+	      echo "local: *; };" >> $output_objdir/$libname.ver~
+	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
+	  fi
+	  ;;
+	esac
+      else
+        ld_shlibs=no
+      fi
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
+	wlarc=
+      else
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
+	ld_shlibs=no
+	cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+	;;
+	*)
+	  # For security reasons, it is highly recommended that you always
+	  # use absolute paths for naming shared libraries, and exclude the
+	  # DT_RUNPATH tag from executables and libraries.  But doing so
+	  # requires that you compile everything twice, which is a pain.
+	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+	    archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	    archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+	  else
+	    ld_shlibs=no
+	  fi
+	;;
+      esac
+      ;;
+
+    sunos4*)
+      archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      wlarc=
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
+      else
+	ld_shlibs=no
+      fi
+      ;;
+    esac
+
+    if test "$ld_shlibs" = no; then
+      runpath_var=
+      hardcode_libdir_flag_spec=
+      export_dynamic_flag_spec=
+      whole_archive_flag_spec=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      allow_undefined_flag=unsupported
+      always_export_symbols=yes
+      archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      hardcode_minus_L=yes
+      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
+	# Neither direct hardcoding nor static linking is supported with a
+	# broken collect2.
+	hardcode_direct=unsupported
+      fi
+      ;;
+
+    aix[4-9]*)
+      if test "$host_cpu" = ia64; then
+	# On IA64, the linker does run time linking by default, so we don't
+	# have to do anything special.
+	aix_use_runtimelinking=no
+	exp_sym_flag='-Bexport'
+	no_entry_flag=""
+      else
+	# If we're using GNU nm, then we don't want the "-C" option.
+	# -C means demangle to AIX nm, but means don't demangle with GNU nm
+	# Also, AIX nm treats weak defined symbols like other global
+	# defined symbols, whereas GNU nm marks them as "W".
+	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+	  export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	else
+	  export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
+	fi
+	aix_use_runtimelinking=no
+
+	# Test if we are trying to use run time linking or normal
+	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
+	# need to do runtime linking.
+	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
+	  for ld_flag in $LDFLAGS; do
+	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
+	    aix_use_runtimelinking=yes
+	    break
+	  fi
+	  done
+	  ;;
+	esac
+
+	exp_sym_flag='-bexport'
+	no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      archive_cmds=''
+      hardcode_direct=yes
+      hardcode_direct_absolute=yes
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      file_list_spec='${wl}-f,'
+
+      if test "$GCC" = yes; then
+	case $host_os in aix4.[012]|aix4.[012].*)
+	# We only want to do this on AIX 4.2 and lower, the check
+	# below for broken collect2 doesn't work under 4.3+
+	  collect2name=`${CC} -print-prog-name=collect2`
+	  if test -f "$collect2name" &&
+	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+	  then
+	  # We have reworked collect2
+	  :
+	  else
+	  # We have old collect2
+	  hardcode_direct=unsupported
+	  # It fails to find uninstalled libraries when the uninstalled
+	  # path is not listed in the libpath.  Setting hardcode_minus_L
+	  # to unsupported forces relinking
+	  hardcode_minus_L=yes
+	  hardcode_libdir_flag_spec='-L$libdir'
+	  hardcode_libdir_separator=
+	  fi
+	  ;;
+	esac
+	shared_flag='-shared'
+	if test "$aix_use_runtimelinking" = yes; then
+	  shared_flag="$shared_flag "'${wl}-G'
+	fi
+      else
+	# not using gcc
+	if test "$host_cpu" = ia64; then
+	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+	# chokes on -Wl,-G. The following line is correct:
+	  shared_flag='-G'
+	else
+	  if test "$aix_use_runtimelinking" = yes; then
+	    shared_flag='${wl}-G'
+	  else
+	    shared_flag='${wl}-bM:SRE'
+	  fi
+	fi
+      fi
+
+      export_dynamic_flag_spec='${wl}-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to export.
+      always_export_symbols=yes
+      if test "$aix_use_runtimelinking" = yes; then
+	# Warning - without using the other runtime loading flags (-brtl),
+	# -berok will link without error, but may produce a broken library.
+	allow_undefined_flag='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+lt_aix_libpath_sed='
+    /Import File Strings/,/^$/ {
+	/^0/ {
+	    s/^0  *\(.*\)$/\1/
+	    p
+	}
+    }'
+aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+# Check for a 64-bit object if we didn't find anything.
+if test -z "$aix_libpath"; then
+  aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+        hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+        archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
+      else
+	if test "$host_cpu" = ia64; then
+	  hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
+	  allow_undefined_flag="-z nodefs"
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
+	else
+	 # Determine the default libpath from the value encoded in an
+	 # empty executable.
+	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+
+lt_aix_libpath_sed='
+    /Import File Strings/,/^$/ {
+	/^0/ {
+	    s/^0  *\(.*\)$/\1/
+	    p
+	}
+    }'
+aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+# Check for a 64-bit object if we didn't find anything.
+if test -z "$aix_libpath"; then
+  aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+	 hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+	  # Warning - without using the other run time loading flags,
+	  # -berok will link without error, but may produce a broken library.
+	  no_undefined_flag=' ${wl}-bernotok'
+	  allow_undefined_flag=' ${wl}-berok'
+	  if test "$with_gnu_ld" = yes; then
+	    # We only use this code for GNU lds that support --whole-archive.
+	    whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
+	  else
+	    # Exported symbols can be pulled into shared objects from archives
+	    whole_archive_flag_spec='$convenience'
+	  fi
+	  archive_cmds_need_lc=yes
+	  # This is similar to how AIX traditionally builds its shared libraries.
+	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
+	fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
+            archive_expsym_cmds=''
+        ;;
+      m68k)
+            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
+            hardcode_libdir_flag_spec='-L$libdir'
+            hardcode_minus_L=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[45]*)
+      export_dynamic_flag_spec=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      hardcode_libdir_flag_spec=' '
+      allow_undefined_flag=unsupported
+      # Tell ltmain to make .lib files, not .a files.
+      libext=lib
+      # Tell ltmain to make .dll files, not .so files.
+      shrext_cmds=".dll"
+      # FIXME: Setting linknames here is a bad hack.
+      archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+      # The linker will automatically build a .lib file if we build a DLL.
+      old_archive_from_new_cmds='true'
+      # FIXME: Should let the user specify the lib program.
+      old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
+      fix_srcfile_path='`cygpath -w "$srcfile"`'
+      enable_shared_with_static_runtimes=yes
+      ;;
+
+    darwin* | rhapsody*)
+
+
+  archive_cmds_need_lc=no
+  hardcode_direct=no
+  hardcode_automatic=yes
+  hardcode_shlibpath_var=unsupported
+  if test "$lt_cv_ld_force_load" = "yes"; then
+    whole_archive_flag_spec='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+  else
+    whole_archive_flag_spec=''
+  fi
+  link_all_deplibs=yes
+  allow_undefined_flag="$_lt_dar_allow_undefined"
+  case $cc_basename in
+     ifort*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test "$_lt_dar_can_shared" = "yes"; then
+    output_verbose_link_cmd=func_echo_all
+    archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
+    module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
+    archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
+    module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
+
+  else
+  ld_shlibs=no
+  fi
+
+      ;;
+
+    dgux*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    freebsd1*)
+      ld_shlibs=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2*)
+      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags'
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    hpux9*)
+      if test "$GCC" = yes; then
+	archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      else
+	archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
+      fi
+      hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_direct=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      hardcode_minus_L=yes
+      export_dynamic_flag_spec='${wl}-E'
+      ;;
+
+    hpux10*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_flag_spec_ld='+b $libdir'
+	hardcode_libdir_separator=:
+	hardcode_direct=yes
+	hardcode_direct_absolute=yes
+	export_dynamic_flag_spec='${wl}-E'
+	# hardcode_minus_L: Not really in the search PATH,
+	# but as the default location of the library.
+	hardcode_minus_L=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+	  archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	esac
+      else
+	case $host_cpu in
+	hppa*64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	ia64*)
+	  archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+	  ;;
+	*)
+
+	  # Older versions of the 11.00 compiler do not understand -b yet
+	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
+	  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5
+$as_echo_n "checking if $CC understands -b... " >&6; }
+if ${lt_cv_prog_compiler__b+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_prog_compiler__b=no
+   save_LDFLAGS="$LDFLAGS"
+   LDFLAGS="$LDFLAGS -b"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&5
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         lt_cv_prog_compiler__b=yes
+       fi
+     else
+       lt_cv_prog_compiler__b=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS="$save_LDFLAGS"
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5
+$as_echo "$lt_cv_prog_compiler__b" >&6; }
+
+if test x"$lt_cv_prog_compiler__b" = xyes; then
+    archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+else
+    archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
+fi
+
+	  ;;
+	esac
+      fi
+      if test "$with_gnu_ld" = no; then
+	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+	hardcode_libdir_separator=:
+
+	case $host_cpu in
+	hppa*64*|ia64*)
+	  hardcode_direct=no
+	  hardcode_shlibpath_var=no
+	  ;;
+	*)
+	  hardcode_direct=yes
+	  hardcode_direct_absolute=yes
+	  export_dynamic_flag_spec='${wl}-E'
+
+	  # hardcode_minus_L: Not really in the search PATH,
+	  # but as the default location of the library.
+	  hardcode_minus_L=yes
+	  ;;
+	esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	# Try to use the -exported_symbol ld option, if it does not
+	# work, assume that -exports_file does not work either and
+	# implicitly export all symbols.
+        save_LDFLAGS="$LDFLAGS"
+        LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
+        cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+int foo(void) {}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
+
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+        LDFLAGS="$save_LDFLAGS"
+      else
+	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      inherit_rpath=yes
+      link_all_deplibs=yes
+      ;;
+
+    netbsd*)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+	archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
+      else
+	archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_direct=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    newsos6)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_direct=yes
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      hardcode_shlibpath_var=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd*)
+      if test -f /usr/libexec/ld.so; then
+	hardcode_direct=yes
+	hardcode_shlibpath_var=no
+	hardcode_direct_absolute=yes
+	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+	  archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
+	  hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	  export_dynamic_flag_spec='${wl}-E'
+	else
+	  case $host_os in
+	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
+	     archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
+	     hardcode_libdir_flag_spec='-R$libdir'
+	     ;;
+	   *)
+	     archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
+	     hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
+	     ;;
+	  esac
+	fi
+      else
+	ld_shlibs=no
+      fi
+      ;;
+
+    os2*)
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_minus_L=yes
+      allow_undefined_flag=unsupported
+      archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
+      old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
+      ;;
+
+    osf3*)
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      hardcode_libdir_separator=:
+      ;;
+
+    osf4* | osf5*)	# as osf3* with the addition of -msym flag
+      if test "$GCC" = yes; then
+	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
+	hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
+      else
+	allow_undefined_flag=' -expect_unresolved \*'
+	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
+	archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
+	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
+
+	# Both c and cxx compiler support -rpath directly
+	hardcode_libdir_flag_spec='-rpath $libdir'
+      fi
+      archive_cmds_need_lc='no'
+      hardcode_libdir_separator=:
+      ;;
+
+    solaris*)
+      no_undefined_flag=' -z defs'
+      if test "$GCC" = yes; then
+	wlarc='${wl}'
+	archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+	case `$CC -V 2>&1` in
+	*"Compilers 5.0"*)
+	  wlarc=''
+	  archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
+	  ;;
+	*)
+	  wlarc='${wl}'
+	  archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
+	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
+	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+	  ;;
+	esac
+      fi
+      hardcode_libdir_flag_spec='-R$libdir'
+      hardcode_shlibpath_var=no
+      case $host_os in
+      solaris2.[0-5] | solaris2.[0-5].*) ;;
+      *)
+	# The compiler driver will combine and reorder linker options,
+	# but understands `-z linker_flag'.  GCC discards it without `$wl',
+	# but is careful enough not to reorder.
+	# Supported since Solaris 2.6 (maybe 2.5.1?)
+	if test "$GCC" = yes; then
+	  whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
+	else
+	  whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
+	fi
+	;;
+      esac
+      link_all_deplibs=yes
+      ;;
+
+    sunos4*)
+      if test "x$host_vendor" = xsequent; then
+	# Use $CC to link under sequent, because it throws in some extra .o
+	# files that make .init and .fini sections work.
+	archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
+      fi
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_direct=yes
+      hardcode_minus_L=yes
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+	sni)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=yes # is this really true???
+	;;
+	siemens)
+	  ## LD is ld it makes a PLAMLIB
+	  ## CC just makes a GrossModule.
+	  archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
+	  reload_cmds='$CC -r -o $output$reload_objs'
+	  hardcode_direct=no
+        ;;
+	motorola)
+	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	  hardcode_direct=no #Motorola manual says yes, but my tests say they lie
+	;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      hardcode_shlibpath_var=no
+      ;;
+
+    sysv4.3*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_shlibpath_var=no
+      export_dynamic_flag_spec='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+	archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+	hardcode_shlibpath_var=no
+	runpath_var=LD_RUN_PATH
+	hardcode_runpath_var=yes
+	ld_shlibs=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
+      no_undefined_flag='${wl}-z,text'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We can NOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      no_undefined_flag='${wl}-z,text'
+      allow_undefined_flag='${wl}-z,nodefs'
+      archive_cmds_need_lc=no
+      hardcode_shlibpath_var=no
+      hardcode_libdir_flag_spec='${wl}-R,$libdir'
+      hardcode_libdir_separator=':'
+      link_all_deplibs=yes
+      export_dynamic_flag_spec='${wl}-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test "$GCC" = yes; then
+	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      else
+	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
+      hardcode_libdir_flag_spec='-L$libdir'
+      hardcode_shlibpath_var=no
+      ;;
+
+    *)
+      ld_shlibs=no
+      ;;
+    esac
+
+    if test x$host_vendor = xsni; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+	export_dynamic_flag_spec='${wl}-Blargedynsym'
+	;;
+      esac
+    fi
+  fi
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
+$as_echo "$ld_shlibs" >&6; }
+test "$ld_shlibs" = no && can_build_shared=no
+
+with_gnu_ld=$with_gnu_ld
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$archive_cmds_need_lc" in
+x|xyes)
+  # Assume -lc should be added
+  archive_cmds_need_lc=yes
+
+  if test "$enable_shared" = yes && test "$GCC" = yes; then
+    case $archive_cmds in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
+$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
+if ${lt_cv_archive_cmds_need_lc+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  $RM -r conftest*
+	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
+  (eval $ac_compile) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } 2>conftest.err; then
+	  soname=conftest
+	  lib=conftest
+	  libobjs=conftest.$ac_objext
+	  deplibs=
+	  wl=$lt_prog_compiler_wl
+	  pic_flag=$lt_prog_compiler_pic
+	  compiler_flags=-v
+	  linker_flags=-v
+	  verstring=
+	  output_objdir=.
+	  libname=conftest
+	  lt_save_allow_undefined_flag=$allow_undefined_flag
+	  allow_undefined_flag=
+	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
+  (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; }
+	  then
+	    lt_cv_archive_cmds_need_lc=no
+	  else
+	    lt_cv_archive_cmds_need_lc=yes
+	  fi
+	  allow_undefined_flag=$lt_save_allow_undefined_flag
+	else
+	  cat conftest.err 1>&5
+	fi
+	$RM -r conftest*
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5
+$as_echo "$lt_cv_archive_cmds_need_lc" >&6; }
+      archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
+$as_echo_n "checking dynamic linker characteristics... " >&6; }
+
+if test "$GCC" = yes; then
+  case $host_os in
+    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
+    *) lt_awk_arg="/^libraries:/" ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;;
+    *) lt_sed_strip_eq="s,=/,/,g" ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary.
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
+    else
+      test -d "$lt_sys_path" && \
+	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS=" "; FS="/|\n";} {
+  lt_foo="";
+  lt_count=0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo="/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[lt_foo]++; }
+  if (lt_freq[lt_foo] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's,/\([A-Za-z]:\),\1,g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=".so"
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+case $host_os in
+aix3*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='${libname}${release}${shared_ext}$major'
+  ;;
+
+aix[4-9]*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test "$host_cpu" = ia64; then
+    # AIX 5 supports IA64
+    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line `#! .'.  This would cause the generated library to
+    # depend on `.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[01] | aix4.[01].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+	   echo ' yes '
+	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
+	:
+      else
+	can_build_shared=no
+      fi
+      ;;
+    esac
+    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    if test "$aix_use_runtimelinking" = yes; then
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    else
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='${libname}${release}.a $libname.a'
+      soname_spec='${libname}${release}${shared_ext}$major'
+    fi
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='${libname}${shared_ext}'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[45]*)
+  version_type=linux
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=".dll"
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$host_os in
+  yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*)
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \${file}`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
+      ;;
+    esac
+    ;;
+
+  *)
+    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
+    ;;
+  esac
+  dynamic_linker='Win32 ld.exe'
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext ${libname}${release}${versuffix}$shared_ext'
+  soname_spec='${libname}${release}${major}$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd1*)
+  dynamic_linker=no
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[123]*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[01]* | freebsdelf3.[01]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
+  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+gnu*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  hardcode_into_libs=yes
+  ;;
+
+haiku*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    if test "X$HPUX_IA64_MODE" = X32; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+    fi
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[3-9]*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+	if test "$lt_cv_prog_gnu_ld" = yes; then
+		version_type=linux
+	else
+		version_type=irix
+	fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
+  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+# This must be Linux ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
+	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
+    cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
+  lt_cv_shlibpath_overrides_runpath=yes
+fi
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+
+fi
+
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Append ld.so.conf contents to the search path
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+    soname_spec='${libname}${release}${shared_ext}$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec="/usr/lib"
+  need_lib_prefix=no
+  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
+  case $host_os in
+    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
+    *)				need_version=no  ;;
+  esac
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
+    case $host_os in
+      openbsd2.[89] | openbsd2.[89].*)
+	shlibpath_overrides_runpath=no
+	;;
+      *)
+	shlibpath_overrides_runpath=yes
+	;;
+      esac
+  else
+    shlibpath_overrides_runpath=yes
+  fi
+  ;;
+
+os2*)
+  libname_spec='$name'
+  shrext_cmds=".dll"
+  need_lib_prefix=no
+  library_names_spec='$libname${shared_ext} $libname.a'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=LIBPATH
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='${libname}${release}${shared_ext}$major'
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test "$with_gnu_ld" = yes; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec ;then
+    version_type=linux
+    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
+    soname_spec='$libname${shared_ext}.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=freebsd-elf
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test "$with_gnu_ld" = yes; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+	;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux
+  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
+$as_echo "$dynamic_linker" >&6; }
+test "$dynamic_linker" = no && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test "$GCC" = yes; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
+  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
+fi
+if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
+  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
+$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
+hardcode_action=
+if test -n "$hardcode_libdir_flag_spec" ||
+   test -n "$runpath_var" ||
+   test "X$hardcode_automatic" = "Xyes" ; then
+
+  # We can hardcode non-existent directories.
+  if test "$hardcode_direct" != no &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
+     test "$hardcode_minus_L" != no; then
+    # Linking always hardcodes the temporary library directory.
+    hardcode_action=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    hardcode_action=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  hardcode_action=unsupported
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
+$as_echo "$hardcode_action" >&6; }
+
+if test "$hardcode_action" = relink ||
+   test "$inherit_rpath" = yes; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test "$shlibpath_overrides_runpath" = yes ||
+     test "$enable_shared" = no; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+
+
+
+
+
+
+  if test "x$enable_dlopen" != xyes; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen="load_add_on"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen="LoadLibrary"
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen="dlopen"
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+  # if libdl is installed we need to link against it
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+
+    lt_cv_dlopen="dyld"
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+
+fi
+
+    ;;
+
+  *)
+    ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
+if test "x$ac_cv_func_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
+$as_echo_n "checking for shl_load in -ldld... " >&6; }
+if ${ac_cv_lib_dld_shl_load+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char shl_load ();
+int
+main ()
+{
+return shl_load ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_shl_load=yes
+else
+  ac_cv_lib_dld_shl_load=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
+$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
+if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
+  lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
+else
+  ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
+if test "x$ac_cv_func_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
+$as_echo_n "checking for dlopen in -ldl... " >&6; }
+if ${ac_cv_lib_dl_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldl  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dl_dlopen=yes
+else
+  ac_cv_lib_dl_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
+$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
+if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
+$as_echo_n "checking for dlopen in -lsvld... " >&6; }
+if ${ac_cv_lib_svld_dlopen+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-lsvld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dlopen ();
+int
+main ()
+{
+return dlopen ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_svld_dlopen=yes
+else
+  ac_cv_lib_svld_dlopen=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
+$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
+if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
+  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
+$as_echo_n "checking for dld_link in -ldld... " >&6; }
+if ${ac_cv_lib_dld_dld_link+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_check_lib_save_LIBS=$LIBS
+LIBS="-ldld  $LIBS"
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
+#ifdef __cplusplus
+extern "C"
+#endif
+char dld_link ();
+int
+main ()
+{
+return dld_link ();
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_lib_dld_dld_link=yes
+else
+  ac_cv_lib_dld_dld_link=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+LIBS=$ac_check_lib_save_LIBS
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
+$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
+if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
+  lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+
+fi
+
+    ;;
+  esac
+
+  if test "x$lt_cv_dlopen" != xno; then
+    enable_dlopen=yes
+  else
+    enable_dlopen=no
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS="$CPPFLAGS"
+    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS="$LDFLAGS"
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
+
+    save_LIBS="$LIBS"
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
+$as_echo_n "checking whether a program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+void fnord () __attribute__((visibility("default")));
+#endif
+
+void fnord () { int i=42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
+$as_echo "$lt_cv_dlopen_self" >&6; }
+
+    if test "x$lt_cv_dlopen_self" = xyes; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
+$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
+if ${lt_cv_dlopen_self_static+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  	  if test "$cross_compiling" = yes; then :
+  lt_cv_dlopen_self_static=cross
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL		RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL		DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL		0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW		DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW	RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW	DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW	0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisbility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
+void fnord () __attribute__((visibility("default")));
+#endif
+
+void fnord () { int i=42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+	}
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}
+_LT_EOF
+  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
+  (eval $ac_link) 2>&5
+  ac_status=$?
+  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
+  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
+    (./conftest; exit; ) >&5 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
+      x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
+    esac
+  else :
+    # compilation failed
+    lt_cv_dlopen_self_static=no
+  fi
+fi
+rm -fr conftest*
+
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
+$as_echo "$lt_cv_dlopen_self_static" >&6; }
+    fi
+
+    CPPFLAGS="$save_CPPFLAGS"
+    LDFLAGS="$save_LDFLAGS"
+    LIBS="$save_LIBS"
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+striplib=
+old_striplib=
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
+$as_echo_n "checking whether stripping libraries is possible... " >&6; }
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP" ; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
+$as_echo "yes" >&6; }
+    else
+      { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    fi
+    ;;
+  *)
+    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+    ;;
+  esac
+fi
+
+
+
+
+
+
+
+
+
+
+
+
+  # Report which library types will actually be built
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
+$as_echo_n "checking if libtool supports shared libraries... " >&6; }
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
+$as_echo "$can_build_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
+$as_echo_n "checking whether to build shared libraries... " >&6; }
+  test "$can_build_shared" = "no" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test "$enable_shared" = yes && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[4-9]*)
+    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
+      test "$enable_shared" = yes && enable_static=no
+    fi
+    ;;
+  esac
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
+$as_echo "$enable_shared" >&6; }
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
+$as_echo_n "checking whether to build static libraries... " >&6; }
+  # Make sure either enable_shared or enable_static is yes.
+  test "$enable_shared" = yes || enable_static=yes
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
+$as_echo "$enable_static" >&6; }
+
+
+
+
+fi
+ac_ext=c
+ac_cpp='$CPP $CPPFLAGS'
+ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
+ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
+ac_compiler_gnu=$ac_cv_c_compiler_gnu
+
+CC="$lt_save_CC"
+
+
+
+
+
+
+
+
+
+
+
+
+
+        ac_config_commands="$ac_config_commands libtool"
+
+
+
+
+# Only expand once:
+
+
+ if test x$GCC = xyes; then
+  GCC_TRUE=
+  GCC_FALSE='#'
+else
+  GCC_TRUE='#'
+  GCC_FALSE=
+fi
+
+
+# Checks for libraries.
+
+# Checks for header files.
+
+# Checks for typedefs, structures, and compiler characteristics.
+ac_fn_c_find_intX_t "$LINENO" "32" "ac_cv_c_int32_t"
+case $ac_cv_c_int32_t in #(
+  no|yes) ;; #(
+  *)
+
+cat >>confdefs.h <<_ACEOF
+#define int32_t $ac_cv_c_int32_t
+_ACEOF
+;;
+esac
+
+
+
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for long long int" >&5
+$as_echo_n "checking for long long int... " >&6; }
+if ${ac_cv_type_long_long_int+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+  /* For now, do not test the preprocessor; as of 2007 there are too many
+	 implementations with broken preprocessors.  Perhaps this can
+	 be revisited in 2012.  In the meantime, code should not expect
+	 #if to work with literals wider than 32 bits.  */
+      /* Test literals.  */
+      long long int ll = 9223372036854775807ll;
+      long long int nll = -9223372036854775807LL;
+      unsigned long long int ull = 18446744073709551615ULL;
+      /* Test constant expressions.   */
+      typedef int a[((-9223372036854775807LL < 0 && 0 < 9223372036854775807ll)
+		     ? 1 : -1)];
+      typedef int b[(18446744073709551615ULL <= (unsigned long long int) -1
+		     ? 1 : -1)];
+      int i = 63;
+int
+main ()
+{
+/* Test availability of runtime routines for shift and division.  */
+      long long int llmax = 9223372036854775807ll;
+      unsigned long long int ullmax = 18446744073709551615ull;
+      return ((ll << 63) | (ll >> 63) | (ll < i) | (ll > i)
+	      | (llmax / ll) | (llmax % ll)
+	      | (ull << 63) | (ull >> 63) | (ull << i) | (ull >> i)
+	      | (ullmax / ull) | (ullmax % ull));
+  ;
+  return 0;
+}
+
+_ACEOF
+if ac_fn_c_try_link "$LINENO"; then :
+  			if test "$cross_compiling" = yes; then :
+  ac_cv_type_long_long_int=yes
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#include <limits.h>
+	       #ifndef LLONG_MAX
+	       # define HALF \
+			(1LL << (sizeof (long long int) * CHAR_BIT - 2))
+	       # define LLONG_MAX (HALF - 1 + HALF)
+	       #endif
+int
+main ()
+{
+long long int n = 1;
+	       int i;
+	       for (i = 0; ; i++)
+		 {
+		   long long int m = n << i;
+		   if (m >> i != n)
+		     return 1;
+		   if (LLONG_MAX / 2 < m)
+		     break;
+		 }
+	       return 0;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_run "$LINENO"; then :
+  ac_cv_type_long_long_int=yes
+else
+  ac_cv_type_long_long_int=no
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+  conftest.$ac_objext conftest.beam conftest.$ac_ext
+fi
+
+else
+  ac_cv_type_long_long_int=no
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext conftest.$ac_ext
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_long_long_int" >&5
+$as_echo "$ac_cv_type_long_long_int" >&6; }
+  if test $ac_cv_type_long_long_int = yes; then
+
+$as_echo "#define HAVE_LONG_LONG_INT 1" >>confdefs.h
+
+  fi
+
+case $ac_cv_type_long_long_int in
+     yes) json_have_long_long=1;;
+     *) json_have_long_long=0;;
+esac
+
+
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5
+$as_echo_n "checking for inline... " >&6; }
+if ${ac_cv_c_inline+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  ac_cv_c_inline=no
+for ac_kw in inline __inline__ __inline; do
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+#ifndef __cplusplus
+typedef int foo_t;
+static $ac_kw foo_t static_foo () {return 0; }
+$ac_kw foo_t foo () {return 0; }
+#endif
+
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_c_inline=$ac_kw
+fi
+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
+  test "$ac_cv_c_inline" != no && break
+done
+
+fi
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5
+$as_echo "$ac_cv_c_inline" >&6; }
+
+case $ac_cv_c_inline in
+  inline | yes) ;;
+  *)
+    case $ac_cv_c_inline in
+      no) ac_val=;;
+      *) ac_val=$ac_cv_c_inline;;
+    esac
+    cat >>confdefs.h <<_ACEOF
+#ifndef __cplusplus
+#define inline $ac_val
+#endif
+_ACEOF
+    ;;
+esac
+
+case $ac_cv_c_inline in
+    yes) json_inline=inline;;
+    no) json_inline=;;
+    *) json_inline=$ac_cv_c_inline;;
+esac
+
+
+# Checks for library functions.
+
+ac_config_files="$ac_config_files jansson.pc Makefile doc/Makefile src/Makefile src/jansson_config.h test/Makefile test/bin/Makefile test/suites/Makefile test/suites/api/Makefile"
+
+cat >confcache <<\_ACEOF
+# This file is a shell script that caches the results of configure
+# tests run on this system so they can be shared between configure
+# scripts and configure runs, see configure's option --config-cache.
+# It is not useful on other systems.  If it contains results you don't
+# want to keep, you may remove or edit it.
+#
+# config.status only pays attention to the cache file if you give it
+# the --recheck option to rerun configure.
+#
+# `ac_cv_env_foo' variables (set or unset) will be overridden when
+# loading this file, other *unset* `ac_cv_foo' will be assigned the
+# following values.
+
+_ACEOF
+
+# The following way of writing the cache mishandles newlines in values,
+# but we know of no workaround that is simple, portable, and efficient.
+# So, we kill variables containing newlines.
+# Ultrix sh set writes to stderr and can't be redirected directly,
+# and sets the high bit in the cache file unless we assign to the vars.
+(
+  for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
+    eval ac_val=\$$ac_var
+    case $ac_val in #(
+    *${as_nl}*)
+      case $ac_var in #(
+      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
+$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
+      esac
+      case $ac_var in #(
+      _ | IFS | as_nl) ;; #(
+      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
+      *) { eval $ac_var=; unset $ac_var;} ;;
+      esac ;;
+    esac
+  done
+
+  (set) 2>&1 |
+    case $as_nl`(ac_space=' '; set) 2>&1` in #(
+    *${as_nl}ac_space=\ *)
+      # `set' does not quote correctly, so add quotes: double-quote
+      # substitution turns \\\\ into \\, and sed turns \\ into \.
+      sed -n \
+	"s/'/'\\\\''/g;
+	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
+      ;; #(
+    *)
+      # `set' quotes correctly as required by POSIX, so do not add quotes.
+      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
+      ;;
+    esac |
+    sort
+) |
+  sed '
+     /^ac_cv_env_/b end
+     t clear
+     :clear
+     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
+     t end
+     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
+     :end' >>confcache
+if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
+  if test -w "$cache_file"; then
+    if test "x$cache_file" != "x/dev/null"; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
+$as_echo "$as_me: updating cache $cache_file" >&6;}
+      if test ! -f "$cache_file" || test -h "$cache_file"; then
+	cat confcache >"$cache_file"
+      else
+        case $cache_file in #(
+        */* | ?:*)
+	  mv -f confcache "$cache_file"$$ &&
+	  mv -f "$cache_file"$$ "$cache_file" ;; #(
+        *)
+	  mv -f confcache "$cache_file" ;;
+	esac
+      fi
+    fi
+  else
+    { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
+$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
+  fi
+fi
+rm -f confcache
+
+test "x$prefix" = xNONE && prefix=$ac_default_prefix
+# Let make expand exec_prefix.
+test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
+
+DEFS=-DHAVE_CONFIG_H
+
+ac_libobjs=
+ac_ltlibobjs=
+U=
+for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
+  # 1. Remove the extension, and $U if already installed.
+  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
+  ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
+  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
+  #    will be set to the directory where LIBOBJS objects are built.
+  as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
+  as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
+done
+LIBOBJS=$ac_libobjs
+
+LTLIBOBJS=$ac_ltlibobjs
+
+
+if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
+  as_fn_error $? "conditional \"AMDEP\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
+  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+if test -z "${GCC_TRUE}" && test -z "${GCC_FALSE}"; then
+  as_fn_error $? "conditional \"GCC\" was never defined.
+Usually this means the macro was only invoked conditionally." "$LINENO" 5
+fi
+
+: "${CONFIG_STATUS=./config.status}"
+ac_write_fail=0
+ac_clean_files_save=$ac_clean_files
+ac_clean_files="$ac_clean_files $CONFIG_STATUS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
+$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
+as_write_fail=0
+cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+# Run this file to recreate the current configuration.
+# Compiler output produced by configure, useful for debugging
+# configure, is in config.log if it exists.
+
+debug=false
+ac_cs_recheck=false
+ac_cs_silent=false
+
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
+## -------------------- ##
+## M4sh Initialization. ##
+## -------------------- ##
+
+# Be more Bourne compatible
+DUALCASE=1; export DUALCASE # for MKS sh
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in #(
+  *posix*) :
+    set -o posix ;; #(
+  *) :
+     ;;
+esac
+fi
+
+
+as_nl='
+'
+export as_nl
+# Printing a long string crashes Solaris 7 /usr/bin/printf.
+as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
+as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
+# Prefer a ksh shell builtin over an external printf program on Solaris,
+# but without wasting forks for bash or zsh.
+if test -z "$BASH_VERSION$ZSH_VERSION" \
+    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='print -r --'
+  as_echo_n='print -rn --'
+elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
+  as_echo='printf %s\n'
+  as_echo_n='printf %s'
+else
+  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
+    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
+    as_echo_n='/usr/ucb/echo -n'
+  else
+    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
+    as_echo_n_body='eval
+      arg=$1;
+      case $arg in #(
+      *"$as_nl"*)
+	expr "X$arg" : "X\\(.*\\)$as_nl";
+	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
+      esac;
+      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
+    '
+    export as_echo_n_body
+    as_echo_n='sh -c $as_echo_n_body as_echo'
+  fi
+  export as_echo_body
+  as_echo='sh -c $as_echo_body as_echo'
+fi
+
+# The user is always right.
+if test "${PATH_SEPARATOR+set}" != set; then
+  PATH_SEPARATOR=:
+  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
+    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
+      PATH_SEPARATOR=';'
+  }
+fi
+
+
+# IFS
+# We need space, tab and new line, in precisely that order.  Quoting is
+# there to prevent editors from complaining about space-tab.
+# (If _AS_PATH_WALK were called with IFS unset, it would disable word
+# splitting by setting IFS to empty value.)
+IFS=" ""	$as_nl"
+
+# Find who we are.  Look in the path if we contain no directory separator.
+as_myself=
+case $0 in #((
+  *[\\/]* ) as_myself=$0 ;;
+  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
+  done
+IFS=$as_save_IFS
+
+     ;;
+esac
+# We did not find ourselves, most probably we were run as `sh COMMAND'
+# in which case we are not to be found in the path.
+if test "x$as_myself" = x; then
+  as_myself=$0
+fi
+if test ! -f "$as_myself"; then
+  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
+  exit 1
+fi
+
+# Unset variables that we do not need and which cause bugs (e.g. in
+# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
+# suppresses any "Segmentation fault" message there.  '((' could
+# trigger a bug in pdksh 5.2.14.
+for as_var in BASH_ENV ENV MAIL MAILPATH
+do eval test x\${$as_var+set} = xset \
+  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
+done
+PS1='$ '
+PS2='> '
+PS4='+ '
+
+# NLS nuisances.
+LC_ALL=C
+export LC_ALL
+LANGUAGE=C
+export LANGUAGE
+
+# CDPATH.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+
+# as_fn_error STATUS ERROR [LINENO LOG_FD]
+# ----------------------------------------
+# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
+# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
+# script with STATUS, using 1 if that was 0.
+as_fn_error ()
+{
+  as_status=$1; test $as_status -eq 0 && as_status=1
+  if test "$4"; then
+    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  fi
+  $as_echo "$as_me: error: $2" >&2
+  as_fn_exit $as_status
+} # as_fn_error
+
+
+# as_fn_set_status STATUS
+# -----------------------
+# Set $? to STATUS, without forking.
+as_fn_set_status ()
+{
+  return $1
+} # as_fn_set_status
+
+# as_fn_exit STATUS
+# -----------------
+# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
+as_fn_exit ()
+{
+  set +e
+  as_fn_set_status $1
+  exit $1
+} # as_fn_exit
+
+# as_fn_unset VAR
+# ---------------
+# Portably unset VAR.
+as_fn_unset ()
+{
+  { eval $1=; unset $1;}
+}
+as_unset=as_fn_unset
+# as_fn_append VAR VALUE
+# ----------------------
+# Append the text in VALUE to the end of the definition contained in VAR. Take
+# advantage of any shell optimizations that allow amortized linear growth over
+# repeated appends, instead of the typical quadratic growth present in naive
+# implementations.
+if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
+  eval 'as_fn_append ()
+  {
+    eval $1+=\$2
+  }'
+else
+  as_fn_append ()
+  {
+    eval $1=\$$1\$2
+  }
+fi # as_fn_append
+
+# as_fn_arith ARG...
+# ------------------
+# Perform arithmetic evaluation on the ARGs, and store the result in the
+# global $as_val. Take advantage of shells that can avoid forks. The arguments
+# must be portable across $(()) and expr.
+if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
+  eval 'as_fn_arith ()
+  {
+    as_val=$(( $* ))
+  }'
+else
+  as_fn_arith ()
+  {
+    as_val=`expr "$@" || test $? -eq 1`
+  }
+fi # as_fn_arith
+
+
+if expr a : '\(a\)' >/dev/null 2>&1 &&
+   test "X`expr 00001 : '.*\(...\)'`" = X001; then
+  as_expr=expr
+else
+  as_expr=false
+fi
+
+if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
+  as_basename=basename
+else
+  as_basename=false
+fi
+
+if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
+  as_dirname=dirname
+else
+  as_dirname=false
+fi
+
+as_me=`$as_basename -- "$0" ||
+$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
+	 X"$0" : 'X\(//\)$' \| \
+	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X/"$0" |
+    sed '/^.*\/\([^/][^/]*\)\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\/\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+
+# Avoid depending upon Character Ranges.
+as_cr_letters='abcdefghijklmnopqrstuvwxyz'
+as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+as_cr_Letters=$as_cr_letters$as_cr_LETTERS
+as_cr_digits='0123456789'
+as_cr_alnum=$as_cr_Letters$as_cr_digits
+
+ECHO_C= ECHO_N= ECHO_T=
+case `echo -n x` in #(((((
+-n*)
+  case `echo 'xy\c'` in
+  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
+  xy)  ECHO_C='\c';;
+  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
+       ECHO_T='	';;
+  esac;;
+*)
+  ECHO_N='-n';;
+esac
+
+rm -f conf$$ conf$$.exe conf$$.file
+if test -d conf$$.dir; then
+  rm -f conf$$.dir/conf$$.file
+else
+  rm -f conf$$.dir
+  mkdir conf$$.dir 2>/dev/null
+fi
+if (echo >conf$$.file) 2>/dev/null; then
+  if ln -s conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s='ln -s'
+    # ... but there are two gotchas:
+    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
+    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
+    # In both cases, we have to default to `cp -p'.
+    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
+      as_ln_s='cp -p'
+  elif ln conf$$.file conf$$ 2>/dev/null; then
+    as_ln_s=ln
+  else
+    as_ln_s='cp -p'
+  fi
+else
+  as_ln_s='cp -p'
+fi
+rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
+rmdir conf$$.dir 2>/dev/null
+
+
+# as_fn_mkdir_p
+# -------------
+# Create "$as_dir" as a directory, including parents if necessary.
+as_fn_mkdir_p ()
+{
+
+  case $as_dir in #(
+  -*) as_dir=./$as_dir;;
+  esac
+  test -d "$as_dir" || eval $as_mkdir_p || {
+    as_dirs=
+    while :; do
+      case $as_dir in #(
+      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
+      *) as_qdir=$as_dir;;
+      esac
+      as_dirs="'$as_qdir' $as_dirs"
+      as_dir=`$as_dirname -- "$as_dir" ||
+$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$as_dir" : 'X\(//\)[^/]' \| \
+	 X"$as_dir" : 'X\(//\)$' \| \
+	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$as_dir" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+      test -d "$as_dir" && break
+    done
+    test -z "$as_dirs" || eval "mkdir $as_dirs"
+  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+
+
+} # as_fn_mkdir_p
+if mkdir -p . 2>/dev/null; then
+  as_mkdir_p='mkdir -p "$as_dir"'
+else
+  test -d ./-p && rmdir ./-p
+  as_mkdir_p=false
+fi
+
+if test -x / >/dev/null 2>&1; then
+  as_test_x='test -x'
+else
+  if ls -dL / >/dev/null 2>&1; then
+    as_ls_L_option=L
+  else
+    as_ls_L_option=
+  fi
+  as_test_x='
+    eval sh -c '\''
+      if test -d "$1"; then
+	test -d "$1/.";
+      else
+	case $1 in #(
+	-*)set "./$1";;
+	esac;
+	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
+	???[sx]*):;;*)false;;esac;fi
+    '\'' sh
+  '
+fi
+as_executable_p=$as_test_x
+
+# Sed expression to map a string onto a valid CPP name.
+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
+
+# Sed expression to map a string onto a valid variable name.
+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
+
+
+exec 6>&1
+## ----------------------------------- ##
+## Main body of $CONFIG_STATUS script. ##
+## ----------------------------------- ##
+_ASEOF
+test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# Save the log message, to keep $0 and so on meaningful, and to
+# report actual input values of CONFIG_FILES etc. instead of their
+# values after options handling.
+ac_log="
+This file was extended by jansson $as_me 2.1, which was
+generated by GNU Autoconf 2.68.  Invocation command line was
+
+  CONFIG_FILES    = $CONFIG_FILES
+  CONFIG_HEADERS  = $CONFIG_HEADERS
+  CONFIG_LINKS    = $CONFIG_LINKS
+  CONFIG_COMMANDS = $CONFIG_COMMANDS
+  $ $0 $@
+
+on `(hostname || uname -n) 2>/dev/null | sed 1q`
+"
+
+_ACEOF
+
+case $ac_config_files in *"
+"*) set x $ac_config_files; shift; ac_config_files=$*;;
+esac
+
+case $ac_config_headers in *"
+"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
+esac
+
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+# Files that config.status was made for.
+config_files="$ac_config_files"
+config_headers="$ac_config_headers"
+config_commands="$ac_config_commands"
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+ac_cs_usage="\
+\`$as_me' instantiates files and other configuration actions
+from templates according to the current configuration.  Unless the files
+and actions are specified as TAGs, all are instantiated by default.
+
+Usage: $0 [OPTION]... [TAG]...
+
+  -h, --help       print this help, then exit
+  -V, --version    print version number and configuration settings, then exit
+      --config     print configuration, then exit
+  -q, --quiet, --silent
+                   do not print progress messages
+  -d, --debug      don't remove temporary files
+      --recheck    update $as_me by reconfiguring in the same conditions
+      --file=FILE[:TEMPLATE]
+                   instantiate the configuration file FILE
+      --header=FILE[:TEMPLATE]
+                   instantiate the configuration header FILE
+
+Configuration files:
+$config_files
+
+Configuration headers:
+$config_headers
+
+Configuration commands:
+$config_commands
+
+Report bugs to <petri at digip.org>."
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
+ac_cs_version="\\
+jansson config.status 2.1
+configured by $0, generated by GNU Autoconf 2.68,
+  with options \\"\$ac_cs_config\\"
+
+Copyright (C) 2010 Free Software Foundation, Inc.
+This config.status script is free software; the Free Software Foundation
+gives unlimited permission to copy, distribute and modify it."
+
+ac_pwd='$ac_pwd'
+srcdir='$srcdir'
+INSTALL='$INSTALL'
+MKDIR_P='$MKDIR_P'
+AWK='$AWK'
+test -n "\$AWK" || AWK=awk
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# The default lists apply if the user does not specify any file.
+ac_need_defaults=:
+while test $# != 0
+do
+  case $1 in
+  --*=?*)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
+    ac_shift=:
+    ;;
+  --*=)
+    ac_option=`expr "X$1" : 'X\([^=]*\)='`
+    ac_optarg=
+    ac_shift=:
+    ;;
+  *)
+    ac_option=$1
+    ac_optarg=$2
+    ac_shift=shift
+    ;;
+  esac
+
+  case $ac_option in
+  # Handling of the options.
+  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
+    ac_cs_recheck=: ;;
+  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
+    $as_echo "$ac_cs_version"; exit ;;
+  --config | --confi | --conf | --con | --co | --c )
+    $as_echo "$ac_cs_config"; exit ;;
+  --debug | --debu | --deb | --de | --d | -d )
+    debug=: ;;
+  --file | --fil | --fi | --f )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    '') as_fn_error $? "missing file argument" ;;
+    esac
+    as_fn_append CONFIG_FILES " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --header | --heade | --head | --hea )
+    $ac_shift
+    case $ac_optarg in
+    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
+    esac
+    as_fn_append CONFIG_HEADERS " '$ac_optarg'"
+    ac_need_defaults=false;;
+  --he | --h)
+    # Conflict between --help and --header
+    as_fn_error $? "ambiguous option: \`$1'
+Try \`$0 --help' for more information.";;
+  --help | --hel | -h )
+    $as_echo "$ac_cs_usage"; exit ;;
+  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
+  | -silent | --silent | --silen | --sile | --sil | --si | --s)
+    ac_cs_silent=: ;;
+
+  # This is an error.
+  -*) as_fn_error $? "unrecognized option: \`$1'
+Try \`$0 --help' for more information." ;;
+
+  *) as_fn_append ac_config_targets " $1"
+     ac_need_defaults=false ;;
+
+  esac
+  shift
+done
+
+ac_configure_extra_args=
+
+if $ac_cs_silent; then
+  exec 6>/dev/null
+  ac_configure_extra_args="$ac_configure_extra_args --silent"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+if \$ac_cs_recheck; then
+  set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
+  shift
+  \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
+  CONFIG_SHELL='$SHELL'
+  export CONFIG_SHELL
+  exec "\$@"
+fi
+
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+exec 5>>config.log
+{
+  echo
+  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
+## Running $as_me. ##
+_ASBOX
+  $as_echo "$ac_log"
+} >&5
+
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+#
+# INIT-COMMANDS
+#
+AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
+
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`'
+macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`'
+enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`'
+enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`'
+pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`'
+enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`'
+SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`'
+ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`'
+host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`'
+host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`'
+host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`'
+build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`'
+build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`'
+build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`'
+SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`'
+Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`'
+GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`'
+EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`'
+FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`'
+LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`'
+NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`'
+LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`'
+max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`'
+ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`'
+exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
+lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
+lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
+lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
+reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
+reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
+OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
+deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
+file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
+AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
+AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
+STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
+RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
+old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`'
+lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`'
+CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`'
+CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`'
+compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`'
+GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
+objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
+MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
+lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
+lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
+need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
+DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
+NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
+LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
+OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`'
+OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`'
+libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`'
+shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`'
+extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`'
+enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`'
+export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`'
+whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`'
+compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`'
+old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`'
+old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
+archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`'
+archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`'
+module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`'
+with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`'
+allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`'
+no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_flag_spec_ld='`$ECHO "$hardcode_libdir_flag_spec_ld" | $SED "$delay_single_quote_subst"`'
+hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`'
+hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`'
+hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`'
+hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`'
+hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`'
+hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
+inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
+link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
+fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`'
+always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
+export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
+exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
+include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
+prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
+file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
+variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
+need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
+need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`'
+version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`'
+runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`'
+shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`'
+libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`'
+library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`'
+soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`'
+install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`'
+postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`'
+postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
+finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`'
+finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`'
+hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`'
+sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`'
+sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`'
+hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`'
+enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`'
+enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`'
+old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`'
+striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`'
+
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in SHELL \
+ECHO \
+SED \
+GREP \
+EGREP \
+FGREP \
+LD \
+NM \
+LN_S \
+lt_SP2NL \
+lt_NL2SP \
+reload_flag \
+OBJDUMP \
+deplibs_check_method \
+file_magic_cmd \
+AR \
+AR_FLAGS \
+STRIP \
+RANLIB \
+CC \
+CFLAGS \
+compiler \
+lt_cv_sys_global_symbol_pipe \
+lt_cv_sys_global_symbol_to_cdecl \
+lt_cv_sys_global_symbol_to_c_name_address \
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
+lt_prog_compiler_no_builtin_flag \
+lt_prog_compiler_wl \
+lt_prog_compiler_pic \
+lt_prog_compiler_static \
+lt_cv_prog_compiler_c_o \
+need_locks \
+DSYMUTIL \
+NMEDIT \
+LIPO \
+OTOOL \
+OTOOL64 \
+shrext_cmds \
+export_dynamic_flag_spec \
+whole_archive_flag_spec \
+compiler_needs_object \
+with_gnu_ld \
+allow_undefined_flag \
+no_undefined_flag \
+hardcode_libdir_flag_spec \
+hardcode_libdir_flag_spec_ld \
+hardcode_libdir_separator \
+fix_srcfile_path \
+exclude_expsyms \
+include_expsyms \
+file_list_spec \
+variables_saved_for_relink \
+libname_spec \
+library_names_spec \
+soname_spec \
+install_override_mode \
+finish_eval \
+old_striplib \
+striplib; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in reload_cmds \
+old_postinstall_cmds \
+old_postuninstall_cmds \
+old_archive_cmds \
+extract_expsyms_cmds \
+old_archive_from_new_cmds \
+old_archive_from_expsyms_cmds \
+archive_cmds \
+archive_expsym_cmds \
+module_cmds \
+module_expsym_cmds \
+export_symbols_cmds \
+prelink_cmds \
+postinstall_cmds \
+postuninstall_cmds \
+finish_cmds \
+sys_lib_search_path_spec \
+sys_lib_dlsearch_path_spec; do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[\\\\\\\`\\"\\\$]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+ac_aux_dir='$ac_aux_dir'
+xsi_shell='$xsi_shell'
+lt_shell_append='$lt_shell_append'
+
+# See if we are running on zsh, and set the options which allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}" ; then
+   setopt NO_GLOB_SUBST
+fi
+
+
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    TIMESTAMP='$TIMESTAMP'
+    RM='$RM'
+    ofile='$ofile'
+
+
+
+
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+
+# Handling of arguments.
+for ac_config_target in $ac_config_targets
+do
+  case $ac_config_target in
+    "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
+    "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
+    "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
+    "jansson.pc") CONFIG_FILES="$CONFIG_FILES jansson.pc" ;;
+    "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
+    "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;;
+    "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;;
+    "src/jansson_config.h") CONFIG_FILES="$CONFIG_FILES src/jansson_config.h" ;;
+    "test/Makefile") CONFIG_FILES="$CONFIG_FILES test/Makefile" ;;
+    "test/bin/Makefile") CONFIG_FILES="$CONFIG_FILES test/bin/Makefile" ;;
+    "test/suites/Makefile") CONFIG_FILES="$CONFIG_FILES test/suites/Makefile" ;;
+    "test/suites/api/Makefile") CONFIG_FILES="$CONFIG_FILES test/suites/api/Makefile" ;;
+
+  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
+  esac
+done
+
+
+# If the user did not use the arguments to specify the items to instantiate,
+# then the envvar interface is used.  Set only those that are not.
+# We use the long form for the default assignment because of an extremely
+# bizarre bug on SunOS 4.1.3.
+if $ac_need_defaults; then
+  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
+  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
+  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
+fi
+
+# Have a temporary directory for convenience.  Make it in the build tree
+# simply because there is no reason against having it here, and in addition,
+# creating and moving files from /tmp can sometimes cause problems.
+# Hook for its removal unless debugging.
+# Note that there is a small window in which the directory will not be cleaned:
+# after its creation but before its name has been assigned to `$tmp'.
+$debug ||
+{
+  tmp= ac_tmp=
+  trap 'exit_status=$?
+  : "${ac_tmp:=$tmp}"
+  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
+' 0
+  trap 'as_fn_exit 1' 1 2 13 15
+}
+# Create a (secure) tmp directory for tmp files.
+
+{
+  tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
+  test -d "$tmp"
+}  ||
+{
+  tmp=./conf$$-$RANDOM
+  (umask 077 && mkdir "$tmp")
+} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
+ac_tmp=$tmp
+
+# Set up the scripts for CONFIG_FILES section.
+# No need to generate them if there are no CONFIG_FILES.
+# This happens for instance with `./config.status config.h'.
+if test -n "$CONFIG_FILES"; then
+
+
+ac_cr=`echo X | tr X '\015'`
+# On cygwin, bash can eat \r inside `` if the user requested igncr.
+# But we know of no other shell where ac_cr would be empty at this
+# point, so we can use a bashism as a fallback.
+if test "x$ac_cr" = x; then
+  eval ac_cr=\$\'\\r\'
+fi
+ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
+if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
+  ac_cs_awk_cr='\\r'
+else
+  ac_cs_awk_cr=$ac_cr
+fi
+
+echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
+_ACEOF
+
+
+{
+  echo "cat >conf$$subs.awk <<_ACEOF" &&
+  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
+  echo "_ACEOF"
+} >conf$$subs.sh ||
+  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+ac_delim='%!_!# '
+for ac_last_try in false false false false false :; do
+  . ./conf$$subs.sh ||
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+
+  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
+  if test $ac_delim_n = $ac_delim_num; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+rm -f conf$$subs.sh
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
+_ACEOF
+sed -n '
+h
+s/^/S["/; s/!.*/"]=/
+p
+g
+s/^[^!]*!//
+:repl
+t repl
+s/'"$ac_delim"'$//
+t delim
+:nl
+h
+s/\(.\{148\}\)..*/\1/
+t more1
+s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
+p
+n
+b repl
+:more1
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t nl
+:delim
+h
+s/\(.\{148\}\)..*/\1/
+t more2
+s/["\\]/\\&/g; s/^/"/; s/$/"/
+p
+b
+:more2
+s/["\\]/\\&/g; s/^/"/; s/$/"\\/
+p
+g
+s/.\{148\}//
+t delim
+' <conf$$subs.awk | sed '
+/^[^""]/{
+  N
+  s/\n//
+}
+' >>$CONFIG_STATUS || ac_write_fail=1
+rm -f conf$$subs.awk
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+_ACAWK
+cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
+  for (key in S) S_is_set[key] = 1
+  FS = ""
+
+}
+{
+  line = $ 0
+  nfields = split(line, field, "@")
+  substed = 0
+  len = length(field[1])
+  for (i = 2; i < nfields; i++) {
+    key = field[i]
+    keylen = length(key)
+    if (S_is_set[key]) {
+      value = S[key]
+      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
+      len += length(value) + length(field[++i])
+      substed = 1
+    } else
+      len += 1 + keylen
+  }
+
+  print line
+}
+
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
+  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
+else
+  cat
+fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
+  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
+# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+# trailing colons and then remove the whole line if VPATH becomes empty
+# (actually we leave an empty line to preserve line numbers).
+if test "x$srcdir" = x.; then
+  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
+h
+s///
+s/^/:/
+s/[	 ]*$/:/
+s/:\$(srcdir):/:/g
+s/:\${srcdir}:/:/g
+s/:@srcdir@:/:/g
+s/^:*//
+s/:*$//
+x
+s/\(=[	 ]*\).*/\1/
+G
+s/\n//
+s/^[^=]*=[	 ]*$//
+}'
+fi
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+fi # test -n "$CONFIG_FILES"
+
+# Set up the scripts for CONFIG_HEADERS section.
+# No need to generate them if there are no CONFIG_HEADERS.
+# This happens for instance with `./config.status Makefile'.
+if test -n "$CONFIG_HEADERS"; then
+cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
+BEGIN {
+_ACEOF
+
+# Transform confdefs.h into an awk script `defines.awk', embedded as
+# here-document in config.status, that substitutes the proper values into
+# config.h.in to produce config.h.
+
+# Create a delimiter string that does not exist in confdefs.h, to ease
+# handling of long lines.
+ac_delim='%!_!# '
+for ac_last_try in false false :; do
+  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
+  if test -z "$ac_tt"; then
+    break
+  elif $ac_last_try; then
+    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
+  else
+    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
+  fi
+done
+
+# For the awk script, D is an array of macro values keyed by name,
+# likewise P contains macro parameters if any.  Preserve backslash
+# newline sequences.
+
+ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
+sed -n '
+s/.\{148\}/&'"$ac_delim"'/g
+t rset
+:rset
+s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
+t def
+d
+:def
+s/\\$//
+t bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3"/p
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
+d
+:bsnl
+s/["\\]/\\&/g
+s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
+D["\1"]=" \3\\\\\\n"\\/p
+t cont
+s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
+t cont
+d
+:cont
+n
+s/.\{148\}/&'"$ac_delim"'/g
+t clear
+:clear
+s/\\$//
+t bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/"/p
+d
+:bsnlc
+s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
+b cont
+' <confdefs.h | sed '
+s/'"$ac_delim"'/"\\\
+"/g' >>$CONFIG_STATUS || ac_write_fail=1
+
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  for (key in D) D_is_set[key] = 1
+  FS = ""
+}
+/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
+  line = \$ 0
+  split(line, arg, " ")
+  if (arg[1] == "#") {
+    defundef = arg[2]
+    mac1 = arg[3]
+  } else {
+    defundef = substr(arg[1], 2)
+    mac1 = arg[2]
+  }
+  split(mac1, mac2, "(") #)
+  macro = mac2[1]
+  prefix = substr(line, 1, index(line, defundef) - 1)
+  if (D_is_set[macro]) {
+    # Preserve the white space surrounding the "#".
+    print prefix "define", macro P[macro] D[macro]
+    next
+  } else {
+    # Replace #undef with comments.  This is necessary, for example,
+    # in the case of _POSIX_SOURCE, which is predefined and required
+    # on some systems where configure will not decide to define it.
+    if (defundef == "undef") {
+      print "/*", prefix defundef, macro, "*/"
+      next
+    }
+  }
+}
+{ print }
+_ACAWK
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
+fi # test -n "$CONFIG_HEADERS"
+
+
+eval set X "  :F $CONFIG_FILES  :H $CONFIG_HEADERS    :C $CONFIG_COMMANDS"
+shift
+for ac_tag
+do
+  case $ac_tag in
+  :[FHLC]) ac_mode=$ac_tag; continue;;
+  esac
+  case $ac_mode$ac_tag in
+  :[FHL]*:*);;
+  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
+  :[FH]-) ac_tag=-:-;;
+  :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
+  esac
+  ac_save_IFS=$IFS
+  IFS=:
+  set x $ac_tag
+  IFS=$ac_save_IFS
+  shift
+  ac_file=$1
+  shift
+
+  case $ac_mode in
+  :L) ac_source=$1;;
+  :[FH])
+    ac_file_inputs=
+    for ac_f
+    do
+      case $ac_f in
+      -) ac_f="$ac_tmp/stdin";;
+      *) # Look for the file first in the build tree, then in the source tree
+	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
+	 # because $ac_f cannot contain `:'.
+	 test -f "$ac_f" ||
+	   case $ac_f in
+	   [\\/$]*) false;;
+	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
+	   esac ||
+	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
+      esac
+      case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
+      as_fn_append ac_file_inputs " '$ac_f'"
+    done
+
+    # Let's still pretend it is `configure' which instantiates (i.e., don't
+    # use $as_me), people would be surprised to read:
+    #    /* config.h.  Generated by config.status.  */
+    configure_input='Generated from '`
+	  $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
+	`' by configure.'
+    if test x"$ac_file" != x-; then
+      configure_input="$ac_file.  $configure_input"
+      { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
+$as_echo "$as_me: creating $ac_file" >&6;}
+    fi
+    # Neutralize special characters interpreted by sed in replacement strings.
+    case $configure_input in #(
+    *\&* | *\|* | *\\* )
+       ac_sed_conf_input=`$as_echo "$configure_input" |
+       sed 's/[\\\\&|]/\\\\&/g'`;; #(
+    *) ac_sed_conf_input=$configure_input;;
+    esac
+
+    case $ac_tag in
+    *:-:* | *:-) cat >"$ac_tmp/stdin" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
+    esac
+    ;;
+  esac
+
+  ac_dir=`$as_dirname -- "$ac_file" ||
+$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$ac_file" : 'X\(//\)[^/]' \| \
+	 X"$ac_file" : 'X\(//\)$' \| \
+	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$ac_file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  as_dir="$ac_dir"; as_fn_mkdir_p
+  ac_builddir=.
+
+case "$ac_dir" in
+.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
+*)
+  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
+  # A ".." for each directory in $ac_dir_suffix.
+  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
+  case $ac_top_builddir_sub in
+  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
+  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
+  esac ;;
+esac
+ac_abs_top_builddir=$ac_pwd
+ac_abs_builddir=$ac_pwd$ac_dir_suffix
+# for backward compatibility:
+ac_top_builddir=$ac_top_build_prefix
+
+case $srcdir in
+  .)  # We are building in place.
+    ac_srcdir=.
+    ac_top_srcdir=$ac_top_builddir_sub
+    ac_abs_top_srcdir=$ac_pwd ;;
+  [\\/]* | ?:[\\/]* )  # Absolute name.
+    ac_srcdir=$srcdir$ac_dir_suffix;
+    ac_top_srcdir=$srcdir
+    ac_abs_top_srcdir=$srcdir ;;
+  *) # Relative name.
+    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
+    ac_top_srcdir=$ac_top_build_prefix$srcdir
+    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
+esac
+ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
+
+
+  case $ac_mode in
+  :F)
+  #
+  # CONFIG_FILE
+  #
+
+  case $INSTALL in
+  [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
+  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
+  esac
+  ac_MKDIR_P=$MKDIR_P
+  case $MKDIR_P in
+  [\\/$]* | ?:[\\/]* ) ;;
+  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
+  esac
+_ACEOF
+
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+# If the template does not know about datarootdir, expand it.
+# FIXME: This hack should be removed a few years after 2.60.
+ac_datarootdir_hack=; ac_datarootdir_seen=
+ac_sed_dataroot='
+/datarootdir/ {
+  p
+  q
+}
+/@datadir@/p
+/@docdir@/p
+/@infodir@/p
+/@localedir@/p
+/@mandir@/p'
+case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
+*datarootdir*) ac_datarootdir_seen=yes;;
+*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
+$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
+_ACEOF
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+  ac_datarootdir_hack='
+  s&@datadir@&$datadir&g
+  s&@docdir@&$docdir&g
+  s&@infodir@&$infodir&g
+  s&@localedir@&$localedir&g
+  s&@mandir@&$mandir&g
+  s&\\\${datarootdir}&$datarootdir&g' ;;
+esac
+_ACEOF
+
+# Neutralize VPATH when `$srcdir' = `.'.
+# Shell code in configure.ac might set extrasub.
+# FIXME: do we really want to maintain this feature?
+cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
+ac_sed_extra="$ac_vpsub
+$extrasub
+_ACEOF
+cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
+:t
+/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
+s|@configure_input@|$ac_sed_conf_input|;t t
+s&@top_builddir@&$ac_top_builddir_sub&;t t
+s&@top_build_prefix@&$ac_top_build_prefix&;t t
+s&@srcdir@&$ac_srcdir&;t t
+s&@abs_srcdir@&$ac_abs_srcdir&;t t
+s&@top_srcdir@&$ac_top_srcdir&;t t
+s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
+s&@builddir@&$ac_builddir&;t t
+s&@abs_builddir@&$ac_abs_builddir&;t t
+s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
+s&@INSTALL@&$ac_INSTALL&;t t
+s&@MKDIR_P@&$ac_MKDIR_P&;t t
+$ac_datarootdir_hack
+"
+eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
+  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+
+test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
+  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
+  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
+      "$ac_tmp/out"`; test -z "$ac_out"; } &&
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&5
+$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
+which seems to be undefined.  Please make sure it is defined" >&2;}
+
+  rm -f "$ac_tmp/stdin"
+  case $ac_file in
+  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
+  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
+  esac \
+  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+ ;;
+  :H)
+  #
+  # CONFIG_HEADER
+  #
+  if test x"$ac_file" != x-; then
+    {
+      $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
+    } >"$ac_tmp/config.h" \
+      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
+      { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
+$as_echo "$as_me: $ac_file is unchanged" >&6;}
+    else
+      rm -f "$ac_file"
+      mv "$ac_tmp/config.h" "$ac_file" \
+	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
+    fi
+  else
+    $as_echo "/* $configure_input  */" \
+      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
+      || as_fn_error $? "could not create -" "$LINENO" 5
+  fi
+# Compute "$ac_file"'s index in $config_headers.
+_am_stamp_count=1
+for _am_header in $config_headers :; do
+  case $_am_header in
+    "$ac_file" | "$ac_file":* )
+      break ;;
+    * )
+      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
+  esac
+done
+echo "timestamp for "$ac_file"" >`$as_dirname -- "$ac_file" ||
+$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$ac_file" : 'X\(//\)[^/]' \| \
+	 X"$ac_file" : 'X\(//\)$' \| \
+	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$ac_file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`/stamp-h$_am_stamp_count
+ ;;
+
+  :C)  { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
+$as_echo "$as_me: executing $ac_file commands" >&6;}
+ ;;
+  esac
+
+
+  case $ac_file$ac_mode in
+    "depfiles":C) test x"$AMDEP_TRUE" != x"" || for mf in $CONFIG_FILES; do
+  # Strip MF so we end up with the name of the file.
+  mf=`echo "$mf" | sed -e 's/:.*$//'`
+  # Check whether this is an Automake generated Makefile or not.
+  # We used to match only the files named `Makefile.in', but
+  # some people rename them; so instead we look at the file content.
+  # Grep'ing the first line is not enough: some people post-process
+  # each Makefile.in and add a new line on top of each file to say so.
+  # Grep'ing the whole file is not good either: AIX grep has a line
+  # limit of 2048, but all sed's we know have understand at least 4000.
+  if sed 10q "$mf" | grep '^#.*generated by automake' > /dev/null 2>&1; then
+    dirpart=`$as_dirname -- "$mf" ||
+$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$mf" : 'X\(//\)[^/]' \| \
+	 X"$mf" : 'X\(//\)$' \| \
+	 X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$mf" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+  else
+    continue
+  fi
+  # Extract the definition of DEPDIR, am__include, and am__quote
+  # from the Makefile without running `make'.
+  DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
+  test -z "$DEPDIR" && continue
+  am__include=`sed -n 's/^am__include = //p' < "$mf"`
+  test -z "am__include" && continue
+  am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
+  # When using ansi2knr, U may be empty or an underscore; expand it
+  U=`sed -n 's/^U = //p' < "$mf"`
+  # Find all dependency output files, they are included files with
+  # $(DEPDIR) in their names.  We invoke sed twice because it is the
+  # simplest approach to changing $(DEPDIR) to its actual value in the
+  # expansion.
+  for file in `sed -n "
+    s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
+       sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
+    # Make sure the directory exists.
+    test -f "$dirpart/$file" && continue
+    fdir=`$as_dirname -- "$file" ||
+$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	 X"$file" : 'X\(//\)[^/]' \| \
+	 X"$file" : 'X\(//\)$' \| \
+	 X"$file" : 'X\(/\)' \| . 2>/dev/null ||
+$as_echo X"$file" |
+    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)[^/].*/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\/\)$/{
+	    s//\1/
+	    q
+	  }
+	  /^X\(\/\).*/{
+	    s//\1/
+	    q
+	  }
+	  s/.*/./; q'`
+    as_dir=$dirpart/$fdir; as_fn_mkdir_p
+    # echo "creating $dirpart/$file"
+    echo '# dummy' > "$dirpart/$file"
+  done
+done
+ ;;
+    "libtool":C)
+
+    # See if we are running on zsh, and set the options which allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}" ; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile="${ofile}T"
+    trap "$RM -f \"$cfgfile\"; exit 1" 1 2 15
+    $RM -f "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+
+# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
+# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
+# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+#
+#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
+#                 2006, 2007, 2008, 2009, 2010 Free Software Foundation,
+#                 Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+#   This file is part of GNU Libtool.
+#
+# GNU Libtool is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation; either version 2 of
+# the License, or (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
+# obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+
+# The names of the tagged configurations supported by this script.
+available_tags=""
+
+# ### BEGIN LIBTOOL CONFIG
+
+# Which release of libtool.m4 was used?
+macro_version=$macro_version
+macro_revision=$macro_revision
+
+# Whether or not to build shared libraries.
+build_libtool_libs=$enable_shared
+
+# Whether or not to build static libraries.
+build_old_libs=$enable_static
+
+# What type of objects to build.
+pic_mode=$pic_mode
+
+# Whether or not to optimize for fast installation.
+fast_install=$enable_fast_install
+
+# Shell to use when invoking shell scripts.
+SHELL=$lt_SHELL
+
+# An echo program that protects backslashes.
+ECHO=$lt_ECHO
+
+# The host system.
+host_alias=$host_alias
+host=$host
+host_os=$host_os
+
+# The build system.
+build_alias=$build_alias
+build=$build
+build_os=$build_os
+
+# A sed program that does not truncate output.
+SED=$lt_SED
+
+# Sed that helps us avoid accidentally triggering echo(1) options like -n.
+Xsed="\$SED -e 1s/^X//"
+
+# A grep program that handles long lines.
+GREP=$lt_GREP
+
+# An ERE matcher.
+EGREP=$lt_EGREP
+
+# A literal string matcher.
+FGREP=$lt_FGREP
+
+# A BSD- or MS-compatible name lister.
+NM=$lt_NM
+
+# Whether we need soft or hard links.
+LN_S=$lt_LN_S
+
+# What is the maximum length of a command?
+max_cmd_len=$max_cmd_len
+
+# Object file suffix (normally "o").
+objext=$ac_objext
+
+# Executable file suffix (normally "").
+exeext=$exeext
+
+# whether the shell understands "unset".
+lt_unset=$lt_unset
+
+# turn spaces into newlines.
+SP2NL=$lt_lt_SP2NL
+
+# turn newlines into spaces.
+NL2SP=$lt_lt_NL2SP
+
+# An object symbol dumper.
+OBJDUMP=$lt_OBJDUMP
+
+# Method to check whether dependent libraries are shared objects.
+deplibs_check_method=$lt_deplibs_check_method
+
+# Command to use when deplibs_check_method == "file_magic".
+file_magic_cmd=$lt_file_magic_cmd
+
+# The archiver.
+AR=$lt_AR
+AR_FLAGS=$lt_AR_FLAGS
+
+# A symbol stripping program.
+STRIP=$lt_STRIP
+
+# Commands used to install an old-style archive.
+RANLIB=$lt_RANLIB
+old_postinstall_cmds=$lt_old_postinstall_cmds
+old_postuninstall_cmds=$lt_old_postuninstall_cmds
+
+# Whether to use a lock for old archive extraction.
+lock_old_archive_extraction=$lock_old_archive_extraction
+
+# A C compiler.
+LTCC=$lt_CC
+
+# LTCC compiler flags.
+LTCFLAGS=$lt_CFLAGS
+
+# Take the output of nm and produce a listing of raw symbols and C names.
+global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
+
+# Transform the output of nm in a proper C declaration.
+global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
+
+# Transform the output of nm in a C name address pair.
+global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
+
+# Transform the output of nm in a C name address pair when lib prefix is needed.
+global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
+
+# The name of the directory that contains temporary libtool files.
+objdir=$objdir
+
+# Used to examine libraries when file_magic_cmd begins with "file".
+MAGIC_CMD=$MAGIC_CMD
+
+# Must we lock files when doing compilation?
+need_locks=$lt_need_locks
+
+# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
+DSYMUTIL=$lt_DSYMUTIL
+
+# Tool to change global to local symbols on Mac OS X.
+NMEDIT=$lt_NMEDIT
+
+# Tool to manipulate fat objects and archives on Mac OS X.
+LIPO=$lt_LIPO
+
+# ldd/readelf like tool for Mach-O binaries on Mac OS X.
+OTOOL=$lt_OTOOL
+
+# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
+OTOOL64=$lt_OTOOL64
+
+# Old archive suffix (normally "a").
+libext=$libext
+
+# Shared library suffix (normally ".so").
+shrext_cmds=$lt_shrext_cmds
+
+# The commands to extract the exported symbol list from a shared archive.
+extract_expsyms_cmds=$lt_extract_expsyms_cmds
+
+# Variables whose values should be saved in libtool wrapper scripts and
+# restored at link time.
+variables_saved_for_relink=$lt_variables_saved_for_relink
+
+# Do we need the "lib" prefix for modules?
+need_lib_prefix=$need_lib_prefix
+
+# Do we need a version for libraries?
+need_version=$need_version
+
+# Library versioning type.
+version_type=$version_type
+
+# Shared library runtime path variable.
+runpath_var=$runpath_var
+
+# Shared library path variable.
+shlibpath_var=$shlibpath_var
+
+# Is shlibpath searched before the hard-coded library search path?
+shlibpath_overrides_runpath=$shlibpath_overrides_runpath
+
+# Format of library name prefix.
+libname_spec=$lt_libname_spec
+
+# List of archive names.  First name is the real one, the rest are links.
+# The last name is the one that the linker finds with -lNAME
+library_names_spec=$lt_library_names_spec
+
+# The coded name of the library, if different from the real name.
+soname_spec=$lt_soname_spec
+
+# Permission mode override for installation of shared libraries.
+install_override_mode=$lt_install_override_mode
+
+# Command to use after installation of a shared archive.
+postinstall_cmds=$lt_postinstall_cmds
+
+# Command to use after uninstallation of a shared archive.
+postuninstall_cmds=$lt_postuninstall_cmds
+
+# Commands used to finish a libtool library installation in a directory.
+finish_cmds=$lt_finish_cmds
+
+# As "finish_cmds", except a single script fragment to be evaled but
+# not shown.
+finish_eval=$lt_finish_eval
+
+# Whether we should hardcode library paths into libraries.
+hardcode_into_libs=$hardcode_into_libs
+
+# Compile-time system search path for libraries.
+sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
+
+# Run-time system search path for libraries.
+sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
+
+# Whether dlopen is supported.
+dlopen_support=$enable_dlopen
+
+# Whether dlopen of programs is supported.
+dlopen_self=$enable_dlopen_self
+
+# Whether dlopen of statically linked programs is supported.
+dlopen_self_static=$enable_dlopen_self_static
+
+# Commands to strip libraries.
+old_striplib=$lt_old_striplib
+striplib=$lt_striplib
+
+
+# The linker used to build libraries.
+LD=$lt_LD
+
+# How to create reloadable object files.
+reload_flag=$lt_reload_flag
+reload_cmds=$lt_reload_cmds
+
+# Commands used to build an old-style archive.
+old_archive_cmds=$lt_old_archive_cmds
+
+# A language specific compiler.
+CC=$lt_compiler
+
+# Is the compiler the GNU compiler?
+with_gcc=$GCC
+
+# Compiler flag to turn off builtin functions.
+no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
+
+# How to pass a linker flag through the compiler.
+wl=$lt_lt_prog_compiler_wl
+
+# Additional compiler flags for building library objects.
+pic_flag=$lt_lt_prog_compiler_pic
+
+# Compiler flag to prevent dynamic linking.
+link_static_flag=$lt_lt_prog_compiler_static
+
+# Does compiler simultaneously support -c and -o options?
+compiler_c_o=$lt_lt_cv_prog_compiler_c_o
+
+# Whether or not to add -lc for building shared libraries.
+build_libtool_need_lc=$archive_cmds_need_lc
+
+# Whether or not to disallow shared libs when runtime libs are static.
+allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
+
+# Compiler flag to allow reflexive dlopens.
+export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
+
+# Compiler flag to generate shared objects directly from archives.
+whole_archive_flag_spec=$lt_whole_archive_flag_spec
+
+# Whether the compiler copes with passing no objects directly.
+compiler_needs_object=$lt_compiler_needs_object
+
+# Create an old-style archive from a shared archive.
+old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
+
+# Create a temporary old-style archive to link instead of a shared archive.
+old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
+
+# Commands used to build a shared archive.
+archive_cmds=$lt_archive_cmds
+archive_expsym_cmds=$lt_archive_expsym_cmds
+
+# Commands used to build a loadable module if different from building
+# a shared archive.
+module_cmds=$lt_module_cmds
+module_expsym_cmds=$lt_module_expsym_cmds
+
+# Whether we are building with GNU ld or not.
+with_gnu_ld=$lt_with_gnu_ld
+
+# Flag that allows shared libraries with undefined symbols to be built.
+allow_undefined_flag=$lt_allow_undefined_flag
+
+# Flag that enforces no undefined symbols.
+no_undefined_flag=$lt_no_undefined_flag
+
+# Flag to hardcode \$libdir into a binary during linking.
+# This must work even if \$libdir does not exist
+hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
+
+# If ld is used when linking, flag to hardcode \$libdir into a binary
+# during linking.  This must work even if \$libdir does not exist.
+hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld
+
+# Whether we need a single "-rpath" flag with a separated argument.
+hardcode_libdir_separator=$lt_hardcode_libdir_separator
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary.
+hardcode_direct=$hardcode_direct
+
+# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
+# DIR into the resulting binary and the resulting library dependency is
+# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
+# library is relocated.
+hardcode_direct_absolute=$hardcode_direct_absolute
+
+# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+# into the resulting binary.
+hardcode_minus_L=$hardcode_minus_L
+
+# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+# into the resulting binary.
+hardcode_shlibpath_var=$hardcode_shlibpath_var
+
+# Set to "yes" if building a shared library automatically hardcodes DIR
+# into the library and all subsequent libraries and executables linked
+# against it.
+hardcode_automatic=$hardcode_automatic
+
+# Set to yes if linker adds runtime paths of dependent libraries
+# to runtime path list.
+inherit_rpath=$inherit_rpath
+
+# Whether libtool must link a program against all its dependency libraries.
+link_all_deplibs=$link_all_deplibs
+
+# Fix the shell variable \$srcfile for the compiler.
+fix_srcfile_path=$lt_fix_srcfile_path
+
+# Set to "yes" if exported symbols are required.
+always_export_symbols=$always_export_symbols
+
+# The commands to list exported symbols.
+export_symbols_cmds=$lt_export_symbols_cmds
+
+# Symbols that should not be listed in the preloaded symbols.
+exclude_expsyms=$lt_exclude_expsyms
+
+# Symbols that must always be exported.
+include_expsyms=$lt_include_expsyms
+
+# Commands necessary for linking programs (against libraries) with templates.
+prelink_cmds=$lt_prelink_cmds
+
+# Specify filename containing input files.
+file_list_spec=$lt_file_list_spec
+
+# How to hardcode a shared library path into an executable.
+hardcode_action=$hardcode_action
+
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test "X${COLLECT_NAMES+set}" != Xset; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+
+ltmain="$ac_aux_dir/ltmain.sh"
+
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \
+    || (rm -f "$cfgfile"; exit 1)
+
+  case $xsi_shell in
+  yes)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+  case ${1} in
+    */*) func_dirname_result="${1%/*}${2}" ;;
+    *  ) func_dirname_result="${3}" ;;
+  esac
+}
+
+# func_basename file
+func_basename ()
+{
+  func_basename_result="${1##*/}"
+}
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+  case ${1} in
+    */*) func_dirname_result="${1%/*}${2}" ;;
+    *  ) func_dirname_result="${3}" ;;
+  esac
+  func_basename_result="${1##*/}"
+}
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+func_stripname ()
+{
+  # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
+  # positional parameters, so assign one to ordinary parameter first.
+  func_stripname_result=${3}
+  func_stripname_result=${func_stripname_result#"${1}"}
+  func_stripname_result=${func_stripname_result%"${2}"}
+}
+
+# func_opt_split
+func_opt_split ()
+{
+  func_opt_split_opt=${1%%=*}
+  func_opt_split_arg=${1#*=}
+}
+
+# func_lo2o object
+func_lo2o ()
+{
+  case ${1} in
+    *.lo) func_lo2o_result=${1%.lo}.${objext} ;;
+    *)    func_lo2o_result=${1} ;;
+  esac
+}
+
+# func_xform libobj-or-source
+func_xform ()
+{
+  func_xform_result=${1%.*}.lo
+}
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+  func_arith_result=$(( $* ))
+}
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+  func_len_result=${#1}
+}
+
+_LT_EOF
+    ;;
+  *) # Bourne compatible functions.
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_dirname file append nondir_replacement
+# Compute the dirname of FILE.  If nonempty, add APPEND to the result,
+# otherwise set result to NONDIR_REPLACEMENT.
+func_dirname ()
+{
+  # Extract subdirectory from the argument.
+  func_dirname_result=`$ECHO "${1}" | $SED "$dirname"`
+  if test "X$func_dirname_result" = "X${1}"; then
+    func_dirname_result="${3}"
+  else
+    func_dirname_result="$func_dirname_result${2}"
+  fi
+}
+
+# func_basename file
+func_basename ()
+{
+  func_basename_result=`$ECHO "${1}" | $SED "$basename"`
+}
+
+
+# func_stripname prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+# func_strip_suffix prefix name
+func_stripname ()
+{
+  case ${2} in
+    .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
+    *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
+  esac
+}
+
+# sed scripts:
+my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q'
+my_sed_long_arg='1s/^-[^=]*=//'
+
+# func_opt_split
+func_opt_split ()
+{
+  func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"`
+  func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"`
+}
+
+# func_lo2o object
+func_lo2o ()
+{
+  func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"`
+}
+
+# func_xform libobj-or-source
+func_xform ()
+{
+  func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'`
+}
+
+# func_arith arithmetic-term...
+func_arith ()
+{
+  func_arith_result=`expr "$@"`
+}
+
+# func_len string
+# STRING may not start with a hyphen.
+func_len ()
+{
+  func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
+}
+
+_LT_EOF
+esac
+
+case $lt_shell_append in
+  yes)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+  eval "$1+=\$2"
+}
+_LT_EOF
+    ;;
+  *)
+    cat << \_LT_EOF >> "$cfgfile"
+
+# func_append var value
+# Append VALUE to the end of shell variable VAR.
+func_append ()
+{
+  eval "$1=\$$1\$2"
+}
+
+_LT_EOF
+    ;;
+  esac
+
+
+  sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \
+    || (rm -f "$cfgfile"; exit 1)
+
+  mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+
+ ;;
+
+  esac
+done # for ac_tag
+
+
+as_fn_exit 0
+_ACEOF
+ac_clean_files=$ac_clean_files_save
+
+test $ac_write_fail = 0 ||
+  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
+
+
+# configure is writing to config.log, and then calls config.status.
+# config.status does its own redirection, appending to config.log.
+# Unfortunately, on DOS this fails, as config.log is still kept open
+# by configure, so config.status won't be able to write to it; its
+# output is simply discarded.  So we exec the FD to /dev/null,
+# effectively closing config.log, so it can be properly (re)opened and
+# appended to by config.status.  When coming back to configure, we
+# need to make the FD available again.
+if test "$no_create" != yes; then
+  ac_cs_success=:
+  ac_config_status_args=
+  test "$silent" = yes &&
+    ac_config_status_args="$ac_config_status_args --quiet"
+  exec 5>/dev/null
+  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
+  exec 5>>config.log
+  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
+  # would make configure fail if this is the last instruction.
+  $ac_cs_success || as_fn_exit 1
+fi
+if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
+$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
+fi
+
diff --git a/lang/c/jansson/configure.ac b/lang/c/jansson/configure.ac
new file mode 100644
index 0000000..da04bf8
--- /dev/null
+++ b/lang/c/jansson/configure.ac
@@ -0,0 +1,49 @@
+AC_PREREQ([2.60])
+AC_INIT([jansson], [2.1], [petri at digip.org])
+
+AM_INIT_AUTOMAKE([1.10 foreign])
+
+AC_CONFIG_SRCDIR([src/value.c])
+AC_CONFIG_HEADERS([config.h])
+
+# Checks for programs.
+AC_PROG_CC
+AC_PROG_LIBTOOL
+AM_CONDITIONAL([GCC], [test x$GCC = xyes])
+
+# Checks for libraries.
+
+# Checks for header files.
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_TYPE_INT32_T
+
+AC_TYPE_LONG_LONG_INT
+case $ac_cv_type_long_long_int in
+     yes) json_have_long_long=1;;
+     *) json_have_long_long=0;;
+esac
+AC_SUBST([json_have_long_long])
+
+AC_C_INLINE
+case $ac_cv_c_inline in
+    yes) json_inline=inline;;
+    no) json_inline=;;
+    *) json_inline=$ac_cv_c_inline;;
+esac
+AC_SUBST([json_inline])
+
+# Checks for library functions.
+
+AC_CONFIG_FILES([
+        jansson.pc
+        Makefile
+        doc/Makefile
+        src/Makefile
+        src/jansson_config.h
+        test/Makefile
+        test/bin/Makefile
+        test/suites/Makefile
+        test/suites/api/Makefile
+])
+AC_OUTPUT
diff --git a/lang/c/jansson/depcomp b/lang/c/jansson/depcomp
new file mode 100755
index 0000000..ca5ea4e
--- /dev/null
+++ b/lang/c/jansson/depcomp
@@ -0,0 +1,584 @@
+#! /bin/sh
+# depcomp - compile a program generating dependencies as side-effects
+
+scriptversion=2006-10-15.18
+
+# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006 Free Software
+# Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# Originally written by Alexandre Oliva <oliva at dcc.unicamp.br>.
+
+case $1 in
+  '')
+     echo "$0: No command.  Try \`$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: depcomp [--help] [--version] PROGRAM [ARGS]
+
+Run PROGRAMS ARGS to compile a file, generating dependencies
+as side-effects.
+
+Environment variables:
+  depmode     Dependency tracking mode.
+  source      Source file read by `PROGRAMS ARGS'.
+  object      Object file output by `PROGRAMS ARGS'.
+  DEPDIR      directory where to store dependencies.
+  depfile     Dependency file to output.
+  tmpdepfile  Temporary file to use when outputing dependencies.
+  libtool     Whether libtool is used (yes/no).
+
+Report bugs to <bug-automake at gnu.org>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "depcomp $scriptversion"
+    exit $?
+    ;;
+esac
+
+if test -z "$depmode" || test -z "$source" || test -z "$object"; then
+  echo "depcomp: Variables source, object and depmode must be set" 1>&2
+  exit 1
+fi
+
+# Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po.
+depfile=${depfile-`echo "$object" |
+  sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`}
+tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`}
+
+rm -f "$tmpdepfile"
+
+# Some modes work just like other modes, but use different flags.  We
+# parameterize here, but still list the modes in the big case below,
+# to make depend.m4 easier to write.  Note that we *cannot* use a case
+# here, because this file can only contain one case statement.
+if test "$depmode" = hp; then
+  # HP compiler uses -M and no extra arg.
+  gccflag=-M
+  depmode=gcc
+fi
+
+if test "$depmode" = dashXmstdout; then
+   # This is just like dashmstdout with a different argument.
+   dashmflag=-xM
+   depmode=dashmstdout
+fi
+
+case "$depmode" in
+gcc3)
+## gcc 3 implements dependency tracking that does exactly what
+## we want.  Yay!  Note: for some reason libtool 1.4 doesn't like
+## it if -MD -MP comes after the -MF stuff.  Hmm.
+## Unfortunately, FreeBSD c89 acceptance of flags depends upon
+## the command line argument order; so add the flags where they
+## appear in depend2.am.  Note that the slowdown incurred here
+## affects only configure: in makefiles, %FASTDEP% shortcuts this.
+  for arg
+  do
+    case $arg in
+    -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;;
+    *)  set fnord "$@" "$arg" ;;
+    esac
+    shift # fnord
+    shift # $arg
+  done
+  "$@"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  mv "$tmpdepfile" "$depfile"
+  ;;
+
+gcc)
+## There are various ways to get dependency output from gcc.  Here's
+## why we pick this rather obscure method:
+## - Don't want to use -MD because we'd like the dependencies to end
+##   up in a subdir.  Having to rename by hand is ugly.
+##   (We might end up doing this anyway to support other compilers.)
+## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like
+##   -MM, not -M (despite what the docs say).
+## - Using -M directly means running the compiler twice (even worse
+##   than renaming).
+  if test -z "$gccflag"; then
+    gccflag=-MD,
+  fi
+  "$@" -Wp,"$gccflag$tmpdepfile"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz
+## The second -e expression handles DOS-style file names with drive letters.
+  sed -e 's/^[^:]*: / /' \
+      -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile"
+## This next piece of magic avoids the `deleted header file' problem.
+## The problem is that when a header file which appears in a .P file
+## is deleted, the dependency causes make to die (because there is
+## typically no way to rebuild the header).  We avoid this by adding
+## dummy dependencies for each header file.  Too bad gcc doesn't do
+## this for us directly.
+  tr ' ' '
+' < "$tmpdepfile" |
+## Some versions of gcc put a space before the `:'.  On the theory
+## that the space means something, we add a space to the output as
+## well.
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp)
+  # This case exists only to let depend.m4 do its work.  It works by
+  # looking at the text of this script.  This case will never be run,
+  # since it is checked for above.
+  exit 1
+  ;;
+
+sgi)
+  if test "$libtool" = yes; then
+    "$@" "-Wp,-MDupdate,$tmpdepfile"
+  else
+    "$@" -MDupdate "$tmpdepfile"
+  fi
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+
+  if test -f "$tmpdepfile"; then  # yes, the sourcefile depend on other files
+    echo "$object : \\" > "$depfile"
+
+    # Clip off the initial element (the dependent).  Don't try to be
+    # clever and replace this with sed code, as IRIX sed won't handle
+    # lines with more than a fixed number of characters (4096 in
+    # IRIX 6.2 sed, 8192 in IRIX 6.5).  We also remove comment lines;
+    # the IRIX cc adds comments like `#:fec' to the end of the
+    # dependency line.
+    tr ' ' '
+' < "$tmpdepfile" \
+    | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \
+    tr '
+' ' ' >> $depfile
+    echo >> $depfile
+
+    # The second pass generates a dummy entry for each header file.
+    tr ' ' '
+' < "$tmpdepfile" \
+   | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \
+   >> $depfile
+  else
+    # The sourcefile does not contain any dependencies, so just
+    # store a dummy comment line, to avoid errors with the Makefile
+    # "include basename.Plo" scheme.
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+aix)
+  # The C for AIX Compiler uses -M and outputs the dependencies
+  # in a .u file.  In older versions, this file always lives in the
+  # current directory.  Also, the AIX compiler puts `$object:' at the
+  # start of each line; $object doesn't have directory information.
+  # Version 6 uses the directory in both cases.
+  stripped=`echo "$object" | sed 's/\(.*\)\..*$/\1/'`
+  tmpdepfile="$stripped.u"
+  if test "$libtool" = yes; then
+    "$@" -Wc,-M
+  else
+    "$@" -M
+  fi
+  stat=$?
+
+  if test -f "$tmpdepfile"; then :
+  else
+    stripped=`echo "$stripped" | sed 's,^.*/,,'`
+    tmpdepfile="$stripped.u"
+  fi
+
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+
+  if test -f "$tmpdepfile"; then
+    outname="$stripped.o"
+    # Each line is of the form `foo.o: dependent.h'.
+    # Do two passes, one to just change these to
+    # `$object: dependent.h' and one to simply `dependent.h:'.
+    sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile"
+    sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile"
+  else
+    # The sourcefile does not contain any dependencies, so just
+    # store a dummy comment line, to avoid errors with the Makefile
+    # "include basename.Plo" scheme.
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile"
+  ;;
+
+icc)
+  # Intel's C compiler understands `-MD -MF file'.  However on
+  #    icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c
+  # ICC 7.0 will fill foo.d with something like
+  #    foo.o: sub/foo.c
+  #    foo.o: sub/foo.h
+  # which is wrong.  We want:
+  #    sub/foo.o: sub/foo.c
+  #    sub/foo.o: sub/foo.h
+  #    sub/foo.c:
+  #    sub/foo.h:
+  # ICC 7.1 will output
+  #    foo.o: sub/foo.c sub/foo.h
+  # and will wrap long lines using \ :
+  #    foo.o: sub/foo.c ... \
+  #     sub/foo.h ... \
+  #     ...
+
+  "$@" -MD -MF "$tmpdepfile"
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+    rm -f "$tmpdepfile"
+    exit $stat
+  fi
+  rm -f "$depfile"
+  # Each line is of the form `foo.o: dependent.h',
+  # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'.
+  # Do two passes, one to just change these to
+  # `$object: dependent.h' and one to simply `dependent.h:'.
+  sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile"
+  # Some versions of the HPUX 10.20 sed can't process this invocation
+  # correctly.  Breaking it into two sed invocations is a workaround.
+  sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" |
+    sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+hp2)
+  # The "hp" stanza above does not work with aCC (C++) and HP's ia64
+  # compilers, which have integrated preprocessors.  The correct option
+  # to use with these is +Maked; it writes dependencies to a file named
+  # 'foo.d', which lands next to the object file, wherever that
+  # happens to be.
+  # Much of this is similar to the tru64 case; see comments there.
+  dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
+  test "x$dir" = "x$object" && dir=
+  base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
+  if test "$libtool" = yes; then
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir.libs/$base.d
+    "$@" -Wc,+Maked
+  else
+    tmpdepfile1=$dir$base.d
+    tmpdepfile2=$dir$base.d
+    "$@" +Maked
+  fi
+  stat=$?
+  if test $stat -eq 0; then :
+  else
+     rm -f "$tmpdepfile1" "$tmpdepfile2"
+     exit $stat
+  fi
+
+  for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2"
+  do
+    test -f "$tmpdepfile" && break
+  done
+  if test -f "$tmpdepfile"; then
+    sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile"
+    # Add `dependent.h:' lines.
+    sed -ne '2,${; s/^ *//; s/ \\*$//; s/$/:/; p;}' "$tmpdepfile" >> "$depfile"
+  else
+    echo "#dummy" > "$depfile"
+  fi
+  rm -f "$tmpdepfile" "$tmpdepfile2"
+  ;;
+
+tru64)
+   # The Tru64 compiler uses -MD to generate dependencies as a side
+   # effect.  `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'.
+   # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put
+   # dependencies in `foo.d' instead, so we check for that too.
+   # Subdirectories are respected.
+   dir=`echo "$object" | sed -e 's|/[^/]*$|/|'`
+   test "x$dir" = "x$object" && dir=
+   base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'`
+
+   if test "$libtool" = yes; then
+      # With Tru64 cc, shared objects can also be used to make a
+      # static library.  This mechanism is used in libtool 1.4 series to
+      # handle both shared and static libraries in a single compilation.
+      # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d.
+      #
+      # With libtool 1.5 this exception was removed, and libtool now
+      # generates 2 separate objects for the 2 libraries.  These two
+      # compilations output dependencies in $dir.libs/$base.o.d and
+      # in $dir$base.o.d.  We have to check for both files, because
+      # one of the two compilations can be disabled.  We should prefer
+      # $dir$base.o.d over $dir.libs/$base.o.d because the latter is
+      # automatically cleaned when .libs/ is deleted, while ignoring
+      # the former would cause a distcleancheck panic.
+      tmpdepfile1=$dir.libs/$base.lo.d   # libtool 1.4
+      tmpdepfile2=$dir$base.o.d          # libtool 1.5
+      tmpdepfile3=$dir.libs/$base.o.d    # libtool 1.5
+      tmpdepfile4=$dir.libs/$base.d      # Compaq CCC V6.2-504
+      "$@" -Wc,-MD
+   else
+      tmpdepfile1=$dir$base.o.d
+      tmpdepfile2=$dir$base.d
+      tmpdepfile3=$dir$base.d
+      tmpdepfile4=$dir$base.d
+      "$@" -MD
+   fi
+
+   stat=$?
+   if test $stat -eq 0; then :
+   else
+      rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
+      exit $stat
+   fi
+
+   for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4"
+   do
+     test -f "$tmpdepfile" && break
+   done
+   if test -f "$tmpdepfile"; then
+      sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile"
+      # That's a tab and a space in the [].
+      sed -e 's,^.*\.[a-z]*:[	 ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile"
+   else
+      echo "#dummy" > "$depfile"
+   fi
+   rm -f "$tmpdepfile"
+   ;;
+
+#nosideeffect)
+  # This comment above is used by automake to tell side-effect
+  # dependency tracking mechanisms from slower ones.
+
+dashmstdout)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout, regardless of -o.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test $1 != '--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove `-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  test -z "$dashmflag" && dashmflag=-M
+  # Require at least two characters before searching for `:'
+  # in the target name.  This is to cope with DOS-style filenames:
+  # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise.
+  "$@" $dashmflag |
+    sed 's:^[  ]*[^: ][^:][^:]*\:[    ]*:'"$object"'\: :' > "$tmpdepfile"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  tr ' ' '
+' < "$tmpdepfile" | \
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+dashXmstdout)
+  # This case only exists to satisfy depend.m4.  It is never actually
+  # run, as this mode is specially recognized in the preamble.
+  exit 1
+  ;;
+
+makedepend)
+  "$@" || exit $?
+  # Remove any Libtool call
+  if test "$libtool" = yes; then
+    while test $1 != '--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+  # X makedepend
+  shift
+  cleared=no
+  for arg in "$@"; do
+    case $cleared in
+    no)
+      set ""; shift
+      cleared=yes ;;
+    esac
+    case "$arg" in
+    -D*|-I*)
+      set fnord "$@" "$arg"; shift ;;
+    # Strip any option that makedepend may not understand.  Remove
+    # the object too, otherwise makedepend will parse it as a source file.
+    -*|$object)
+      ;;
+    *)
+      set fnord "$@" "$arg"; shift ;;
+    esac
+  done
+  obj_suffix="`echo $object | sed 's/^.*\././'`"
+  touch "$tmpdepfile"
+  ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@"
+  rm -f "$depfile"
+  cat < "$tmpdepfile" > "$depfile"
+  sed '1,2d' "$tmpdepfile" | tr ' ' '
+' | \
+## Some versions of the HPUX 10.20 sed can't process this invocation
+## correctly.  Breaking it into two sed invocations is a workaround.
+    sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile" "$tmpdepfile".bak
+  ;;
+
+cpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout.
+  "$@" || exit $?
+
+  # Remove the call to Libtool.
+  if test "$libtool" = yes; then
+    while test $1 != '--mode=compile'; do
+      shift
+    done
+    shift
+  fi
+
+  # Remove `-o $object'.
+  IFS=" "
+  for arg
+  do
+    case $arg in
+    -o)
+      shift
+      ;;
+    $object)
+      shift
+      ;;
+    *)
+      set fnord "$@" "$arg"
+      shift # fnord
+      shift # $arg
+      ;;
+    esac
+  done
+
+  "$@" -E |
+    sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \
+       -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' |
+    sed '$ s: \\$::' > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  cat < "$tmpdepfile" >> "$depfile"
+  sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+msvisualcpp)
+  # Important note: in order to support this mode, a compiler *must*
+  # always write the preprocessed file to stdout, regardless of -o,
+  # because we must use -o when running libtool.
+  "$@" || exit $?
+  IFS=" "
+  for arg
+  do
+    case "$arg" in
+    "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI")
+	set fnord "$@"
+	shift
+	shift
+	;;
+    *)
+	set fnord "$@" "$arg"
+	shift
+	shift
+	;;
+    esac
+  done
+  "$@" -E |
+  sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile"
+  rm -f "$depfile"
+  echo "$object : \\" > "$depfile"
+  . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::	\1 \\:p' >> "$depfile"
+  echo "	" >> "$depfile"
+  . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile"
+  rm -f "$tmpdepfile"
+  ;;
+
+none)
+  exec "$@"
+  ;;
+
+*)
+  echo "Unknown depmode $depmode" 1>&2
+  exit 1
+  ;;
+esac
+
+exit 0
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-end: "$"
+# End:
diff --git a/lang/c/jansson/doc/Makefile.am b/lang/c/jansson/doc/Makefile.am
new file mode 100644
index 0000000..6c79b54
--- /dev/null
+++ b/lang/c/jansson/doc/Makefile.am
@@ -0,0 +1,20 @@
+EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst		\
+	gettingstarted.rst github_commits.c index.rst tutorial.rst	\
+	upgrading.rst ext/refcounting.py
+
+SPHINXBUILD = sphinx-build
+SPHINXOPTS = -d _build/doctrees $(SPHINXOPTS_EXTRA)
+
+html-local:
+	$(SPHINXBUILD) -b html $(SPHINXOPTS) $(srcdir) _build/html
+
+install-html-local: html
+	mkdir -p $(DESTDIR)$(htmldir)
+	cp -r _build/html $(DESTDIR)$(htmldir)
+
+uninstall-local:
+	rm -rf $(DESTDIR)$(htmldir)
+
+clean-local:
+	rm -rf _build
+	rm -f ext/refcounting.pyc
diff --git a/lang/c/jansson/doc/Makefile.in b/lang/c/jansson/doc/Makefile.in
new file mode 100644
index 0000000..2fa9a3a
--- /dev/null
+++ b/lang/c/jansson/doc/Makefile.in
@@ -0,0 +1,352 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = doc
+DIST_COMMON = README $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+SOURCES =
+DIST_SOURCES =
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+EXTRA_DIST = conf.py apiref.rst changes.rst conformance.rst		\
+	gettingstarted.rst github_commits.c index.rst tutorial.rst	\
+	upgrading.rst ext/refcounting.py
+
+SPHINXBUILD = sphinx-build
+SPHINXOPTS = -d _build/doctrees $(SPHINXOPTS_EXTRA)
+all: all-am
+
+.SUFFIXES:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  doc/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  doc/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+tags: TAGS
+TAGS:
+
+ctags: CTAGS
+CTAGS:
+
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-local mostlyclean-am
+
+distclean: distclean-am
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am: html-local
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-html-am: install-html-local
+
+install-info: install-info-am
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-ps: install-ps-am
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-local
+
+.MAKE: install-am install-strip
+
+.PHONY: all all-am check check-am clean clean-generic clean-libtool \
+	clean-local distclean distclean-generic distclean-libtool \
+	distdir dvi dvi-am html html-am html-local info info-am \
+	install install-am install-data install-data-am install-dvi \
+	install-dvi-am install-exec install-exec-am install-html \
+	install-html-am install-html-local install-info \
+	install-info-am install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-generic \
+	mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am \
+	uninstall-local
+
+
+html-local:
+	$(SPHINXBUILD) -b html $(SPHINXOPTS) $(srcdir) _build/html
+
+install-html-local: html
+	mkdir -p $(DESTDIR)$(htmldir)
+	cp -r _build/html $(DESTDIR)$(htmldir)
+
+uninstall-local:
+	rm -rf $(DESTDIR)$(htmldir)
+
+clean-local:
+	rm -rf _build
+	rm -f ext/refcounting.pyc
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/doc/README b/lang/c/jansson/doc/README
new file mode 100644
index 0000000..930b3bf
--- /dev/null
+++ b/lang/c/jansson/doc/README
@@ -0,0 +1,5 @@
+To build the documentation, invoke
+
+    make html
+
+Then point your browser to _build/html/index.html.
diff --git a/lang/c/jansson/doc/apiref.rst b/lang/c/jansson/doc/apiref.rst
new file mode 100644
index 0000000..48ff979
--- /dev/null
+++ b/lang/c/jansson/doc/apiref.rst
@@ -0,0 +1,1221 @@
+.. _apiref:
+
+*************
+API Reference
+*************
+
+.. highlight:: c
+
+Preliminaries
+=============
+
+All declarations are in :file:`jansson.h`, so it's enough to
+
+::
+
+   #include <jansson.h>
+
+in each source file.
+
+All constants are prefixed with ``JSON_`` (except for those describing
+the library version, prefixed with ``JANSSON_``). Other identifiers
+are prefixed with ``json_``. Type names are suffixed with ``_t`` and
+``typedef``\ 'd so that the ``struct`` keyword need not be used.
+
+
+Library Version
+===============
+
+The Jansson version is of the form *A.B.C*, where *A* is the major
+version, *B* is the minor version and *C* is the micro version. If the
+micro version is zero, it's omitted from the version string, i.e. the
+version string is just *A.B*.
+
+When a new release only fixes bugs and doesn't add new features or
+functionality, the micro version is incremented. When new features are
+added in a backwards compatible way, the minor version is incremented
+and the micro version is set to zero. When there are backwards
+incompatible changes, the major version is incremented and others are
+set to zero.
+
+The following preprocessor constants specify the current version of
+the library:
+
+``JANSSON_VERSION_MAJOR``, ``JANSSON_VERSION_MINOR``, ``JANSSON_VERSION_MICRO``
+  Integers specifying the major, minor and micro versions,
+  respectively.
+
+``JANSSON_VERSION``
+  A string representation of the current version, e.g. ``"1.2.1"`` or
+  ``"1.3"``.
+
+``JANSSON_VERSION_HEX``
+  A 3-byte hexadecimal representation of the version, e.g.
+  ``0x010201`` for version 1.2.1 and ``0x010300`` for version 1.3.
+  This is useful in numeric comparisions, e.g.::
+
+      #if JANSSON_VERSION_HEX >= 0x010300
+      /* Code specific to version 1.3 and above */
+      #endif
+
+
+Value Representation
+====================
+
+The JSON specification (:rfc:`4627`) defines the following data types:
+*object*, *array*, *string*, *number*, *boolean*, and *null*. JSON
+types are used dynamically; arrays and objects can hold any other data
+type, including themselves. For this reason, Jansson's type system is
+also dynamic in nature. There's one C type to represent all JSON
+values, and this structure knows the type of the JSON value it holds.
+
+.. type:: json_t
+
+  This data structure is used throughout the library to represent all
+  JSON values. It always contains the type of the JSON value it holds
+  and the value's reference count. The rest depends on the type of the
+  value.
+
+Objects of :type:`json_t` are always used through a pointer. There
+are APIs for querying the type, manipulating the reference count, and
+for constructing and manipulating values of different types.
+
+Unless noted otherwise, all API functions return an error value if an
+error occurs. Depending on the function's signature, the error value
+is either *NULL* or -1. Invalid arguments or invalid input are
+apparent sources for errors. Memory allocation and I/O operations may
+also cause errors.
+
+
+Type
+----
+
+The type of a JSON value is queried and tested using the following
+functions:
+
+.. type:: enum json_type
+
+   The type of a JSON value. The following members are defined:
+
+   +--------------------+
+   | ``JSON_OBJECT``    |
+   +--------------------+
+   | ``JSON_ARRAY``     |
+   +--------------------+
+   | ``JSON_STRING``    |
+   +--------------------+
+   | ``JSON_INTEGER``   |
+   +--------------------+
+   | ``JSON_REAL``      |
+   +--------------------+
+   | ``JSON_TRUE``      |
+   +--------------------+
+   | ``JSON_FALSE``     |
+   +--------------------+
+   | ``JSON_NULL``      |
+   +--------------------+
+
+   These correspond to JSON object, array, string, number, boolean and
+   null. A number is represented by either a value of the type
+   ``JSON_INTEGER`` or of the type ``JSON_REAL``. A true boolean value
+   is represented by a value of the type ``JSON_TRUE`` and false by a
+   value of the type ``JSON_FALSE``.
+
+.. function:: int json_typeof(const json_t *json)
+
+   Return the type of the JSON value (a :type:`json_type` cast to
+   :type:`int`). *json* MUST NOT be *NULL*. This function is actually
+   implemented as a macro for speed.
+
+.. function:: json_is_object(const json_t *json)
+               json_is_array(const json_t *json)
+               json_is_string(const json_t *json)
+               json_is_integer(const json_t *json)
+               json_is_real(const json_t *json)
+               json_is_true(const json_t *json)
+               json_is_false(const json_t *json)
+               json_is_null(const json_t *json)
+
+   These functions (actually macros) return true (non-zero) for values
+   of the given type, and false (zero) for values of other types and
+   for *NULL*.
+
+.. function:: json_is_number(const json_t *json)
+
+   Returns true for values of types ``JSON_INTEGER`` and
+   ``JSON_REAL``, and false for other types and for *NULL*.
+
+.. function:: json_is_boolean(const json_t *json)
+
+   Returns true for types ``JSON_TRUE`` and ``JSON_FALSE``, and false
+   for values of other types and for *NULL*.
+
+
+.. _apiref-reference-count:
+
+Reference Count
+---------------
+
+The reference count is used to track whether a value is still in use
+or not. When a value is created, it's reference count is set to 1. If
+a reference to a value is kept (e.g. a value is stored somewhere for
+later use), its reference count is incremented, and when the value is
+no longer needed, the reference count is decremented. When the
+reference count drops to zero, there are no references left, and the
+value can be destroyed.
+
+The following functions are used to manipulate the reference count.
+
+.. function:: json_t *json_incref(json_t *json)
+
+   Increment the reference count of *json* if it's not non-*NULL*.
+   Returns *json*.
+
+.. function:: void json_decref(json_t *json)
+
+   Decrement the reference count of *json*. As soon as a call to
+   :func:`json_decref()` drops the reference count to zero, the value
+   is destroyed and it can no longer be used.
+
+Functions creating new JSON values set the reference count to 1. These
+functions are said to return a **new reference**. Other functions
+returning (existing) JSON values do not normally increase the
+reference count. These functions are said to return a **borrowed
+reference**. So, if the user will hold a reference to a value returned
+as a borrowed reference, he must call :func:`json_incref`. As soon as
+the value is no longer needed, :func:`json_decref` should be called
+to release the reference.
+
+Normally, all functions accepting a JSON value as an argument will
+manage the reference, i.e. increase and decrease the reference count
+as needed. However, some functions **steal** the reference, i.e. they
+have the same result as if the user called :func:`json_decref()` on
+the argument right after calling the function. These functions are
+suffixed with ``_new`` or have ``_new_`` somewhere in their name.
+
+For example, the following code creates a new JSON array and appends
+an integer to it::
+
+  json_t *array, *integer;
+
+  array = json_array();
+  integer = json_integer(42);
+
+  json_array_append(array, integer);
+  json_decref(integer);
+
+Note how the caller has to release the reference to the integer value
+by calling :func:`json_decref()`. By using a reference stealing
+function :func:`json_array_append_new()` instead of
+:func:`json_array_append()`, the code becomes much simpler::
+
+  json_t *array = json_array();
+  json_array_append_new(array, json_integer(42));
+
+In this case, the user doesn't have to explicitly release the
+reference to the integer value, as :func:`json_array_append_new()`
+steals the reference when appending the value to the array.
+
+In the following sections it is clearly documented whether a function
+will return a new or borrowed reference or steal a reference to its
+argument.
+
+
+Circular References
+-------------------
+
+A circular reference is created when an object or an array is,
+directly or indirectly, inserted inside itself. The direct case is
+simple::
+
+  json_t *obj = json_object();
+  json_object_set(obj, "foo", obj);
+
+Jansson will refuse to do this, and :func:`json_object_set()` (and
+all the other such functions for objects and arrays) will return with
+an error status. The indirect case is the dangerous one::
+
+  json_t *arr1 = json_array(), *arr2 = json_array();
+  json_array_append(arr1, arr2);
+  json_array_append(arr2, arr1);
+
+In this example, the array ``arr2`` is contained in the array
+``arr1``, and vice versa. Jansson cannot check for this kind of
+indirect circular references without a performance hit, so it's up to
+the user to avoid them.
+
+If a circular reference is created, the memory consumed by the values
+cannot be freed by :func:`json_decref()`. The reference counts never
+drops to zero because the values are keeping the references to each
+other. Moreover, trying to encode the values with any of the encoding
+functions will fail. The encoder detects circular references and
+returns an error status.
+
+
+True, False and Null
+====================
+
+These values are implemented as singletons, so each of these functions
+returns the same value each time.
+
+.. function:: json_t *json_true(void)
+
+   .. refcounting:: new
+
+   Returns the JSON true value.
+
+.. function:: json_t *json_false(void)
+
+   .. refcounting:: new
+
+   Returns the JSON false value.
+
+.. function:: json_t *json_null(void)
+
+   .. refcounting:: new
+
+   Returns the JSON null value.
+
+
+String
+======
+
+Jansson uses UTF-8 as the character encoding. All JSON strings must be
+valid UTF-8 (or ASCII, as it's a subset of UTF-8). Normal null
+terminated C strings are used, so JSON strings may not contain
+embedded null characters. All other Unicode codepoints U+0001 through
+U+10FFFF are allowed.
+
+.. function:: json_t *json_string(const char *value)
+
+   .. refcounting:: new
+
+   Returns a new JSON string, or *NULL* on error. *value* must be a
+   valid UTF-8 encoded Unicode string.
+
+.. function:: json_t *json_string_nocheck(const char *value)
+
+   .. refcounting:: new
+
+   Like :func:`json_string`, but doesn't check that *value* is valid
+   UTF-8. Use this function only if you are certain that this really
+   is the case (e.g. you have already checked it by other means).
+
+.. function:: const char *json_string_value(const json_t *string)
+
+   Returns the associated value of *string* as a null terminated UTF-8
+   encoded string, or *NULL* if *string* is not a JSON string.
+
+   The retuned value is read-only and must not be modified or freed by
+   the user. It is valid as long as *string* exists, i.e. as long as
+   its reference count has not dropped to zero.
+
+.. function:: int json_string_set(const json_t *string, const char *value)
+
+   Sets the associated value of *string* to *value*. *value* must be a
+   valid UTF-8 encoded Unicode string. Returns 0 on success and -1 on
+   error.
+
+.. function:: int json_string_set_nocheck(const json_t *string, const char *value)
+
+   Like :func:`json_string_set`, but doesn't check that *value* is
+   valid UTF-8. Use this function only if you are certain that this
+   really is the case (e.g. you have already checked it by other
+   means).
+
+
+Number
+======
+
+The JSON specification only contains one numeric type, "number". The C
+programming language has distinct types for integer and floating-point
+numbers, so for practical reasons Jansson also has distinct types for
+the two. They are called "integer" and "real", respectively. For more
+information, see :ref:`rfc-conformance`.
+
+.. type:: json_int_t
+
+   This is the C type that is used to store JSON integer values. It
+   represents the widest integer type available on your system. In
+   practice it's just a typedef of ``long long`` if your compiler
+   supports it, otherwise ``long``.
+
+   Usually, you can safely use plain ``int`` in place of
+   ``json_int_t``, and the implicit C integer conversion handles the
+   rest. Only when you know that you need the full 64-bit range, you
+   should use ``json_int_t`` explicitly.
+
+``JSON_INTEGER_IS_LONG_LONG``
+
+   This is a preprocessor variable that holds the value 1 if
+   :type:`json_int_t` is ``long long``, and 0 if it's ``long``. It
+   can be used as follows::
+
+       #if JSON_INTEGER_IS_LONG_LONG
+       /* Code specific for long long */
+       #else
+       /* Code specific for long */
+       #endif
+
+``JSON_INTEGER_FORMAT``
+
+   This is a macro that expands to a :func:`printf()` conversion
+   specifier that corresponds to :type:`json_int_t`, without the
+   leading ``%`` sign, i.e. either ``"lld"`` or ``"ld"``. This macro
+   is required because the actual type of :type:`json_int_t` can be
+   either ``long`` or ``long long``, and :func:`printf()` reuiqres
+   different length modifiers for the two.
+
+   Example::
+
+       json_int_t x = 123123123;
+       printf("x is %" JSON_INTEGER_FORMAT "\n", x);
+
+
+.. function:: json_t *json_integer(json_int_t value)
+
+   .. refcounting:: new
+
+   Returns a new JSON integer, or *NULL* on error.
+
+.. function:: json_int_t json_integer_value(const json_t *integer)
+
+   Returns the associated value of *integer*, or 0 if *json* is not a
+   JSON integer.
+
+.. function:: int json_integer_set(const json_t *integer, json_int_t value)
+
+   Sets the associated value of *integer* to *value*. Returns 0 on
+   success and -1 if *integer* is not a JSON integer.
+
+.. function:: json_t *json_real(double value)
+
+   .. refcounting:: new
+
+   Returns a new JSON real, or *NULL* on error.
+
+.. function:: double json_real_value(const json_t *real)
+
+   Returns the associated value of *real*, or 0.0 if *real* is not a
+   JSON real.
+
+.. function:: int json_real_set(const json_t *real, double value)
+
+   Sets the associated value of *real* to *value*. Returns 0 on
+   success and -1 if *real* is not a JSON real.
+
+In addition to the functions above, there's a common query function
+for integers and reals:
+
+.. function:: double json_number_value(const json_t *json)
+
+   Returns the associated value of the JSON integer or JSON real
+   *json*, cast to double regardless of the actual type. If *json* is
+   neither JSON real nor JSON integer, 0.0 is returned.
+
+
+Array
+=====
+
+A JSON array is an ordered collection of other JSON values.
+
+.. function:: json_t *json_array(void)
+
+   .. refcounting:: new
+
+   Returns a new JSON array, or *NULL* on error. Initially, the array
+   is empty.
+
+.. function:: size_t json_array_size(const json_t *array)
+
+   Returns the number of elements in *array*, or 0 if *array* is NULL
+   or not a JSON array.
+
+.. function:: json_t *json_array_get(const json_t *array, size_t index)
+
+   .. refcounting:: borrow
+
+   Returns the element in *array* at position *index*. The valid range
+   for *index* is from 0 to the return value of
+   :func:`json_array_size()` minus 1. If *array* is not a JSON array,
+   if *array* is *NULL*, or if *index* is out of range, *NULL* is
+   returned.
+
+.. function:: int json_array_set(json_t *array, size_t index, json_t *value)
+
+   Replaces the element in *array* at position *index* with *value*.
+   The valid range for *index* is from 0 to the return value of
+   :func:`json_array_size()` minus 1. Returns 0 on success and -1 on
+   error.
+
+.. function:: int json_array_set_new(json_t *array, size_t index, json_t *value)
+
+   Like :func:`json_array_set()` but steals the reference to *value*.
+   This is useful when *value* is newly created and not used after
+   the call.
+
+.. function:: int json_array_append(json_t *array, json_t *value)
+
+   Appends *value* to the end of *array*, growing the size of *array*
+   by 1. Returns 0 on success and -1 on error.
+
+.. function:: int json_array_append_new(json_t *array, json_t *value)
+
+   Like :func:`json_array_append()` but steals the reference to
+   *value*. This is useful when *value* is newly created and not used
+   after the call.
+
+.. function:: int json_array_insert(json_t *array, size_t index, json_t *value)
+
+   Inserts *value* to *array* at position *index*, shifting the
+   elements at *index* and after it one position towards the end of
+   the array. Returns 0 on success and -1 on error.
+
+.. function:: int json_array_insert_new(json_t *array, size_t index, json_t *value)
+
+   Like :func:`json_array_insert()` but steals the reference to
+   *value*. This is useful when *value* is newly created and not used
+   after the call.
+
+.. function:: int json_array_remove(json_t *array, size_t index)
+
+   Removes the element in *array* at position *index*, shifting the
+   elements after *index* one position towards the start of the array.
+   Returns 0 on success and -1 on error.
+
+.. function:: int json_array_clear(json_t *array)
+
+   Removes all elements from *array*. Returns 0 on sucess and -1 on
+   error.
+
+.. function:: int json_array_extend(json_t *array, json_t *other_array)
+
+   Appends all elements in *other_array* to the end of *array*.
+   Returns 0 on success and -1 on error.
+
+
+Object
+======
+
+A JSON object is a dictionary of key-value pairs, where the key is a
+Unicode string and the value is any JSON value.
+
+.. function:: json_t *json_object(void)
+
+   .. refcounting:: new
+
+   Returns a new JSON object, or *NULL* on error. Initially, the
+   object is empty.
+
+.. function:: size_t json_object_size(const json_t *object)
+
+   Returns the number of elements in *object*, or 0 if *object* is not
+   a JSON object.
+
+.. function:: json_t *json_object_get(const json_t *object, const char *key)
+
+   .. refcounting:: borrow
+
+   Get a value corresponding to *key* from *object*. Returns *NULL* if
+   *key* is not found and on error.
+
+.. function:: int json_object_set(json_t *object, const char *key, json_t *value)
+
+   Set the value of *key* to *value* in *object*. *key* must be a
+   valid null terminated UTF-8 encoded Unicode string. If there
+   already is a value for *key*, it is replaced by the new value.
+   Returns 0 on success and -1 on error.
+
+.. function:: int json_object_set_nocheck(json_t *object, const char *key, json_t *value)
+
+   Like :func:`json_object_set`, but doesn't check that *key* is
+   valid UTF-8. Use this function only if you are certain that this
+   really is the case (e.g. you have already checked it by other
+   means).
+
+.. function:: int json_object_set_new(json_t *object, const char *key, json_t *value)
+
+   Like :func:`json_object_set()` but steals the reference to
+   *value*. This is useful when *value* is newly created and not used
+   after the call.
+
+.. function:: int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value)
+
+   Like :func:`json_object_set_new`, but doesn't check that *key* is
+   valid UTF-8. Use this function only if you are certain that this
+   really is the case (e.g. you have already checked it by other
+   means).
+
+.. function:: int json_object_del(json_t *object, const char *key)
+
+   Delete *key* from *object* if it exists. Returns 0 on success, or
+   -1 if *key* was not found.
+
+
+.. function:: int json_object_clear(json_t *object)
+
+   Remove all elements from *object*. Returns 0 on success and -1 if
+   *object* is not a JSON object.
+
+.. function:: int json_object_update(json_t *object, json_t *other)
+
+   Update *object* with the key-value pairs from *other*, overwriting
+   existing keys. Returns 0 on success or -1 on error.
+
+
+The following functions implement an iteration protocol for objects,
+allowing to iterate through all key-value pairs in an object. The
+items are not returned in any particular order, as this would require
+sorting due to the internal object representation.
+
+.. function:: void *json_object_iter(json_t *object)
+
+   Returns an opaque iterator which can be used to iterate over all
+   key-value pairs in *object*, or *NULL* if *object* is empty.
+
+.. function:: void *json_object_iter_at(json_t *object, const char *key)
+
+   Like :func:`json_object_iter()`, but returns an iterator to the
+   key-value pair in *object* whose key is equal to *key*, or NULL if
+   *key* is not found in *object*. Iterating forward to the end of
+   *object* only yields all key-value pairs of the object if *key*
+   happens to be the first key in the underlying hash table.
+
+.. function:: void *json_object_iter_next(json_t *object, void *iter)
+
+   Returns an iterator pointing to the next key-value pair in *object*
+   after *iter*, or *NULL* if the whole object has been iterated
+   through.
+
+.. function:: const char *json_object_iter_key(void *iter)
+
+   Extract the associated key from *iter*.
+
+.. function:: json_t *json_object_iter_value(void *iter)
+
+   .. refcounting:: borrow
+
+   Extract the associated value from *iter*.
+
+.. function:: int json_object_iter_set(json_t *object, void *iter, json_t *value)
+
+   Set the value of the key-value pair in *object*, that is pointed to
+   by *iter*, to *value*.
+
+.. function:: int json_object_iter_set_new(json_t *object, void *iter, json_t *value)
+
+   Like :func:`json_object_iter_set()`, but steals the reference to
+   *value*. This is useful when *value* is newly created and not used
+   after the call.
+
+The iteration protocol can be used for example as follows::
+
+   /* obj is a JSON object */
+   const char *key;
+   json_t *value;
+   void *iter = json_object_iter(obj);
+   while(iter)
+   {
+       key = json_object_iter_key(iter);
+       value = json_object_iter_value(iter);
+       /* use key and value ... */
+       iter = json_object_iter_next(obj, iter);
+   }
+
+
+Error reporting
+===============
+
+Jansson uses a single struct type to pass error information to the
+user. See sections :ref:`apiref-decoding`, :ref:`apiref-pack` and
+:ref:`apiref-unpack` for functions that pass error information using
+this struct.
+
+.. type:: json_error_t
+
+   .. member:: char text[]
+
+      The error message (in UTF-8), or an empty string if a message is
+      not available.
+
+   .. member:: char source[]
+
+      Source of the error. This can be (a part of) the file name or a
+      special identifier in angle brackers (e.g. ``<string>``).
+
+   .. member:: int line
+
+      The line number on which the error occurred.
+
+   .. member:: int column
+
+      The column on which the error occurred. Note that this is the
+      *character column*, not the byte column, i.e. a multibyte UTF-8
+      character counts as one column.
+
+   .. member:: size_t position
+
+      The position in bytes from the start of the input. This is
+      useful for debugging Unicode encoding problems.
+
+The normal use of :type:`json_error_t` is to allocate it on the stack,
+and pass a pointer to a function. Example::
+
+   int main() {
+       json_t *json;
+       json_error_t error;
+
+       json = json_load_file("/path/to/file.json", 0, &error);
+       if(!json) {
+           /* the error variable contains error information */
+       }
+       ...
+   }
+
+Also note that if the call succeeded (``json != NULL`` in the above
+example), the contents of ``error`` are unspecified.
+
+All functions also accept *NULL* as the :type:`json_error_t` pointer,
+in which case no error information is returned to the caller.
+
+
+Encoding
+========
+
+This sections describes the functions that can be used to encode
+values to JSON. By default, only objects and arrays can be encoded
+directly, since they are the only valid *root* values of a JSON text.
+To encode any JSON value, use the ``JSON_ENCODE_ANY`` flag (see
+below).
+
+By default, the output has no newlines, and spaces are used between
+array and object elements for a readable output. This behavior can be
+altered by using the ``JSON_INDENT`` and ``JSON_COMPACT`` flags
+described below. A newline is never appended to the end of the encoded
+JSON data.
+
+Each function takes a *flags* parameter that controls some aspects of
+how the data is encoded. Its default value is 0. The following macros
+can be ORed together to obtain *flags*.
+
+``JSON_INDENT(n)``
+   Pretty-print the result, using newlines between array and object
+   items, and indenting with *n* spaces. The valid range for *n* is
+   between 0 and 32, other values result in an undefined output. If
+   ``JSON_INDENT`` is not used or *n* is 0, no newlines are inserted
+   between array and object items.
+
+``JSON_COMPACT``
+   This flag enables a compact representation, i.e. sets the separator
+   between array and object items to ``","`` and between object keys
+   and values to ``":"``. Without this flag, the corresponding
+   separators are ``", "`` and ``": "`` for more readable output.
+
+``JSON_ENSURE_ASCII``
+   If this flag is used, the output is guaranteed to consist only of
+   ASCII characters. This is achived by escaping all Unicode
+   characters outside the ASCII range.
+
+``JSON_SORT_KEYS``
+   If this flag is used, all the objects in output are sorted by key.
+   This is useful e.g. if two JSON texts are diffed or visually
+   compared.
+
+``JSON_PRESERVE_ORDER``
+   If this flag is used, object keys in the output are sorted into the
+   same order in which they were first inserted to the object. For
+   example, decoding a JSON text and then encoding with this flag
+   preserves the order of object keys.
+
+``JSON_ENCODE_ANY``
+   Specifying this flag makes it possible to encode any JSON value on
+   its own. Without it, only objects and arrays can be passed as the
+   *root* value to the encoding functions.
+
+   **Note:** Encoding any value may be useful in some scenarios, but
+   it's generally discouraged as it violates strict compatiblity with
+   :rfc:`4627`. If you use this flag, don't expect interoperatibility
+   with other JSON systems. Even Jansson itself doesn't have any means
+   to decode JSON texts whose root value is not object or array.
+
+   .. versionadded:: 2.1
+
+The following functions perform the actual JSON encoding. The result
+is in UTF-8.
+
+.. function:: char *json_dumps(const json_t *root, size_t flags)
+
+   Returns the JSON representation of *root* as a string, or *NULL* on
+   error. *flags* is described above. The return value must be freed
+   by the caller using :func:`free()`.
+
+.. function:: int json_dumpf(const json_t *root, FILE *output, size_t flags)
+
+   Write the JSON representation of *root* to the stream *output*.
+   *flags* is described above. Returns 0 on success and -1 on error.
+   If an error occurs, something may have already been written to
+   *output*. In this case, the output is undefined and most likely not
+   valid JSON.
+
+.. function:: int json_dump_file(const json_t *json, const char *path, size_t flags)
+
+   Write the JSON representation of *root* to the file *path*. If
+   *path* already exists, it is overwritten. *flags* is described
+   above. Returns 0 on success and -1 on error.
+
+
+.. _apiref-decoding:
+
+Decoding
+========
+
+This sections describes the functions that can be used to decode JSON
+text to the Jansson representation of JSON data. The JSON
+specification requires that a JSON text is either a serialized array
+or object, and this requirement is also enforced with the following
+functions. In other words, the top level value in the JSON text being
+decoded must be either array or object.
+
+See :ref:`rfc-conformance` for a discussion on Jansson's conformance
+to the JSON specification. It explains many design decisions that
+affect especially the behavior of the decoder.
+
+Each function takes a *flags* parameter that can be used to control
+the behavior of the decoder. Its default value is 0. The following
+macros can be ORed together to obtain *flags*.
+
+``JSON_REJECT_DUPLICATES``
+   Issue a decoding error if any JSON object in the input text
+   contains duplicate keys. Without this flag, the value of the last
+   occurence of each key ends up in the result. Key equivalence is
+   checked byte-by-byte, without special Unicode comparison
+   algorithms.
+
+   .. versionadded:: 2.1
+
+``JSON_DISABLE_EOF_CHECK``
+   By default, the decoder expects that its whole input constitutes a
+   valid JSON text, and issues an error if there's extra data after
+   the otherwise valid JSON input. With this flag enabled, the decoder
+   stops after decoding a valid JSON array or object, and thus allows
+   extra data after the JSON text.
+
+   .. versionadded:: 2.1
+
+The following functions perform the actual JSON decoding.
+
+.. function:: json_t *json_loads(const char *input, size_t flags, json_error_t *error)
+
+   .. refcounting:: new
+
+   Decodes the JSON string *input* and returns the array or object it
+   contains, or *NULL* on error, in which case *error* is filled with
+   information about the error. *flags* is described above.
+
+.. function:: json_t *json_loadb(const char *buffer, size_t buflen, size_t flags, json_error_t *error)
+
+   .. refcounting:: new
+
+   Decodes the JSON string *buffer*, whose length is *buflen*, and
+   returns the array or object it contains, or *NULL* on error, in
+   which case *error* is filled with information about the error. This
+   is similar to :func:`json_loads()` except that the string doesn't
+   need to be null-terminated. *flags* is described above.
+
+   .. versionadded:: 2.1
+
+.. function:: json_t *json_loadf(FILE *input, size_t flags, json_error_t *error)
+
+   .. refcounting:: new
+
+   Decodes the JSON text in stream *input* and returns the array or
+   object it contains, or *NULL* on error, in which case *error* is
+   filled with information about the error. *flags* is described
+   above.
+
+.. function:: json_t *json_load_file(const char *path, size_t flags, json_error_t *error)
+
+   .. refcounting:: new
+
+   Decodes the JSON text in file *path* and returns the array or
+   object it contains, or *NULL* on error, in which case *error* is
+   filled with information about the error. *flags* is described
+   above.
+
+
+.. _apiref-pack:
+
+Building Values
+===============
+
+This sectinon describes functions that help to create, or *pack*,
+complex JSON values, especially nested objects and arrays. Value
+building is based on a *format string* that is used to tell the
+functions about the expected arguments.
+
+For example, the format string ``"i"`` specifies a single integer
+value, while the format string ``"[ssb]"`` or the equivalent ``"[s, s,
+b]"`` specifies an array value with two integers and a boolean as its
+items::
+
+    /* Create the JSON integer 42 */
+    json_pack("i", 42);
+
+    /* Create the JSON array ["foo", "bar", true] */
+    json_pack("[ssb]", "foo", "bar", 1);
+
+Here's the full list of format characters. The type in parentheses
+denotes the resulting JSON type, and the type in brackets (if any)
+denotes the C type that is expected as the corresponding argument.
+
+``s`` (string) [const char \*]
+    Convert a NULL terminated UTF-8 string to a JSON string.
+
+``n`` (null)
+    Output a JSON null value. No argument is consumed.
+
+``b`` (boolean) [int]
+    Convert a C :type:`int` to JSON boolean value. Zero is converted
+    to ``false`` and non-zero to ``true``.
+
+``i`` (integer) [int]
+    Convert a C :type:`int` to JSON integer.
+
+``I`` (integer) [json_int_t]
+    Convert a C :type:`json_int_t` to JSON integer.
+
+``f`` (real) [double]
+    Convert a C :type:`double` to JSON real.
+
+``o`` (any value) [json_t \*]
+    Output any given JSON value as-is. If the value is added to an
+    array or object, the reference to the value passed to ``o`` is
+    stealed by the container.
+
+``O`` (any value) [json_t \*]
+    Like ``o``, but the argument's reference count is incremented.
+    This is useful if you pack into an array or object and want to
+    keep the reference for the JSON value consumed by ``O`` to
+    yourself.
+
+``[fmt]`` (array)
+    Build an array with contents from the inner format string. ``fmt``
+    may contain objects and arrays, i.e. recursive value building is
+    supported.
+
+``{fmt}`` (object)
+    Build an object with contents from the inner format string
+    ``fmt``. The first, third, etc. format character represent a key,
+    and must be ``s`` (as object keys are always strings). The second,
+    fourth, etc. format character represent a value. Any value may be
+    an object or array, i.e. recursive value building is supported.
+
+The following functions compose the value building API:
+
+.. function:: json_t *json_pack(const char *fmt, ...)
+
+   .. refcounting:: new
+
+   Build a new JSON value according to the format string *fmt*. For
+   each format character (except for ``{}[]n``), one argument is
+   consumed and used to build the corresponding value. Returns *NULL*
+   on error.
+
+.. function:: json_t *json_pack_ex(json_error_t *error, size_t flags, const char *fmt, ...)
+              json_t *json_vpack_ex(json_error_t *error, size_t flags, const char *fmt, va_list ap)
+
+   .. refcounting:: new
+
+   Like :func:`json_pack()`, but an in the case of an error, an error
+   message is written to *error*, if it's not *NULL*. The *flags*
+   parameter is currently unused and should be set to 0.
+
+   As only the errors in format string (and out-of-memory errors) can
+   be caught by the packer, these two functions are most likely only
+   useful for debugging format strings.
+
+More examples::
+
+  /* Build an empty JSON object */
+  json_pack("{}");
+
+  /* Build the JSON object {"foo": 42, "bar": 7} */
+  json_pack("{sisb}", "foo", 42, "bar", 7);
+
+  /* Like above, ':', ',' and whitespace are ignored */
+  json_pack("{s:i, s:b}", "foo", 42, "bar", 7);
+
+  /* Build the JSON array [[1, 2], {"cool": true}] */
+  json_pack("[[i,i],{s:b]]", 1, 2, "cool", 1);
+
+
+.. _apiref-unpack:
+
+Parsing and Validating Values
+=============================
+
+This sectinon describes functions that help to validate complex values
+and extract, or *unpack*, data from them. Like :ref:`building values
+<apiref-pack>`, this is also based on format strings.
+
+While a JSON value is unpacked, the type specified in the format
+string is checked to match that of the JSON value. This is the
+validation part of the process. In addition to this, the unpacking
+functions can also check that all items of arrays and objects are
+unpacked. This check be enabled with the format character ``!`` or by
+using the flag ``JSON_STRICT``. See below for details.
+
+Here's the full list of format characters. The type in parentheses
+denotes the JSON type, and the type in brackets (if any) denotes the C
+type whose address should be passed.
+
+``s`` (string) [const char \*]
+    Convert a JSON string to a pointer to a NULL terminated UTF-8
+    string.
+
+``n`` (null)
+    Expect a JSON null value. Nothing is extracted.
+
+``b`` (boolean) [int]
+    Convert a JSON boolean value to a C :type:`int`, so that ``true``
+    is converted to 1 and ``false`` to 0.
+
+``i`` (integer) [int]
+    Convert a JSON integer to C :type:`int`.
+
+``I`` (integer) [json_int_t]
+    Convert a JSON integer to C :type:`json_int_t`.
+
+``f`` (real) [double]
+    Convert a JSON real to C :type:`double`.
+
+``F`` (integer or real) [double]
+    Convert a JSON number (integer or real) to C :type:`double`.
+
+``o`` (any value) [json_t \*]
+    Store a JSON value with no conversion to a :type:`json_t` pointer.
+
+``O`` (any value) [json_t \*]
+    Like ``O``, but the JSON value's reference count is incremented.
+
+``[fmt]`` (array)
+    Convert each item in the JSON array according to the inner format
+    string. ``fmt`` may contain objects and arrays, i.e. recursive
+    value extraction is supporetd.
+
+``{fmt}`` (object)
+    Convert each item in the JSON object according to the inner format
+    string ``fmt``. The first, third, etc. format character represent
+    a key, and must be ``s``. The corresponding argument to unpack
+    functions is read as the object key. The second fourth, etc.
+    format character represent a value and is written to the address
+    given as the corresponding argument. **Note** that every other
+    argument is read from and every other is written to.
+
+    ``fmt`` may contain objects and arrays as values, i.e. recursive
+    value extraction is supporetd.
+
+``!``
+    This special format character is used to enable the check that
+    all object and array items are accessed, on a per-value basis. It
+    must appear inside an array or object as the last format character
+    before the closing bracket or brace. To enable the check globally,
+    use the ``JSON_STRICT`` unpacking flag.
+
+``*``
+    This special format character is the opposite of ``!``. If the
+    ``JSON_STRICT`` flag is used, ``*`` can be used to disable the
+    strict check on a per-value basis. It must appear inside an array
+    or object as the last format character before the closing bracket
+    or brace.
+
+The following functions compose the parsing and validation API:
+
+.. function:: int json_unpack(json_t *root, const char *fmt, ...)
+
+   Validate and unpack the JSON value *root* according to the format
+   string *fmt*. Returns 0 on success and -1 on failure.
+
+.. function:: int json_unpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, ...)
+              int json_vunpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, va_list ap)
+
+   Validate and unpack the JSON value *root* according to the format
+   string *fmt*. If an error occurs and *error* is not *NULL*, write
+   error information to *error*. *flags* can be used to control the
+   behaviour of the unpacker, see below for the flags. Returns 0 on
+   success and -1 on failure.
+
+The following unpacking flags are available:
+
+``JSON_STRICT``
+    Enable the extra validation step checking that all object and
+    array items are unpacked. This is equivalent to appending the
+    format character ``!`` to the end of every array and object in the
+    format string.
+
+``JSON_VALIDATE_ONLY``
+    Don't extract any data, just validate the JSON value against the
+    given format string. Note that object keys must still be specified
+    after the format string.
+
+Examples::
+
+    /* root is the JSON integer 42 */
+    int myint;
+    json_unpack(root, "i", &myint);
+    assert(myint == 42);
+
+    /* root is the JSON object {"foo": "bar", "quux": true} */
+    const char *str;
+    int boolean;
+    json_unpack(root, "{s:s, s:b}", "foo", &str, "quux", &boolean);
+    assert(strcmp(str, "bar") == 0 && boolean == 1);
+
+    /* root is the JSON array [[1, 2], {"baz": null} */
+    json_error_t error;
+    json_unpack_ex(root, &error, JSON_VALIDATE_ONLY, "[[i,i], {s:n}]", "baz");
+    /* returns 0 for validation success, nothing is extracted */
+
+    /* root is the JSON array [1, 2, 3, 4, 5] */
+    int myint1, myint2;
+    json_unpack(root, "[ii!]", &myint1, &myint2);
+    /* returns -1 for failed validation */
+
+
+Equality
+========
+
+Testing for equality of two JSON values cannot, in general, be
+achieved using the ``==`` operator. Equality in the terms of the
+``==`` operator states that the two :type:`json_t` pointers point to
+exactly the same JSON value. However, two JSON values can be equal not
+only if they are exactly the same value, but also if they have equal
+"contents":
+
+* Two integer or real values are equal if their contained numeric
+  values are equal. An integer value is never equal to a real value,
+  though.
+
+* Two strings are equal if their contained UTF-8 strings are equal,
+  byte by byte. Unicode comparison algorithms are not implemented.
+
+* Two arrays are equal if they have the same number of elements and
+  each element in the first array is equal to the corresponding
+  element in the second array.
+
+* Two objects are equal if they have exactly the same keys and the
+  value for each key in the first object is equal to the value of the
+  corresponding key in the second object.
+
+* Two true, false or null values have no "contents", so they are equal
+  if their types are equal. (Because these values are singletons,
+  their equality can actually be tested with ``==``.)
+
+The following function can be used to test whether two JSON values are
+equal.
+
+.. function:: int json_equal(json_t *value1, json_t *value2)
+
+   Returns 1 if *value1* and *value2* are equal, as defined above.
+   Returns 0 if they are inequal or one or both of the pointers are
+   *NULL*.
+
+
+Copying
+=======
+
+Because of reference counting, passing JSON values around doesn't
+require copying them. But sometimes a fresh copy of a JSON value is
+needed. For example, if you need to modify an array, but still want to
+use the original afterwards, you should take a copy of it first.
+
+Jansson supports two kinds of copying: shallow and deep. There is a
+difference between these methods only for arrays and objects. Shallow
+copying only copies the first level value (array or object) and uses
+the same child values in the copied value. Deep copying makes a fresh
+copy of the child values, too. Moreover, all the child values are deep
+copied in a recursive fashion.
+
+.. function:: json_t *json_copy(json_t *value)
+
+   .. refcounting:: new
+
+   Returns a shallow copy of *value*, or *NULL* on error.
+
+.. function:: json_t *json_deep_copy(json_t *value)
+
+   .. refcounting:: new
+
+   Returns a deep copy of *value*, or *NULL* on error.
+
+
+Custom Memory Allocation
+========================
+
+By default, Jansson uses :func:`malloc()` and :func:`free()` for
+memory allocation. These functions can be overridden if custom
+behavior is needed.
+
+.. type:: json_malloc_t
+
+   A typedef for a function pointer with :func:`malloc()`'s
+   signature::
+
+       typedef void *(*json_malloc_t)(size_t);
+
+.. type:: json_free_t
+
+   A typedef for a function pointer with :func:`free()`'s
+   signature::
+
+       typedef void (*json_free_t)(void *);
+
+.. function:: void json_set_alloc_funcs(json_malloc_t malloc_fn, json_free_t free_fn)
+
+   Use *malloc_fn* instead of :func:`malloc()` and *free_fn* instead
+   of :func:`free()`. This function has to be called before any other
+   Jansson's API functions to ensure that all memory operations use
+   the same functions.
+
+Examples:
+
+Use the `Boehm's conservative garbage collector`_ for memory
+operations::
+
+    json_set_alloc_funcs(GC_malloc, GC_free);
+
+.. _Boehm's conservative garbage collector: http://www.hpl.hp.com/personal/Hans_Boehm/gc/
+
+Allow storing sensitive data (e.g. passwords or encryption keys) in
+JSON structures by zeroing all memory when freed::
+
+    static void *secure_malloc(size_t size)
+    {
+        /* Store the memory area size in the beginning of the block */
+        void *ptr = malloc(size + 8);
+        *((size_t *)ptr) = size;
+        return ptr + 8;
+    }
+
+    static void secure_free(void *ptr)
+    {
+        size_t size;
+
+        ptr -= 8;
+        size = *((size_t *)ptr);
+
+        guaranteed_memset(ptr, 0, size);
+        free(ptr);
+    }
+
+    int main()
+    {
+        json_set_alloc_funcs(secure_malloc, secure_free);
+        /* ... */
+    }
+
+For more information about the issues of storing sensitive data in
+memory, see
+http://www.dwheeler.com/secure-programs/Secure-Programs-HOWTO/protect-secrets.html.
+The page also examplains the :func:`guaranteed_memset()` function used
+in the example and gives a sample implementation for it.
diff --git a/lang/c/jansson/doc/changes.rst b/lang/c/jansson/doc/changes.rst
new file mode 100644
index 0000000..ea56843
--- /dev/null
+++ b/lang/c/jansson/doc/changes.rst
@@ -0,0 +1,5 @@
+******************
+Changes in Jansson
+******************
+
+.. include:: ../CHANGES
diff --git a/lang/c/jansson/doc/conf.py b/lang/c/jansson/doc/conf.py
new file mode 100644
index 0000000..ff3ba2e
--- /dev/null
+++ b/lang/c/jansson/doc/conf.py
@@ -0,0 +1,217 @@
+# -*- coding: utf-8 -*-
+#
+# Jansson documentation build configuration file, created by
+# sphinx-quickstart on Sun Sep  5 21:47:20 2010.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('ext'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['refcounting']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Jansson'
+copyright = u'2009-2011, Petri Lehtinen'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '2.1'
+# The full version, including alpha/beta/rc tags.
+release = '2.1'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+default_role = 'c:func'
+primary_domain = 'c'
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#html_theme = 'default'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+#html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Janssondoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'Jansson.tex', u'Jansson Documentation',
+   u'Petri Lehtinen', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'jansson', u'Jansson Documentation',
+     [u'Petri Lehtinen'], 1)
+]
diff --git a/lang/c/jansson/doc/conformance.rst b/lang/c/jansson/doc/conformance.rst
new file mode 100644
index 0000000..34d60bd
--- /dev/null
+++ b/lang/c/jansson/doc/conformance.rst
@@ -0,0 +1,112 @@
+.. _rfc-conformance:
+
+***************
+RFC Conformance
+***************
+
+JSON is specified in :rfc:`4627`, *"The application/json Media Type
+for JavaScript Object Notation (JSON)"*. This chapter discusses
+Jansson's conformance to this specification.
+
+Character Encoding
+==================
+
+Jansson only supports UTF-8 encoded JSON texts. It does not support or
+auto-detect any of the other encodings mentioned in the RFC, namely
+UTF-16LE, UTF-16BE, UTF-32LE or UTF-32BE. Pure ASCII is supported, as
+it's a subset of UTF-8.
+
+Strings
+=======
+
+JSON strings are mapped to C-style null-terminated character arrays,
+and UTF-8 encoding is used internally. Strings may not contain
+embedded null characters, not even escaped ones.
+
+For example, trying to decode the following JSON text leads to a parse
+error::
+
+    ["this string contains the null character: \u0000"]
+
+All other Unicode codepoints U+0001 through U+10FFFF are allowed.
+
+Unicode normalization or any other transformation is never performed
+on any strings (string values or object keys). When checking for
+equivalence of strings or object keys, the comparison is performed
+byte by byte between the original UTF-8 representations of the
+strings.
+
+Numbers
+=======
+
+Real vs. Integer
+----------------
+
+JSON makes no distinction between real and integer numbers; Jansson
+does. Real numbers are mapped to the ``double`` type and integers to
+the ``json_int_t`` type, which is a typedef of ``long long`` or
+``long``, depending on whether ``long long`` is supported by your
+compiler or not.
+
+A JSON number is considered to be a real number if its lexical
+representation includes one of ``e``, ``E``, or ``.``; regardless if
+its actual numeric value is a true integer (e.g., all of ``1E6``,
+``3.0``, ``400E-2``, and ``3.14E3`` are mathematical integers, but
+will be treated as real values).
+
+All other JSON numbers are considered integers.
+
+When encoding to JSON, real values are always represented
+with a fractional part; e.g., the ``double`` value 3.0 will be
+represented in JSON as ``3.0``, not ``3``.
+
+Overflow, Underflow & Precision
+-------------------------------
+
+Real numbers whose absolute values are too small to be represented in
+a C ``double`` will be silently estimated with 0.0. Thus, depending on
+platform, JSON numbers very close to zero such as 1E-999 may result in
+0.0.
+
+Real numbers whose absolute values are too large to be represented in
+a C ``double`` will result in an overflow error (a JSON decoding
+error). Thus, depending on platform, JSON numbers like 1E+999 or
+-1E+999 may result in a parsing error.
+
+Likewise, integer numbers whose absolute values are too large to be
+represented in the ``json_int_t`` type (see above) will result in an
+overflow error (a JSON decoding error). Thus, depending on platform,
+JSON numbers like 1000000000000000 may result in parsing error.
+
+Parsing JSON real numbers may result in a loss of precision. As long
+as overflow does not occur (i.e. a total loss of precision), the
+rounded approximate value is silently used. Thus the JSON number
+1.000000000000000005 may, depending on platform, result in the
+``double`` value 1.0.
+
+Signed zeros
+------------
+
+JSON makes no statement about what a number means; however Javascript
+(ECMAscript) does state that +0.0 and -0.0 must be treated as being
+distinct values, i.e. -0.0 |not-equal| 0.0. Jansson relies on the
+underlying floating point library in the C environment in which it is
+compiled. Therefore it is platform-dependent whether 0.0 and -0.0 will
+be distinct values. Most platforms that use the IEEE 754
+floating-point standard will support signed zeros.
+
+Note that this only applies to floating-point; neither JSON, C, or
+IEEE support the concept of signed integer zeros.
+
+.. |not-equal| unicode:: U+2260
+
+Types
+-----
+
+No support is provided in Jansson for any C numeric types other than
+``json_int_t`` and ``double``. This excludes things such as unsigned
+types, ``long double``, etc. Obviously, shorter types like ``short``,
+``int``, ``long`` (if ``json_int_t`` is ``long long``) and ``float``
+are implicitly handled via the ordinary C type coercion rules (subject
+to overflow semantics). Also, no support or hooks are provided for any
+supplemental "bignum" type add-on packages.
diff --git a/lang/c/jansson/doc/ext/refcounting.py b/lang/c/jansson/doc/ext/refcounting.py
new file mode 100644
index 0000000..5bf4aab
--- /dev/null
+++ b/lang/c/jansson/doc/ext/refcounting.py
@@ -0,0 +1,59 @@
+"""
+    refcounting
+    ~~~~~~~~~~~
+
+    Reference count annotations for C API functions. Has the same
+    result as the sphinx.ext.refcounting extension but works for all
+    functions regardless of the signature, and the reference counting
+    information is written inline with the documentation instead of a
+    separate file.
+
+    Adds a new directive "refcounting". The directive has no content
+    and one required positional parameter:: "new" or "borrow".
+
+    Example:
+
+    .. cfunction:: json_t *json_object(void)
+
+       .. refcounting:: new
+
+       <description of the json_object function>
+
+    :copyright: Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+    :license: MIT, see LICENSE for details.
+"""
+
+from docutils import nodes
+
+class refcounting(nodes.emphasis): pass
+
+def visit(self, node):
+    self.visit_emphasis(node)
+
+def depart(self, node):
+    self.depart_emphasis(node)
+
+def html_visit(self, node):
+    self.body.append(self.starttag(node, 'em', '', CLASS='refcount'))
+
+def html_depart(self, node):
+    self.body.append('</em>')
+
+
+def refcounting_directive(name, arguments, options, content, lineno,
+                   content_offset, block_text, state, state_machine):
+    if arguments[0] == 'borrow':
+        text = 'Return value: Borrowed reference.'
+    elif arguments[0] == 'new':
+        text = 'Return value: New reference.'
+    else:
+        raise Error('Valid arguments: new, borrow')
+
+    return [refcounting(text, text)]
+
+def setup(app):
+    app.add_node(refcounting,
+                 html=(html_visit, html_depart),
+                 latex=(visit, depart),
+                 text=(visit, depart))
+    app.add_directive('refcounting', refcounting_directive, 0, (1, 0, 0))
diff --git a/lang/c/jansson/doc/gettingstarted.rst b/lang/c/jansson/doc/gettingstarted.rst
new file mode 100644
index 0000000..eb5f683
--- /dev/null
+++ b/lang/c/jansson/doc/gettingstarted.rst
@@ -0,0 +1,123 @@
+***************
+Getting Started
+***************
+
+.. highlight:: c
+
+Compiling and Installing Jansson
+================================
+
+The Jansson source is available at
+http://www.digip.org/jansson/releases/.
+
+Unix-like systems
+-----------------
+
+Unpack the source tarball and change to the source directory:
+
+.. parsed-literal::
+
+    bunzip2 -c jansson-|release|.tar.bz2 | tar xf -
+    cd jansson-|release|
+
+The source uses GNU Autotools (autoconf_, automake_, libtool_), so
+compiling and installing is extremely simple::
+
+    ./configure
+    make
+    make check
+    make install
+
+To change the destination directory (``/usr/local`` by default), use
+the ``--prefix=DIR`` argument to ``./configure``. See ``./configure
+--help`` for the list of all possible installation options. (There are
+no options to customize the resulting Jansson binary.)
+
+The command ``make check`` runs the test suite distributed with
+Jansson. This step is not strictly necessary, but it may find possible
+problems that Jansson has on your platform. If any problems are found,
+please report them.
+
+If you obtained the source from a Git repository (or any other source
+control system), there's no ``./configure`` script as it's not kept in
+version control. To create the script, the build system needs to be
+bootstrapped. There are many ways to do this, but the easiest one is
+to use ``autoreconf``::
+
+    autoreconf -vi
+
+This command creates the ``./configure`` script, which can then be
+used as described above.
+
+.. _autoconf: http://www.gnu.org/software/autoconf/
+.. _automake: http://www.gnu.org/software/automake/
+.. _libtool: http://www.gnu.org/software/libtool/
+
+
+Other Systems
+-------------
+
+On Windows and other non Unix-like systems, you may be unable to run
+the ``./configure`` script. In this case, follow these steps. All the
+files mentioned can be found in the ``src/`` directory.
+
+1. Create ``jansson_config.h``. This file has some platform-specific
+   parameters that are normally filled in by the ``./configure``
+   script:
+
+   - On Windows, rename ``jansson_config.h.win32`` to ``jansson_config.h``.
+
+   - On other systems, edit ``jansson_config.h.in``, replacing all
+     ``@variable@`` placeholders, and rename the file to
+     ``jansson_config.h``.
+
+2. Make ``jansson.h`` and ``jansson_config.h`` available to the
+   compiler, so that they can be found when compiling programs that
+   use Jansson.
+
+3. Compile all the ``.c`` files (in the ``src/`` directory) into a
+   library file. Make the library available to the compiler, as in
+   step 2.
+
+
+Building the Documentation
+--------------------------
+
+(This subsection describes how to build the HTML documentation you are
+currently reading, so it can be safely skipped.)
+
+Documentation is in the ``doc/`` subdirectory. It's written in
+reStructuredText_ with Sphinx_ annotations. To generate the HTML
+documentation, invoke::
+
+   make html
+
+and point your browser to ``doc/_build/html/index.html``. Sphinx_ 1.0
+or newer is required to generate the documentation.
+
+.. _reStructuredText: http://docutils.sourceforge.net/rst.html
+.. _Sphinx: http://sphinx.pocoo.org/
+
+
+Compiling Programs that Use Jansson
+===================================
+
+Jansson involves one C header file, :file:`jansson.h`, so it's enough
+to put the line
+
+::
+
+    #include <jansson.h>
+
+in the beginning of every source file that uses Jansson.
+
+There's also just one library to link with, ``libjansson``. Compile and
+link the program as follows::
+
+    cc -o prog prog.c -ljansson
+
+Starting from version 1.2, there's also support for pkg-config_::
+
+    cc -o prog prog.c `pkg-config --cflags --libs jansson`
+
+.. _pkg-config: http://pkg-config.freedesktop.org/
diff --git a/lang/c/jansson/doc/github_commits.c b/lang/c/jansson/doc/github_commits.c
new file mode 100644
index 0000000..9ba36b7
--- /dev/null
+++ b/lang/c/jansson/doc/github_commits.c
@@ -0,0 +1,171 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <jansson.h>
+#include <curl/curl.h>
+
+#define BUFFER_SIZE  (256 * 1024)  /* 256 KB */
+
+#define URL_FORMAT   "http://github.com/api/v2/json/commits/list/%s/%s/master"
+#define URL_SIZE     256
+
+/* Return the offset of the first newline in text or the length of
+   text if there's no newline */
+static int newline_offset(const char *text)
+{
+    const char *newline = strchr(text, '\n');
+    if(!newline)
+        return strlen(text);
+    else
+        return (int)(newline - text);
+}
+
+struct write_result
+{
+    char *data;
+    int pos;
+};
+
+static size_t write_response(void *ptr, size_t size, size_t nmemb, void *stream)
+{
+    struct write_result *result = (struct write_result *)stream;
+
+    if(result->pos + size * nmemb >= BUFFER_SIZE - 1)
+    {
+        fprintf(stderr, "error: too small buffer\n");
+        return 0;
+    }
+
+    memcpy(result->data + result->pos, ptr, size * nmemb);
+    result->pos += size * nmemb;
+
+    return size * nmemb;
+}
+
+static char *request(const char *url)
+{
+    CURL *curl;
+    CURLcode status;
+    char *data;
+    long code;
+
+    curl = curl_easy_init();
+    data = malloc(BUFFER_SIZE);
+    if(!curl || !data)
+        return NULL;
+
+    struct write_result write_result = {
+        .data = data,
+        .pos = 0
+    };
+
+    curl_easy_setopt(curl, CURLOPT_URL, url);
+    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_response);
+    curl_easy_setopt(curl, CURLOPT_WRITEDATA, &write_result);
+
+    status = curl_easy_perform(curl);
+    if(status != 0)
+    {
+        fprintf(stderr, "error: unable to request data from %s:\n", url);
+        fprintf(stderr, "%s\n", curl_easy_strerror(status));
+        return NULL;
+    }
+
+    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &code);
+    if(code != 200)
+    {
+        fprintf(stderr, "error: server responded with code %ld\n", code);
+        return NULL;
+    }
+
+    curl_easy_cleanup(curl);
+    curl_global_cleanup();
+
+    /* zero-terminate the result */
+    data[write_result.pos] = '\0';
+
+    return data;
+}
+
+int main(int argc, char *argv[])
+{
+    size_t i;
+    char *text;
+    char url[URL_SIZE];
+
+    json_t *root;
+    json_error_t error;
+    json_t *commits;
+
+    if(argc != 3)
+    {
+        fprintf(stderr, "usage: %s USER REPOSITORY\n\n", argv[0]);
+        fprintf(stderr, "List commits at USER's REPOSITORY.\n\n");
+        return 2;
+    }
+
+    snprintf(url, URL_SIZE, URL_FORMAT, argv[1], argv[2]);
+
+    text = request(url);
+    if(!text)
+        return 1;
+
+    root = json_loads(text, 0, &error);
+    free(text);
+
+    if(!root)
+    {
+        fprintf(stderr, "error: on line %d: %s\n", error.line, error.text);
+        return 1;
+    }
+
+    commits = json_object_get(root, "commits");
+    if(!json_is_array(commits))
+    {
+        fprintf(stderr, "error: commits is not an array\n");
+        return 1;
+    }
+
+    for(i = 0; i < json_array_size(commits); i++)
+    {
+        json_t *commit, *id, *message;
+        const char *message_text;
+
+        commit = json_array_get(commits, i);
+        if(!json_is_object(commit))
+        {
+            fprintf(stderr, "error: commit %d is not an object\n", i + 1);
+            return 1;
+        }
+
+        id = json_object_get(commit, "id");
+        if(!json_is_string(id))
+        {
+            fprintf(stderr, "error: commit %d: id is not a string\n", i + 1);
+            return 1;
+        }
+
+        message = json_object_get(commit, "message");
+        if(!json_is_string(message))
+        {
+            fprintf(stderr, "error: commit %d: message is not a string\n", i + 1);
+            return 1;
+        }
+
+        message_text = json_string_value(message);
+        printf("%.8s %.*s\n",
+               json_string_value(id),
+               newline_offset(message_text),
+               message_text);
+    }
+
+    json_decref(root);
+    return 0;
+}
diff --git a/lang/c/jansson/doc/index.rst b/lang/c/jansson/doc/index.rst
new file mode 100644
index 0000000..b5a3be8
--- /dev/null
+++ b/lang/c/jansson/doc/index.rst
@@ -0,0 +1,47 @@
+Jansson Documentation
+=====================
+
+This is the documentation for Jansson_ |release|, last updated |today|.
+
+Introduction
+------------
+
+Jansson_ is a C library for encoding, decoding and manipulating JSON
+data. Its main features and design principles are:
+
+- Simple and intuitive API and data model
+
+- Comprehensive documentation
+
+- No dependencies on other libraries
+
+- Full Unicode support (UTF-8)
+
+- Extensive test suite
+
+Jansson is licensed under the `MIT license`_; see LICENSE in the
+source distribution for details.
+
+
+.. _`MIT license`: http://www.opensource.org/licenses/mit-license.php
+.. _Jansson: http://www.digip.org/jansson/
+
+Contents
+--------
+
+.. toctree::
+   :maxdepth: 2
+
+   gettingstarted
+   upgrading
+   tutorial
+   conformance
+   apiref
+   changes
+
+
+Indices and Tables
+==================
+
+* :ref:`genindex`
+* :ref:`search`
diff --git a/lang/c/jansson/doc/tutorial.rst b/lang/c/jansson/doc/tutorial.rst
new file mode 100644
index 0000000..dd7ae19
--- /dev/null
+++ b/lang/c/jansson/doc/tutorial.rst
@@ -0,0 +1,275 @@
+.. _tutorial:
+
+********
+Tutorial
+********
+
+.. highlight:: c
+
+In this tutorial, we create a program that fetches the latest commits
+of a repository in GitHub_ over the web. One of the response formats
+supported by `GitHub API`_ is JSON, so the result can be parsed using
+Jansson.
+
+To stick to the the scope of this tutorial, we will only cover the the
+parts of the program related to handling JSON data. For the best user
+experience, the full source code is available:
+:download:`github_commits.c`. To compile it (on Unix-like systems with
+gcc), use the following command::
+
+    gcc -o github_commits github_commits.c -ljansson -lcurl
+
+libcurl_ is used to communicate over the web, so it is required to
+compile the program.
+
+The command line syntax is::
+
+    github_commits USER REPOSITORY
+
+``USER`` is a GitHub user ID and ``REPOSITORY`` is the repository
+name. Please note that the GitHub API is rate limited, so if you run
+the program too many times within a short period of time, the sever
+starts to respond with an error.
+
+.. _GitHub: http://github.com/
+.. _GitHub API: http://develop.github.com/
+.. _libcurl: http://curl.haxx.se/
+
+
+.. _tutorial-github-commits-api:
+
+The GitHub Commits API
+======================
+
+The `GitHub commits API`_ is used by sending HTTP requests to URLs
+starting with ``http://github.com/api/v2/json/commits/``. Our program
+only lists the latest commits, so the rest of the URL is
+``list/USER/REPOSITORY/BRANCH``, where ``USER``, ``REPOSITORY`` and
+``BRANCH`` are the GitHub user ID, the name of the repository, and the
+name of the branch whose commits are to be listed, respectively.
+
+GitHub responds with a JSON object of the following form:
+
+.. code-block:: none
+
+    {
+        "commits": [
+            {
+                "id": "<the commit ID>",
+                "message": "<the commit message>",
+                <more fields, not important to this tutorial>
+            },
+            {
+                "id": "<the commit ID>",
+                "message": "<the commit message>",
+                <more fields, not important to this tutorial>
+            },
+            <more commits...>
+        ]
+    }
+
+In our program, the HTTP request is sent using the following
+function::
+
+    static char *request(const char *url);
+
+It takes the URL as a parameter, preforms a HTTP GET request, and
+returns a newly allocated string that contains the response body. If
+the request fails, an error message is printed to stderr and the
+return value is *NULL*. For full details, refer to :download:`the code
+<github_commits.c>`, as the actual implementation is not important
+here.
+
+.. _GitHub commits API: http://develop.github.com/p/commits.html
+
+.. _tutorial-the-program:
+
+The Program
+===========
+
+First the includes::
+
+    #include <string.h>
+    #include <jansson.h>
+
+Like all the programs using Jansson, we need to include
+:file:`jansson.h`.
+
+The following definitions are used to build the GitHub commits API
+request URL::
+
+   #define URL_FORMAT   "http://github.com/api/v2/json/commits/list/%s/%s/master"
+   #define URL_SIZE     256
+
+The following function is used when formatting the result to find the
+first newline in the commit message::
+
+    /* Return the offset of the first newline in text or the length of
+       text if there's no newline */
+    static int newline_offset(const char *text)
+    {
+        const char *newline = strchr(text, '\n');
+        if(!newline)
+            return strlen(text);
+        else
+            return (int)(newline - text);
+    }
+
+The main function follows. In the beginning, we first declare a bunch
+of variables and check the command line parameters::
+
+    size_t i;
+    char *text;
+    char url[URL_SIZE];
+
+    json_t *root;
+    json_error_t error;
+    json_t *commits;
+
+    if(argc != 3)
+    {
+        fprintf(stderr, "usage: %s USER REPOSITORY\n\n", argv[0]);
+        fprintf(stderr, "List commits at USER's REPOSITORY.\n\n");
+        return 2;
+    }
+
+Then we build the request URL using the user and repository names
+given as command line parameters::
+
+    snprintf(url, URL_SIZE, URL_FORMAT, argv[1], argv[2]);
+
+This uses the ``URL_SIZE`` and ``URL_FORMAT`` constants defined above.
+Now we're ready to actually request the JSON data over the web::
+
+    text = request(url);
+    if(!text)
+        return 1;
+
+If an error occurs, our function ``request`` prints the error and
+returns *NULL*, so it's enough to just return 1 from the main
+function.
+
+Next we'll call :func:`json_loads()` to decode the JSON text we got
+as a response::
+
+    root = json_loads(text, 0, &error);
+    free(text);
+
+    if(!root)
+    {
+        fprintf(stderr, "error: on line %d: %s\n", error.line, error.text);
+        return 1;
+    }
+
+We don't need the JSON text anymore, so we can free the ``text``
+variable right after decoding it. If :func:`json_loads()` fails, it
+returns *NULL* and sets error information to the :type:`json_error_t`
+structure given as the second parameter. In this case, our program
+prints the error information out and returns 1 from the main function.
+
+Now we're ready to extract the data out of the decoded JSON response.
+The structure of the response JSON was explained in section
+:ref:`tutorial-github-commits-api`.
+
+First, we'll extract the ``commits`` array from the JSON response::
+
+    commits = json_object_get(root, "commits");
+    if(!json_is_array(commits))
+    {
+        fprintf(stderr, "error: commits is not an array\n");
+        return 1;
+    }
+
+This is the array that contains objects describing latest commits in
+the repository. We check that the returned value really is an array.
+If the key ``commits`` doesn't exist, :func:`json_object_get()`
+returns *NULL*, but :func:`json_is_array()` handles this case, too.
+
+Then we proceed to loop over all the commits in the array::
+
+    for(i = 0; i < json_array_size(commits); i++)
+    {
+        json_t *commit, *id, *message;
+        const char *message_text;
+
+        commit = json_array_get(commits, i);
+        if(!json_is_object(commit))
+        {
+            fprintf(stderr, "error: commit %d is not an object\n", i + 1);
+            return 1;
+        }
+    ...
+
+The function :func:`json_array_size()` returns the size of a JSON
+array. First, we again declare some variables and then extract the
+i'th element of the ``commits`` array using :func:`json_array_get()`.
+We also check that the resulting value is a JSON object.
+
+Next we'll extract the commit ID and commit message, and check that
+they both are JSON strings::
+
+        id = json_object_get(commit, "id");
+        if(!json_is_string(id))
+        {
+            fprintf(stderr, "error: commit %d: id is not a string\n", i + 1);
+            return 1;
+        }
+
+        message = json_object_get(commit, "message");
+        if(!json_is_string(message))
+        {
+            fprintf(stderr, "error: commit %d: message is not a string\n", i + 1);
+            return 1;
+        }
+    ...
+
+And finally, we'll print the first 8 characters of the commit ID and
+the first line of the commit message. A C-style string is extracted
+from a JSON string using :func:`json_string_value()`::
+
+        message_text = json_string_value(message);
+        printf("%.8s %.*s\n",
+               json_string_value(id),
+               newline_offset(message_text),
+               message_text);
+    }
+
+After sending the HTTP request, we decoded the JSON text using
+:func:`json_loads()`, remember? It returns a *new reference* to the
+JSON value it decodes. When we're finished with the value, we'll need
+to decrease the reference count using :func:`json_decref()`. This way
+Jansson can release the resources::
+
+    json_decref(root);
+    return 0;
+
+For a detailed explanation of reference counting in Jansson, see
+:ref:`apiref-reference-count` in :ref:`apiref`.
+
+The program's ready, let's test it and view the latest commits in
+Jansson's repository::
+
+    $ ./github_commits akheron jansson
+    86dc1d62 Fix indentation
+    b67e130f json_dumpf: Document the output shortage on error
+    4cd77771 Enhance handling of circular references
+    79009e62 json_dumps: Close the strbuffer if dumping fails
+    76999799 doc: Fix a small typo in apiref
+    22af193a doc/Makefile.am: Remove *.pyc in clean
+    951d091f Make integer, real and string mutable
+    185e107d Don't use non-portable asprintf()
+    ca7703fb Merge branch '1.0'
+    12cd4e8c jansson 1.0.4
+    <etc...>
+
+
+Conclusion
+==========
+
+In this tutorial, we implemented a program that fetches the latest
+commits of a GitHub repository using the GitHub commits API. Jansson
+was used to decode the JSON response and to extract the commit data.
+
+This tutorial only covered a small part of Jansson. For example, we
+did not create or manipulate JSON values at all. Proceed to
+:ref:`apiref` to explore all features of Jansson.
diff --git a/lang/c/jansson/doc/upgrading.rst b/lang/c/jansson/doc/upgrading.rst
new file mode 100644
index 0000000..9b49046
--- /dev/null
+++ b/lang/c/jansson/doc/upgrading.rst
@@ -0,0 +1,76 @@
+.. highlight:: c
+
+******************
+Upgrading from 1.x
+******************
+
+This chapter lists the backwards incompatible changes introduced in
+Jansson 2.0, and the steps that are needed for upgrading your code.
+
+**The incompatibilities are not dramatic.** The biggest change is that
+all decoding functions now require and extra parameter. Most programs
+can be modified to work with 2.0 by adding a ``0`` as the second
+parameter to all calls of :func:`json_loads()`, :func:`json_loadf()`
+and :func:`json_load_file()`.
+
+
+Compatibility
+=============
+
+Jansson 2.0 is backwards incompatible with the Jansson 1.x releases.
+It is ABI incompatible, i.e. all programs dynamically linking to the
+Jansson library need to be recompiled. It's also API incompatible,
+i.e. the source code of programs using Jansson 1.x may need
+modifications to make them compile against Jansson 2.0.
+
+All the 2.x releases are guaranteed to be backwards compatible for
+both ABI and API, so no recompilation or source changes are needed
+when upgrading from 2.x to 2.y.
+
+
+List of Incompatible Changes
+============================
+
+**Decoding flags**
+    For future needs, a ``flags`` parameter was added as the second
+    parameter to all decoding functions, i.e. :func:`json_loads()`,
+    :func:`json_loadf()` and :func:`json_load_file()`. All calls to
+    these functions need to be changed by adding a ``0`` as the second
+    argument. For example::
+
+        /* old code */
+        json_loads(input, &error);
+
+        /* new code */
+        json_loads(input, 0, &error);
+
+
+**Underlying type of JSON integers**
+    The underlying C type of JSON integers has been changed from
+    :type:`int` to the widest available signed integer type, i.e.
+    :type:`long long` or :type:`long`, depending on whether
+    :type:`long long` is supported on your system or not. This makes
+    the whole 64-bit integer range available on most modern systems.
+
+    ``jansson.h`` has a typedef :type:`json_int_t` to the underlying
+    integer type. :type:`int` should still be used in most cases when
+    dealing with smallish JSON integers, as the compiler handles
+    implicit type coercion. Only when the full 64-bit range is needed,
+    :type:`json_int_t` should be explicitly used.
+
+
+**Maximum encoder indentation depth**
+    The maximum argument of the ``JSON_INDENT()`` macro has been
+    changed from 255 to 31, to free up bits from the ``flags``
+    parameter of :func:`json_dumps()`, :func:`json_dumpf()` and
+    :func:`json_dump_file()`. If your code uses a bigger indentation
+    than 31, it needs to be changed.
+
+
+**Unsigned integers in API functions**
+    Version 2.0 unifies unsigned integer usage in the API. All uses of
+    :type:`unsigned int` and :type:`unsigned long` have been replaced
+    with :type:`size_t`. This includes flags, container sizes, etc.
+    This should not require source code changes, as both
+    :type:`unsigned int` and :type:`unsigned long` are usually
+    compatible with :type:`size_t`.
diff --git a/lang/c/jansson/install-sh b/lang/c/jansson/install-sh
new file mode 100755
index 0000000..4fbbae7
--- /dev/null
+++ b/lang/c/jansson/install-sh
@@ -0,0 +1,507 @@
+#!/bin/sh
+# install - install a program, script, or datafile
+
+scriptversion=2006-10-14.15
+
+# This originates from X11R5 (mit/util/scripts/install.sh), which was
+# later released in X11R6 (xc/config/util/install.sh) with the
+# following copyright and license.
+#
+# Copyright (C) 1994 X Consortium
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to
+# deal in the Software without restriction, including without limitation the
+# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+# sell copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
+# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
+# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
+# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+# Except as contained in this notice, the name of the X Consortium shall not
+# be used in advertising or otherwise to promote the sale, use or other deal-
+# ings in this Software without prior written authorization from the X Consor-
+# tium.
+#
+#
+# FSF changes to this file are in the public domain.
+#
+# Calling this script install-sh is preferred over install.sh, to prevent
+# `make' implicit rules from creating a file called install from it
+# when there is no Makefile.
+#
+# This script is compatible with the BSD install script, but was written
+# from scratch.
+
+nl='
+'
+IFS=" ""	$nl"
+
+# set DOITPROG to echo to test this script
+
+# Don't use :- since 4.3BSD and earlier shells don't like it.
+doit="${DOITPROG-}"
+if test -z "$doit"; then
+  doit_exec=exec
+else
+  doit_exec=$doit
+fi
+
+# Put in absolute file names if you don't have them in your path;
+# or use environment vars.
+
+mvprog="${MVPROG-mv}"
+cpprog="${CPPROG-cp}"
+chmodprog="${CHMODPROG-chmod}"
+chownprog="${CHOWNPROG-chown}"
+chgrpprog="${CHGRPPROG-chgrp}"
+stripprog="${STRIPPROG-strip}"
+rmprog="${RMPROG-rm}"
+mkdirprog="${MKDIRPROG-mkdir}"
+
+posix_glob=
+posix_mkdir=
+
+# Desired mode of installed file.
+mode=0755
+
+chmodcmd=$chmodprog
+chowncmd=
+chgrpcmd=
+stripcmd=
+rmcmd="$rmprog -f"
+mvcmd="$mvprog"
+src=
+dst=
+dir_arg=
+dstarg=
+no_target_directory=
+
+usage="Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
+   or: $0 [OPTION]... SRCFILES... DIRECTORY
+   or: $0 [OPTION]... -t DIRECTORY SRCFILES...
+   or: $0 [OPTION]... -d DIRECTORIES...
+
+In the 1st form, copy SRCFILE to DSTFILE.
+In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
+In the 4th, create DIRECTORIES.
+
+Options:
+-c         (ignored)
+-d         create directories instead of installing files.
+-g GROUP   $chgrpprog installed files to GROUP.
+-m MODE    $chmodprog installed files to MODE.
+-o USER    $chownprog installed files to USER.
+-s         $stripprog installed files.
+-t DIRECTORY  install into DIRECTORY.
+-T         report an error if DSTFILE is a directory.
+--help     display this help and exit.
+--version  display version info and exit.
+
+Environment variables override the default commands:
+  CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG
+"
+
+while test $# -ne 0; do
+  case $1 in
+    -c) shift
+        continue;;
+
+    -d) dir_arg=true
+        shift
+        continue;;
+
+    -g) chgrpcmd="$chgrpprog $2"
+        shift
+        shift
+        continue;;
+
+    --help) echo "$usage"; exit $?;;
+
+    -m) mode=$2
+        shift
+        shift
+	case $mode in
+	  *' '* | *'	'* | *'
+'*	  | *'*'* | *'?'* | *'['*)
+	    echo "$0: invalid mode: $mode" >&2
+	    exit 1;;
+	esac
+        continue;;
+
+    -o) chowncmd="$chownprog $2"
+        shift
+        shift
+        continue;;
+
+    -s) stripcmd=$stripprog
+        shift
+        continue;;
+
+    -t) dstarg=$2
+	shift
+	shift
+	continue;;
+
+    -T) no_target_directory=true
+	shift
+	continue;;
+
+    --version) echo "$0 $scriptversion"; exit $?;;
+
+    --)	shift
+	break;;
+
+    -*)	echo "$0: invalid option: $1" >&2
+	exit 1;;
+
+    *)  break;;
+  esac
+done
+
+if test $# -ne 0 && test -z "$dir_arg$dstarg"; then
+  # When -d is used, all remaining arguments are directories to create.
+  # When -t is used, the destination is already specified.
+  # Otherwise, the last argument is the destination.  Remove it from $@.
+  for arg
+  do
+    if test -n "$dstarg"; then
+      # $@ is not empty: it contains at least $arg.
+      set fnord "$@" "$dstarg"
+      shift # fnord
+    fi
+    shift # arg
+    dstarg=$arg
+  done
+fi
+
+if test $# -eq 0; then
+  if test -z "$dir_arg"; then
+    echo "$0: no input file specified." >&2
+    exit 1
+  fi
+  # It's OK to call `install-sh -d' without argument.
+  # This can happen when creating conditional directories.
+  exit 0
+fi
+
+if test -z "$dir_arg"; then
+  trap '(exit $?); exit' 1 2 13 15
+
+  # Set umask so as not to create temps with too-generous modes.
+  # However, 'strip' requires both read and write access to temps.
+  case $mode in
+    # Optimize common cases.
+    *644) cp_umask=133;;
+    *755) cp_umask=22;;
+
+    *[0-7])
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw='% 200'
+      fi
+      cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
+    *)
+      if test -z "$stripcmd"; then
+	u_plus_rw=
+      else
+	u_plus_rw=,u+rw
+      fi
+      cp_umask=$mode$u_plus_rw;;
+  esac
+fi
+
+for src
+do
+  # Protect names starting with `-'.
+  case $src in
+    -*) src=./$src ;;
+  esac
+
+  if test -n "$dir_arg"; then
+    dst=$src
+    dstdir=$dst
+    test -d "$dstdir"
+    dstdir_status=$?
+  else
+
+    # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
+    # might cause directories to be created, which would be especially bad
+    # if $src (and thus $dsttmp) contains '*'.
+    if test ! -f "$src" && test ! -d "$src"; then
+      echo "$0: $src does not exist." >&2
+      exit 1
+    fi
+
+    if test -z "$dstarg"; then
+      echo "$0: no destination specified." >&2
+      exit 1
+    fi
+
+    dst=$dstarg
+    # Protect names starting with `-'.
+    case $dst in
+      -*) dst=./$dst ;;
+    esac
+
+    # If destination is a directory, append the input filename; won't work
+    # if double slashes aren't ignored.
+    if test -d "$dst"; then
+      if test -n "$no_target_directory"; then
+	echo "$0: $dstarg: Is a directory" >&2
+	exit 1
+      fi
+      dstdir=$dst
+      dst=$dstdir/`basename "$src"`
+      dstdir_status=0
+    else
+      # Prefer dirname, but fall back on a substitute if dirname fails.
+      dstdir=`
+	(dirname "$dst") 2>/dev/null ||
+	expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
+	     X"$dst" : 'X\(//\)[^/]' \| \
+	     X"$dst" : 'X\(//\)$' \| \
+	     X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
+	echo X"$dst" |
+	    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)[^/].*/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\/\)$/{
+		   s//\1/
+		   q
+		 }
+		 /^X\(\/\).*/{
+		   s//\1/
+		   q
+		 }
+		 s/.*/./; q'
+      `
+
+      test -d "$dstdir"
+      dstdir_status=$?
+    fi
+  fi
+
+  obsolete_mkdir_used=false
+
+  if test $dstdir_status != 0; then
+    case $posix_mkdir in
+      '')
+	# Create intermediate dirs using mode 755 as modified by the umask.
+	# This is like FreeBSD 'install' as of 1997-10-28.
+	umask=`umask`
+	case $stripcmd.$umask in
+	  # Optimize common cases.
+	  *[2367][2367]) mkdir_umask=$umask;;
+	  .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
+
+	  *[0-7])
+	    mkdir_umask=`expr $umask + 22 \
+	      - $umask % 100 % 40 + $umask % 20 \
+	      - $umask % 10 % 4 + $umask % 2
+	    `;;
+	  *) mkdir_umask=$umask,go-w;;
+	esac
+
+	# With -d, create the new directory with the user-specified mode.
+	# Otherwise, rely on $mkdir_umask.
+	if test -n "$dir_arg"; then
+	  mkdir_mode=-m$mode
+	else
+	  mkdir_mode=
+	fi
+
+	posix_mkdir=false
+	case $umask in
+	  *[123567][0-7][0-7])
+	    # POSIX mkdir -p sets u+wx bits regardless of umask, which
+	    # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
+	    ;;
+	  *)
+	    tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
+	    trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
+
+	    if (umask $mkdir_umask &&
+		exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
+	    then
+	      if test -z "$dir_arg" || {
+		   # Check for POSIX incompatibilities with -m.
+		   # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
+		   # other-writeable bit of parent directory when it shouldn't.
+		   # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
+		   ls_ld_tmpdir=`ls -ld "$tmpdir"`
+		   case $ls_ld_tmpdir in
+		     d????-?r-*) different_mode=700;;
+		     d????-?--*) different_mode=755;;
+		     *) false;;
+		   esac &&
+		   $mkdirprog -m$different_mode -p -- "$tmpdir" && {
+		     ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
+		     test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
+		   }
+		 }
+	      then posix_mkdir=:
+	      fi
+	      rmdir "$tmpdir/d" "$tmpdir"
+	    else
+	      # Remove any dirs left behind by ancient mkdir implementations.
+	      rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
+	    fi
+	    trap '' 0;;
+	esac;;
+    esac
+
+    if
+      $posix_mkdir && (
+	umask $mkdir_umask &&
+	$doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
+      )
+    then :
+    else
+
+      # The umask is ridiculous, or mkdir does not conform to POSIX,
+      # or it failed possibly due to a race condition.  Create the
+      # directory the slow way, step by step, checking for races as we go.
+
+      case $dstdir in
+	/*) prefix=/ ;;
+	-*) prefix=./ ;;
+	*)  prefix= ;;
+      esac
+
+      case $posix_glob in
+        '')
+	  if (set -f) 2>/dev/null; then
+	    posix_glob=true
+	  else
+	    posix_glob=false
+	  fi ;;
+      esac
+
+      oIFS=$IFS
+      IFS=/
+      $posix_glob && set -f
+      set fnord $dstdir
+      shift
+      $posix_glob && set +f
+      IFS=$oIFS
+
+      prefixes=
+
+      for d
+      do
+	test -z "$d" && continue
+
+	prefix=$prefix$d
+	if test -d "$prefix"; then
+	  prefixes=
+	else
+	  if $posix_mkdir; then
+	    (umask=$mkdir_umask &&
+	     $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
+	    # Don't fail if two instances are running concurrently.
+	    test -d "$prefix" || exit 1
+	  else
+	    case $prefix in
+	      *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
+	      *) qprefix=$prefix;;
+	    esac
+	    prefixes="$prefixes '$qprefix'"
+	  fi
+	fi
+	prefix=$prefix/
+      done
+
+      if test -n "$prefixes"; then
+	# Don't fail if two instances are running concurrently.
+	(umask $mkdir_umask &&
+	 eval "\$doit_exec \$mkdirprog $prefixes") ||
+	  test -d "$dstdir" || exit 1
+	obsolete_mkdir_used=true
+      fi
+    fi
+  fi
+
+  if test -n "$dir_arg"; then
+    { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
+    { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
+    { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
+      test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
+  else
+
+    # Make a couple of temp file names in the proper directory.
+    dsttmp=$dstdir/_inst.$$_
+    rmtmp=$dstdir/_rm.$$_
+
+    # Trap to clean up those temp files at exit.
+    trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
+
+    # Copy the file name to the temp name.
+    (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
+
+    # and set any options; do chmod last to preserve setuid bits.
+    #
+    # If any of these fail, we abort the whole thing.  If we want to
+    # ignore errors from any of these, just make sure not to ignore
+    # errors from the above "$doit $cpprog $src $dsttmp" command.
+    #
+    { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \
+      && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \
+      && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \
+      && { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
+
+    # Now rename the file to the real destination.
+    { $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null \
+      || {
+	   # The rename failed, perhaps because mv can't rename something else
+	   # to itself, or perhaps because mv is so ancient that it does not
+	   # support -f.
+
+	   # Now remove or move aside any old file at destination location.
+	   # We try this two ways since rm can't unlink itself on some
+	   # systems and the destination file might be busy for other
+	   # reasons.  In this case, the final cleanup might fail but the new
+	   # file should still install successfully.
+	   {
+	     if test -f "$dst"; then
+	       $doit $rmcmd -f "$dst" 2>/dev/null \
+	       || { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null \
+		     && { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }; }\
+	       || {
+		 echo "$0: cannot unlink or rename $dst" >&2
+		 (exit 1); exit 1
+	       }
+	     else
+	       :
+	     fi
+	   } &&
+
+	   # Now rename the file to the real destination.
+	   $doit $mvcmd "$dsttmp" "$dst"
+	 }
+    } || exit 1
+
+    trap '' 0
+  fi
+done
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-end: "$"
+# End:
diff --git a/lang/c/jansson/jansson.pc.in b/lang/c/jansson/jansson.pc.in
new file mode 100644
index 0000000..d9bf4da
--- /dev/null
+++ b/lang/c/jansson/jansson.pc.in
@@ -0,0 +1,10 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=${prefix}/include
+
+Name: Jansson
+Description: Library for encoding, decoding and manipulating JSON data
+Version: @VERSION@
+Libs: -L${libdir} -ljansson
+Cflags: -I${includedir}
diff --git a/lang/c/jansson/ltmain.sh b/lang/c/jansson/ltmain.sh
new file mode 100755
index 0000000..c856b8d
--- /dev/null
+++ b/lang/c/jansson/ltmain.sh
@@ -0,0 +1,8745 @@
+# Generated from ltmain.m4sh.
+
+# libtool (GNU libtool) 2.2.10
+# Written by Gordon Matzigkeit <gord at gnu.ai.mit.edu>, 1996
+
+# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006,
+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License,
+# if you distribute this file as part of a program or library that
+# is built using GNU Libtool, you may include this file under the
+# same distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Libtool; see the file COPYING.  If not, a copy
+# can be downloaded from http://www.gnu.org/licenses/gpl.html,
+# or obtained by writing to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+
+# Usage: $progname [OPTION]... [MODE-ARG]...
+#
+# Provide generalized library-building support services.
+#
+#       --config             show all configuration variables
+#       --debug              enable verbose shell tracing
+#   -n, --dry-run            display commands without modifying any files
+#       --features           display basic configuration information and exit
+#       --mode=MODE          use operation mode MODE
+#       --preserve-dup-deps  don't remove duplicate dependency libraries
+#       --quiet, --silent    don't print informational messages
+#       --no-quiet, --no-silent
+#                            print informational messages (default)
+#       --tag=TAG            use configuration variables from tag TAG
+#   -v, --verbose            print more informational messages than default
+#       --no-verbose         don't print the extra informational messages
+#       --version            print version information
+#   -h, --help, --help-all   print short, long, or detailed help message
+#
+# MODE must be one of the following:
+#
+#         clean              remove files from the build directory
+#         compile            compile a source file into a libtool object
+#         execute            automatically set library path, then run a program
+#         finish             complete the installation of libtool libraries
+#         install            install libraries or executables
+#         link               create a library or an executable
+#         uninstall          remove libraries from an installed directory
+#
+# MODE-ARGS vary depending on the MODE.  When passed as first option,
+# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that.
+# Try `$progname --help --mode=MODE' for a more detailed description of MODE.
+#
+# When reporting a bug, please describe a test case to reproduce it and
+# include the following information:
+#
+#         host-triplet:	$host
+#         shell:		$SHELL
+#         compiler:		$LTCC
+#         compiler flags:		$LTCFLAGS
+#         linker:		$LD (gnu? $with_gnu_ld)
+#         $progname:	(GNU libtool) 2.2.10
+#         automake:	$automake_version
+#         autoconf:	$autoconf_version
+#
+# Report bugs to <bug-libtool at gnu.org>.
+
+PROGRAM=libtool
+PACKAGE=libtool
+VERSION=2.2.10
+TIMESTAMP=""
+package_revision=1.3175
+
+# Be Bourne compatible
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+else
+  case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+$1
+_LTECHO_EOF'
+}
+
+# NLS nuisances: We save the old values to restore during execute mode.
+lt_user_locale=
+lt_safe_locale=
+for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+do
+  eval "if test \"\${$lt_var+set}\" = set; then
+          save_$lt_var=\$$lt_var
+          $lt_var=C
+	  export $lt_var
+	  lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\"
+	  lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\"
+	fi"
+done
+LC_ALL=C
+LANGUAGE=C
+export LANGUAGE LC_ALL
+
+$lt_unset CDPATH
+
+
+# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
+# is ksh but when the shell is invoked as "sh" and the current value of
+# the _XPG environment variable is not equal to 1 (one), the special
+# positional parameter $0, within a function call, is the name of the
+# function.
+progpath="$0"
+
+
+
+: ${CP="cp -f"}
+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'}
+: ${EGREP="/usr/bin/grep -E"}
+: ${FGREP="/usr/bin/grep -F"}
+: ${GREP="/usr/bin/grep"}
+: ${LN_S="ln -s"}
+: ${MAKE="make"}
+: ${MKDIR="mkdir"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+: ${SED="/usr/bin/sed"}
+: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
+: ${Xsed="$SED -e 1s/^X//"}
+
+# Global variables:
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+EXIT_MISMATCH=63  # $? = 63 is used to indicate version mismatch to missing.
+EXIT_SKIP=77	  # $? = 77 is used to indicate a skipped test to automake.
+
+exit_status=$EXIT_SUCCESS
+
+# Make sure IFS has a sensible default
+lt_nl='
+'
+IFS=" 	$lt_nl"
+
+dirname="s,/[^/]*$,,"
+basename="s,^.*/,,"
+
+# func_dirname_and_basename file append nondir_replacement
+# perform func_basename and func_dirname in a single function
+# call:
+#   dirname:  Compute the dirname of FILE.  If nonempty,
+#             add APPEND to the result, otherwise set result
+#             to NONDIR_REPLACEMENT.
+#             value returned in "$func_dirname_result"
+#   basename: Compute filename of FILE.
+#             value retuned in "$func_basename_result"
+# Implementation must be kept synchronized with func_dirname
+# and func_basename. For efficiency, we do not delegate to
+# those functions but instead duplicate the functionality here.
+func_dirname_and_basename ()
+{
+  # Extract subdirectory from the argument.
+  func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"`
+  if test "X$func_dirname_result" = "X${1}"; then
+    func_dirname_result="${3}"
+  else
+    func_dirname_result="$func_dirname_result${2}"
+  fi
+  func_basename_result=`$ECHO "${1}" | $SED -e "$basename"`
+}
+
+# Generated shell functions inserted here.
+
+# These SED scripts presuppose an absolute path with a trailing slash.
+pathcar='s,^/\([^/]*\).*$,\1,'
+pathcdr='s,^/[^/]*,,'
+removedotparts=':dotsl
+		s@/\./@/@g
+		t dotsl
+		s,/\.$,/,'
+collapseslashes='s@/\{1,\}@/@g'
+finalslash='s,/*$,/,'
+
+# func_normal_abspath PATH
+# Remove doubled-up and trailing slashes, "." path components,
+# and cancel out any ".." path components in PATH after making
+# it an absolute path.
+#             value returned in "$func_normal_abspath_result"
+func_normal_abspath ()
+{
+  # Start from root dir and reassemble the path.
+  func_normal_abspath_result=
+  func_normal_abspath_tpath=$1
+  func_normal_abspath_altnamespace=
+  case $func_normal_abspath_tpath in
+    "")
+      # Empty path, that just means $cwd.
+      func_stripname '' '/' "`pwd`"
+      func_normal_abspath_result=$func_stripname_result
+      return
+    ;;
+    # The next three entries are used to spot a run of precisely
+    # two leading slashes without using negated character classes;
+    # we take advantage of case's first-match behaviour.
+    ///*)
+      # Unusual form of absolute path, do nothing.
+    ;;
+    //*)
+      # Not necessarily an ordinary path; POSIX reserves leading '//'
+      # and for example Cygwin uses it to access remote file shares
+      # over CIFS/SMB, so we conserve a leading double slash if found.
+      func_normal_abspath_altnamespace=/
+    ;;
+    /*)
+      # Absolute path, do nothing.
+    ;;
+    *)
+      # Relative path, prepend $cwd.
+      func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
+    ;;
+  esac
+  # Cancel out all the simple stuff to save iterations.  We also want
+  # the path to end with a slash for ease of parsing, so make sure
+  # there is one (and only one) here.
+  func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"`
+  while :; do
+    # Processed it all yet?
+    if test "$func_normal_abspath_tpath" = / ; then
+      # If we ascended to the root using ".." the result may be empty now.
+      if test -z "$func_normal_abspath_result" ; then
+        func_normal_abspath_result=/
+      fi
+      break
+    fi
+    func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcar"`
+    func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
+        -e "$pathcdr"`
+    # Figure out what to do with it
+    case $func_normal_abspath_tcomponent in
+      "")
+        # Trailing empty path component, ignore it.
+      ;;
+      ..)
+        # Parent dir; strip last assembled component from result.
+        func_dirname "$func_normal_abspath_result"
+        func_normal_abspath_result=$func_dirname_result
+      ;;
+      *)
+        # Actual path component, append it.
+        func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent
+      ;;
+    esac
+  done
+  # Restore leading double-slash if one was found on entry.
+  func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
+}
+
+# func_relative_path SRCDIR DSTDIR
+# generates a relative path from SRCDIR to DSTDIR, with a trailing
+# slash if non-empty, suitable for immediately appending a filename
+# without needing to append a separator.
+#             value returned in "$func_relative_path_result"
+func_relative_path ()
+{
+  func_relative_path_result=
+  func_normal_abspath "$1"
+  func_relative_path_tlibdir=$func_normal_abspath_result
+  func_normal_abspath "$2"
+  func_relative_path_tbindir=$func_normal_abspath_result
+
+  # Ascend the tree starting from libdir
+  while :; do
+    # check if we have found a prefix of bindir
+    case $func_relative_path_tbindir in
+      $func_relative_path_tlibdir)
+        # found an exact match
+        func_relative_path_tcancelled=
+        break
+        ;;
+      $func_relative_path_tlibdir*)
+        # found a matching prefix
+        func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
+        func_relative_path_tcancelled=$func_stripname_result
+        if test -z "$func_relative_path_result"; then
+          func_relative_path_result=.
+        fi
+        break
+        ;;
+      *)
+        func_dirname $func_relative_path_tlibdir
+        func_relative_path_tlibdir=${func_dirname_result}
+        if test "x$func_relative_path_tlibdir" = x ; then
+          # Have to descend all the way to the root!
+          func_relative_path_result=../$func_relative_path_result
+          func_relative_path_tcancelled=$func_relative_path_tbindir
+          break
+        fi
+        func_relative_path_result=../$func_relative_path_result
+        ;;
+    esac
+  done
+
+  # Now calculate path; take care to avoid doubling-up slashes.
+  func_stripname '' '/' "$func_relative_path_result"
+  func_relative_path_result=$func_stripname_result
+  func_stripname '/' '/' "$func_relative_path_tcancelled"
+  if test "x$func_stripname_result" != x ; then
+    func_relative_path_result=${func_relative_path_result}/${func_stripname_result}
+  fi
+
+  # Normalisation. If bindir is libdir, return empty string,
+  # else relative path ending with a slash; either way, target
+  # file name can be directly appended.
+  if test ! -z "$func_relative_path_result"; then
+    func_stripname './' '' "$func_relative_path_result/"
+    func_relative_path_result=$func_stripname_result
+  fi
+}
+
+# The name of this program:
+func_dirname_and_basename "$progpath"
+progname=$func_basename_result
+
+# Make sure we have an absolute path for reexecution:
+case $progpath in
+  [\\/]*|[A-Za-z]:\\*) ;;
+  *[\\/]*)
+     progdir=$func_dirname_result
+     progdir=`cd "$progdir" && pwd`
+     progpath="$progdir/$progname"
+     ;;
+  *)
+     save_IFS="$IFS"
+     IFS=:
+     for progdir in $PATH; do
+       IFS="$save_IFS"
+       test -x "$progdir/$progname" && break
+     done
+     IFS="$save_IFS"
+     test -n "$progdir" || progdir=`pwd`
+     progpath="$progdir/$progname"
+     ;;
+esac
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+Xsed="${SED}"' -e 1s/^X//'
+sed_quote_subst='s/\([`"$\\]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\(["`\\]\)/\\\1/g'
+
+# Re-`\' parameter expansions in output of double_quote_subst that were
+# `\'-ed in input to the same.  If an odd number of `\' preceded a '$'
+# in input to double_quote_subst, that '$' was protected from expansion.
+# Since each input `\' is now two `\'s, look for any number of runs of
+# four `\'s followed by two `\'s and then a '$'.  `\' that '$'.
+bs='\\'
+bs2='\\\\'
+bs4='\\\\\\\\'
+dollar='\$'
+sed_double_backslash="\
+  s/$bs4/&\\
+/g
+  s/^$bs2$dollar/$bs&/
+  s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g
+  s/\n//g"
+
+# Standard options:
+opt_dry_run=false
+opt_help=false
+opt_quiet=false
+opt_verbose=false
+opt_warning=:
+
+# func_echo arg...
+# Echo program name prefixed message, along with the current mode
+# name if it has been set yet.
+func_echo ()
+{
+    $ECHO "$progname${mode+: }$mode: $*"
+}
+
+# func_verbose arg...
+# Echo program name prefixed message in verbose mode only.
+func_verbose ()
+{
+    $opt_verbose && func_echo ${1+"$@"}
+
+    # A bug in bash halts the script if the last line of a function
+    # fails when set -e is in force, so we need another command to
+    # work around that:
+    :
+}
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+# func_error arg...
+# Echo program name prefixed message to standard error.
+func_error ()
+{
+    $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2
+}
+
+# func_warning arg...
+# Echo program name prefixed warning message to standard error.
+func_warning ()
+{
+    $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2
+
+    # bash bug again:
+    :
+}
+
+# func_fatal_error arg...
+# Echo program name prefixed message to standard error, and exit.
+func_fatal_error ()
+{
+    func_error ${1+"$@"}
+    exit $EXIT_FAILURE
+}
+
+# func_fatal_help arg...
+# Echo program name prefixed message to standard error, followed by
+# a help hint, and exit.
+func_fatal_help ()
+{
+    func_error ${1+"$@"}
+    func_fatal_error "$help"
+}
+help="Try \`$progname --help' for more information."  ## default
+
+
+# func_grep expression filename
+# Check whether EXPRESSION matches any line of FILENAME, without output.
+func_grep ()
+{
+    $GREP "$1" "$2" >/dev/null 2>&1
+}
+
+
+# func_mkdir_p directory-path
+# Make sure the entire path to DIRECTORY-PATH is available.
+func_mkdir_p ()
+{
+    my_directory_path="$1"
+    my_dir_list=
+
+    if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then
+
+      # Protect directory names starting with `-'
+      case $my_directory_path in
+        -*) my_directory_path="./$my_directory_path" ;;
+      esac
+
+      # While some portion of DIR does not yet exist...
+      while test ! -d "$my_directory_path"; do
+        # ...make a list in topmost first order.  Use a colon delimited
+	# list incase some portion of path contains whitespace.
+        my_dir_list="$my_directory_path:$my_dir_list"
+
+        # If the last portion added has no slash in it, the list is done
+        case $my_directory_path in */*) ;; *) break ;; esac
+
+        # ...otherwise throw away the child directory and loop
+        my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"`
+      done
+      my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'`
+
+      save_mkdir_p_IFS="$IFS"; IFS=':'
+      for my_dir in $my_dir_list; do
+	IFS="$save_mkdir_p_IFS"
+        # mkdir can fail with a `File exist' error if two processes
+        # try to create one of the directories concurrently.  Don't
+        # stop in that case!
+        $MKDIR "$my_dir" 2>/dev/null || :
+      done
+      IFS="$save_mkdir_p_IFS"
+
+      # Bail out if we (or some other process) failed to create a directory.
+      test -d "$my_directory_path" || \
+        func_fatal_error "Failed to create \`$1'"
+    fi
+}
+
+
+# func_mktempdir [string]
+# Make a temporary directory that won't clash with other running
+# libtool processes, and avoids race conditions if possible.  If
+# given, STRING is the basename for that directory.
+func_mktempdir ()
+{
+    my_template="${TMPDIR-/tmp}/${1-$progname}"
+
+    if test "$opt_dry_run" = ":"; then
+      # Return a directory name, but don't create it in dry-run mode
+      my_tmpdir="${my_template}-$$"
+    else
+
+      # If mktemp works, use that first and foremost
+      my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null`
+
+      if test ! -d "$my_tmpdir"; then
+        # Failing that, at least try and use $RANDOM to avoid a race
+        my_tmpdir="${my_template}-${RANDOM-0}$$"
+
+        save_mktempdir_umask=`umask`
+        umask 0077
+        $MKDIR "$my_tmpdir"
+        umask $save_mktempdir_umask
+      fi
+
+      # If we're not in dry-run mode, bomb out on failure
+      test -d "$my_tmpdir" || \
+        func_fatal_error "cannot create temporary directory \`$my_tmpdir'"
+    fi
+
+    $ECHO "$my_tmpdir"
+}
+
+
+# func_quote_for_eval arg
+# Aesthetically quote ARG to be evaled later.
+# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT
+# is double-quoted, suitable for a subsequent eval, whereas
+# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters
+# which are still active within double quotes backslashified.
+func_quote_for_eval ()
+{
+    case $1 in
+      *[\\\`\"\$]*)
+	func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;;
+      *)
+        func_quote_for_eval_unquoted_result="$1" ;;
+    esac
+
+    case $func_quote_for_eval_unquoted_result in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting, command substitution and and variable
+      # expansion for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\""
+        ;;
+      *)
+        func_quote_for_eval_result="$func_quote_for_eval_unquoted_result"
+    esac
+}
+
+
+# func_quote_for_expand arg
+# Aesthetically quote ARG to be evaled later; same as above,
+# but do not quote variable references.
+func_quote_for_expand ()
+{
+    case $1 in
+      *[\\\`\"]*)
+	my_arg=`$ECHO "$1" | $SED \
+	    -e "$double_quote_subst" -e "$sed_double_backslash"` ;;
+      *)
+        my_arg="$1" ;;
+    esac
+
+    case $my_arg in
+      # Double-quote args containing shell metacharacters to delay
+      # word splitting and command substitution for a subsequent eval.
+      # Many Bourne shells cannot handle close brackets correctly
+      # in scan sets, so we specify it separately.
+      *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \	]*|*]*|"")
+        my_arg="\"$my_arg\""
+        ;;
+    esac
+
+    func_quote_for_expand_result="$my_arg"
+}
+
+
+# func_show_eval cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.
+func_show_eval ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$my_cmd"
+      my_status=$?
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_show_eval_locale cmd [fail_exp]
+# Unless opt_silent is true, then output CMD.  Then, if opt_dryrun is
+# not true, evaluate CMD.  If the evaluation of CMD fails, and FAIL_EXP
+# is given, then evaluate it.  Use the saved locale for evaluation.
+func_show_eval_locale ()
+{
+    my_cmd="$1"
+    my_fail_exp="${2-:}"
+
+    ${opt_silent-false} || {
+      func_quote_for_expand "$my_cmd"
+      eval "func_echo $func_quote_for_expand_result"
+    }
+
+    if ${opt_dry_run-false}; then :; else
+      eval "$lt_user_locale
+	    $my_cmd"
+      my_status=$?
+      eval "$lt_safe_locale"
+      if test "$my_status" -eq 0; then :; else
+	eval "(exit $my_status); $my_fail_exp"
+      fi
+    fi
+}
+
+
+# func_version
+# Echo version message to standard output and exit.
+func_version ()
+{
+    $SED -n '/(C)/!b go
+	:more
+	/\./!{
+	  N
+	  s/\n# / /
+	  b more
+	}
+	:go
+	/^# '$PROGRAM' (GNU /,/# warranty; / {
+        s/^# //
+	s/^# *$//
+        s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/
+        p
+     }' < "$progpath"
+     exit $?
+}
+
+# func_usage
+# Echo short help message to standard output and exit.
+func_usage ()
+{
+    $SED -n '/^# Usage:/,/^#  *.*--help/ {
+        s/^# //
+	s/^# *$//
+	s/\$progname/'$progname'/
+	p
+    }' < "$progpath"
+    echo
+    $ECHO "run \`$progname --help | more' for full usage"
+    exit $?
+}
+
+# func_help [NOEXIT]
+# Echo long help message to standard output and exit,
+# unless 'noexit' is passed as argument.
+func_help ()
+{
+    $SED -n '/^# Usage:/,/# Report bugs to/ {
+        s/^# //
+	s/^# *$//
+	s*\$progname*'$progname'*
+	s*\$host*'"$host"'*
+	s*\$SHELL*'"$SHELL"'*
+	s*\$LTCC*'"$LTCC"'*
+	s*\$LTCFLAGS*'"$LTCFLAGS"'*
+	s*\$LD*'"$LD"'*
+	s/\$with_gnu_ld/'"$with_gnu_ld"'/
+	s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/
+	s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/
+	p
+     }' < "$progpath"
+    ret=$?
+    if test -z "$1"; then
+      exit $ret
+    fi
+}
+
+# func_missing_arg argname
+# Echo program name prefixed message to standard error and set global
+# exit_cmd.
+func_missing_arg ()
+{
+    func_error "missing argument for $1."
+    exit_cmd=exit
+}
+
+exit_cmd=:
+
+
+
+
+
+
+magic="%%%MAGIC variable%%%"
+magic_exe="%%%MAGIC EXE variable%%%"
+
+# Global variables.
+# $mode is unset
+nonopt=
+execute_dlfiles=
+preserve_args=
+lo2o="s/\\.lo\$/.${objext}/"
+o2lo="s/\\.${objext}\$/.lo/"
+extracted_archives=
+extracted_serial=0
+
+opt_dry_run=false
+opt_duplicate_deps=false
+opt_silent=false
+opt_debug=:
+
+# If this variable is set in any of the actions, the command in it
+# will be execed at the end.  This prevents here-documents from being
+# left over by shells.
+exec_cmd=
+
+# func_fatal_configuration arg...
+# Echo program name prefixed message to standard error, followed by
+# a configuration failure hint, and exit.
+func_fatal_configuration ()
+{
+    func_error ${1+"$@"}
+    func_error "See the $PACKAGE documentation for more information."
+    func_fatal_error "Fatal configuration error."
+}
+
+
+# func_config
+# Display the configuration for all the tags in this script.
+func_config ()
+{
+    re_begincf='^# ### BEGIN LIBTOOL'
+    re_endcf='^# ### END LIBTOOL'
+
+    # Default configuration.
+    $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
+
+    # Now print the configurations for the tags.
+    for tagname in $taglist; do
+      $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
+    done
+
+    exit $?
+}
+
+# func_features
+# Display the features supported by this script.
+func_features ()
+{
+    echo "host: $host"
+    if test "$build_libtool_libs" = yes; then
+      echo "enable shared libraries"
+    else
+      echo "disable shared libraries"
+    fi
+    if test "$build_old_libs" = yes; then
+      echo "enable static libraries"
+    else
+      echo "disable static libraries"
+    fi
+
+    exit $?
+}
+
+# func_enable_tag tagname
+# Verify that TAGNAME is valid, and either flag an error and exit, or
+# enable the TAGNAME tag.  We also add TAGNAME to the global $taglist
+# variable here.
+func_enable_tag ()
+{
+  # Global variable:
+  tagname="$1"
+
+  re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
+  re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
+  sed_extractcf="/$re_begincf/,/$re_endcf/p"
+
+  # Validate tagname.
+  case $tagname in
+    *[!-_A-Za-z0-9,/]*)
+      func_fatal_error "invalid tag name: $tagname"
+      ;;
+  esac
+
+  # Don't test for the "default" C tag, as we know it's
+  # there but not specially marked.
+  case $tagname in
+    CC) ;;
+    *)
+      if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
+	taglist="$taglist $tagname"
+
+	# Evaluate the configuration.  Be careful to quote the path
+	# and the sed script, to avoid splitting on whitespace, but
+	# also don't use non-portable quotes within backquotes within
+	# quotes we have to do it in 2 steps:
+	extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
+	eval "$extractedcf"
+      else
+	func_error "ignoring unknown tag $tagname"
+      fi
+      ;;
+  esac
+}
+
+# Parse options once, thoroughly.  This comes as soon as possible in
+# the script to make things like `libtool --version' happen quickly.
+{
+
+  # Shorthand for --mode=foo, only valid as the first argument
+  case $1 in
+  clean|clea|cle|cl)
+    shift; set dummy --mode clean ${1+"$@"}; shift
+    ;;
+  compile|compil|compi|comp|com|co|c)
+    shift; set dummy --mode compile ${1+"$@"}; shift
+    ;;
+  execute|execut|execu|exec|exe|ex|e)
+    shift; set dummy --mode execute ${1+"$@"}; shift
+    ;;
+  finish|finis|fini|fin|fi|f)
+    shift; set dummy --mode finish ${1+"$@"}; shift
+    ;;
+  install|instal|insta|inst|ins|in|i)
+    shift; set dummy --mode install ${1+"$@"}; shift
+    ;;
+  link|lin|li|l)
+    shift; set dummy --mode link ${1+"$@"}; shift
+    ;;
+  uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
+    shift; set dummy --mode uninstall ${1+"$@"}; shift
+    ;;
+  esac
+
+  # Parse non-mode specific arguments:
+  while test "$#" -gt 0; do
+    opt="$1"
+    shift
+
+    case $opt in
+      --config)		func_config					;;
+
+      --debug)		preserve_args="$preserve_args $opt"
+			func_echo "enabling shell trace mode"
+			opt_debug='set -x'
+			$opt_debug
+			;;
+
+      -dlopen)		test "$#" -eq 0 && func_missing_arg "$opt" && break
+			execute_dlfiles="$execute_dlfiles $1"
+			shift
+			;;
+
+      --dry-run | -n)	opt_dry_run=:					;;
+      --features)       func_features					;;
+      --finish)		mode="finish"					;;
+
+      --mode)		test "$#" -eq 0 && func_missing_arg "$opt" && break
+			case $1 in
+			  # Valid mode arguments:
+			  clean)	;;
+			  compile)	;;
+			  execute)	;;
+			  finish)	;;
+			  install)	;;
+			  link)		;;
+			  relink)	;;
+			  uninstall)	;;
+
+			  # Catch anything else as an error
+			  *) func_error "invalid argument for $opt"
+			     exit_cmd=exit
+			     break
+			     ;;
+		        esac
+
+			mode="$1"
+			shift
+			;;
+
+      --preserve-dup-deps)
+			opt_duplicate_deps=:				;;
+
+      --quiet|--silent)	preserve_args="$preserve_args $opt"
+			opt_silent=:
+			opt_verbose=false
+			;;
+
+      --no-quiet|--no-silent)
+			preserve_args="$preserve_args $opt"
+			opt_silent=false
+			;;
+
+      --verbose| -v)	preserve_args="$preserve_args $opt"
+			opt_silent=false
+			opt_verbose=:
+			;;
+
+      --no-verbose)	preserve_args="$preserve_args $opt"
+			opt_verbose=false
+			;;
+
+      --tag)		test "$#" -eq 0 && func_missing_arg "$opt" && break
+			preserve_args="$preserve_args $opt $1"
+			func_enable_tag "$1"	# tagname is set here
+			shift
+			;;
+
+      # Separate optargs to long options:
+      -dlopen=*|--mode=*|--tag=*)
+			func_opt_split "$opt"
+			set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"}
+			shift
+			;;
+
+      -\?|-h)		func_usage					;;
+      --help)		opt_help=:					;;
+      --help-all)	opt_help=': help-all'				;;
+      --version)	func_version					;;
+
+      -*)		func_fatal_help "unrecognized option \`$opt'"	;;
+
+      *)		nonopt="$opt"
+			break
+			;;
+    esac
+  done
+
+
+  case $host in
+    *cygwin* | *mingw* | *pw32* | *cegcc*)
+      # don't eliminate duplications in $postdeps and $predeps
+      opt_duplicate_compiler_generated_deps=:
+      ;;
+    *)
+      opt_duplicate_compiler_generated_deps=$opt_duplicate_deps
+      ;;
+  esac
+
+  # Having warned about all mis-specified options, bail out if
+  # anything was wrong.
+  $exit_cmd $EXIT_FAILURE
+}
+
+# func_check_version_match
+# Ensure that we are using m4 macros, and libtool script from the same
+# release of libtool.
+func_check_version_match ()
+{
+  if test "$package_revision" != "$macro_revision"; then
+    if test "$VERSION" != "$macro_version"; then
+      if test -z "$macro_version"; then
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from an older release.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      else
+        cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, but the
+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
+$progname: and run autoconf again.
+_LT_EOF
+      fi
+    else
+      cat >&2 <<_LT_EOF
+$progname: Version mismatch error.  This is $PACKAGE $VERSION, revision $package_revision,
+$progname: but the definition of this LT_INIT comes from revision $macro_revision.
+$progname: You should recreate aclocal.m4 with macros from revision $package_revision
+$progname: of $PACKAGE $VERSION and run autoconf again.
+_LT_EOF
+    fi
+
+    exit $EXIT_MISMATCH
+  fi
+}
+
+
+## ----------- ##
+##    Main.    ##
+## ----------- ##
+
+$opt_help || {
+  # Sanity checks first:
+  func_check_version_match
+
+  if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
+    func_fatal_configuration "not configured to build any kind of library"
+  fi
+
+  test -z "$mode" && func_fatal_error "error: you must specify a MODE."
+
+
+  # Darwin sucks
+  eval std_shrext=\"$shrext_cmds\"
+
+
+  # Only execute mode is allowed to have -dlopen flags.
+  if test -n "$execute_dlfiles" && test "$mode" != execute; then
+    func_error "unrecognized option \`-dlopen'"
+    $ECHO "$help" 1>&2
+    exit $EXIT_FAILURE
+  fi
+
+  # Change the help message to a mode-specific one.
+  generic_help="$help"
+  help="Try \`$progname --help --mode=$mode' for more information."
+}
+
+
+# func_lalib_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_lalib_p ()
+{
+    test -f "$1" &&
+      $SED -e 4q "$1" 2>/dev/null \
+        | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
+}
+
+# func_lalib_unsafe_p file
+# True iff FILE is a libtool `.la' library or `.lo' object file.
+# This function implements the same check as func_lalib_p without
+# resorting to external programs.  To this end, it redirects stdin and
+# closes it afterwards, without saving the original file descriptor.
+# As a safety measure, use it only where a negative result would be
+# fatal anyway.  Works if `file' does not exist.
+func_lalib_unsafe_p ()
+{
+    lalib_p=no
+    if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
+	for lalib_p_l in 1 2 3 4
+	do
+	    read lalib_p_line
+	    case "$lalib_p_line" in
+		\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
+	    esac
+	done
+	exec 0<&5 5<&-
+    fi
+    test "$lalib_p" = yes
+}
+
+# func_ltwrapper_script_p file
+# True iff FILE is a libtool wrapper script
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_script_p ()
+{
+    func_lalib_p "$1"
+}
+
+# func_ltwrapper_executable_p file
+# True iff FILE is a libtool wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_executable_p ()
+{
+    func_ltwrapper_exec_suffix=
+    case $1 in
+    *.exe) ;;
+    *) func_ltwrapper_exec_suffix=.exe ;;
+    esac
+    $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
+}
+
+# func_ltwrapper_scriptname file
+# Assumes file is an ltwrapper_executable
+# uses $file to determine the appropriate filename for a
+# temporary ltwrapper_script.
+func_ltwrapper_scriptname ()
+{
+    func_ltwrapper_scriptname_result=""
+    if func_ltwrapper_executable_p "$1"; then
+	func_dirname_and_basename "$1" "" "."
+	func_stripname '' '.exe' "$func_basename_result"
+	func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper"
+    fi
+}
+
+# func_ltwrapper_p file
+# True iff FILE is a libtool wrapper script or wrapper executable
+# This function is only a basic sanity check; it will hardly flush out
+# determined imposters.
+func_ltwrapper_p ()
+{
+    func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
+}
+
+
+# func_execute_cmds commands fail_cmd
+# Execute tilde-delimited COMMANDS.
+# If FAIL_CMD is given, eval that upon failure.
+# FAIL_CMD may read-access the current command in variable CMD!
+func_execute_cmds ()
+{
+    $opt_debug
+    save_ifs=$IFS; IFS='~'
+    for cmd in $1; do
+      IFS=$save_ifs
+      eval cmd=\"$cmd\"
+      func_show_eval "$cmd" "${2-:}"
+    done
+    IFS=$save_ifs
+}
+
+
+# func_source file
+# Source FILE, adding directory component if necessary.
+# Note that it is not necessary on cygwin/mingw to append a dot to
+# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
+# behavior happens only for exec(3), not for open(2)!  Also, sourcing
+# `FILE.' does not work on cygwin managed mounts.
+func_source ()
+{
+    $opt_debug
+    case $1 in
+    */* | *\\*)	. "$1" ;;
+    *)		. "./$1" ;;
+    esac
+}
+
+
+# func_infer_tag arg
+# Infer tagged configuration to use if any are available and
+# if one wasn't chosen via the "--tag" command line option.
+# Only attempt this if the compiler in the base compile
+# command doesn't match the default compiler.
+# arg is usually of the form 'gcc ...'
+func_infer_tag ()
+{
+    $opt_debug
+    if test -n "$available_tags" && test -z "$tagname"; then
+      CC_quoted=
+      for arg in $CC; do
+        func_quote_for_eval "$arg"
+	CC_quoted="$CC_quoted $func_quote_for_eval_result"
+      done
+      CC_expanded=`func_echo_all $CC`
+      CC_quoted_expanded=`func_echo_all $CC_quoted`
+      case $@ in
+      # Blanks in the command may have been stripped by the calling shell,
+      # but not from the CC environment variable when configure was run.
+      " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+      " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
+      # Blanks at the start of $base_compile will cause this to fail
+      # if we don't check for them as well.
+      *)
+	for z in $available_tags; do
+	  if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
+	    # Evaluate the configuration.
+	    eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
+	    CC_quoted=
+	    for arg in $CC; do
+	      # Double-quote args containing other shell metacharacters.
+	      func_quote_for_eval "$arg"
+	      CC_quoted="$CC_quoted $func_quote_for_eval_result"
+	    done
+	    CC_expanded=`func_echo_all $CC`
+	    CC_quoted_expanded=`func_echo_all $CC_quoted`
+	    case "$@ " in
+	    " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
+	    " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
+	      # The compiler in the base compile command matches
+	      # the one in the tagged configuration.
+	      # Assume this is the tagged configuration we want.
+	      tagname=$z
+	      break
+	      ;;
+	    esac
+	  fi
+	done
+	# If $tagname still isn't set, then no tagged configuration
+	# was found and let the user know that the "--tag" command
+	# line option must be used.
+	if test -z "$tagname"; then
+	  func_echo "unable to infer tagged configuration"
+	  func_fatal_error "specify a tag with \`--tag'"
+#	else
+#	  func_verbose "using $tagname tagged configuration"
+	fi
+	;;
+      esac
+    fi
+}
+
+
+
+# func_write_libtool_object output_name pic_name nonpic_name
+# Create a libtool object file (analogous to a ".la" file),
+# but don't create it if we're doing a dry run.
+func_write_libtool_object ()
+{
+    write_libobj=${1}
+    if test "$build_libtool_libs" = yes; then
+      write_lobj=\'${2}\'
+    else
+      write_lobj=none
+    fi
+
+    if test "$build_old_libs" = yes; then
+      write_oldobj=\'${3}\'
+    else
+      write_oldobj=none
+    fi
+
+    $opt_dry_run || {
+      cat >${write_libobj}T <<EOF
+# $write_libobj - a libtool object file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# Name of the PIC object.
+pic_object=$write_lobj
+
+# Name of the non-PIC object
+non_pic_object=$write_oldobj
+
+EOF
+      $MV "${write_libobj}T" "${write_libobj}"
+    }
+}
+
+# func_mode_compile arg...
+func_mode_compile ()
+{
+    $opt_debug
+    # Get the compilation command and the source file.
+    base_compile=
+    srcfile="$nonopt"  #  always keep a non-empty value in "srcfile"
+    suppress_opt=yes
+    suppress_output=
+    arg_mode=normal
+    libobj=
+    later=
+    pie_flag=
+
+    for arg
+    do
+      case $arg_mode in
+      arg  )
+	# do not "continue".  Instead, add this to base_compile
+	lastarg="$arg"
+	arg_mode=normal
+	;;
+
+      target )
+	libobj="$arg"
+	arg_mode=normal
+	continue
+	;;
+
+      normal )
+	# Accept any command-line options.
+	case $arg in
+	-o)
+	  test -n "$libobj" && \
+	    func_fatal_error "you cannot specify \`-o' more than once"
+	  arg_mode=target
+	  continue
+	  ;;
+
+	-pie | -fpie | -fPIE)
+          pie_flag="$pie_flag $arg"
+	  continue
+	  ;;
+
+	-shared | -static | -prefer-pic | -prefer-non-pic)
+	  later="$later $arg"
+	  continue
+	  ;;
+
+	-no-suppress)
+	  suppress_opt=no
+	  continue
+	  ;;
+
+	-Xcompiler)
+	  arg_mode=arg  #  the next one goes into the "base_compile" arg list
+	  continue      #  The current "srcfile" will either be retained or
+	  ;;            #  replaced later.  I would guess that would be a bug.
+
+	-Wc,*)
+	  func_stripname '-Wc,' '' "$arg"
+	  args=$func_stripname_result
+	  lastarg=
+	  save_ifs="$IFS"; IFS=','
+	  for arg in $args; do
+	    IFS="$save_ifs"
+	    func_quote_for_eval "$arg"
+	    lastarg="$lastarg $func_quote_for_eval_result"
+	  done
+	  IFS="$save_ifs"
+	  func_stripname ' ' '' "$lastarg"
+	  lastarg=$func_stripname_result
+
+	  # Add the arguments to base_compile.
+	  base_compile="$base_compile $lastarg"
+	  continue
+	  ;;
+
+	*)
+	  # Accept the current argument as the source file.
+	  # The previous "srcfile" becomes the current argument.
+	  #
+	  lastarg="$srcfile"
+	  srcfile="$arg"
+	  ;;
+	esac  #  case $arg
+	;;
+      esac    #  case $arg_mode
+
+      # Aesthetically quote the previous argument.
+      func_quote_for_eval "$lastarg"
+      base_compile="$base_compile $func_quote_for_eval_result"
+    done # for arg
+
+    case $arg_mode in
+    arg)
+      func_fatal_error "you must specify an argument for -Xcompile"
+      ;;
+    target)
+      func_fatal_error "you must specify a target with \`-o'"
+      ;;
+    *)
+      # Get the name of the library object.
+      test -z "$libobj" && {
+	func_basename "$srcfile"
+	libobj="$func_basename_result"
+      }
+      ;;
+    esac
+
+    # Recognize several different file suffixes.
+    # If the user specifies -o file.o, it is replaced with file.lo
+    case $libobj in
+    *.[cCFSifmso] | \
+    *.ada | *.adb | *.ads | *.asm | \
+    *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
+    *.[fF][09]? | *.for | *.java | *.obj | *.sx | *.cu | *.cup)
+      func_xform "$libobj"
+      libobj=$func_xform_result
+      ;;
+    esac
+
+    case $libobj in
+    *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
+    *)
+      func_fatal_error "cannot determine name of library object from \`$libobj'"
+      ;;
+    esac
+
+    func_infer_tag $base_compile
+
+    for arg in $later; do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	continue
+	;;
+
+      -static)
+	build_libtool_libs=no
+	build_old_libs=yes
+	continue
+	;;
+
+      -prefer-pic)
+	pic_mode=yes
+	continue
+	;;
+
+      -prefer-non-pic)
+	pic_mode=no
+	continue
+	;;
+      esac
+    done
+
+    func_quote_for_eval "$libobj"
+    test "X$libobj" != "X$func_quote_for_eval_result" \
+      && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"'	 &()|`$[]' \
+      && func_warning "libobj name \`$libobj' may not contain shell special characters."
+    func_dirname_and_basename "$obj" "/" ""
+    objname="$func_basename_result"
+    xdir="$func_dirname_result"
+    lobj=${xdir}$objdir/$objname
+
+    test -z "$base_compile" && \
+      func_fatal_help "you must specify a compilation command"
+
+    # Delete any leftover library objects.
+    if test "$build_old_libs" = yes; then
+      removelist="$obj $lobj $libobj ${libobj}T"
+    else
+      removelist="$lobj $libobj ${libobj}T"
+    fi
+
+    # On Cygwin there's no "real" PIC flag so we must build both object types
+    case $host_os in
+    cygwin* | mingw* | pw32* | os2* | cegcc*)
+      pic_mode=default
+      ;;
+    esac
+    if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+      # non-PIC code in shared libraries is not supported
+      pic_mode=default
+    fi
+
+    # Calculate the filename of the output object if compiler does
+    # not support -o with -c
+    if test "$compiler_c_o" = no; then
+      output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext}
+      lockfile="$output_obj.lock"
+    else
+      output_obj=
+      need_locks=no
+      lockfile=
+    fi
+
+    # Lock this critical section if it is needed
+    # We use this script file to make the link, it avoids creating a new file
+    if test "$need_locks" = yes; then
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    elif test "$need_locks" = warn; then
+      if test -f "$lockfile"; then
+	$ECHO "\
+*** ERROR, $lockfile exists and contains:
+`cat $lockfile 2>/dev/null`
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+      removelist="$removelist $output_obj"
+      $ECHO "$srcfile" > "$lockfile"
+    fi
+
+    $opt_dry_run || $RM $removelist
+    removelist="$removelist $lockfile"
+    trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
+
+    if test -n "$fix_srcfile_path"; then
+      eval srcfile=\"$fix_srcfile_path\"
+    fi
+    func_quote_for_eval "$srcfile"
+    qsrcfile=$func_quote_for_eval_result
+
+    # Only build a PIC object if we are building libtool libraries.
+    if test "$build_libtool_libs" = yes; then
+      # Without this assignment, base_compile gets emptied.
+      fbsd_hideous_sh_bug=$base_compile
+
+      if test "$pic_mode" != no; then
+	command="$base_compile $qsrcfile $pic_flag"
+      else
+	# Don't build PIC code
+	command="$base_compile $qsrcfile"
+      fi
+
+      func_mkdir_p "$xdir$objdir"
+
+      if test -z "$output_obj"; then
+	# Place PIC objects in $objdir
+	command="$command -o $lobj"
+      fi
+
+      func_show_eval_locale "$command"	\
+          'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed, then go on to compile the next one
+      if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
+	func_show_eval '$MV "$output_obj" "$lobj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+
+      # Allow error messages only from the first compilation.
+      if test "$suppress_opt" = yes; then
+	suppress_output=' >/dev/null 2>&1'
+      fi
+    fi
+
+    # Only build a position-dependent object if we build old libraries.
+    if test "$build_old_libs" = yes; then
+      if test "$pic_mode" != yes; then
+	# Don't build PIC code
+	command="$base_compile $qsrcfile$pie_flag"
+      else
+	command="$base_compile $qsrcfile $pic_flag"
+      fi
+      if test "$compiler_c_o" = yes; then
+	command="$command -o $obj"
+      fi
+
+      # Suppress compiler output if we already did a PIC compilation.
+      command="$command$suppress_output"
+      func_show_eval_locale "$command" \
+        '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
+
+      if test "$need_locks" = warn &&
+	 test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
+	$ECHO "\
+*** ERROR, $lockfile contains:
+`cat $lockfile 2>/dev/null`
+
+but it should contain:
+$srcfile
+
+This indicates that another process is trying to use the same
+temporary object file, and libtool could not work around it because
+your compiler does not support \`-c' and \`-o' together.  If you
+repeat this compilation, it may succeed, by chance, but you had better
+avoid parallel builds (make -j) in this platform, or get a better
+compiler."
+
+	$opt_dry_run || $RM $removelist
+	exit $EXIT_FAILURE
+      fi
+
+      # Just move the object if needed
+      if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
+	func_show_eval '$MV "$output_obj" "$obj"' \
+	  'error=$?; $opt_dry_run || $RM $removelist; exit $error'
+      fi
+    fi
+
+    $opt_dry_run || {
+      func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
+
+      # Unlock the critical section if it was locked
+      if test "$need_locks" != no; then
+	removelist=$lockfile
+        $RM "$lockfile"
+      fi
+    }
+
+    exit $EXIT_SUCCESS
+}
+
+$opt_help || {
+  test "$mode" = compile && func_mode_compile ${1+"$@"}
+}
+
+func_mode_help ()
+{
+    # We need to display help for each of the modes.
+    case $mode in
+      "")
+        # Generic help is extracted from the usage comments
+        # at the start of this file.
+        func_help
+        ;;
+
+      clean)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
+
+Remove files from the build directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, object or program, all the files associated
+with it are deleted. Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      compile)
+      $ECHO \
+"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
+
+Compile a source file into a libtool library object.
+
+This mode accepts the following additional options:
+
+  -o OUTPUT-FILE    set the output file name to OUTPUT-FILE
+  -no-suppress      do not suppress compiler output for multiple passes
+  -prefer-pic       try to build PIC objects only
+  -prefer-non-pic   try to build non-PIC objects only
+  -shared           do not build a \`.o' file suitable for static linking
+  -static           only build a \`.o' file suitable for static linking
+  -Wc,FLAG          pass FLAG directly to the compiler
+
+COMPILE-COMMAND is a command to be used in creating a \`standard' object file
+from the given SOURCEFILE.
+
+The output file name is determined by removing the directory component from
+SOURCEFILE, then substituting the C source code suffix \`.c' with the
+library object suffix, \`.lo'."
+        ;;
+
+      execute)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
+
+Automatically set library path, then run a program.
+
+This mode accepts the following additional options:
+
+  -dlopen FILE      add the directory containing FILE to the library path
+
+This mode sets the library path environment variable according to \`-dlopen'
+flags.
+
+If any of the ARGS are libtool executable wrappers, then they are translated
+into their corresponding uninstalled binary, and any of their required library
+directories are added to the library path.
+
+Then, COMMAND is executed, with ARGS as arguments."
+        ;;
+
+      finish)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
+
+Complete the installation of libtool libraries.
+
+Each LIBDIR is a directory that contains libtool libraries.
+
+The commands that this mode executes may require superuser privileges.  Use
+the \`--dry-run' option if you just want to see what would be executed."
+        ;;
+
+      install)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
+
+Install executables or libraries.
+
+INSTALL-COMMAND is the installation command.  The first component should be
+either the \`install' or \`cp' program.
+
+The following components of INSTALL-COMMAND are treated specially:
+
+  -inst-prefix-dir PREFIX-DIR  Use PREFIX-DIR as a staging area for installation
+
+The rest of the components are interpreted as arguments to that command (only
+BSD-compatible install options are recognized)."
+        ;;
+
+      link)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
+
+Link object files or libraries together to form another library, or to
+create an executable program.
+
+LINK-COMMAND is a command using the C compiler that you would use to create
+a program from several object files.
+
+The following components of LINK-COMMAND are treated specially:
+
+  -all-static       do not do any dynamic linking at all
+  -avoid-version    do not add a version suffix if possible
+  -bindir BINDIR    specify path to binaries directory (for systems where
+                    libraries must be found in the PATH setting at runtime)
+  -dlopen FILE      \`-dlpreopen' FILE if it cannot be dlopened at runtime
+  -dlpreopen FILE   link in FILE and add its symbols to lt_preloaded_symbols
+  -export-dynamic   allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
+  -export-symbols SYMFILE
+                    try to export only the symbols listed in SYMFILE
+  -export-symbols-regex REGEX
+                    try to export only the symbols matching REGEX
+  -LLIBDIR          search LIBDIR for required installed libraries
+  -lNAME            OUTPUT-FILE requires the installed library libNAME
+  -module           build a library that can dlopened
+  -no-fast-install  disable the fast-install mode
+  -no-install       link a not-installable executable
+  -no-undefined     declare that a library does not refer to external symbols
+  -o OUTPUT-FILE    create OUTPUT-FILE from the specified objects
+  -objectlist FILE  Use a list of object files found in FILE to specify objects
+  -precious-files-regex REGEX
+                    don't remove output files matching REGEX
+  -release RELEASE  specify package release information
+  -rpath LIBDIR     the created library will eventually be installed in LIBDIR
+  -R[ ]LIBDIR       add LIBDIR to the runtime path of programs and libraries
+  -shared           only do dynamic linking of libtool libraries
+  -shrext SUFFIX    override the standard shared library file extension
+  -static           do not do any dynamic linking of uninstalled libtool libraries
+  -static-libtool-libs
+                    do not do any dynamic linking of libtool libraries
+  -version-info CURRENT[:REVISION[:AGE]]
+                    specify library version info [each variable defaults to 0]
+  -weak LIBNAME     declare that the target provides the LIBNAME interface
+  -Wc,FLAG
+  -Xcompiler FLAG   pass linker-specific FLAG directly to the compiler
+  -Wl,FLAG
+  -Xlinker FLAG     pass linker-specific FLAG directly to the linker
+  -XCClinker FLAG   pass link-specific FLAG to the compiler driver (CC)
+
+All other options (arguments beginning with \`-') are ignored.
+
+Every other argument is treated as a filename.  Files ending in \`.la' are
+treated as uninstalled libtool libraries, other files are standard or library
+object files.
+
+If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
+only library objects (\`.lo' files) may be specified, and \`-rpath' is
+required, except when creating a convenience library.
+
+If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
+using \`ar' and \`ranlib', or on Windows using \`lib'.
+
+If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
+is created, otherwise an executable program is created."
+        ;;
+
+      uninstall)
+        $ECHO \
+"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
+
+Remove libraries from an installation directory.
+
+RM is the name of the program to use to delete files associated with each FILE
+(typically \`/bin/rm').  RM-OPTIONS are options (such as \`-f') to be passed
+to RM.
+
+If FILE is a libtool library, all the files associated with it are deleted.
+Otherwise, only FILE itself is deleted using RM."
+        ;;
+
+      *)
+        func_fatal_help "invalid operation mode \`$mode'"
+        ;;
+    esac
+
+    echo
+    $ECHO "Try \`$progname --help' for more information about other modes."
+}
+
+# Now that we've collected a possible --mode arg, show help if necessary
+if $opt_help; then
+  if test "$opt_help" = :; then
+    func_mode_help
+  else
+    {
+      func_help noexit
+      for mode in compile link execute install finish uninstall clean; do
+	func_mode_help
+      done
+    } | sed -n '1p; 2,$s/^Usage:/  or: /p'
+    {
+      func_help noexit
+      for mode in compile link execute install finish uninstall clean; do
+	echo
+	func_mode_help
+      done
+    } |
+    sed '1d
+      /^When reporting/,/^Report/{
+	H
+	d
+      }
+      $x
+      /information about other modes/d
+      /more detailed .*MODE/d
+      s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
+  fi
+  exit $?
+fi
+
+
+# func_mode_execute arg...
+func_mode_execute ()
+{
+    $opt_debug
+    # The first argument is the command name.
+    cmd="$nonopt"
+    test -z "$cmd" && \
+      func_fatal_help "you must specify a COMMAND"
+
+    # Handle -dlopen flags immediately.
+    for file in $execute_dlfiles; do
+      test -f "$file" \
+	|| func_fatal_help "\`$file' is not a file"
+
+      dir=
+      case $file in
+      *.la)
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$lib' is not a valid libtool archive"
+
+	# Read the libtool library.
+	dlname=
+	library_names=
+	func_source "$file"
+
+	# Skip this library if it cannot be dlopened.
+	if test -z "$dlname"; then
+	  # Warn if it was a shared library.
+	  test -n "$library_names" && \
+	    func_warning "\`$file' was not linked with \`-export-dynamic'"
+	  continue
+	fi
+
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+
+	if test -f "$dir/$objdir/$dlname"; then
+	  dir="$dir/$objdir"
+	else
+	  if test ! -f "$dir/$dlname"; then
+	    func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'"
+	  fi
+	fi
+	;;
+
+      *.lo)
+	# Just add the directory containing the .lo file.
+	func_dirname "$file" "" "."
+	dir="$func_dirname_result"
+	;;
+
+      *)
+	func_warning "\`-dlopen' is ignored for non-libtool libraries and objects"
+	continue
+	;;
+      esac
+
+      # Get the absolute pathname.
+      absdir=`cd "$dir" && pwd`
+      test -n "$absdir" && dir="$absdir"
+
+      # Now add the directory to shlibpath_var.
+      if eval "test -z \"\$$shlibpath_var\""; then
+	eval "$shlibpath_var=\"\$dir\""
+      else
+	eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
+      fi
+    done
+
+    # This variable tells wrapper scripts just to set shlibpath_var
+    # rather than running their programs.
+    libtool_execute_magic="$magic"
+
+    # Check if any of the arguments is a wrapper script.
+    args=
+    for file
+    do
+      case $file in
+      -* | *.la | *.lo ) ;;
+      *)
+	# Do a test to see if this is really a libtool program.
+	if func_ltwrapper_script_p "$file"; then
+	  func_source "$file"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	elif func_ltwrapper_executable_p "$file"; then
+	  func_ltwrapper_scriptname "$file"
+	  func_source "$func_ltwrapper_scriptname_result"
+	  # Transform arg to wrapped name.
+	  file="$progdir/$program"
+	fi
+	;;
+      esac
+      # Quote arguments (to preserve shell metacharacters).
+      func_quote_for_eval "$file"
+      args="$args $func_quote_for_eval_result"
+    done
+
+    if test "X$opt_dry_run" = Xfalse; then
+      if test -n "$shlibpath_var"; then
+	# Export the shlibpath_var.
+	eval "export $shlibpath_var"
+      fi
+
+      # Restore saved environment variables
+      for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
+      do
+	eval "if test \"\${save_$lt_var+set}\" = set; then
+                $lt_var=\$save_$lt_var; export $lt_var
+	      else
+		$lt_unset $lt_var
+	      fi"
+      done
+
+      # Now prepare to actually exec the command.
+      exec_cmd="\$cmd$args"
+    else
+      # Display what would be done.
+      if test -n "$shlibpath_var"; then
+	eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
+	echo "export $shlibpath_var"
+      fi
+      $ECHO "$cmd$args"
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$mode" = execute && func_mode_execute ${1+"$@"}
+
+
+# func_mode_finish arg...
+func_mode_finish ()
+{
+    $opt_debug
+    libdirs="$nonopt"
+    admincmds=
+
+    if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
+      for dir
+      do
+	libdirs="$libdirs $dir"
+      done
+
+      for libdir in $libdirs; do
+	if test -n "$finish_cmds"; then
+	  # Do each command in the finish commands.
+	  func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
+'"$cmd"'"'
+	fi
+	if test -n "$finish_eval"; then
+	  # Do the single finish_eval.
+	  eval cmds=\"$finish_eval\"
+	  $opt_dry_run || eval "$cmds" || admincmds="$admincmds
+       $cmds"
+	fi
+      done
+    fi
+
+    # Exit here if they wanted silent mode.
+    $opt_silent && exit $EXIT_SUCCESS
+
+    echo "----------------------------------------------------------------------"
+    echo "Libraries have been installed in:"
+    for libdir in $libdirs; do
+      $ECHO "   $libdir"
+    done
+    echo
+    echo "If you ever happen to want to link against installed libraries"
+    echo "in a given directory, LIBDIR, you must either use libtool, and"
+    echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
+    echo "flag during linking and do at least one of the following:"
+    if test -n "$shlibpath_var"; then
+      echo "   - add LIBDIR to the \`$shlibpath_var' environment variable"
+      echo "     during execution"
+    fi
+    if test -n "$runpath_var"; then
+      echo "   - add LIBDIR to the \`$runpath_var' environment variable"
+      echo "     during linking"
+    fi
+    if test -n "$hardcode_libdir_flag_spec"; then
+      libdir=LIBDIR
+      eval flag=\"$hardcode_libdir_flag_spec\"
+
+      $ECHO "   - use the \`$flag' linker flag"
+    fi
+    if test -n "$admincmds"; then
+      $ECHO "   - have your system administrator run these commands:$admincmds"
+    fi
+    if test -f /etc/ld.so.conf; then
+      echo "   - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
+    fi
+    echo
+
+    echo "See any operating system documentation about shared libraries for"
+    case $host in
+      solaris2.[6789]|solaris2.1[0-9])
+        echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
+	echo "pages."
+	;;
+      *)
+        echo "more information, such as the ld(1) and ld.so(8) manual pages."
+        ;;
+    esac
+    echo "----------------------------------------------------------------------"
+    exit $EXIT_SUCCESS
+}
+
+test "$mode" = finish && func_mode_finish ${1+"$@"}
+
+
+# func_mode_install arg...
+func_mode_install ()
+{
+    $opt_debug
+    # There may be an optional sh(1) argument at the beginning of
+    # install_prog (especially on Windows NT).
+    if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
+       # Allow the use of GNU shtool's install command.
+       case $nonopt in *shtool*) :;; *) false;; esac; then
+      # Aesthetically quote it.
+      func_quote_for_eval "$nonopt"
+      install_prog="$func_quote_for_eval_result "
+      arg=$1
+      shift
+    else
+      install_prog=
+      arg=$nonopt
+    fi
+
+    # The real first argument should be the name of the installation program.
+    # Aesthetically quote it.
+    func_quote_for_eval "$arg"
+    install_prog="$install_prog$func_quote_for_eval_result"
+    install_shared_prog=$install_prog
+    case " $install_prog " in
+      *[\\\ /]cp\ *) install_cp=: ;;
+      *) install_cp=false ;;
+    esac
+
+    # We need to accept at least all the BSD install flags.
+    dest=
+    files=
+    opts=
+    prev=
+    install_type=
+    isdir=no
+    stripme=
+    no_mode=:
+    for arg
+    do
+      arg2=
+      if test -n "$dest"; then
+	files="$files $dest"
+	dest=$arg
+	continue
+      fi
+
+      case $arg in
+      -d) isdir=yes ;;
+      -f)
+	if $install_cp; then :; else
+	  prev=$arg
+	fi
+	;;
+      -g | -m | -o)
+	prev=$arg
+	;;
+      -s)
+	stripme=" -s"
+	continue
+	;;
+      -*)
+	;;
+      *)
+	# If the previous option needed an argument, then skip it.
+	if test -n "$prev"; then
+	  if test "x$prev" = x-m && test -n "$install_override_mode"; then
+	    arg2=$install_override_mode
+	    no_mode=false
+	  fi
+	  prev=
+	else
+	  dest=$arg
+	  continue
+	fi
+	;;
+      esac
+
+      # Aesthetically quote the argument.
+      func_quote_for_eval "$arg"
+      install_prog="$install_prog $func_quote_for_eval_result"
+      if test -n "$arg2"; then
+	func_quote_for_eval "$arg2"
+      fi
+      install_shared_prog="$install_shared_prog $func_quote_for_eval_result"
+    done
+
+    test -z "$install_prog" && \
+      func_fatal_help "you must specify an install program"
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prev' option requires an argument"
+
+    if test -n "$install_override_mode" && $no_mode; then
+      if $install_cp; then :; else
+	func_quote_for_eval "$install_override_mode"
+	install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result"
+      fi
+    fi
+
+    if test -z "$files"; then
+      if test -z "$dest"; then
+	func_fatal_help "no file or destination specified"
+      else
+	func_fatal_help "you must specify a destination"
+      fi
+    fi
+
+    # Strip any trailing slash from the destination.
+    func_stripname '' '/' "$dest"
+    dest=$func_stripname_result
+
+    # Check to see that the destination is a directory.
+    test -d "$dest" && isdir=yes
+    if test "$isdir" = yes; then
+      destdir="$dest"
+      destname=
+    else
+      func_dirname_and_basename "$dest" "" "."
+      destdir="$func_dirname_result"
+      destname="$func_basename_result"
+
+      # Not a directory, so check to see that there is only one file specified.
+      set dummy $files; shift
+      test "$#" -gt 1 && \
+	func_fatal_help "\`$dest' is not a directory"
+    fi
+    case $destdir in
+    [\\/]* | [A-Za-z]:[\\/]*) ;;
+    *)
+      for file in $files; do
+	case $file in
+	*.lo) ;;
+	*)
+	  func_fatal_help "\`$destdir' must be an absolute directory name"
+	  ;;
+	esac
+      done
+      ;;
+    esac
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    staticlibs=
+    future_libdirs=
+    current_libdirs=
+    for file in $files; do
+
+      # Do each installation.
+      case $file in
+      *.$libext)
+	# Do the static libraries later.
+	staticlibs="$staticlibs $file"
+	;;
+
+      *.la)
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$file" \
+	  || func_fatal_help "\`$file' is not a valid libtool archive"
+
+	library_names=
+	old_library=
+	relink_command=
+	func_source "$file"
+
+	# Add the libdir to current_libdirs if it is the destination.
+	if test "X$destdir" = "X$libdir"; then
+	  case "$current_libdirs " in
+	  *" $libdir "*) ;;
+	  *) current_libdirs="$current_libdirs $libdir" ;;
+	  esac
+	else
+	  # Note the libdir as a future libdir.
+	  case "$future_libdirs " in
+	  *" $libdir "*) ;;
+	  *) future_libdirs="$future_libdirs $libdir" ;;
+	  esac
+	fi
+
+	func_dirname "$file" "/" ""
+	dir="$func_dirname_result"
+	dir="$dir$objdir"
+
+	if test -n "$relink_command"; then
+	  # Determine the prefix the user has applied to our future dir.
+	  inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
+
+	  # Don't allow the user to place us outside of our expected
+	  # location b/c this prevents finding dependent libraries that
+	  # are installed to the same prefix.
+	  # At present, this check doesn't affect windows .dll's that
+	  # are installed into $libdir/../bin (currently, that works fine)
+	  # but it's something to keep an eye on.
+	  test "$inst_prefix_dir" = "$destdir" && \
+	    func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir"
+
+	  if test -n "$inst_prefix_dir"; then
+	    # Stick the inst_prefix_dir data into the link command.
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
+	  else
+	    relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
+	  fi
+
+	  func_warning "relinking \`$file'"
+	  func_show_eval "$relink_command" \
+	    'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"'
+	fi
+
+	# See the names of the shared library.
+	set dummy $library_names; shift
+	if test -n "$1"; then
+	  realname="$1"
+	  shift
+
+	  srcname="$realname"
+	  test -n "$relink_command" && srcname="$realname"T
+
+	  # Install the shared library and build the symlinks.
+	  func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
+	      'exit $?'
+	  tstripme="$stripme"
+	  case $host_os in
+	  cygwin* | mingw* | pw32* | cegcc*)
+	    case $realname in
+	    *.dll.a)
+	      tstripme=""
+	      ;;
+	    esac
+	    ;;
+	  esac
+	  if test -n "$tstripme" && test -n "$striplib"; then
+	    func_show_eval "$striplib $destdir/$realname" 'exit $?'
+	  fi
+
+	  if test "$#" -gt 0; then
+	    # Delete the old symlinks, and create new ones.
+	    # Try `ln -sf' first, because the `ln' binary might depend on
+	    # the symlink we replace!  Solaris /bin/ln does not understand -f,
+	    # so we also need to try rm && ln -s.
+	    for linkname
+	    do
+	      test "$linkname" != "$realname" \
+		&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
+	    done
+	  fi
+
+	  # Do each command in the postinstall commands.
+	  lib="$destdir/$realname"
+	  func_execute_cmds "$postinstall_cmds" 'exit $?'
+	fi
+
+	# Install the pseudo-library for information purposes.
+	func_basename "$file"
+	name="$func_basename_result"
+	instname="$dir/$name"i
+	func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
+
+	# Maybe install the static library, too.
+	test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
+	;;
+
+      *.lo)
+	# Install (i.e. copy) a libtool object.
+
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# Deduce the name of the destination old-style object file.
+	case $destfile in
+	*.lo)
+	  func_lo2o "$destfile"
+	  staticdest=$func_lo2o_result
+	  ;;
+	*.$objext)
+	  staticdest="$destfile"
+	  destfile=
+	  ;;
+	*)
+	  func_fatal_help "cannot copy a libtool object to \`$destfile'"
+	  ;;
+	esac
+
+	# Install the libtool object if requested.
+	test -n "$destfile" && \
+	  func_show_eval "$install_prog $file $destfile" 'exit $?'
+
+	# Install the old object if enabled.
+	if test "$build_old_libs" = yes; then
+	  # Deduce the name of the old-style object file.
+	  func_lo2o "$file"
+	  staticobj=$func_lo2o_result
+	  func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
+	fi
+	exit $EXIT_SUCCESS
+	;;
+
+      *)
+	# Figure out destination file name, if it wasn't already specified.
+	if test -n "$destname"; then
+	  destfile="$destdir/$destname"
+	else
+	  func_basename "$file"
+	  destfile="$func_basename_result"
+	  destfile="$destdir/$destfile"
+	fi
+
+	# If the file is missing, and there is a .exe on the end, strip it
+	# because it is most likely a libtool script we actually want to
+	# install
+	stripped_ext=""
+	case $file in
+	  *.exe)
+	    if test ! -f "$file"; then
+	      func_stripname '' '.exe' "$file"
+	      file=$func_stripname_result
+	      stripped_ext=".exe"
+	    fi
+	    ;;
+	esac
+
+	# Do a test to see if this is really a libtool program.
+	case $host in
+	*cygwin* | *mingw*)
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      wrapper=$func_ltwrapper_scriptname_result
+	    else
+	      func_stripname '' '.exe' "$file"
+	      wrapper=$func_stripname_result
+	    fi
+	    ;;
+	*)
+	    wrapper=$file
+	    ;;
+	esac
+	if func_ltwrapper_script_p "$wrapper"; then
+	  notinst_deplibs=
+	  relink_command=
+
+	  func_source "$wrapper"
+
+	  # Check the variables that should have been set.
+	  test -z "$generated_by_libtool_version" && \
+	    func_fatal_error "invalid libtool wrapper script \`$wrapper'"
+
+	  finalize=yes
+	  for lib in $notinst_deplibs; do
+	    # Check to see that each library is installed.
+	    libdir=
+	    if test -f "$lib"; then
+	      func_source "$lib"
+	    fi
+	    libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test
+	    if test -n "$libdir" && test ! -f "$libfile"; then
+	      func_warning "\`$lib' has not been installed in \`$libdir'"
+	      finalize=no
+	    fi
+	  done
+
+	  relink_command=
+	  func_source "$wrapper"
+
+	  outputname=
+	  if test "$fast_install" = no && test -n "$relink_command"; then
+	    $opt_dry_run || {
+	      if test "$finalize" = yes; then
+	        tmpdir=`func_mktempdir`
+		func_basename "$file$stripped_ext"
+		file="$func_basename_result"
+	        outputname="$tmpdir/$file"
+	        # Replace the output file specification.
+	        relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
+
+	        $opt_silent || {
+	          func_quote_for_expand "$relink_command"
+		  eval "func_echo $func_quote_for_expand_result"
+	        }
+	        if eval "$relink_command"; then :
+	          else
+		  func_error "error: relink \`$file' with the above command before installing it"
+		  $opt_dry_run || ${RM}r "$tmpdir"
+		  continue
+	        fi
+	        file="$outputname"
+	      else
+	        func_warning "cannot relink \`$file'"
+	      fi
+	    }
+	  else
+	    # Install the binary that we compiled earlier.
+	    file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
+	  fi
+	fi
+
+	# remove .exe since cygwin /usr/bin/install will append another
+	# one anyway
+	case $install_prog,$host in
+	*/usr/bin/install*,*cygwin*)
+	  case $file:$destfile in
+	  *.exe:*.exe)
+	    # this is ok
+	    ;;
+	  *.exe:*)
+	    destfile=$destfile.exe
+	    ;;
+	  *:*.exe)
+	    func_stripname '' '.exe' "$destfile"
+	    destfile=$func_stripname_result
+	    ;;
+	  esac
+	  ;;
+	esac
+	func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
+	$opt_dry_run || if test -n "$outputname"; then
+	  ${RM}r "$tmpdir"
+	fi
+	;;
+      esac
+    done
+
+    for file in $staticlibs; do
+      func_basename "$file"
+      name="$func_basename_result"
+
+      # Set up the ranlib parameters.
+      oldlib="$destdir/$name"
+
+      func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
+
+      if test -n "$stripme" && test -n "$old_striplib"; then
+	func_show_eval "$old_striplib $oldlib" 'exit $?'
+      fi
+
+      # Do each command in the postinstall commands.
+      func_execute_cmds "$old_postinstall_cmds" 'exit $?'
+    done
+
+    test -n "$future_libdirs" && \
+      func_warning "remember to run \`$progname --finish$future_libdirs'"
+
+    if test -n "$current_libdirs"; then
+      # Maybe just do a dry run.
+      $opt_dry_run && current_libdirs=" -n$current_libdirs"
+      exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
+    else
+      exit $EXIT_SUCCESS
+    fi
+}
+
+test "$mode" = install && func_mode_install ${1+"$@"}
+
+
+# func_generate_dlsyms outputname originator pic_p
+# Extract symbols from dlprefiles and create ${outputname}S.o with
+# a dlpreopen symbol table.
+func_generate_dlsyms ()
+{
+    $opt_debug
+    my_outputname="$1"
+    my_originator="$2"
+    my_pic_p="${3-no}"
+    my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'`
+    my_dlsyms=
+
+    if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+      if test -n "$NM" && test -n "$global_symbol_pipe"; then
+	my_dlsyms="${my_outputname}S.c"
+      else
+	func_error "not configured to extract global symbols from dlpreopened files"
+      fi
+    fi
+
+    if test -n "$my_dlsyms"; then
+      case $my_dlsyms in
+      "") ;;
+      *.c)
+	# Discover the nlist of each of the dlfiles.
+	nlist="$output_objdir/${my_outputname}.nm"
+
+	func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
+
+	# Parse the name list into a source file.
+	func_verbose "creating $output_objdir/$my_dlsyms"
+
+	$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
+/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */
+/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */
+
+#ifdef __cplusplus
+extern \"C\" {
+#endif
+
+#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
+#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
+#endif
+
+/* External symbol declarations for the compiler. */\
+"
+
+	if test "$dlself" = yes; then
+	  func_verbose "generating symbol list for \`$output'"
+
+	  $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
+
+	  # Add our own program objects to the symbol list.
+	  progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	  for progfile in $progfiles; do
+	    func_verbose "extracting global C symbols from \`$progfile'"
+	    $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'"
+	  done
+
+	  if test -n "$exclude_expsyms"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  if test -n "$export_symbols_regex"; then
+	    $opt_dry_run || {
+	      eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	    }
+	  fi
+
+	  # Prepare the list of exported symbols
+	  if test -z "$export_symbols"; then
+	    export_symbols="$output_objdir/$outputname.exp"
+	    $opt_dry_run || {
+	      $RM $export_symbols
+	      eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
+	      case $host in
+	      *cygwin* | *mingw* | *cegcc* )
+                eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+                eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
+	        ;;
+	      esac
+	    }
+	  else
+	    $opt_dry_run || {
+	      eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
+	      eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
+	      eval '$MV "$nlist"T "$nlist"'
+	      case $host in
+	        *cygwin* | *mingw* | *cegcc* )
+	          eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
+	          eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
+	          ;;
+	      esac
+	    }
+	  fi
+	fi
+
+	for dlprefile in $dlprefiles; do
+	  func_verbose "extracting global C symbols from \`$dlprefile'"
+	  func_basename "$dlprefile"
+	  name="$func_basename_result"
+	  $opt_dry_run || {
+	    eval '$ECHO ": $name " >> "$nlist"'
+	    eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'"
+	  }
+	done
+
+	$opt_dry_run || {
+	  # Make sure we have at least an empty file.
+	  test -f "$nlist" || : > "$nlist"
+
+	  if test -n "$exclude_expsyms"; then
+	    $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
+	    $MV "$nlist"T "$nlist"
+	  fi
+
+	  # Try sorting and uniquifying the output.
+	  if $GREP -v "^: " < "$nlist" |
+	      if sort -k 3 </dev/null >/dev/null 2>&1; then
+		sort -k 3
+	      else
+		sort +2
+	      fi |
+	      uniq > "$nlist"S; then
+	    :
+	  else
+	    $GREP -v "^: " < "$nlist" > "$nlist"S
+	  fi
+
+	  if test -f "$nlist"S; then
+	    eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
+	  else
+	    echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
+	  fi
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+
+/* The mapping between symbol names and symbols.  */
+typedef struct {
+  const char *name;
+  void *address;
+} lt_dlsymlist;
+"
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc* )
+	    echo >> "$output_objdir/$my_dlsyms" "\
+/* DATA imports from DLLs on WIN32 con't be const, because
+   runtime relocations are performed -- see ld's documentation
+   on pseudo-relocs.  */"
+	    lt_dlsym_const= ;;
+	  *osf5*)
+	    echo >> "$output_objdir/$my_dlsyms" "\
+/* This system does not cope well with relocations in const data */"
+	    lt_dlsym_const= ;;
+	  *)
+	    lt_dlsym_const=const ;;
+	  esac
+
+	  echo >> "$output_objdir/$my_dlsyms" "\
+extern $lt_dlsym_const lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[];
+$lt_dlsym_const lt_dlsymlist
+lt_${my_prefix}_LTX_preloaded_symbols[] =
+{\
+  { \"$my_originator\", (void *) 0 },"
+
+	  case $need_lib_prefix in
+	  no)
+	    eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  *)
+	    eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
+	    ;;
+	  esac
+	  echo >> "$output_objdir/$my_dlsyms" "\
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt_${my_prefix}_LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif\
+"
+	} # !$opt_dry_run
+
+	pic_flag_for_symtable=
+	case "$compile_command " in
+	*" -static "*) ;;
+	*)
+	  case $host in
+	  # compiling the symbol table file with pic_flag works around
+	  # a FreeBSD bug that causes programs to crash when -lm is
+	  # linked before any other PIC object.  But we must not use
+	  # pic_flag when linking with -static.  The problem exists in
+	  # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
+	  *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
+	    pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
+	  *-*-hpux*)
+	    pic_flag_for_symtable=" $pic_flag"  ;;
+	  *)
+	    if test "X$my_pic_p" != Xno; then
+	      pic_flag_for_symtable=" $pic_flag"
+	    fi
+	    ;;
+	  esac
+	  ;;
+	esac
+	symtab_cflags=
+	for arg in $LTCFLAGS; do
+	  case $arg in
+	  -pie | -fpie | -fPIE) ;;
+	  *) symtab_cflags="$symtab_cflags $arg" ;;
+	  esac
+	done
+
+	# Now compile the dynamic symbol file.
+	func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
+
+	# Clean up the generated files.
+	func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"'
+
+	# Transform the symbol file into the correct name.
+	symfileobj="$output_objdir/${my_outputname}S.$objext"
+	case $host in
+	*cygwin* | *mingw* | *cegcc* )
+	  if test -f "$output_objdir/$my_outputname.def"; then
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
+	  else
+	    compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	    finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  fi
+	  ;;
+	*)
+	  compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
+	  ;;
+	esac
+	;;
+      *)
+	func_fatal_error "unknown suffix for \`$my_dlsyms'"
+	;;
+      esac
+    else
+      # We keep going just in case the user didn't refer to
+      # lt_preloaded_symbols.  The linker will fail if global_symbol_pipe
+      # really was required.
+
+      # Nullify the symbol file.
+      compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
+      finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
+    fi
+}
+
+# func_win32_libid arg
+# return the library type of file 'arg'
+#
+# Need a lot of goo to handle *both* DLLs and import libs
+# Has to be a shell function in order to 'eat' the argument
+# that is supplied when $file_magic_command is called.
+# Despite the name, also deal with 64 bit binaries.
+func_win32_libid ()
+{
+  $opt_debug
+  win32_libid_type="unknown"
+  win32_fileres=`file -L $1 2>/dev/null`
+  case $win32_fileres in
+  *ar\ archive\ import\ library*) # definitely import
+    win32_libid_type="x86 archive import"
+    ;;
+  *ar\ archive*) # could be an import, or static
+    # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
+    if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
+       $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
+      win32_nmres=`eval $NM -f posix -A $1 |
+	$SED -n -e '
+	    1,100{
+		/ I /{
+		    s,.*,import,
+		    p
+		    q
+		}
+	    }'`
+      case $win32_nmres in
+      import*)  win32_libid_type="x86 archive import";;
+      *)        win32_libid_type="x86 archive static";;
+      esac
+    fi
+    ;;
+  *DLL*)
+    win32_libid_type="x86 DLL"
+    ;;
+  *executable*) # but shell scripts are "executable" too...
+    case $win32_fileres in
+    *MS\ Windows\ PE\ Intel*)
+      win32_libid_type="x86 DLL"
+      ;;
+    esac
+    ;;
+  esac
+  $ECHO "$win32_libid_type"
+}
+
+
+
+# func_extract_an_archive dir oldlib
+func_extract_an_archive ()
+{
+    $opt_debug
+    f_ex_an_ar_dir="$1"; shift
+    f_ex_an_ar_oldlib="$1"
+    if test "$lock_old_archive_extraction" = yes; then
+      lockfile=$f_ex_an_ar_oldlib.lock
+      until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
+	func_echo "Waiting for $lockfile to be removed"
+	sleep 2
+      done
+    fi
+    func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
+		   'stat=$?; rm -f "$lockfile"; exit $stat'
+    if test "$lock_old_archive_extraction" = yes; then
+      $opt_dry_run || rm -f "$lockfile"
+    fi
+    if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
+     :
+    else
+      func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
+    fi
+}
+
+
+# func_extract_archives gentop oldlib ...
+func_extract_archives ()
+{
+    $opt_debug
+    my_gentop="$1"; shift
+    my_oldlibs=${1+"$@"}
+    my_oldobjs=""
+    my_xlib=""
+    my_xabs=""
+    my_xdir=""
+
+    for my_xlib in $my_oldlibs; do
+      # Extract the objects.
+      case $my_xlib in
+	[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
+	*) my_xabs=`pwd`"/$my_xlib" ;;
+      esac
+      func_basename "$my_xlib"
+      my_xlib="$func_basename_result"
+      my_xlib_u=$my_xlib
+      while :; do
+        case " $extracted_archives " in
+	*" $my_xlib_u "*)
+	  func_arith $extracted_serial + 1
+	  extracted_serial=$func_arith_result
+	  my_xlib_u=lt$extracted_serial-$my_xlib ;;
+	*) break ;;
+	esac
+      done
+      extracted_archives="$extracted_archives $my_xlib_u"
+      my_xdir="$my_gentop/$my_xlib_u"
+
+      func_mkdir_p "$my_xdir"
+
+      case $host in
+      *-darwin*)
+	func_verbose "Extracting $my_xabs"
+	# Do not bother doing anything if just a dry run
+	$opt_dry_run || {
+	  darwin_orig_dir=`pwd`
+	  cd $my_xdir || exit $?
+	  darwin_archive=$my_xabs
+	  darwin_curdir=`pwd`
+	  darwin_base_archive=`basename "$darwin_archive"`
+	  darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
+	  if test -n "$darwin_arches"; then
+	    darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
+	    darwin_arch=
+	    func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
+	    for darwin_arch in  $darwin_arches ; do
+	      func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
+	      cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
+	      func_extract_an_archive "`pwd`" "${darwin_base_archive}"
+	      cd "$darwin_curdir"
+	      $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
+	    done # $darwin_arches
+            ## Okay now we've a bunch of thin objects, gotta fatten them up :)
+	    darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u`
+	    darwin_file=
+	    darwin_files=
+	    for darwin_file in $darwin_filelist; do
+	      darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
+	      $LIPO -create -output "$darwin_file" $darwin_files
+	    done # $darwin_filelist
+	    $RM -rf unfat-$$
+	    cd "$darwin_orig_dir"
+	  else
+	    cd $darwin_orig_dir
+	    func_extract_an_archive "$my_xdir" "$my_xabs"
+	  fi # $darwin_arches
+	} # !$opt_dry_run
+	;;
+      *)
+        func_extract_an_archive "$my_xdir" "$my_xabs"
+	;;
+      esac
+      my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
+    done
+
+    func_extract_archives_result="$my_oldobjs"
+}
+
+
+# func_emit_wrapper [arg=no]
+#
+# Emit a libtool wrapper script on stdout.
+# Don't directly open a file because we may want to
+# incorporate the script contents within a cygwin/mingw
+# wrapper executable.  Must ONLY be called from within
+# func_mode_link because it depends on a number of variables
+# set therein.
+#
+# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
+# variable will take.  If 'yes', then the emitted script
+# will assume that the directory in which it is stored is
+# the $objdir directory.  This is a cygwin/mingw-specific
+# behavior.
+func_emit_wrapper ()
+{
+	func_emit_wrapper_arg1=${1-no}
+
+	$ECHO "\
+#! $SHELL
+
+# $output - temporary wrapper script for $objdir/$outputname
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# The $output program cannot be directly executed until all the libtool
+# libraries that it depends on are installed.
+#
+# This wrapper script should never be moved out of the build directory.
+# If it is, it will not operate correctly.
+
+# Sed substitution that helps us do robust quoting.  It backslashifies
+# metacharacters that are still active within double-quoted strings.
+sed_quote_subst='$sed_quote_subst'
+
+# Be Bourne compatible
+if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '\${1+\"\$@\"}'='\"\$@\"'
+  setopt NO_GLOB_SUBST
+else
+  case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
+fi
+BIN_SH=xpg4; export BIN_SH # for Tru64
+DUALCASE=1; export DUALCASE # for MKS sh
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+relink_command=\"$relink_command\"
+
+# This environment variable determines our operation mode.
+if test \"\$libtool_install_magic\" = \"$magic\"; then
+  # install mode needs the following variables:
+  generated_by_libtool_version='$macro_version'
+  notinst_deplibs='$notinst_deplibs'
+else
+  # When we are sourced in execute mode, \$file and \$ECHO are already set.
+  if test \"\$libtool_execute_magic\" != \"$magic\"; then
+    file=\"\$0\""
+
+    qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
+    $ECHO "\
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$1
+_LTECHO_EOF'
+}
+    ECHO=\"$qECHO\"
+  fi
+
+# Very basic option parsing. These options are (a) specific to
+# the libtool wrapper, (b) are identical between the wrapper
+# /script/ and the wrapper /executable/ which is used only on
+# windows platforms, and (c) all begin with the string "--lt-"
+# (application programs are unlikely to have options which match
+# this pattern).
+#
+# There are only two supported options: --lt-debug and
+# --lt-dump-script. There is, deliberately, no --lt-help.
+#
+# The first argument to this parsing function should be the
+# script's $0 value, followed by "$@".
+lt_option_debug=
+func_parse_lt_options ()
+{
+  lt_script_arg0=\$0
+  shift
+  for lt_opt
+  do
+    case \"\$lt_opt\" in
+    --lt-debug) lt_option_debug=1 ;;
+    --lt-dump-script)
+        lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
+        test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
+        lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
+        cat \"\$lt_dump_D/\$lt_dump_F\"
+        exit 0
+      ;;
+    --lt-*)
+        \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
+        exit 1
+      ;;
+    esac
+  done
+
+  # Print the debug banner immediately:
+  if test -n \"\$lt_option_debug\"; then
+    echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2
+  fi
+}
+
+# Used when --lt-debug. Prints its arguments to stdout
+# (redirection is the responsibility of the caller)
+func_lt_dump_args ()
+{
+  lt_dump_args_N=1;
+  for lt_arg
+  do
+    \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\"
+    lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
+  done
+}
+
+# Core function for launching the target application
+func_exec_program_core ()
+{
+"
+  case $host in
+  # Backslashes separate directories on plain windows
+  *-*-mingw | *-*-os2* | *-cegcc*)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
+"
+    ;;
+
+  *)
+    $ECHO "\
+      if test -n \"\$lt_option_debug\"; then
+        \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2
+        func_lt_dump_args \${1+\"\$@\"} 1>&2
+      fi
+      exec \"\$progdir/\$program\" \${1+\"\$@\"}
+"
+    ;;
+  esac
+  $ECHO "\
+      \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
+      exit 1
+}
+
+# A function to encapsulate launching the target application
+# Strips options in the --lt-* namespace from \$@ and
+# launches target application with the remaining arguments.
+func_exec_program ()
+{
+  for lt_wr_arg
+  do
+    case \$lt_wr_arg in
+    --lt-*) ;;
+    *) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
+    esac
+    shift
+  done
+  func_exec_program_core \${1+\"\$@\"}
+}
+
+  # Parse options
+  func_parse_lt_options \"\$0\" \${1+\"\$@\"}
+
+  # Find the directory that this script lives in.
+  thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
+  test \"x\$thisdir\" = \"x\$file\" && thisdir=.
+
+  # Follow symbolic links until we get to the real thisdir.
+  file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
+  while test -n \"\$file\"; do
+    destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
+
+    # If there was a directory component, then change thisdir.
+    if test \"x\$destdir\" != \"x\$file\"; then
+      case \"\$destdir\" in
+      [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
+      *) thisdir=\"\$thisdir/\$destdir\" ;;
+      esac
+    fi
+
+    file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
+    file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
+  done
+
+  # Usually 'no', except on cygwin/mingw when embedded into
+  # the cwrapper.
+  WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
+  if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
+    # special case for '.'
+    if test \"\$thisdir\" = \".\"; then
+      thisdir=\`pwd\`
+    fi
+    # remove .libs from thisdir
+    case \"\$thisdir\" in
+    *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
+    $objdir )   thisdir=. ;;
+    esac
+  fi
+
+  # Try to get the absolute directory name.
+  absdir=\`cd \"\$thisdir\" && pwd\`
+  test -n \"\$absdir\" && thisdir=\"\$absdir\"
+"
+
+	if test "$fast_install" = yes; then
+	  $ECHO "\
+  program=lt-'$outputname'$exeext
+  progdir=\"\$thisdir/$objdir\"
+
+  if test ! -f \"\$progdir/\$program\" ||
+     { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
+       test \"X\$file\" != \"X\$progdir/\$program\"; }; then
+
+    file=\"\$\$-\$program\"
+
+    if test ! -d \"\$progdir\"; then
+      $MKDIR \"\$progdir\"
+    else
+      $RM \"\$progdir/\$file\"
+    fi"
+
+	  $ECHO "\
+
+    # relink executable if necessary
+    if test -n \"\$relink_command\"; then
+      if relink_command_output=\`eval \$relink_command 2>&1\`; then :
+      else
+	$ECHO \"\$relink_command_output\" >&2
+	$RM \"\$progdir/\$file\"
+	exit 1
+      fi
+    fi
+
+    $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
+    { $RM \"\$progdir/\$program\";
+      $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
+    $RM \"\$progdir/\$file\"
+  fi"
+	else
+	  $ECHO "\
+  program='$outputname'
+  progdir=\"\$thisdir/$objdir\"
+"
+	fi
+
+	$ECHO "\
+
+  if test -f \"\$progdir/\$program\"; then"
+
+	# Export our shlibpath_var if we have one.
+	if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+	  $ECHO "\
+    # Add our own library path to $shlibpath_var
+    $shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
+
+    # Some systems cannot cope with colon-terminated $shlibpath_var
+    # The second colon is a workaround for a bug in BeOS R4 sed
+    $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
+
+    export $shlibpath_var
+"
+	fi
+
+	# fixup the dll searchpath if we need to.
+	if test -n "$dllsearchpath"; then
+	  $ECHO "\
+    # Add the dll search path components to the executable PATH
+    PATH=$dllsearchpath:\$PATH
+"
+	fi
+
+	$ECHO "\
+    if test \"\$libtool_execute_magic\" != \"$magic\"; then
+      # Run the actual program with our arguments.
+      func_exec_program \${1+\"\$@\"}
+    fi
+  else
+    # The program doesn't exist.
+    \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
+    \$ECHO \"This script is just a wrapper for \$program.\" 1>&2
+    \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
+    exit 1
+  fi
+fi\
+"
+}
+
+
+# func_to_host_path arg
+#
+# Convert paths to host format when used with build tools.
+# Intended for use with "native" mingw (where libtool itself
+# is running under the msys shell), or in the following cross-
+# build environments:
+#    $build          $host
+#    mingw (msys)    mingw  [e.g. native]
+#    cygwin          mingw
+#    *nix + wine     mingw
+# where wine is equipped with the `winepath' executable.
+# In the native mingw case, the (msys) shell automatically
+# converts paths for any non-msys applications it launches,
+# but that facility isn't available from inside the cwrapper.
+# Similar accommodations are necessary for $host mingw and
+# $build cygwin.  Calling this function does no harm for other
+# $host/$build combinations not listed above.
+#
+# ARG is the path (on $build) that should be converted to
+# the proper representation for $host. The result is stored
+# in $func_to_host_path_result.
+func_to_host_path ()
+{
+  func_to_host_path_result="$1"
+  if test -n "$1"; then
+    case $host in
+      *mingw* )
+        lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+        case $build in
+          *mingw* ) # actually, msys
+            # awkward: cmd appends spaces to result
+            func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null |
+              $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+            ;;
+          *cygwin* )
+            func_to_host_path_result=`cygpath -w "$1" |
+	      $SED -e "$lt_sed_naive_backslashify"`
+            ;;
+          * )
+            # Unfortunately, winepath does not exit with a non-zero
+            # error code, so we are forced to check the contents of
+            # stdout. On the other hand, if the command is not
+            # found, the shell will set an exit code of 127 and print
+            # *an error message* to stdout. So we must check for both
+            # error code of zero AND non-empty stdout, which explains
+            # the odd construction:
+            func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null`
+            if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then
+              func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" |
+                $SED -e "$lt_sed_naive_backslashify"`
+            else
+              # Allow warning below.
+              func_to_host_path_result=
+            fi
+            ;;
+        esac
+        if test -z "$func_to_host_path_result" ; then
+          func_error "Could not determine host path corresponding to"
+          func_error "  \`$1'"
+          func_error "Continuing, but uninstalled executables may not work."
+          # Fallback:
+          func_to_host_path_result="$1"
+        fi
+        ;;
+    esac
+  fi
+}
+# end: func_to_host_path
+
+# func_to_host_pathlist arg
+#
+# Convert pathlists to host format when used with build tools.
+# See func_to_host_path(), above. This function supports the
+# following $build/$host combinations (but does no harm for
+# combinations not listed here):
+#    $build          $host
+#    mingw (msys)    mingw  [e.g. native]
+#    cygwin          mingw
+#    *nix + wine     mingw
+#
+# Path separators are also converted from $build format to
+# $host format. If ARG begins or ends with a path separator
+# character, it is preserved (but converted to $host format)
+# on output.
+#
+# ARG is a pathlist (on $build) that should be converted to
+# the proper representation on $host. The result is stored
+# in $func_to_host_pathlist_result.
+func_to_host_pathlist ()
+{
+  func_to_host_pathlist_result="$1"
+  if test -n "$1"; then
+    case $host in
+      *mingw* )
+        lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
+        # Remove leading and trailing path separator characters from
+        # ARG. msys behavior is inconsistent here, cygpath turns them
+        # into '.;' and ';.', and winepath ignores them completely.
+	func_stripname : : "$1"
+        func_to_host_pathlist_tmp1=$func_stripname_result
+        case $build in
+          *mingw* ) # Actually, msys.
+            # Awkward: cmd appends spaces to result.
+            func_to_host_pathlist_result=`
+	      ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null |
+	      $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"`
+            ;;
+          *cygwin* )
+            func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" |
+              $SED -e "$lt_sed_naive_backslashify"`
+            ;;
+          * )
+            # unfortunately, winepath doesn't convert pathlists
+            func_to_host_pathlist_result=""
+            func_to_host_pathlist_oldIFS=$IFS
+            IFS=:
+            for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do
+              IFS=$func_to_host_pathlist_oldIFS
+              if test -n "$func_to_host_pathlist_f" ; then
+                func_to_host_path "$func_to_host_pathlist_f"
+                if test -n "$func_to_host_path_result" ; then
+                  if test -z "$func_to_host_pathlist_result" ; then
+                    func_to_host_pathlist_result="$func_to_host_path_result"
+                  else
+                    func_append func_to_host_pathlist_result ";$func_to_host_path_result"
+                  fi
+                fi
+              fi
+            done
+            IFS=$func_to_host_pathlist_oldIFS
+            ;;
+        esac
+        if test -z "$func_to_host_pathlist_result"; then
+          func_error "Could not determine the host path(s) corresponding to"
+          func_error "  \`$1'"
+          func_error "Continuing, but uninstalled executables may not work."
+          # Fallback. This may break if $1 contains DOS-style drive
+          # specifications. The fix is not to complicate the expression
+          # below, but for the user to provide a working wine installation
+          # with winepath so that path translation in the cross-to-mingw
+          # case works properly.
+          lt_replace_pathsep_nix_to_dos="s|:|;|g"
+          func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\
+            $SED -e "$lt_replace_pathsep_nix_to_dos"`
+        fi
+        # Now, add the leading and trailing path separators back
+        case "$1" in
+          :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result"
+            ;;
+        esac
+        case "$1" in
+          *: ) func_append func_to_host_pathlist_result ";"
+            ;;
+        esac
+        ;;
+    esac
+  fi
+}
+# end: func_to_host_pathlist
+
+# func_emit_cwrapperexe_src
+# emit the source code for a wrapper executable on stdout
+# Must ONLY be called from within func_mode_link because
+# it depends on a number of variable set therein.
+func_emit_cwrapperexe_src ()
+{
+	cat <<EOF
+
+/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
+   Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+
+   The $output program cannot be directly executed until all the libtool
+   libraries that it depends on are installed.
+
+   This wrapper executable should never be moved out of the build directory.
+   If it is, it will not operate correctly.
+*/
+EOF
+	    cat <<"EOF"
+#ifdef _MSC_VER
+# define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef _MSC_VER
+# include <direct.h>
+# include <process.h>
+# include <io.h>
+#else
+# include <unistd.h>
+# include <stdint.h>
+# ifdef __CYGWIN__
+#  include <io.h>
+# endif
+#endif
+#include <malloc.h>
+#include <stdarg.h>
+#include <assert.h>
+#include <string.h>
+#include <ctype.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+
+/* declarations of non-ANSI functions */
+#if defined(__MINGW32__)
+# ifdef __STRICT_ANSI__
+int _putenv (const char *);
+# endif
+#elif defined(__CYGWIN__)
+# ifdef __STRICT_ANSI__
+char *realpath (const char *, char *);
+int putenv (char *);
+int setenv (const char *, const char *, int);
+# endif
+/* #elif defined (other platforms) ... */
+#endif
+
+/* portability defines, excluding path handling macros */
+#if defined(_MSC_VER)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+# define S_IXUSR _S_IEXEC
+# ifndef _INTPTR_T_DEFINED
+#  define _INTPTR_T_DEFINED
+#  define intptr_t int
+# endif
+#elif defined(__MINGW32__)
+# define setmode _setmode
+# define stat    _stat
+# define chmod   _chmod
+# define getcwd  _getcwd
+# define putenv  _putenv
+#elif defined(__CYGWIN__)
+# define HAVE_SETENV
+# define FOPEN_WB "wb"
+/* #elif defined (other platforms) ... */
+#endif
+
+#if defined(PATH_MAX)
+# define LT_PATHMAX PATH_MAX
+#elif defined(MAXPATHLEN)
+# define LT_PATHMAX MAXPATHLEN
+#else
+# define LT_PATHMAX 1024
+#endif
+
+#ifndef S_IXOTH
+# define S_IXOTH 0
+#endif
+#ifndef S_IXGRP
+# define S_IXGRP 0
+#endif
+
+/* path handling portability macros */
+#ifndef DIR_SEPARATOR
+# define DIR_SEPARATOR '/'
+# define PATH_SEPARATOR ':'
+#endif
+
+#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
+  defined (__OS2__)
+# define HAVE_DOS_BASED_FILE_SYSTEM
+# define FOPEN_WB "wb"
+# ifndef DIR_SEPARATOR_2
+#  define DIR_SEPARATOR_2 '\\'
+# endif
+# ifndef PATH_SEPARATOR_2
+#  define PATH_SEPARATOR_2 ';'
+# endif
+#endif
+
+#ifndef DIR_SEPARATOR_2
+# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
+#else /* DIR_SEPARATOR_2 */
+# define IS_DIR_SEPARATOR(ch) \
+	(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
+#endif /* DIR_SEPARATOR_2 */
+
+#ifndef PATH_SEPARATOR_2
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
+#else /* PATH_SEPARATOR_2 */
+# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
+#endif /* PATH_SEPARATOR_2 */
+
+#ifndef FOPEN_WB
+# define FOPEN_WB "w"
+#endif
+#ifndef _O_BINARY
+# define _O_BINARY 0
+#endif
+
+#define XMALLOC(type, num)      ((type *) xmalloc ((num) * sizeof(type)))
+#define XFREE(stale) do { \
+  if (stale) { free ((void *) stale); stale = 0; } \
+} while (0)
+
+#if defined(LT_DEBUGWRAPPER)
+static int lt_debug = 1;
+#else
+static int lt_debug = 0;
+#endif
+
+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
+
+void *xmalloc (size_t num);
+char *xstrdup (const char *string);
+const char *base_name (const char *name);
+char *find_executable (const char *wrapper);
+char *chase_symlinks (const char *pathspec);
+int make_executable (const char *path);
+int check_executable (const char *path);
+char *strendzap (char *str, const char *pat);
+void lt_debugprintf (const char *file, int line, const char *fmt, ...);
+void lt_fatal (const char *file, int line, const char *message, ...);
+static const char *nonnull (const char *s);
+static const char *nonempty (const char *s);
+void lt_setenv (const char *name, const char *value);
+char *lt_extend_str (const char *orig_value, const char *add, int to_end);
+void lt_update_exe_path (const char *name, const char *value);
+void lt_update_lib_path (const char *name, const char *value);
+char **prepare_spawn (char **argv);
+void lt_dump_script (FILE *f);
+EOF
+
+	    cat <<EOF
+const char * MAGIC_EXE = "$magic_exe";
+const char * LIB_PATH_VARNAME = "$shlibpath_var";
+EOF
+
+	    if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
+              func_to_host_pathlist "$temp_rpath"
+	      cat <<EOF
+const char * LIB_PATH_VALUE   = "$func_to_host_pathlist_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * LIB_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test -n "$dllsearchpath"; then
+              func_to_host_pathlist "$dllsearchpath:"
+	      cat <<EOF
+const char * EXE_PATH_VARNAME = "PATH";
+const char * EXE_PATH_VALUE   = "$func_to_host_pathlist_result";
+EOF
+	    else
+	      cat <<"EOF"
+const char * EXE_PATH_VARNAME = "";
+const char * EXE_PATH_VALUE   = "";
+EOF
+	    fi
+
+	    if test "$fast_install" = yes; then
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "lt-$outputname"; /* hopefully, no .exe */
+EOF
+	    else
+	      cat <<EOF
+const char * TARGET_PROGRAM_NAME = "$outputname"; /* hopefully, no .exe */
+EOF
+	    fi
+
+
+	    cat <<"EOF"
+
+#define LTWRAPPER_OPTION_PREFIX         "--lt-"
+
+static const char *ltwrapper_option_prefix = LTWRAPPER_OPTION_PREFIX;
+static const char *dumpscript_opt       = LTWRAPPER_OPTION_PREFIX "dump-script";
+static const char *debug_opt            = LTWRAPPER_OPTION_PREFIX "debug";
+
+int
+main (int argc, char *argv[])
+{
+  char **newargz;
+  int  newargc;
+  char *tmp_pathspec;
+  char *actual_cwrapper_path;
+  char *actual_cwrapper_name;
+  char *target_name;
+  char *lt_argv_zero;
+  intptr_t rval = 127;
+
+  int i;
+
+  program_name = (char *) xstrdup (base_name (argv[0]));
+  newargz = XMALLOC (char *, argc + 1);
+
+  /* very simple arg parsing; don't want to rely on getopt
+   * also, copy all non cwrapper options to newargz, except
+   * argz[0], which is handled differently
+   */
+  newargc=0;
+  for (i = 1; i < argc; i++)
+    {
+      if (strcmp (argv[i], dumpscript_opt) == 0)
+	{
+EOF
+	    case "$host" in
+	      *mingw* | *cygwin* )
+		# make stdout use "unix" line endings
+		echo "          setmode(1,_O_BINARY);"
+		;;
+	      esac
+
+	    cat <<"EOF"
+	  lt_dump_script (stdout);
+	  return 0;
+	}
+      if (strcmp (argv[i], debug_opt) == 0)
+	{
+          lt_debug = 1;
+          continue;
+	}
+      if (strcmp (argv[i], ltwrapper_option_prefix) == 0)
+        {
+          /* however, if there is an option in the LTWRAPPER_OPTION_PREFIX
+             namespace, but it is not one of the ones we know about and
+             have already dealt with, above (inluding dump-script), then
+             report an error. Otherwise, targets might begin to believe
+             they are allowed to use options in the LTWRAPPER_OPTION_PREFIX
+             namespace. The first time any user complains about this, we'll
+             need to make LTWRAPPER_OPTION_PREFIX a configure-time option
+             or a configure.ac-settable value.
+           */
+          lt_fatal (__FILE__, __LINE__,
+		    "unrecognized %s option: '%s'",
+                    ltwrapper_option_prefix, argv[i]);
+        }
+      /* otherwise ... */
+      newargz[++newargc] = xstrdup (argv[i]);
+    }
+  newargz[++newargc] = NULL;
+
+EOF
+	    cat <<EOF
+  /* The GNU banner must be the first non-error debug message */
+  lt_debugprintf (__FILE__, __LINE__, "libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\n");
+EOF
+	    cat <<"EOF"
+  lt_debugprintf (__FILE__, __LINE__, "(main) argv[0]: %s\n", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__, "(main) program_name: %s\n", program_name);
+
+  tmp_pathspec = find_executable (argv[0]);
+  if (tmp_pathspec == NULL)
+    lt_fatal (__FILE__, __LINE__, "couldn't find %s", argv[0]);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (before symlink chase) at: %s\n",
+		  tmp_pathspec);
+
+  actual_cwrapper_path = chase_symlinks (tmp_pathspec);
+  lt_debugprintf (__FILE__, __LINE__,
+                  "(main) found exe (after symlink chase) at: %s\n",
+		  actual_cwrapper_path);
+  XFREE (tmp_pathspec);
+
+  actual_cwrapper_name = xstrdup (base_name (actual_cwrapper_path));
+  strendzap (actual_cwrapper_path, actual_cwrapper_name);
+
+  /* wrapper name transforms */
+  strendzap (actual_cwrapper_name, ".exe");
+  tmp_pathspec = lt_extend_str (actual_cwrapper_name, ".exe", 1);
+  XFREE (actual_cwrapper_name);
+  actual_cwrapper_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  /* target_name transforms -- use actual target program name; might have lt- prefix */
+  target_name = xstrdup (base_name (TARGET_PROGRAM_NAME));
+  strendzap (target_name, ".exe");
+  tmp_pathspec = lt_extend_str (target_name, ".exe", 1);
+  XFREE (target_name);
+  target_name = tmp_pathspec;
+  tmp_pathspec = 0;
+
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(main) libtool target name: %s\n",
+		  target_name);
+EOF
+
+	    cat <<EOF
+  newargz[0] =
+    XMALLOC (char, (strlen (actual_cwrapper_path) +
+		    strlen ("$objdir") + 1 + strlen (actual_cwrapper_name) + 1));
+  strcpy (newargz[0], actual_cwrapper_path);
+  strcat (newargz[0], "$objdir");
+  strcat (newargz[0], "/");
+EOF
+
+	    cat <<"EOF"
+  /* stop here, and copy so we don't have to do this twice */
+  tmp_pathspec = xstrdup (newargz[0]);
+
+  /* do NOT want the lt- prefix here, so use actual_cwrapper_name */
+  strcat (newargz[0], actual_cwrapper_name);
+
+  /* DO want the lt- prefix here if it exists, so use target_name */
+  lt_argv_zero = lt_extend_str (tmp_pathspec, target_name, 1);
+  XFREE (tmp_pathspec);
+  tmp_pathspec = NULL;
+EOF
+
+	    case $host_os in
+	      mingw*)
+	    cat <<"EOF"
+  {
+    char* p;
+    while ((p = strchr (newargz[0], '\\')) != NULL)
+      {
+	*p = '/';
+      }
+    while ((p = strchr (lt_argv_zero, '\\')) != NULL)
+      {
+	*p = '/';
+      }
+  }
+EOF
+	    ;;
+	    esac
+
+	    cat <<"EOF"
+  XFREE (target_name);
+  XFREE (actual_cwrapper_path);
+  XFREE (actual_cwrapper_name);
+
+  lt_setenv ("BIN_SH", "xpg4"); /* for Tru64 */
+  lt_setenv ("DUALCASE", "1");  /* for MSK sh */
+  lt_update_lib_path (LIB_PATH_VARNAME, LIB_PATH_VALUE);
+  lt_update_exe_path (EXE_PATH_VARNAME, EXE_PATH_VALUE);
+
+  lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n",
+		  nonnull (lt_argv_zero));
+  for (i = 0; i < newargc; i++)
+    {
+      lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n",
+		      i, nonnull (newargz[i]));
+    }
+
+EOF
+
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+  /* execv doesn't actually work on mingw as expected on unix */
+  newargz = prepare_spawn (newargz);
+  rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz);
+  if (rval == -1)
+    {
+      /* failed to start process */
+      lt_debugprintf (__FILE__, __LINE__,
+		      "(main) failed to launch target \"%s\": %s\n",
+		      lt_argv_zero, nonnull (strerror (errno)));
+      return 127;
+    }
+  return rval;
+EOF
+		;;
+	      *)
+		cat <<"EOF"
+  execv (lt_argv_zero, newargz);
+  return rval; /* =127, but avoids unused variable warning */
+EOF
+		;;
+	    esac
+
+	    cat <<"EOF"
+}
+
+void *
+xmalloc (size_t num)
+{
+  void *p = (void *) malloc (num);
+  if (!p)
+    lt_fatal (__FILE__, __LINE__, "memory exhausted");
+
+  return p;
+}
+
+char *
+xstrdup (const char *string)
+{
+  return string ? strcpy ((char *) xmalloc (strlen (string) + 1),
+			  string) : NULL;
+}
+
+const char *
+base_name (const char *name)
+{
+  const char *base;
+
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  /* Skip over the disk name in MSDOS pathnames. */
+  if (isalpha ((unsigned char) name[0]) && name[1] == ':')
+    name += 2;
+#endif
+
+  for (base = name; *name; name++)
+    if (IS_DIR_SEPARATOR (*name))
+      base = name + 1;
+  return base;
+}
+
+int
+check_executable (const char *path)
+{
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if ((stat (path, &st) >= 0)
+      && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
+    return 1;
+  else
+    return 0;
+}
+
+int
+make_executable (const char *path)
+{
+  int rval = 0;
+  struct stat st;
+
+  lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
+                  nonempty (path));
+  if ((!path) || (!*path))
+    return 0;
+
+  if (stat (path, &st) >= 0)
+    {
+      rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
+    }
+  return rval;
+}
+
+/* Searches for the full path of the wrapper.  Returns
+   newly allocated full path name if found, NULL otherwise
+   Does not chase symlinks, even on platforms that support them.
+*/
+char *
+find_executable (const char *wrapper)
+{
+  int has_slash = 0;
+  const char *p;
+  const char *p_next;
+  /* static buffer for getcwd */
+  char tmp[LT_PATHMAX + 1];
+  int tmp_len;
+  char *concat_name;
+
+  lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
+                  nonempty (wrapper));
+
+  if ((wrapper == NULL) || (*wrapper == '\0'))
+    return NULL;
+
+  /* Absolute path? */
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+  if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
+    {
+      concat_name = xstrdup (wrapper);
+      if (check_executable (concat_name))
+	return concat_name;
+      XFREE (concat_name);
+    }
+  else
+    {
+#endif
+      if (IS_DIR_SEPARATOR (wrapper[0]))
+	{
+	  concat_name = xstrdup (wrapper);
+	  if (check_executable (concat_name))
+	    return concat_name;
+	  XFREE (concat_name);
+	}
+#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
+    }
+#endif
+
+  for (p = wrapper; *p; p++)
+    if (*p == '/')
+      {
+	has_slash = 1;
+	break;
+      }
+  if (!has_slash)
+    {
+      /* no slashes; search PATH */
+      const char *path = getenv ("PATH");
+      if (path != NULL)
+	{
+	  for (p = path; *p; p = p_next)
+	    {
+	      const char *q;
+	      size_t p_len;
+	      for (q = p; *q; q++)
+		if (IS_PATH_SEPARATOR (*q))
+		  break;
+	      p_len = q - p;
+	      p_next = (*q == '\0' ? q : q + 1);
+	      if (p_len == 0)
+		{
+		  /* empty path: current directory */
+		  if (getcwd (tmp, LT_PATHMAX) == NULL)
+		    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+                              nonnull (strerror (errno)));
+		  tmp_len = strlen (tmp);
+		  concat_name =
+		    XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, tmp, tmp_len);
+		  concat_name[tmp_len] = '/';
+		  strcpy (concat_name + tmp_len + 1, wrapper);
+		}
+	      else
+		{
+		  concat_name =
+		    XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
+		  memcpy (concat_name, p, p_len);
+		  concat_name[p_len] = '/';
+		  strcpy (concat_name + p_len + 1, wrapper);
+		}
+	      if (check_executable (concat_name))
+		return concat_name;
+	      XFREE (concat_name);
+	    }
+	}
+      /* not found in PATH; assume curdir */
+    }
+  /* Relative path | not found in path: prepend cwd */
+  if (getcwd (tmp, LT_PATHMAX) == NULL)
+    lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
+              nonnull (strerror (errno)));
+  tmp_len = strlen (tmp);
+  concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
+  memcpy (concat_name, tmp, tmp_len);
+  concat_name[tmp_len] = '/';
+  strcpy (concat_name + tmp_len + 1, wrapper);
+
+  if (check_executable (concat_name))
+    return concat_name;
+  XFREE (concat_name);
+  return NULL;
+}
+
+char *
+chase_symlinks (const char *pathspec)
+{
+#ifndef S_ISLNK
+  return xstrdup (pathspec);
+#else
+  char buf[LT_PATHMAX];
+  struct stat s;
+  char *tmp_pathspec = xstrdup (pathspec);
+  char *p;
+  int has_symlinks = 0;
+  while (strlen (tmp_pathspec) && !has_symlinks)
+    {
+      lt_debugprintf (__FILE__, __LINE__,
+		      "checking path component for symlinks: %s\n",
+		      tmp_pathspec);
+      if (lstat (tmp_pathspec, &s) == 0)
+	{
+	  if (S_ISLNK (s.st_mode) != 0)
+	    {
+	      has_symlinks = 1;
+	      break;
+	    }
+
+	  /* search backwards for last DIR_SEPARATOR */
+	  p = tmp_pathspec + strlen (tmp_pathspec) - 1;
+	  while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    p--;
+	  if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
+	    {
+	      /* no more DIR_SEPARATORS left */
+	      break;
+	    }
+	  *p = '\0';
+	}
+      else
+	{
+	  lt_fatal (__FILE__, __LINE__,
+		    "error accessing file \"%s\": %s",
+		    tmp_pathspec, nonnull (strerror (errno)));
+	}
+    }
+  XFREE (tmp_pathspec);
+
+  if (!has_symlinks)
+    {
+      return xstrdup (pathspec);
+    }
+
+  tmp_pathspec = realpath (pathspec, buf);
+  if (tmp_pathspec == 0)
+    {
+      lt_fatal (__FILE__, __LINE__,
+		"could not follow symlinks for %s", pathspec);
+    }
+  return xstrdup (tmp_pathspec);
+#endif
+}
+
+char *
+strendzap (char *str, const char *pat)
+{
+  size_t len, patlen;
+
+  assert (str != NULL);
+  assert (pat != NULL);
+
+  len = strlen (str);
+  patlen = strlen (pat);
+
+  if (patlen <= len)
+    {
+      str += len - patlen;
+      if (strcmp (str, pat) == 0)
+	*str = '\0';
+    }
+  return str;
+}
+
+void
+lt_debugprintf (const char *file, int line, const char *fmt, ...)
+{
+  va_list args;
+  if (lt_debug)
+    {
+      (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
+      va_start (args, fmt);
+      (void) vfprintf (stderr, fmt, args);
+      va_end (args);
+    }
+}
+
+static void
+lt_error_core (int exit_status, const char *file,
+	       int line, const char *mode,
+	       const char *message, va_list ap)
+{
+  fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
+  vfprintf (stderr, message, ap);
+  fprintf (stderr, ".\n");
+
+  if (exit_status >= 0)
+    exit (exit_status);
+}
+
+void
+lt_fatal (const char *file, int line, const char *message, ...)
+{
+  va_list ap;
+  va_start (ap, message);
+  lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
+  va_end (ap);
+}
+
+static const char *
+nonnull (const char *s)
+{
+  return s ? s : "(null)";
+}
+
+static const char *
+nonempty (const char *s)
+{
+  return (s && !*s) ? "(empty)" : nonnull (s);
+}
+
+void
+lt_setenv (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_setenv) setting '%s' to '%s'\n",
+                  nonnull (name), nonnull (value));
+  {
+#ifdef HAVE_SETENV
+    /* always make a copy, for consistency with !HAVE_SETENV */
+    char *str = xstrdup (value);
+    setenv (name, str, 1);
+#else
+    int len = strlen (name) + 1 + strlen (value) + 1;
+    char *str = XMALLOC (char, len);
+    sprintf (str, "%s=%s", name, value);
+    if (putenv (str) != EXIT_SUCCESS)
+      {
+        XFREE (str);
+      }
+#endif
+  }
+}
+
+char *
+lt_extend_str (const char *orig_value, const char *add, int to_end)
+{
+  char *new_value;
+  if (orig_value && *orig_value)
+    {
+      int orig_value_len = strlen (orig_value);
+      int add_len = strlen (add);
+      new_value = XMALLOC (char, add_len + orig_value_len + 1);
+      if (to_end)
+        {
+          strcpy (new_value, orig_value);
+          strcpy (new_value + orig_value_len, add);
+        }
+      else
+        {
+          strcpy (new_value, add);
+          strcpy (new_value + add_len, orig_value);
+        }
+    }
+  else
+    {
+      new_value = xstrdup (add);
+    }
+  return new_value;
+}
+
+void
+lt_update_exe_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      /* some systems can't cope with a ':'-terminated path #' */
+      int len = strlen (new_value);
+      while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
+        {
+          new_value[len-1] = '\0';
+        }
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+void
+lt_update_lib_path (const char *name, const char *value)
+{
+  lt_debugprintf (__FILE__, __LINE__,
+		  "(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
+                  nonnull (name), nonnull (value));
+
+  if (name && *name && value && *value)
+    {
+      char *new_value = lt_extend_str (getenv (name), value, 0);
+      lt_setenv (name, new_value);
+      XFREE (new_value);
+    }
+}
+
+EOF
+	    case $host_os in
+	      mingw*)
+		cat <<"EOF"
+
+/* Prepares an argument vector before calling spawn().
+   Note that spawn() does not by itself call the command interpreter
+     (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
+      ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+         GetVersionEx(&v);
+         v.dwPlatformId == VER_PLATFORM_WIN32_NT;
+      }) ? "cmd.exe" : "command.com").
+   Instead it simply concatenates the arguments, separated by ' ', and calls
+   CreateProcess().  We must quote the arguments since Win32 CreateProcess()
+   interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
+   special way:
+   - Space and tab are interpreted as delimiters. They are not treated as
+     delimiters if they are surrounded by double quotes: "...".
+   - Unescaped double quotes are removed from the input. Their only effect is
+     that within double quotes, space and tab are treated like normal
+     characters.
+   - Backslashes not followed by double quotes are not special.
+   - But 2*n+1 backslashes followed by a double quote become
+     n backslashes followed by a double quote (n >= 0):
+       \" -> "
+       \\\" -> \"
+       \\\\\" -> \\"
+ */
+#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
+char **
+prepare_spawn (char **argv)
+{
+  size_t argc;
+  char **new_argv;
+  size_t i;
+
+  /* Count number of arguments.  */
+  for (argc = 0; argv[argc] != NULL; argc++)
+    ;
+
+  /* Allocate new argument vector.  */
+  new_argv = XMALLOC (char *, argc + 1);
+
+  /* Put quoted arguments into the new argument vector.  */
+  for (i = 0; i < argc; i++)
+    {
+      const char *string = argv[i];
+
+      if (string[0] == '\0')
+	new_argv[i] = xstrdup ("\"\"");
+      else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
+	{
+	  int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
+	  size_t length;
+	  unsigned int backslashes;
+	  const char *s;
+	  char *quoted_string;
+	  char *p;
+
+	  length = 0;
+	  backslashes = 0;
+	  if (quote_around)
+	    length++;
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		length += backslashes + 1;
+	      length++;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    length += backslashes + 1;
+
+	  quoted_string = XMALLOC (char, length + 1);
+
+	  p = quoted_string;
+	  backslashes = 0;
+	  if (quote_around)
+	    *p++ = '"';
+	  for (s = string; *s != '\0'; s++)
+	    {
+	      char c = *s;
+	      if (c == '"')
+		{
+		  unsigned int j;
+		  for (j = backslashes + 1; j > 0; j--)
+		    *p++ = '\\';
+		}
+	      *p++ = c;
+	      if (c == '\\')
+		backslashes++;
+	      else
+		backslashes = 0;
+	    }
+	  if (quote_around)
+	    {
+	      unsigned int j;
+	      for (j = backslashes; j > 0; j--)
+		*p++ = '\\';
+	      *p++ = '"';
+	    }
+	  *p = '\0';
+
+	  new_argv[i] = quoted_string;
+	}
+      else
+	new_argv[i] = (char *) string;
+    }
+  new_argv[argc] = NULL;
+
+  return new_argv;
+}
+EOF
+		;;
+	    esac
+
+            cat <<"EOF"
+void lt_dump_script (FILE* f)
+{
+EOF
+	    func_emit_wrapper yes |
+              $SED -e 's/\([\\"]\)/\\\1/g' \
+	           -e 's/^/  fputs ("/' -e 's/$/\\n", f);/'
+
+            cat <<"EOF"
+}
+EOF
+}
+# end: func_emit_cwrapperexe_src
+
+# func_win32_import_lib_p ARG
+# True if ARG is an import lib, as indicated by $file_magic_cmd
+func_win32_import_lib_p ()
+{
+    $opt_debug
+    case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
+    *import*) : ;;
+    *) false ;;
+    esac
+}
+
+# func_mode_link arg...
+func_mode_link ()
+{
+    $opt_debug
+    case $host in
+    *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+      # It is impossible to link a dll without this setting, and
+      # we shouldn't force the makefile maintainer to figure out
+      # which system we are compiling for in order to pass an extra
+      # flag for every libtool invocation.
+      # allow_undefined=no
+
+      # FIXME: Unfortunately, there are problems with the above when trying
+      # to make a dll which has undefined symbols, in which case not
+      # even a static library is built.  For now, we need to specify
+      # -no-undefined on the libtool link line when we can be certain
+      # that all symbols are satisfied, otherwise we get a static library.
+      allow_undefined=yes
+      ;;
+    *)
+      allow_undefined=yes
+      ;;
+    esac
+    libtool_args=$nonopt
+    base_compile="$nonopt $@"
+    compile_command=$nonopt
+    finalize_command=$nonopt
+
+    compile_rpath=
+    finalize_rpath=
+    compile_shlibpath=
+    finalize_shlibpath=
+    convenience=
+    old_convenience=
+    deplibs=
+    old_deplibs=
+    compiler_flags=
+    linker_flags=
+    dllsearchpath=
+    lib_search_path=`pwd`
+    inst_prefix_dir=
+    new_inherited_linker_flags=
+
+    avoid_version=no
+    bindir=
+    dlfiles=
+    dlprefiles=
+    dlself=no
+    export_dynamic=no
+    export_symbols=
+    export_symbols_regex=
+    generated=
+    libobjs=
+    ltlibs=
+    module=no
+    no_install=no
+    objs=
+    non_pic_objects=
+    precious_files_regex=
+    prefer_static_libs=no
+    preload=no
+    prev=
+    prevarg=
+    release=
+    rpath=
+    xrpath=
+    perm_rpath=
+    temp_rpath=
+    thread_safe=no
+    vinfo=
+    vinfo_number=no
+    weak_libs=
+    single_module="${wl}-single_module"
+    func_infer_tag $base_compile
+
+    # We need to know -static, to get the right output filenames.
+    for arg
+    do
+      case $arg in
+      -shared)
+	test "$build_libtool_libs" != yes && \
+	  func_fatal_configuration "can not build a shared library"
+	build_old_libs=no
+	break
+	;;
+      -all-static | -static | -static-libtool-libs)
+	case $arg in
+	-all-static)
+	  if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
+	    func_warning "complete static linking is impossible in this configuration"
+	  fi
+	  if test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	-static)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=built
+	  ;;
+	-static-libtool-libs)
+	  if test -z "$pic_flag" && test -n "$link_static_flag"; then
+	    dlopen_self=$dlopen_self_static
+	  fi
+	  prefer_static_libs=yes
+	  ;;
+	esac
+	build_libtool_libs=no
+	build_old_libs=yes
+	break
+	;;
+      esac
+    done
+
+    # See if our shared archives depend on static archives.
+    test -n "$old_archive_from_new_cmds" && build_old_libs=yes
+
+    # Go through the arguments, transforming them on the way.
+    while test "$#" -gt 0; do
+      arg="$1"
+      shift
+      func_quote_for_eval "$arg"
+      qarg=$func_quote_for_eval_unquoted_result
+      func_append libtool_args " $func_quote_for_eval_result"
+
+      # If the previous option needs an argument, assign it.
+      if test -n "$prev"; then
+	case $prev in
+	output)
+	  func_append compile_command " @OUTPUT@"
+	  func_append finalize_command " @OUTPUT@"
+	  ;;
+	esac
+
+	case $prev in
+	bindir)
+	  bindir="$arg"
+	  prev=
+	  continue
+	  ;;
+	dlfiles|dlprefiles)
+	  if test "$preload" = no; then
+	    # Add the symbol object into the linking commands.
+	    func_append compile_command " @SYMFILE@"
+	    func_append finalize_command " @SYMFILE@"
+	    preload=yes
+	  fi
+	  case $arg in
+	  *.la | *.lo) ;;  # We handle these cases below.
+	  force)
+	    if test "$dlself" = no; then
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  self)
+	    if test "$prev" = dlprefiles; then
+	      dlself=yes
+	    elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
+	      dlself=yes
+	    else
+	      dlself=needless
+	      export_dynamic=yes
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  *)
+	    if test "$prev" = dlfiles; then
+	      dlfiles="$dlfiles $arg"
+	    else
+	      dlprefiles="$dlprefiles $arg"
+	    fi
+	    prev=
+	    continue
+	    ;;
+	  esac
+	  ;;
+	expsyms)
+	  export_symbols="$arg"
+	  test -f "$arg" \
+	    || func_fatal_error "symbol file \`$arg' does not exist"
+	  prev=
+	  continue
+	  ;;
+	expsyms_regex)
+	  export_symbols_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	framework)
+	  case $host in
+	    *-*-darwin*)
+	      case "$deplibs " in
+		*" $qarg.ltframework "*) ;;
+		*) deplibs="$deplibs $qarg.ltframework" # this is fixed later
+		   ;;
+	      esac
+	      ;;
+	  esac
+	  prev=
+	  continue
+	  ;;
+	inst_prefix)
+	  inst_prefix_dir="$arg"
+	  prev=
+	  continue
+	  ;;
+	objectlist)
+	  if test -f "$arg"; then
+	    save_arg=$arg
+	    moreargs=
+	    for fil in `cat "$save_arg"`
+	    do
+#	      moreargs="$moreargs $fil"
+	      arg=$fil
+	      # A libtool-controlled object.
+
+	      # Check to see that this really is a libtool object.
+	      if func_lalib_unsafe_p "$arg"; then
+		pic_object=
+		non_pic_object=
+
+		# Read the .lo file
+		func_source "$arg"
+
+		if test -z "$pic_object" ||
+		   test -z "$non_pic_object" ||
+		   test "$pic_object" = none &&
+		   test "$non_pic_object" = none; then
+		  func_fatal_error "cannot find name of object for \`$arg'"
+		fi
+
+		# Extract subdirectory from the argument.
+		func_dirname "$arg" "/" ""
+		xdir="$func_dirname_result"
+
+		if test "$pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  pic_object="$xdir$pic_object"
+
+		  if test "$prev" = dlfiles; then
+		    if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		      dlfiles="$dlfiles $pic_object"
+		      prev=
+		      continue
+		    else
+		      # If libtool objects are unsupported, then we need to preload.
+		      prev=dlprefiles
+		    fi
+		  fi
+
+		  # CHECK ME:  I think I busted this.  -Ossama
+		  if test "$prev" = dlprefiles; then
+		    # Preload the old-style object.
+		    dlprefiles="$dlprefiles $pic_object"
+		    prev=
+		  fi
+
+		  # A PIC object.
+		  func_append libobjs " $pic_object"
+		  arg="$pic_object"
+		fi
+
+		# Non-PIC object.
+		if test "$non_pic_object" != none; then
+		  # Prepend the subdirectory the object is found in.
+		  non_pic_object="$xdir$non_pic_object"
+
+		  # A standard non-PIC object
+		  func_append non_pic_objects " $non_pic_object"
+		  if test -z "$pic_object" || test "$pic_object" = none ; then
+		    arg="$non_pic_object"
+		  fi
+		else
+		  # If the PIC object exists, use it instead.
+		  # $xdir was prepended to $pic_object above.
+		  non_pic_object="$pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+		fi
+	      else
+		# Only an error if not doing a dry-run.
+		if $opt_dry_run; then
+		  # Extract subdirectory from the argument.
+		  func_dirname "$arg" "/" ""
+		  xdir="$func_dirname_result"
+
+		  func_lo2o "$arg"
+		  pic_object=$xdir$objdir/$func_lo2o_result
+		  non_pic_object=$xdir$func_lo2o_result
+		  func_append libobjs " $pic_object"
+		  func_append non_pic_objects " $non_pic_object"
+	        else
+		  func_fatal_error "\`$arg' is not a valid libtool object"
+		fi
+	      fi
+	    done
+	  else
+	    func_fatal_error "link input file \`$arg' does not exist"
+	  fi
+	  arg=$save_arg
+	  prev=
+	  continue
+	  ;;
+	precious_regex)
+	  precious_files_regex="$arg"
+	  prev=
+	  continue
+	  ;;
+	release)
+	  release="-$arg"
+	  prev=
+	  continue
+	  ;;
+	rpath | xrpath)
+	  # We need an absolute path.
+	  case $arg in
+	  [\\/]* | [A-Za-z]:[\\/]*) ;;
+	  *)
+	    func_fatal_error "only absolute run-paths are allowed"
+	    ;;
+	  esac
+	  if test "$prev" = rpath; then
+	    case "$rpath " in
+	    *" $arg "*) ;;
+	    *) rpath="$rpath $arg" ;;
+	    esac
+	  else
+	    case "$xrpath " in
+	    *" $arg "*) ;;
+	    *) xrpath="$xrpath $arg" ;;
+	    esac
+	  fi
+	  prev=
+	  continue
+	  ;;
+	shrext)
+	  shrext_cmds="$arg"
+	  prev=
+	  continue
+	  ;;
+	weak)
+	  weak_libs="$weak_libs $arg"
+	  prev=
+	  continue
+	  ;;
+	xcclinker)
+	  linker_flags="$linker_flags $qarg"
+	  compiler_flags="$compiler_flags $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xcompiler)
+	  compiler_flags="$compiler_flags $qarg"
+	  prev=
+	  func_append compile_command " $qarg"
+	  func_append finalize_command " $qarg"
+	  continue
+	  ;;
+	xlinker)
+	  linker_flags="$linker_flags $qarg"
+	  compiler_flags="$compiler_flags $wl$qarg"
+	  prev=
+	  func_append compile_command " $wl$qarg"
+	  func_append finalize_command " $wl$qarg"
+	  continue
+	  ;;
+	*)
+	  eval "$prev=\"\$arg\""
+	  prev=
+	  continue
+	  ;;
+	esac
+      fi # test -n "$prev"
+
+      prevarg="$arg"
+
+      case $arg in
+      -all-static)
+	if test -n "$link_static_flag"; then
+	  # See comment for -static flag below, for more details.
+	  func_append compile_command " $link_static_flag"
+	  func_append finalize_command " $link_static_flag"
+	fi
+	continue
+	;;
+
+      -allow-undefined)
+	# FIXME: remove this flag sometime in the future.
+	func_fatal_error "\`-allow-undefined' must not be used because it is the default"
+	;;
+
+      -avoid-version)
+	avoid_version=yes
+	continue
+	;;
+
+      -bindir)
+	prev=bindir
+	continue
+	;;
+
+      -dlopen)
+	prev=dlfiles
+	continue
+	;;
+
+      -dlpreopen)
+	prev=dlprefiles
+	continue
+	;;
+
+      -export-dynamic)
+	export_dynamic=yes
+	continue
+	;;
+
+      -export-symbols | -export-symbols-regex)
+	if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
+	  func_fatal_error "more than one -exported-symbols argument is not allowed"
+	fi
+	if test "X$arg" = "X-export-symbols"; then
+	  prev=expsyms
+	else
+	  prev=expsyms_regex
+	fi
+	continue
+	;;
+
+      -framework)
+	prev=framework
+	continue
+	;;
+
+      -inst-prefix-dir)
+	prev=inst_prefix
+	continue
+	;;
+
+      # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+      # so, if we see these flags be careful not to treat them like -L
+      -L[A-Z][A-Z]*:*)
+	case $with_gcc/$host in
+	no/*-*-irix* | /*-*-irix*)
+	  func_append compile_command " $arg"
+	  func_append finalize_command " $arg"
+	  ;;
+	esac
+	continue
+	;;
+
+      -L*)
+	func_stripname '-L' '' "$arg"
+	dir=$func_stripname_result
+	if test -z "$dir"; then
+	  if test "$#" -gt 0; then
+	    func_fatal_error "require no space between \`-L' and \`$1'"
+	  else
+	    func_fatal_error "need path for \`-L' option"
+	  fi
+	fi
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  absdir=`cd "$dir" && pwd`
+	  test -z "$absdir" && \
+	    func_fatal_error "cannot determine absolute directory name of \`$dir'"
+	  dir="$absdir"
+	  ;;
+	esac
+	case "$deplibs " in
+	*" -L$dir "*) ;;
+	*)
+	  deplibs="$deplibs -L$dir"
+	  lib_search_path="$lib_search_path $dir"
+	  ;;
+	esac
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$dir:"*) ;;
+	  ::) dllsearchpath=$dir;;
+	  *) dllsearchpath="$dllsearchpath:$dir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath="$dllsearchpath:$testbindir";;
+	  esac
+	  ;;
+	esac
+	continue
+	;;
+
+      -l*)
+	if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # These systems don't actually have a C or math library (as such)
+	    continue
+	    ;;
+	  *-*-os2*)
+	    # These systems don't actually have a C library (as such)
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C and math libraries are in the System framework
+	    deplibs="$deplibs System.ltframework"
+	    continue
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    test "X$arg" = "X-lc" && continue
+	    ;;
+	  esac
+	elif test "X$arg" = "X-lc_r"; then
+	 case $host in
+	 *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	   # Do not include libc_r directly, use -pthread flag.
+	   continue
+	   ;;
+	 esac
+	fi
+	deplibs="$deplibs $arg"
+	continue
+	;;
+
+      -module)
+	module=yes
+	continue
+	;;
+
+      # Tru64 UNIX uses -model [arg] to determine the layout of C++
+      # classes, name mangling, and exception handling.
+      # Darwin uses the -arch flag to determine output architecture.
+      -model|-arch|-isysroot)
+	compiler_flags="$compiler_flags $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	prev=xcompiler
+	continue
+	;;
+
+      -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+	compiler_flags="$compiler_flags $arg"
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+	case "$new_inherited_linker_flags " in
+	    *" $arg "*) ;;
+	    * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;;
+	esac
+	continue
+	;;
+
+      -multi_module)
+	single_module="${wl}-multi_module"
+	continue
+	;;
+
+      -no-fast-install)
+	fast_install=no
+	continue
+	;;
+
+      -no-install)
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
+	  # The PATH hackery in wrapper scripts is required on Windows
+	  # and Darwin in order for the loader to find any dlls it needs.
+	  func_warning "\`-no-install' is ignored for $host"
+	  func_warning "assuming \`-no-fast-install' instead"
+	  fast_install=no
+	  ;;
+	*) no_install=yes ;;
+	esac
+	continue
+	;;
+
+      -no-undefined)
+	allow_undefined=no
+	continue
+	;;
+
+      -objectlist)
+	prev=objectlist
+	continue
+	;;
+
+      -o) prev=output ;;
+
+      -precious-files-regex)
+	prev=precious_regex
+	continue
+	;;
+
+      -release)
+	prev=release
+	continue
+	;;
+
+      -rpath)
+	prev=rpath
+	continue
+	;;
+
+      -R)
+	prev=xrpath
+	continue
+	;;
+
+      -R*)
+	func_stripname '-R' '' "$arg"
+	dir=$func_stripname_result
+	# We need an absolute path.
+	case $dir in
+	[\\/]* | [A-Za-z]:[\\/]*) ;;
+	*)
+	  func_fatal_error "only absolute run-paths are allowed"
+	  ;;
+	esac
+	case "$xrpath " in
+	*" $dir "*) ;;
+	*) xrpath="$xrpath $dir" ;;
+	esac
+	continue
+	;;
+
+      -shared)
+	# The effects of -shared are defined in a previous loop.
+	continue
+	;;
+
+      -shrext)
+	prev=shrext
+	continue
+	;;
+
+      -static | -static-libtool-libs)
+	# The effects of -static are defined in a previous loop.
+	# We used to do the same as -all-static on platforms that
+	# didn't have a PIC flag, but the assumption that the effects
+	# would be equivalent was wrong.  It would break on at least
+	# Digital Unix and AIX.
+	continue
+	;;
+
+      -thread-safe)
+	thread_safe=yes
+	continue
+	;;
+
+      -version-info)
+	prev=vinfo
+	continue
+	;;
+
+      -version-number)
+	prev=vinfo
+	vinfo_number=yes
+	continue
+	;;
+
+      -weak)
+        prev=weak
+	continue
+	;;
+
+      -Wc,*)
+	func_stripname '-Wc,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg="$arg $func_quote_for_eval_result"
+	  compiler_flags="$compiler_flags $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Wl,*)
+	func_stripname '-Wl,' '' "$arg"
+	args=$func_stripname_result
+	arg=
+	save_ifs="$IFS"; IFS=','
+	for flag in $args; do
+	  IFS="$save_ifs"
+          func_quote_for_eval "$flag"
+	  arg="$arg $wl$func_quote_for_eval_result"
+	  compiler_flags="$compiler_flags $wl$func_quote_for_eval_result"
+	  linker_flags="$linker_flags $func_quote_for_eval_result"
+	done
+	IFS="$save_ifs"
+	func_stripname ' ' '' "$arg"
+	arg=$func_stripname_result
+	;;
+
+      -Xcompiler)
+	prev=xcompiler
+	continue
+	;;
+
+      -Xlinker)
+	prev=xlinker
+	continue
+	;;
+
+      -XCClinker)
+	prev=xcclinker
+	continue
+	;;
+
+      # -msg_* for osf cc
+      -msg_*)
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      # Flags to be passed through unchanged, with rationale:
+      # -64, -mips[0-9]      enable 64-bit mode for the SGI compiler
+      # -r[0-9][0-9]*        specify processor for the SGI compiler
+      # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
+      # +DA*, +DD*           enable 64-bit mode for the HP compiler
+      # -q*                  compiler args for the IBM compiler
+      # -m*, -t[45]*, -txscale* architecture-specific flags for GCC
+      # -F/path              path to uninstalled frameworks, gcc on darwin
+      # -p, -pg, --coverage, -fprofile-*  profiling flags for GCC
+      # @file                GCC response files
+      # -tp=*                Portland pgcc target processor selection
+      -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
+      -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+        func_append compile_command " $arg"
+        func_append finalize_command " $arg"
+        compiler_flags="$compiler_flags $arg"
+        continue
+        ;;
+
+      # Some other compiler flag.
+      -* | +*)
+        func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+
+      *.$objext)
+	# A standard object.
+	objs="$objs $arg"
+	;;
+
+      *.lo)
+	# A libtool-controlled object.
+
+	# Check to see that this really is a libtool object.
+	if func_lalib_unsafe_p "$arg"; then
+	  pic_object=
+	  non_pic_object=
+
+	  # Read the .lo file
+	  func_source "$arg"
+
+	  if test -z "$pic_object" ||
+	     test -z "$non_pic_object" ||
+	     test "$pic_object" = none &&
+	     test "$non_pic_object" = none; then
+	    func_fatal_error "cannot find name of object for \`$arg'"
+	  fi
+
+	  # Extract subdirectory from the argument.
+	  func_dirname "$arg" "/" ""
+	  xdir="$func_dirname_result"
+
+	  if test "$pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    pic_object="$xdir$pic_object"
+
+	    if test "$prev" = dlfiles; then
+	      if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
+		dlfiles="$dlfiles $pic_object"
+		prev=
+		continue
+	      else
+		# If libtool objects are unsupported, then we need to preload.
+		prev=dlprefiles
+	      fi
+	    fi
+
+	    # CHECK ME:  I think I busted this.  -Ossama
+	    if test "$prev" = dlprefiles; then
+	      # Preload the old-style object.
+	      dlprefiles="$dlprefiles $pic_object"
+	      prev=
+	    fi
+
+	    # A PIC object.
+	    func_append libobjs " $pic_object"
+	    arg="$pic_object"
+	  fi
+
+	  # Non-PIC object.
+	  if test "$non_pic_object" != none; then
+	    # Prepend the subdirectory the object is found in.
+	    non_pic_object="$xdir$non_pic_object"
+
+	    # A standard non-PIC object
+	    func_append non_pic_objects " $non_pic_object"
+	    if test -z "$pic_object" || test "$pic_object" = none ; then
+	      arg="$non_pic_object"
+	    fi
+	  else
+	    # If the PIC object exists, use it instead.
+	    # $xdir was prepended to $pic_object above.
+	    non_pic_object="$pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  fi
+	else
+	  # Only an error if not doing a dry-run.
+	  if $opt_dry_run; then
+	    # Extract subdirectory from the argument.
+	    func_dirname "$arg" "/" ""
+	    xdir="$func_dirname_result"
+
+	    func_lo2o "$arg"
+	    pic_object=$xdir$objdir/$func_lo2o_result
+	    non_pic_object=$xdir$func_lo2o_result
+	    func_append libobjs " $pic_object"
+	    func_append non_pic_objects " $non_pic_object"
+	  else
+	    func_fatal_error "\`$arg' is not a valid libtool object"
+	  fi
+	fi
+	;;
+
+      *.$libext)
+	# An archive.
+	deplibs="$deplibs $arg"
+	old_deplibs="$old_deplibs $arg"
+	continue
+	;;
+
+      *.la)
+	# A libtool-controlled library.
+
+	if test "$prev" = dlfiles; then
+	  # This library was specified with -dlopen.
+	  dlfiles="$dlfiles $arg"
+	  prev=
+	elif test "$prev" = dlprefiles; then
+	  # The library was specified with -dlpreopen.
+	  dlprefiles="$dlprefiles $arg"
+	  prev=
+	else
+	  deplibs="$deplibs $arg"
+	fi
+	continue
+	;;
+
+      # Some other compiler argument.
+      *)
+	# Unknown arguments in both finalize_command and compile_command need
+	# to be aesthetically quoted because they are evaled later.
+	func_quote_for_eval "$arg"
+	arg="$func_quote_for_eval_result"
+	;;
+      esac # arg
+
+      # Now actually substitute the argument into the commands.
+      if test -n "$arg"; then
+	func_append compile_command " $arg"
+	func_append finalize_command " $arg"
+      fi
+    done # argument parsing loop
+
+    test -n "$prev" && \
+      func_fatal_help "the \`$prevarg' option requires an argument"
+
+    if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
+      eval arg=\"$export_dynamic_flag_spec\"
+      func_append compile_command " $arg"
+      func_append finalize_command " $arg"
+    fi
+
+    oldlibs=
+    # calculate the name of the file, without its directory
+    func_basename "$output"
+    outputname="$func_basename_result"
+    libobjs_save="$libobjs"
+
+    if test -n "$shlibpath_var"; then
+      # get the directories listed in $shlibpath_var
+      eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\`
+    else
+      shlib_search_path=
+    fi
+    eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
+    eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
+
+    func_dirname "$output" "/" ""
+    output_objdir="$func_dirname_result$objdir"
+    # Create the object directory.
+    func_mkdir_p "$output_objdir"
+
+    # Determine the type of output
+    case $output in
+    "")
+      func_fatal_help "you must specify an output file"
+      ;;
+    *.$libext) linkmode=oldlib ;;
+    *.lo | *.$objext) linkmode=obj ;;
+    *.la) linkmode=lib ;;
+    *) linkmode=prog ;; # Anything else should be a program.
+    esac
+
+    specialdeplibs=
+
+    libs=
+    # Find all interdependent deplibs by searching for libraries
+    # that are linked more than once (e.g. -la -lb -la)
+    for deplib in $deplibs; do
+      if $opt_duplicate_deps ; then
+	case "$libs " in
+	*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
+	esac
+      fi
+      libs="$libs $deplib"
+    done
+
+    if test "$linkmode" = lib; then
+      libs="$predeps $libs $compiler_lib_search_path $postdeps"
+
+      # Compute libraries that are listed more than once in $predeps
+      # $postdeps and mark them as special (i.e., whose duplicates are
+      # not to be eliminated).
+      pre_post_deps=
+      if $opt_duplicate_compiler_generated_deps; then
+	for pre_post_dep in $predeps $postdeps; do
+	  case "$pre_post_deps " in
+	  *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
+	  esac
+	  pre_post_deps="$pre_post_deps $pre_post_dep"
+	done
+      fi
+      pre_post_deps=
+    fi
+
+    deplibs=
+    newdependency_libs=
+    newlib_search_path=
+    need_relink=no # whether we're linking any uninstalled libtool libraries
+    notinst_deplibs= # not-installed libtool libraries
+    notinst_path= # paths that contain not-installed libtool libraries
+
+    case $linkmode in
+    lib)
+	passes="conv dlpreopen link"
+	for file in $dlfiles $dlprefiles; do
+	  case $file in
+	  *.la) ;;
+	  *)
+	    func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file"
+	    ;;
+	  esac
+	done
+	;;
+    prog)
+	compile_deplibs=
+	finalize_deplibs=
+	alldeplibs=no
+	newdlfiles=
+	newdlprefiles=
+	passes="conv scan dlopen dlpreopen link"
+	;;
+    *)  passes="conv"
+	;;
+    esac
+
+    for pass in $passes; do
+      # The preopen pass in lib mode reverses $deplibs; put it back here
+      # so that -L comes before libs that need it for instance...
+      if test "$linkmode,$pass" = "lib,link"; then
+	## FIXME: Find the place where the list is rebuilt in the wrong
+	##        order, and fix it there properly
+        tmp_deplibs=
+	for deplib in $deplibs; do
+	  tmp_deplibs="$deplib $tmp_deplibs"
+	done
+	deplibs="$tmp_deplibs"
+      fi
+
+      if test "$linkmode,$pass" = "lib,link" ||
+	 test "$linkmode,$pass" = "prog,scan"; then
+	libs="$deplibs"
+	deplibs=
+      fi
+      if test "$linkmode" = prog; then
+	case $pass in
+	dlopen) libs="$dlfiles" ;;
+	dlpreopen) libs="$dlprefiles" ;;
+	link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
+	esac
+      fi
+      if test "$linkmode,$pass" = "lib,dlpreopen"; then
+	# Collect and forward deplibs of preopened libtool libs
+	for lib in $dlprefiles; do
+	  # Ignore non-libtool-libs
+	  dependency_libs=
+	  case $lib in
+	  *.la)	func_source "$lib" ;;
+	  esac
+
+	  # Collect preopened libtool deplibs, except any this library
+	  # has declared as weak libs
+	  for deplib in $dependency_libs; do
+	    func_basename "$deplib"
+            deplib_base=$func_basename_result
+	    case " $weak_libs " in
+	    *" $deplib_base "*) ;;
+	    *) deplibs="$deplibs $deplib" ;;
+	    esac
+	  done
+	done
+	libs="$dlprefiles"
+      fi
+      if test "$pass" = dlopen; then
+	# Collect dlpreopened libraries
+	save_deplibs="$deplibs"
+	deplibs=
+      fi
+
+      for deplib in $libs; do
+	lib=
+	found=no
+	case $deplib in
+	-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    compiler_flags="$compiler_flags $deplib"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-l*)
+	  if test "$linkmode" != lib && test "$linkmode" != prog; then
+	    func_warning "\`-l' is ignored for archives/objects"
+	    continue
+	  fi
+	  func_stripname '-l' '' "$deplib"
+	  name=$func_stripname_result
+	  if test "$linkmode" = lib; then
+	    searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
+	  else
+	    searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
+	  fi
+	  for searchdir in $searchdirs; do
+	    for search_ext in .la $std_shrext .so .a; do
+	      # Search the libtool library
+	      lib="$searchdir/lib${name}${search_ext}"
+	      if test -f "$lib"; then
+		if test "$search_ext" = ".la"; then
+		  found=yes
+		else
+		  found=no
+		fi
+		break 2
+	      fi
+	    done
+	  done
+	  if test "$found" != yes; then
+	    # deplib doesn't seem to be a libtool library
+	    if test "$linkmode,$pass" = "prog,link"; then
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      deplibs="$deplib $deplibs"
+	      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    continue
+	  else # deplib is a libtool library
+	    # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
+	    # We need to do some special things here, and not later.
+	    if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	      case " $predeps $postdeps " in
+	      *" $deplib "*)
+		if func_lalib_p "$lib"; then
+		  library_names=
+		  old_library=
+		  func_source "$lib"
+		  for l in $old_library $library_names; do
+		    ll="$l"
+		  done
+		  if test "X$ll" = "X$old_library" ; then # only static version available
+		    found=no
+		    func_dirname "$lib" "" "."
+		    ladir="$func_dirname_result"
+		    lib=$ladir/$old_library
+		    if test "$linkmode,$pass" = "prog,link"; then
+		      compile_deplibs="$deplib $compile_deplibs"
+		      finalize_deplibs="$deplib $finalize_deplibs"
+		    else
+		      deplibs="$deplib $deplibs"
+		      test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+		    fi
+		    continue
+		  fi
+		fi
+		;;
+	      *) ;;
+	      esac
+	    fi
+	  fi
+	  ;; # -l
+	*.ltframework)
+	  if test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$deplib $compile_deplibs"
+	    finalize_deplibs="$deplib $finalize_deplibs"
+	  else
+	    deplibs="$deplib $deplibs"
+	    if test "$linkmode" = lib ; then
+		case "$new_inherited_linker_flags " in
+		    *" $deplib "*) ;;
+		    * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;;
+		esac
+	    fi
+	  fi
+	  continue
+	  ;;
+	-L*)
+	  case $linkmode in
+	  lib)
+	    deplibs="$deplib $deplibs"
+	    test "$pass" = conv && continue
+	    newdependency_libs="$deplib $newdependency_libs"
+	    func_stripname '-L' '' "$deplib"
+	    newlib_search_path="$newlib_search_path $func_stripname_result"
+	    ;;
+	  prog)
+	    if test "$pass" = conv; then
+	      deplibs="$deplib $deplibs"
+	      continue
+	    fi
+	    if test "$pass" = scan; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    func_stripname '-L' '' "$deplib"
+	    newlib_search_path="$newlib_search_path $func_stripname_result"
+	    ;;
+	  *)
+	    func_warning "\`-L' is ignored for archives/objects"
+	    ;;
+	  esac # linkmode
+	  continue
+	  ;; # -L
+	-R*)
+	  if test "$pass" = link; then
+	    func_stripname '-R' '' "$deplib"
+	    dir=$func_stripname_result
+	    # Make sure the xrpath contains only unique directories.
+	    case "$xrpath " in
+	    *" $dir "*) ;;
+	    *) xrpath="$xrpath $dir" ;;
+	    esac
+	  fi
+	  deplibs="$deplib $deplibs"
+	  continue
+	  ;;
+	*.la) lib="$deplib" ;;
+	*.$libext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	    continue
+	  fi
+	  case $linkmode in
+	  lib)
+	    # Linking convenience modules into shared libraries is allowed,
+	    # but linking other static libraries is non-portable.
+	    case " $dlpreconveniencelibs " in
+	    *" $deplib "*) ;;
+	    *)
+	      valid_a_lib=no
+	      case $deplibs_check_method in
+		match_pattern*)
+		  set dummy $deplibs_check_method; shift
+		  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+		  if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
+		    | $EGREP "$match_pattern_regex" > /dev/null; then
+		    valid_a_lib=yes
+		  fi
+		;;
+		pass_all)
+		  valid_a_lib=yes
+		;;
+	      esac
+	      if test "$valid_a_lib" != yes; then
+		echo
+		$ECHO "*** Warning: Trying to link with static lib archive $deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because the file extensions .$libext of this argument makes me believe"
+		echo "*** that it is just a static archive that I should not use here."
+	      else
+		echo
+		$ECHO "*** Warning: Linking the shared library $output against the"
+		$ECHO "*** static library $deplib is not portable!"
+		deplibs="$deplib $deplibs"
+	      fi
+	      ;;
+	    esac
+	    continue
+	    ;;
+	  prog)
+	    if test "$pass" != link; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    fi
+	    continue
+	    ;;
+	  esac # linkmode
+	  ;; # *.$libext
+	*.lo | *.$objext)
+	  if test "$pass" = conv; then
+	    deplibs="$deplib $deplibs"
+	  elif test "$linkmode" = prog; then
+	    if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+	      # If there is no dlopen support or we're linking statically,
+	      # we need to preload.
+	      newdlprefiles="$newdlprefiles $deplib"
+	      compile_deplibs="$deplib $compile_deplibs"
+	      finalize_deplibs="$deplib $finalize_deplibs"
+	    else
+	      newdlfiles="$newdlfiles $deplib"
+	    fi
+	  fi
+	  continue
+	  ;;
+	%DEPLIBS%)
+	  alldeplibs=yes
+	  continue
+	  ;;
+	esac # case $deplib
+
+	if test "$found" = yes || test -f "$lib"; then :
+	else
+	  func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'"
+	fi
+
+	# Check to see that this really is a libtool archive.
+	func_lalib_unsafe_p "$lib" \
+	  || func_fatal_error "\`$lib' is not a valid libtool archive"
+
+	func_dirname "$lib" "" "."
+	ladir="$func_dirname_result"
+
+	dlname=
+	dlopen=
+	dlpreopen=
+	libdir=
+	library_names=
+	old_library=
+	inherited_linker_flags=
+	# If the library was installed with an old release of libtool,
+	# it will not redefine variables installed, or shouldnotlink
+	installed=yes
+	shouldnotlink=no
+	avoidtemprpath=
+
+
+	# Read the .la file
+	func_source "$lib"
+
+	# Convert "-framework foo" to "foo.ltframework"
+	if test -n "$inherited_linker_flags"; then
+	  tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
+	  for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
+	    case " $new_inherited_linker_flags " in
+	      *" $tmp_inherited_linker_flag "*) ;;
+	      *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";;
+	    esac
+	  done
+	fi
+	dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	if test "$linkmode,$pass" = "lib,link" ||
+	   test "$linkmode,$pass" = "prog,scan" ||
+	   { test "$linkmode" != prog && test "$linkmode" != lib; }; then
+	  test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
+	  test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
+	fi
+
+	if test "$pass" = conv; then
+	  # Only check for convenience libraries
+	  deplibs="$lib $deplibs"
+	  if test -z "$libdir"; then
+	    if test -z "$old_library"; then
+	      func_fatal_error "cannot find name of link library for \`$lib'"
+	    fi
+	    # It is a libtool convenience library, so add in its objects.
+	    convenience="$convenience $ladir/$objdir/$old_library"
+	    old_convenience="$old_convenience $ladir/$objdir/$old_library"
+	  elif test "$linkmode" != prog && test "$linkmode" != lib; then
+	    func_fatal_error "\`$lib' is not a convenience library"
+	  fi
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    deplibs="$deplib $deplibs"
+	    if $opt_duplicate_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs="$tmp_libs $deplib"
+	  done
+	  continue
+	fi # $pass = conv
+
+
+	# Get the name of the library we link against.
+	linklib=
+	for l in $old_library $library_names; do
+	  linklib="$l"
+	done
+	if test -z "$linklib"; then
+	  func_fatal_error "cannot find name of link library for \`$lib'"
+	fi
+
+	# This library was specified with -dlopen.
+	if test "$pass" = dlopen; then
+	  if test -z "$libdir"; then
+	    func_fatal_error "cannot -dlopen a convenience library: \`$lib'"
+	  fi
+	  if test -z "$dlname" ||
+	     test "$dlopen_support" != yes ||
+	     test "$build_libtool_libs" = no; then
+	    # If there is no dlname, no dlopen support or we're linking
+	    # statically, we need to preload.  We also need to preload any
+	    # dependent libraries so libltdl's deplib preloader doesn't
+	    # bomb out in the load deplibs phase.
+	    dlprefiles="$dlprefiles $lib $dependency_libs"
+	  else
+	    newdlfiles="$newdlfiles $lib"
+	  fi
+	  continue
+	fi # $pass = dlopen
+
+	# We need an absolute path.
+	case $ladir in
+	[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
+	*)
+	  abs_ladir=`cd "$ladir" && pwd`
+	  if test -z "$abs_ladir"; then
+	    func_warning "cannot determine absolute directory name of \`$ladir'"
+	    func_warning "passing it literally to the linker, although it might fail"
+	    abs_ladir="$ladir"
+	  fi
+	  ;;
+	esac
+	func_basename "$lib"
+	laname="$func_basename_result"
+
+	# Find the relevant object directory and library name.
+	if test "X$installed" = Xyes; then
+	  if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    func_warning "library \`$lib' was moved."
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    libdir="$abs_ladir"
+	  else
+	    dir="$libdir"
+	    absdir="$libdir"
+	  fi
+	  test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
+	else
+	  if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
+	    dir="$ladir"
+	    absdir="$abs_ladir"
+	    # Remove this search path later
+	    notinst_path="$notinst_path $abs_ladir"
+	  else
+	    dir="$ladir/$objdir"
+	    absdir="$abs_ladir/$objdir"
+	    # Remove this search path later
+	    notinst_path="$notinst_path $abs_ladir"
+	  fi
+	fi # $installed = yes
+	func_stripname 'lib' '.la' "$laname"
+	name=$func_stripname_result
+
+	# This library was specified with -dlpreopen.
+	if test "$pass" = dlpreopen; then
+	  if test -z "$libdir" && test "$linkmode" = prog; then
+	    func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'"
+	  fi
+	  # Prefer using a static library (so that no silly _DYNAMIC symbols
+	  # are required to link).
+	  if test -n "$old_library"; then
+	    newdlprefiles="$newdlprefiles $dir/$old_library"
+	    # Keep a list of preopened convenience libraries to check
+	    # that they are being used correctly in the link pass.
+	    test -z "$libdir" && \
+		dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library"
+	  # Otherwise, use the dlname, so that lt_dlopen finds it.
+	  elif test -n "$dlname"; then
+	    newdlprefiles="$newdlprefiles $dir/$dlname"
+	  else
+	    newdlprefiles="$newdlprefiles $dir/$linklib"
+	  fi
+	fi # $pass = dlpreopen
+
+	if test -z "$libdir"; then
+	  # Link the convenience library
+	  if test "$linkmode" = lib; then
+	    deplibs="$dir/$old_library $deplibs"
+	  elif test "$linkmode,$pass" = "prog,link"; then
+	    compile_deplibs="$dir/$old_library $compile_deplibs"
+	    finalize_deplibs="$dir/$old_library $finalize_deplibs"
+	  else
+	    deplibs="$lib $deplibs" # used for prog,scan pass
+	  fi
+	  continue
+	fi
+
+
+	if test "$linkmode" = prog && test "$pass" != link; then
+	  newlib_search_path="$newlib_search_path $ladir"
+	  deplibs="$lib $deplibs"
+
+	  linkalldeplibs=no
+	  if test "$link_all_deplibs" != no || test -z "$library_names" ||
+	     test "$build_libtool_libs" = no; then
+	    linkalldeplibs=yes
+	  fi
+
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    case $deplib in
+	    -L*) func_stripname '-L' '' "$deplib"
+	         newlib_search_path="$newlib_search_path $func_stripname_result"
+		 ;;
+	    esac
+	    # Need to link against all dependency_libs?
+	    if test "$linkalldeplibs" = yes; then
+	      deplibs="$deplib $deplibs"
+	    else
+	      # Need to hardcode shared library paths
+	      # or/and link against static libraries
+	      newdependency_libs="$deplib $newdependency_libs"
+	    fi
+	    if $opt_duplicate_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs="$tmp_libs $deplib"
+	  done # for deplib
+	  continue
+	fi # $linkmode = prog...
+
+	if test "$linkmode,$pass" = "prog,link"; then
+	  if test -n "$library_names" &&
+	     { { test "$prefer_static_libs" = no ||
+	         test "$prefer_static_libs,$installed" = "built,yes"; } ||
+	       test -z "$old_library"; }; then
+	    # We need to hardcode the library path
+	    if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
+	      # Make sure the rpath contains only unique directories.
+	      case "$temp_rpath:" in
+	      *"$absdir:"*) ;;
+	      *) temp_rpath="$temp_rpath$absdir:" ;;
+	      esac
+	    fi
+
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath="$compile_rpath $absdir"
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath="$finalize_rpath $libdir"
+	      esac
+	      ;;
+	    esac
+	  fi # $linkmode,$pass = prog,link...
+
+	  if test "$alldeplibs" = yes &&
+	     { test "$deplibs_check_method" = pass_all ||
+	       { test "$build_libtool_libs" = yes &&
+		 test -n "$library_names"; }; }; then
+	    # We only need to search for static libraries
+	    continue
+	  fi
+	fi
+
+	link_static=no # Whether the deplib will be linked statically
+	use_static_libs=$prefer_static_libs
+	if test "$use_static_libs" = built && test "$installed" = yes; then
+	  use_static_libs=no
+	fi
+	if test -n "$library_names" &&
+	   { test "$use_static_libs" = no || test -z "$old_library"; }; then
+	  case $host in
+	  *cygwin* | *mingw* | *cegcc*)
+	      # No point in relinking DLLs because paths are not encoded
+	      notinst_deplibs="$notinst_deplibs $lib"
+	      need_relink=no
+	    ;;
+	  *)
+	    if test "$installed" = no; then
+	      notinst_deplibs="$notinst_deplibs $lib"
+	      need_relink=yes
+	    fi
+	    ;;
+	  esac
+	  # This is a shared library
+
+	  # Warn about portability, can't link against -module's on some
+	  # systems (darwin).  Don't bleat about dlopened modules though!
+	  dlopenmodule=""
+	  for dlpremoduletest in $dlprefiles; do
+	    if test "X$dlpremoduletest" = "X$lib"; then
+	      dlopenmodule="$dlpremoduletest"
+	      break
+	    fi
+	  done
+	  if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then
+	    echo
+	    if test "$linkmode" = prog; then
+	      $ECHO "*** Warning: Linking the executable $output against the loadable module"
+	    else
+	      $ECHO "*** Warning: Linking the shared library $output against the loadable module"
+	    fi
+	    $ECHO "*** $linklib is not portable!"
+	  fi
+	  if test "$linkmode" = lib &&
+	     test "$hardcode_into_libs" = yes; then
+	    # Hardcode the library path.
+	    # Skip directories that are in the system default run-time
+	    # search path.
+	    case " $sys_lib_dlsearch_path " in
+	    *" $absdir "*) ;;
+	    *)
+	      case "$compile_rpath " in
+	      *" $absdir "*) ;;
+	      *) compile_rpath="$compile_rpath $absdir"
+	      esac
+	      ;;
+	    esac
+	    case " $sys_lib_dlsearch_path " in
+	    *" $libdir "*) ;;
+	    *)
+	      case "$finalize_rpath " in
+	      *" $libdir "*) ;;
+	      *) finalize_rpath="$finalize_rpath $libdir"
+	      esac
+	      ;;
+	    esac
+	  fi
+
+	  if test -n "$old_archive_from_expsyms_cmds"; then
+	    # figure out the soname
+	    set dummy $library_names
+	    shift
+	    realname="$1"
+	    shift
+	    libname=`eval "\\$ECHO \"$libname_spec\""`
+	    # use dlname if we got it. it's perfectly good, no?
+	    if test -n "$dlname"; then
+	      soname="$dlname"
+	    elif test -n "$soname_spec"; then
+	      # bleh windows
+	      case $host in
+	      *cygwin* | mingw* | *cegcc*)
+	        func_arith $current - $age
+		major=$func_arith_result
+		versuffix="-$major"
+		;;
+	      esac
+	      eval soname=\"$soname_spec\"
+	    else
+	      soname="$realname"
+	    fi
+
+	    # Make a new name for the extract_expsyms_cmds to use
+	    soroot="$soname"
+	    func_basename "$soroot"
+	    soname="$func_basename_result"
+	    func_stripname 'lib' '.dll' "$soname"
+	    newlib=libimp-$func_stripname_result.a
+
+	    # If the library has no export list, then create one now
+	    if test -f "$output_objdir/$soname-def"; then :
+	    else
+	      func_verbose "extracting exported symbol list from \`$soname'"
+	      func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
+	    fi
+
+	    # Create $newlib
+	    if test -f "$output_objdir/$newlib"; then :; else
+	      func_verbose "generating import library for \`$soname'"
+	      func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
+	    fi
+	    # make sure the library variables are pointing to the new library
+	    dir=$output_objdir
+	    linklib=$newlib
+	  fi # test -n "$old_archive_from_expsyms_cmds"
+
+	  if test "$linkmode" = prog || test "$mode" != relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    lib_linked=yes
+	    case $hardcode_action in
+	    immediate | unsupported)
+	      if test "$hardcode_direct" = no; then
+		add="$dir/$linklib"
+		case $host in
+		  *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;;
+		  *-*-sysv4*uw2*) add_dir="-L$dir" ;;
+		  *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
+		    *-*-unixware7*) add_dir="-L$dir" ;;
+		  *-*-darwin* )
+		    # if the lib is a (non-dlopened) module then we can not
+		    # link against it, someone is ignoring the earlier warnings
+		    if /usr/bin/file -L $add 2> /dev/null |
+			 $GREP ": [^:]* bundle" >/dev/null ; then
+		      if test "X$dlopenmodule" != "X$lib"; then
+			$ECHO "*** Warning: lib $linklib is a module, not a shared library"
+			if test -z "$old_library" ; then
+			  echo
+			  echo "*** And there doesn't seem to be a static archive available"
+			  echo "*** The link will probably fail, sorry"
+			else
+			  add="$dir/$old_library"
+			fi
+		      elif test -n "$old_library"; then
+			add="$dir/$old_library"
+		      fi
+		    fi
+		esac
+	      elif test "$hardcode_minus_L" = no; then
+		case $host in
+		*-*-sunos*) add_shlibpath="$dir" ;;
+		esac
+		add_dir="-L$dir"
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = no; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    relink)
+	      if test "$hardcode_direct" = yes &&
+	         test "$hardcode_direct_absolute" = no; then
+		add="$dir/$linklib"
+	      elif test "$hardcode_minus_L" = yes; then
+		add_dir="-L$dir"
+		# Try looking first in the location we're being installed to.
+		if test -n "$inst_prefix_dir"; then
+		  case $libdir in
+		    [\\/]*)
+		      add_dir="$add_dir -L$inst_prefix_dir$libdir"
+		      ;;
+		  esac
+		fi
+		add="-l$name"
+	      elif test "$hardcode_shlibpath_var" = yes; then
+		add_shlibpath="$dir"
+		add="-l$name"
+	      else
+		lib_linked=no
+	      fi
+	      ;;
+	    *) lib_linked=no ;;
+	    esac
+
+	    if test "$lib_linked" != yes; then
+	      func_fatal_configuration "unsupported hardcode properties"
+	    fi
+
+	    if test -n "$add_shlibpath"; then
+	      case :$compile_shlibpath: in
+	      *":$add_shlibpath:"*) ;;
+	      *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
+	      esac
+	    fi
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+	      test -n "$add" && compile_deplibs="$add $compile_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	      if test "$hardcode_direct" != yes &&
+		 test "$hardcode_minus_L" != yes &&
+		 test "$hardcode_shlibpath_var" = yes; then
+		case :$finalize_shlibpath: in
+		*":$libdir:"*) ;;
+		*) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
+		esac
+	      fi
+	    fi
+	  fi
+
+	  if test "$linkmode" = prog || test "$mode" = relink; then
+	    add_shlibpath=
+	    add_dir=
+	    add=
+	    # Finalize command for both is simple: just hardcode it.
+	    if test "$hardcode_direct" = yes &&
+	       test "$hardcode_direct_absolute" = no; then
+	      add="$libdir/$linklib"
+	    elif test "$hardcode_minus_L" = yes; then
+	      add_dir="-L$libdir"
+	      add="-l$name"
+	    elif test "$hardcode_shlibpath_var" = yes; then
+	      case :$finalize_shlibpath: in
+	      *":$libdir:"*) ;;
+	      *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
+	      esac
+	      add="-l$name"
+	    elif test "$hardcode_automatic" = yes; then
+	      if test -n "$inst_prefix_dir" &&
+		 test -f "$inst_prefix_dir$libdir/$linklib" ; then
+		add="$inst_prefix_dir$libdir/$linklib"
+	      else
+		add="$libdir/$linklib"
+	      fi
+	    else
+	      # We cannot seem to hardcode it, guess we'll fake it.
+	      add_dir="-L$libdir"
+	      # Try looking first in the location we're being installed to.
+	      if test -n "$inst_prefix_dir"; then
+		case $libdir in
+		  [\\/]*)
+		    add_dir="$add_dir -L$inst_prefix_dir$libdir"
+		    ;;
+		esac
+	      fi
+	      add="-l$name"
+	    fi
+
+	    if test "$linkmode" = prog; then
+	      test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+	      test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+	    else
+	      test -n "$add_dir" && deplibs="$add_dir $deplibs"
+	      test -n "$add" && deplibs="$add $deplibs"
+	    fi
+	  fi
+	elif test "$linkmode" = prog; then
+	  # Here we assume that one of hardcode_direct or hardcode_minus_L
+	  # is not unsupported.  This is valid on all known static and
+	  # shared platforms.
+	  if test "$hardcode_direct" != unsupported; then
+	    test -n "$old_library" && linklib="$old_library"
+	    compile_deplibs="$dir/$linklib $compile_deplibs"
+	    finalize_deplibs="$dir/$linklib $finalize_deplibs"
+	  else
+	    compile_deplibs="-l$name -L$dir $compile_deplibs"
+	    finalize_deplibs="-l$name -L$dir $finalize_deplibs"
+	  fi
+	elif test "$build_libtool_libs" = yes; then
+	  # Not a shared library
+	  if test "$deplibs_check_method" != pass_all; then
+	    # We're trying link a shared library against a static one
+	    # but the system doesn't support it.
+
+	    # Just print a warning and add the library to dependency_libs so
+	    # that the program can be linked against the static library.
+	    echo
+	    $ECHO "*** Warning: This system can not link to static lib archive $lib."
+	    echo "*** I have the capability to make that library automatically link in when"
+	    echo "*** you link to this library.  But I can only do this if you have a"
+	    echo "*** shared version of the library, which you do not appear to have."
+	    if test "$module" = yes; then
+	      echo "*** But as you try to build a module library, libtool will still create "
+	      echo "*** a static module, that should work as long as the dlopening application"
+	      echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
+	      if test -z "$global_symbol_pipe"; then
+		echo
+		echo "*** However, this would only work if libtool was able to extract symbol"
+		echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+		echo "*** not find such a program.  So, this module is probably useless."
+		echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	      fi
+	      if test "$build_old_libs" = no; then
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  else
+	    deplibs="$dir/$old_library $deplibs"
+	    link_static=yes
+	  fi
+	fi # link shared/static library?
+
+	if test "$linkmode" = lib; then
+	  if test -n "$dependency_libs" &&
+	     { test "$hardcode_into_libs" != yes ||
+	       test "$build_old_libs" = yes ||
+	       test "$link_static" = yes; }; then
+	    # Extract -R from dependency_libs
+	    temp_deplibs=
+	    for libdir in $dependency_libs; do
+	      case $libdir in
+	      -R*) func_stripname '-R' '' "$libdir"
+	           temp_xrpath=$func_stripname_result
+		   case " $xrpath " in
+		   *" $temp_xrpath "*) ;;
+		   *) xrpath="$xrpath $temp_xrpath";;
+		   esac;;
+	      *) temp_deplibs="$temp_deplibs $libdir";;
+	      esac
+	    done
+	    dependency_libs="$temp_deplibs"
+	  fi
+
+	  newlib_search_path="$newlib_search_path $absdir"
+	  # Link against this library
+	  test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
+	  # ... and its dependency_libs
+	  tmp_libs=
+	  for deplib in $dependency_libs; do
+	    newdependency_libs="$deplib $newdependency_libs"
+	    if $opt_duplicate_deps ; then
+	      case "$tmp_libs " in
+	      *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
+	      esac
+	    fi
+	    tmp_libs="$tmp_libs $deplib"
+	  done
+
+	  if test "$link_all_deplibs" != no; then
+	    # Add the search paths of all dependency libraries
+	    for deplib in $dependency_libs; do
+	      path=
+	      case $deplib in
+	      -L*) path="$deplib" ;;
+	      *.la)
+	        func_dirname "$deplib" "" "."
+		dir="$func_dirname_result"
+		# We need an absolute path.
+		case $dir in
+		[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
+		*)
+		  absdir=`cd "$dir" && pwd`
+		  if test -z "$absdir"; then
+		    func_warning "cannot determine absolute directory name of \`$dir'"
+		    absdir="$dir"
+		  fi
+		  ;;
+		esac
+		if $GREP "^installed=no" $deplib > /dev/null; then
+		case $host in
+		*-*-darwin*)
+		  depdepl=
+		  eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
+		  if test -n "$deplibrary_names" ; then
+		    for tmp in $deplibrary_names ; do
+		      depdepl=$tmp
+		    done
+		    if test -f "$absdir/$objdir/$depdepl" ; then
+		      depdepl="$absdir/$objdir/$depdepl"
+		      darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
+                      if test -z "$darwin_install_name"; then
+                          darwin_install_name=`${OTOOL64} -L $depdepl  | awk '{if (NR == 2) {print $1;exit}}'`
+                      fi
+		      compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}"
+		      linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}"
+		      path=
+		    fi
+		  fi
+		  ;;
+		*)
+		  path="-L$absdir/$objdir"
+		  ;;
+		esac
+		else
+		  eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		  test -z "$libdir" && \
+		    func_fatal_error "\`$deplib' is not a valid libtool archive"
+		  test "$absdir" != "$libdir" && \
+		    func_warning "\`$deplib' seems to be moved"
+
+		  path="-L$absdir"
+		fi
+		;;
+	      esac
+	      case " $deplibs " in
+	      *" $path "*) ;;
+	      *) deplibs="$path $deplibs" ;;
+	      esac
+	    done
+	  fi # link_all_deplibs != no
+	fi # linkmode = lib
+      done # for deplib in $libs
+      if test "$pass" = link; then
+	if test "$linkmode" = "prog"; then
+	  compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
+	  finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
+	else
+	  compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	fi
+      fi
+      dependency_libs="$newdependency_libs"
+      if test "$pass" = dlpreopen; then
+	# Link the dlpreopened libraries before other libraries
+	for deplib in $save_deplibs; do
+	  deplibs="$deplib $deplibs"
+	done
+      fi
+      if test "$pass" != dlopen; then
+	if test "$pass" != conv; then
+	  # Make sure lib_search_path contains only unique directories.
+	  lib_search_path=
+	  for dir in $newlib_search_path; do
+	    case "$lib_search_path " in
+	    *" $dir "*) ;;
+	    *) lib_search_path="$lib_search_path $dir" ;;
+	    esac
+	  done
+	  newlib_search_path=
+	fi
+
+	if test "$linkmode,$pass" != "prog,link"; then
+	  vars="deplibs"
+	else
+	  vars="compile_deplibs finalize_deplibs"
+	fi
+	for var in $vars dependency_libs; do
+	  # Add libraries to $var in reverse order
+	  eval tmp_libs=\"\$$var\"
+	  new_libs=
+	  for deplib in $tmp_libs; do
+	    # FIXME: Pedantically, this is the right thing to do, so
+	    #        that some nasty dependency loop isn't accidentally
+	    #        broken:
+	    #new_libs="$deplib $new_libs"
+	    # Pragmatically, this seems to cause very few problems in
+	    # practice:
+	    case $deplib in
+	    -L*) new_libs="$deplib $new_libs" ;;
+	    -R*) ;;
+	    *)
+	      # And here is the reason: when a library appears more
+	      # than once as an explicit dependence of a library, or
+	      # is implicitly linked in more than once by the
+	      # compiler, it is considered special, and multiple
+	      # occurrences thereof are not removed.  Compare this
+	      # with having the same library being listed as a
+	      # dependency of multiple other libraries: in this case,
+	      # we know (pedantically, we assume) the library does not
+	      # need to be listed more than once, so we keep only the
+	      # last copy.  This is not always right, but it is rare
+	      # enough that we require users that really mean to play
+	      # such unportable linking tricks to link the library
+	      # using -Wl,-lname, so that libtool does not consider it
+	      # for duplicate removal.
+	      case " $specialdeplibs " in
+	      *" $deplib "*) new_libs="$deplib $new_libs" ;;
+	      *)
+		case " $new_libs " in
+		*" $deplib "*) ;;
+		*) new_libs="$deplib $new_libs" ;;
+		esac
+		;;
+	      esac
+	      ;;
+	    esac
+	  done
+	  tmp_libs=
+	  for deplib in $new_libs; do
+	    case $deplib in
+	    -L*)
+	      case " $tmp_libs " in
+	      *" $deplib "*) ;;
+	      *) tmp_libs="$tmp_libs $deplib" ;;
+	      esac
+	      ;;
+	    *) tmp_libs="$tmp_libs $deplib" ;;
+	    esac
+	  done
+	  eval $var=\"$tmp_libs\"
+	done # for var
+      fi
+      # Last step: remove runtime libs from dependency_libs
+      # (they stay in deplibs)
+      tmp_libs=
+      for i in $dependency_libs ; do
+	case " $predeps $postdeps $compiler_lib_search_path " in
+	*" $i "*)
+	  i=""
+	  ;;
+	esac
+	if test -n "$i" ; then
+	  tmp_libs="$tmp_libs $i"
+	fi
+      done
+      dependency_libs=$tmp_libs
+    done # for pass
+    if test "$linkmode" = prog; then
+      dlfiles="$newdlfiles"
+    fi
+    if test "$linkmode" = prog || test "$linkmode" = lib; then
+      dlprefiles="$newdlprefiles"
+    fi
+
+    case $linkmode in
+    oldlib)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for archives"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for archives" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for archives"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for archives"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info/-version-number' is ignored for archives"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for archives"
+
+      test -n "$export_symbols$export_symbols_regex" && \
+	func_warning "\`-export-symbols' is ignored for archives"
+
+      # Now set the variables for building old libraries.
+      build_libtool_libs=no
+      oldlibs="$output"
+      objs="$objs$old_deplibs"
+      ;;
+
+    lib)
+      # Make sure we only generate libraries of the form `libNAME.la'.
+      case $outputname in
+      lib*)
+	func_stripname 'lib' '.la' "$outputname"
+	name=$func_stripname_result
+	eval shared_ext=\"$shrext_cmds\"
+	eval libname=\"$libname_spec\"
+	;;
+      *)
+	test "$module" = no && \
+	  func_fatal_help "libtool library \`$output' must begin with \`lib'"
+
+	if test "$need_lib_prefix" != no; then
+	  # Add the "lib" prefix for modules if required
+	  func_stripname '' '.la' "$outputname"
+	  name=$func_stripname_result
+	  eval shared_ext=\"$shrext_cmds\"
+	  eval libname=\"$libname_spec\"
+	else
+	  func_stripname '' '.la' "$outputname"
+	  libname=$func_stripname_result
+	fi
+	;;
+      esac
+
+      if test -n "$objs"; then
+	if test "$deplibs_check_method" != pass_all; then
+	  func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs"
+	else
+	  echo
+	  $ECHO "*** Warning: Linking the shared library $output against the non-libtool"
+	  $ECHO "*** objects $objs is not portable!"
+	  libobjs="$libobjs $objs"
+	fi
+      fi
+
+      test "$dlself" != no && \
+	func_warning "\`-dlopen self' is ignored for libtool libraries"
+
+      set dummy $rpath
+      shift
+      test "$#" -gt 1 && \
+	func_warning "ignoring multiple \`-rpath's for a libtool library"
+
+      install_libdir="$1"
+
+      oldlibs=
+      if test -z "$rpath"; then
+	if test "$build_libtool_libs" = yes; then
+	  # Building a libtool convenience library.
+	  # Some compilers have problems with a `.al' extension so
+	  # convenience libraries should have the same extension an
+	  # archive normally would.
+	  oldlibs="$output_objdir/$libname.$libext $oldlibs"
+	  build_libtool_libs=convenience
+	  build_old_libs=yes
+	fi
+
+	test -n "$vinfo" && \
+	  func_warning "\`-version-info/-version-number' is ignored for convenience libraries"
+
+	test -n "$release" && \
+	  func_warning "\`-release' is ignored for convenience libraries"
+      else
+
+	# Parse the version information argument.
+	save_ifs="$IFS"; IFS=':'
+	set dummy $vinfo 0 0 0
+	shift
+	IFS="$save_ifs"
+
+	test -n "$7" && \
+	  func_fatal_help "too many parameters to \`-version-info'"
+
+	# convert absolute version numbers to libtool ages
+	# this retains compatibility with .la files and attempts
+	# to make the code below a bit more comprehensible
+
+	case $vinfo_number in
+	yes)
+	  number_major="$1"
+	  number_minor="$2"
+	  number_revision="$3"
+	  #
+	  # There are really only two kinds -- those that
+	  # use the current revision as the major version
+	  # and those that subtract age and use age as
+	  # a minor version.  But, then there is irix
+	  # which has an extra 1 added just for fun
+	  #
+	  case $version_type in
+	  darwin|linux|osf|windows|none)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_revision"
+	    ;;
+	  freebsd-aout|freebsd-elf|qnx|sunos)
+	    current="$number_major"
+	    revision="$number_minor"
+	    age="0"
+	    ;;
+	  irix|nonstopux)
+	    func_arith $number_major + $number_minor
+	    current=$func_arith_result
+	    age="$number_minor"
+	    revision="$number_minor"
+	    lt_irix_increment=no
+	    ;;
+	  esac
+	  ;;
+	no)
+	  current="$1"
+	  revision="$2"
+	  age="$3"
+	  ;;
+	esac
+
+	# Check that each of the things are valid numbers.
+	case $current in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "CURRENT \`$current' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $revision in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "REVISION \`$revision' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	case $age in
+	0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
+	*)
+	  func_error "AGE \`$age' must be a nonnegative integer"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	  ;;
+	esac
+
+	if test "$age" -gt "$current"; then
+	  func_error "AGE \`$age' is greater than the current interface number \`$current'"
+	  func_fatal_error "\`$vinfo' is not valid version information"
+	fi
+
+	# Calculate the version variables.
+	major=
+	versuffix=
+	verstring=
+	case $version_type in
+	none) ;;
+
+	darwin)
+	  # Like Linux, but with the current version available in
+	  # verstring for coding it into the library header
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  # Darwin ld doesn't like 0 for these options...
+	  func_arith $current + 1
+	  minor_current=$func_arith_result
+	  xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
+	  verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
+	  ;;
+
+	freebsd-aout)
+	  major=".$current"
+	  versuffix=".$current.$revision";
+	  ;;
+
+	freebsd-elf)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	irix | nonstopux)
+	  if test "X$lt_irix_increment" = "Xno"; then
+	    func_arith $current - $age
+	  else
+	    func_arith $current - $age + 1
+	  fi
+	  major=$func_arith_result
+
+	  case $version_type in
+	    nonstopux) verstring_prefix=nonstopux ;;
+	    *)         verstring_prefix=sgi ;;
+	  esac
+	  verstring="$verstring_prefix$major.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$revision
+	  while test "$loop" -ne 0; do
+	    func_arith $revision - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring_prefix$major.$iface:$verstring"
+	  done
+
+	  # Before this point, $major must not contain `.'.
+	  major=.$major
+	  versuffix="$major.$revision"
+	  ;;
+
+	linux)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix="$major.$age.$revision"
+	  ;;
+
+	osf)
+	  func_arith $current - $age
+	  major=.$func_arith_result
+	  versuffix=".$current.$age.$revision"
+	  verstring="$current.$age.$revision"
+
+	  # Add in all the interfaces that we are compatible with.
+	  loop=$age
+	  while test "$loop" -ne 0; do
+	    func_arith $current - $loop
+	    iface=$func_arith_result
+	    func_arith $loop - 1
+	    loop=$func_arith_result
+	    verstring="$verstring:${iface}.0"
+	  done
+
+	  # Make executables depend on our current version.
+	  verstring="$verstring:${current}.0"
+	  ;;
+
+	qnx)
+	  major=".$current"
+	  versuffix=".$current"
+	  ;;
+
+	sunos)
+	  major=".$current"
+	  versuffix=".$current.$revision"
+	  ;;
+
+	windows)
+	  # Use '-' rather than '.', since we only want one
+	  # extension on DOS 8.3 filesystems.
+	  func_arith $current - $age
+	  major=$func_arith_result
+	  versuffix="-$major"
+	  ;;
+
+	*)
+	  func_fatal_configuration "unknown library version type \`$version_type'"
+	  ;;
+	esac
+
+	# Clear the version info if we defaulted, and they specified a release.
+	if test -z "$vinfo" && test -n "$release"; then
+	  major=
+	  case $version_type in
+	  darwin)
+	    # we can't check for "0.0" in archive_cmds due to quoting
+	    # problems, so we reset it completely
+	    verstring=
+	    ;;
+	  *)
+	    verstring="0.0"
+	    ;;
+	  esac
+	  if test "$need_version" = no; then
+	    versuffix=
+	  else
+	    versuffix=".0.0"
+	  fi
+	fi
+
+	# Remove version info from name if versioning should be avoided
+	if test "$avoid_version" = yes && test "$need_version" = no; then
+	  major=
+	  versuffix=
+	  verstring=""
+	fi
+
+	# Check to see if the archive will have undefined symbols.
+	if test "$allow_undefined" = yes; then
+	  if test "$allow_undefined_flag" = unsupported; then
+	    func_warning "undefined symbols not allowed in $host shared libraries"
+	    build_libtool_libs=no
+	    build_old_libs=yes
+	  fi
+	else
+	  # Don't allow undefined symbols.
+	  allow_undefined_flag="$no_undefined_flag"
+	fi
+
+      fi
+
+      func_generate_dlsyms "$libname" "$libname" "yes"
+      libobjs="$libobjs $symfileobj"
+      test "X$libobjs" = "X " && libobjs=
+
+      if test "$mode" != relink; then
+	# Remove our outputs, but don't remove object files since they
+	# may have been created when compiling PIC objects.
+	removelist=
+	tempremovelist=`$ECHO "$output_objdir/*"`
+	for p in $tempremovelist; do
+	  case $p in
+	    *.$objext | *.gcno)
+	       ;;
+	    $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
+	       if test "X$precious_files_regex" != "X"; then
+		 if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
+		 then
+		   continue
+		 fi
+	       fi
+	       removelist="$removelist $p"
+	       ;;
+	    *) ;;
+	  esac
+	done
+	test -n "$removelist" && \
+	  func_show_eval "${RM}r \$removelist"
+      fi
+
+      # Now set the variables for building old libraries.
+      if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
+	oldlibs="$oldlibs $output_objdir/$libname.$libext"
+
+	# Transform .lo files to .o files.
+	oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP`
+      fi
+
+      # Eliminate all temporary directories.
+      #for path in $notinst_path; do
+      #	lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
+      #	deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
+      #	dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
+      #done
+
+      if test -n "$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	temp_xrpath=
+	for libdir in $xrpath; do
+	  temp_xrpath="$temp_xrpath -R$libdir"
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath="$finalize_rpath $libdir" ;;
+	  esac
+	done
+	if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+	  dependency_libs="$temp_xrpath $dependency_libs"
+	fi
+      fi
+
+      # Make sure dlfiles contains only unique files that won't be dlpreopened
+      old_dlfiles="$dlfiles"
+      dlfiles=
+      for lib in $old_dlfiles; do
+	case " $dlprefiles $dlfiles " in
+	*" $lib "*) ;;
+	*) dlfiles="$dlfiles $lib" ;;
+	esac
+      done
+
+      # Make sure dlprefiles contains only unique files
+      old_dlprefiles="$dlprefiles"
+      dlprefiles=
+      for lib in $old_dlprefiles; do
+	case "$dlprefiles " in
+	*" $lib "*) ;;
+	*) dlprefiles="$dlprefiles $lib" ;;
+	esac
+      done
+
+      if test "$build_libtool_libs" = yes; then
+	if test -n "$rpath"; then
+	  case $host in
+	  *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
+	    # these systems don't actually have a c library (as such)!
+	    ;;
+	  *-*-rhapsody* | *-*-darwin1.[012])
+	    # Rhapsody C library is in the System framework
+	    deplibs="$deplibs System.ltframework"
+	    ;;
+	  *-*-netbsd*)
+	    # Don't link with libc until the a.out ld.so is fixed.
+	    ;;
+	  *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
+	    # Do not include libc due to us having libc/libc_r.
+	    ;;
+	  *-*-sco3.2v5* | *-*-sco5v6*)
+	    # Causes problems with __ctype
+	    ;;
+	  *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
+	    # Compiler inserts libc in the correct place for threads to work
+	    ;;
+	  *)
+	    # Add libc to deplibs on all other systems if necessary.
+	    if test "$build_libtool_need_lc" = "yes"; then
+	      deplibs="$deplibs -lc"
+	    fi
+	    ;;
+	  esac
+	fi
+
+	# Transform deplibs into only deplibs that can be linked in shared.
+	name_save=$name
+	libname_save=$libname
+	release_save=$release
+	versuffix_save=$versuffix
+	major_save=$major
+	# I'm not sure if I'm treating the release correctly.  I think
+	# release should show up in the -l (ie -lgmp5) so we don't want to
+	# add it in twice.  Is that correct?
+	release=""
+	versuffix=""
+	major=""
+	newdeplibs=
+	droppeddeps=no
+	case $deplibs_check_method in
+	pass_all)
+	  # Don't check for shared/static.  Everything works.
+	  # This might be a little naive.  We might want to check
+	  # whether the library exists or not.  But this is on
+	  # osf3 & osf4 and I'm not really sure... Just
+	  # implementing what was already the behavior.
+	  newdeplibs=$deplibs
+	  ;;
+	test_compile)
+	  # This code stresses the "libraries are programs" paradigm to its
+	  # limits. Maybe even breaks it.  We compile a program, linking it
+	  # against the deplibs as a proxy for the library.  Then we can check
+	  # whether they linked in statically or dynamically with ldd.
+	  $opt_dry_run || $RM conftest.c
+	  cat > conftest.c <<EOF
+	  int main() { return 0; }
+EOF
+	  $opt_dry_run || $RM conftest
+	  if $LTCC $LTCFLAGS -o conftest conftest.c $deplibs; then
+	    ldd_output=`ldd conftest`
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		  case " $predeps $postdeps " in
+		  *" $i "*)
+		    newdeplibs="$newdeplibs $i"
+		    i=""
+		    ;;
+		  esac
+		fi
+		if test -n "$i" ; then
+		  libname=`eval "\\$ECHO \"$libname_spec\""`
+		  deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		  set dummy $deplib_matches; shift
+		  deplib_match=$1
+		  if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		    newdeplibs="$newdeplibs $i"
+		  else
+		    droppeddeps=yes
+		    echo
+		    $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		    echo "*** I have the capability to make that library automatically link in when"
+		    echo "*** you link to this library.  But I can only do this if you have a"
+		    echo "*** shared version of the library, which I believe you do not have"
+		    echo "*** because a test_compile did reveal that the linker did not use it for"
+		    echo "*** its dynamic dependency list that programs get resolved with at runtime."
+		  fi
+		fi
+		;;
+	      *)
+		newdeplibs="$newdeplibs $i"
+		;;
+	      esac
+	    done
+	  else
+	    # Error occurred in the first compile.  Let's try to salvage
+	    # the situation: Compile a separate program for each library.
+	    for i in $deplibs; do
+	      case $i in
+	      -l*)
+		func_stripname -l '' "$i"
+		name=$func_stripname_result
+		$opt_dry_run || $RM conftest
+		if $LTCC $LTCFLAGS -o conftest conftest.c $i; then
+		  ldd_output=`ldd conftest`
+		  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		    case " $predeps $postdeps " in
+		    *" $i "*)
+		      newdeplibs="$newdeplibs $i"
+		      i=""
+		      ;;
+		    esac
+		  fi
+		  if test -n "$i" ; then
+		    libname=`eval "\\$ECHO \"$libname_spec\""`
+		    deplib_matches=`eval "\\$ECHO \"$library_names_spec\""`
+		    set dummy $deplib_matches; shift
+		    deplib_match=$1
+		    if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
+		      newdeplibs="$newdeplibs $i"
+		    else
+		      droppeddeps=yes
+		      echo
+		      $ECHO "*** Warning: dynamic linker does not accept needed library $i."
+		      echo "*** I have the capability to make that library automatically link in when"
+		      echo "*** you link to this library.  But I can only do this if you have a"
+		      echo "*** shared version of the library, which you do not appear to have"
+		      echo "*** because a test_compile did reveal that the linker did not use this one"
+		      echo "*** as a dynamic dependency that programs can get resolved with at runtime."
+		    fi
+		  fi
+		else
+		  droppeddeps=yes
+		  echo
+		  $ECHO "*** Warning!  Library $i is needed by this library but I was not able to"
+		  echo "*** make it link in!  You will probably need to install it or some"
+		  echo "*** library that it depends on before this library will be fully"
+		  echo "*** functional.  Installing it before continuing would be even better."
+		fi
+		;;
+	      *)
+		newdeplibs="$newdeplibs $i"
+		;;
+	      esac
+	    done
+	  fi
+	  ;;
+	file_magic*)
+	  set dummy $deplibs_check_method; shift
+	  file_magic_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs="$newdeplibs $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		      # Follow soft links.
+		      if ls -lLd "$potent_lib" 2>/dev/null |
+			 $GREP " -> " >/dev/null; then
+			continue
+		      fi
+		      # The statement above tries to avoid entering an
+		      # endless loop below, in case of cyclic links.
+		      # We might still enter an endless loop, since a link
+		      # loop can be closed while we follow links,
+		      # but so what?
+		      potlib="$potent_lib"
+		      while test -h "$potlib" 2>/dev/null; do
+			potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
+			case $potliblink in
+			[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
+			*) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";;
+			esac
+		      done
+		      if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
+			 $SED -e 10q |
+			 $EGREP "$file_magic_regex" > /dev/null; then
+			newdeplibs="$newdeplibs $a_deplib"
+			a_deplib=""
+			break 2
+		      fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for file magic test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a file magic. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs="$newdeplibs $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	match_pattern*)
+	  set dummy $deplibs_check_method; shift
+	  match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
+	  for a_deplib in $deplibs; do
+	    case $a_deplib in
+	    -l*)
+	      func_stripname -l '' "$a_deplib"
+	      name=$func_stripname_result
+	      if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+		case " $predeps $postdeps " in
+		*" $a_deplib "*)
+		  newdeplibs="$newdeplibs $a_deplib"
+		  a_deplib=""
+		  ;;
+		esac
+	      fi
+	      if test -n "$a_deplib" ; then
+		libname=`eval "\\$ECHO \"$libname_spec\""`
+		for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
+		  potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
+		  for potent_lib in $potential_libs; do
+		    potlib="$potent_lib" # see symlink-check above in file_magic test
+		    if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
+		       $EGREP "$match_pattern_regex" > /dev/null; then
+		      newdeplibs="$newdeplibs $a_deplib"
+		      a_deplib=""
+		      break 2
+		    fi
+		  done
+		done
+	      fi
+	      if test -n "$a_deplib" ; then
+		droppeddeps=yes
+		echo
+		$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
+		echo "*** I have the capability to make that library automatically link in when"
+		echo "*** you link to this library.  But I can only do this if you have a"
+		echo "*** shared version of the library, which you do not appear to have"
+		echo "*** because I did check the linker path looking for a file starting"
+		if test -z "$potlib" ; then
+		  $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
+		else
+		  $ECHO "*** with $libname and none of the candidates passed a file format test"
+		  $ECHO "*** using a regex pattern. Last file checked: $potlib"
+		fi
+	      fi
+	      ;;
+	    *)
+	      # Add a -L argument.
+	      newdeplibs="$newdeplibs $a_deplib"
+	      ;;
+	    esac
+	  done # Gone through all deplibs.
+	  ;;
+	none | unknown | *)
+	  newdeplibs=""
+	  tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
+	  if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
+	    for i in $predeps $postdeps ; do
+	      # can't use Xsed below, because $i might contain '/'
+	      tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"`
+	    done
+	  fi
+	  case $tmp_deplibs in
+	  *[!\	\ ]*)
+	    echo
+	    if test "X$deplibs_check_method" = "Xnone"; then
+	      echo "*** Warning: inter-library dependencies are not supported in this platform."
+	    else
+	      echo "*** Warning: inter-library dependencies are not known to be supported."
+	    fi
+	    echo "*** All declared inter-library dependencies are being dropped."
+	    droppeddeps=yes
+	    ;;
+	  esac
+	  ;;
+	esac
+	versuffix=$versuffix_save
+	major=$major_save
+	release=$release_save
+	libname=$libname_save
+	name=$name_save
+
+	case $host in
+	*-*-rhapsody* | *-*-darwin1.[012])
+	  # On Rhapsody replace the C library with the System framework
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
+	  ;;
+	esac
+
+	if test "$droppeddeps" = yes; then
+	  if test "$module" = yes; then
+	    echo
+	    echo "*** Warning: libtool could not satisfy all declared inter-library"
+	    $ECHO "*** dependencies of module $libname.  Therefore, libtool will create"
+	    echo "*** a static module, that should work as long as the dlopening"
+	    echo "*** application is linked with the -dlopen flag."
+	    if test -z "$global_symbol_pipe"; then
+	      echo
+	      echo "*** However, this would only work if libtool was able to extract symbol"
+	      echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
+	      echo "*** not find such a program.  So, this module is probably useless."
+	      echo "*** \`nm' from GNU binutils and a full rebuild may help."
+	    fi
+	    if test "$build_old_libs" = no; then
+	      oldlibs="$output_objdir/$libname.$libext"
+	      build_libtool_libs=module
+	      build_old_libs=yes
+	    else
+	      build_libtool_libs=no
+	    fi
+	  else
+	    echo "*** The inter-library dependencies that have been dropped here will be"
+	    echo "*** automatically added whenever a program is linked with this library"
+	    echo "*** or is declared to -dlopen it."
+
+	    if test "$allow_undefined" = no; then
+	      echo
+	      echo "*** Since this library must not contain undefined symbols,"
+	      echo "*** because either the platform does not support them or"
+	      echo "*** it was explicitly requested with -no-undefined,"
+	      echo "*** libtool will only create a static version of it."
+	      if test "$build_old_libs" = no; then
+		oldlibs="$output_objdir/$libname.$libext"
+		build_libtool_libs=module
+		build_old_libs=yes
+	      else
+		build_libtool_libs=no
+	      fi
+	    fi
+	  fi
+	fi
+	# Done checking deplibs!
+	deplibs=$newdeplibs
+      fi
+      # Time to change all our "foo.ltframework" stuff back to "-framework foo"
+      case $host in
+	*-*-darwin*)
+	  newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	  ;;
+      esac
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs="$new_libs -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs="$new_libs $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs="$new_libs $deplib" ;;
+	esac
+      done
+      deplibs="$new_libs"
+
+      # All the library-specific variables (install_libdir is set above).
+      library_names=
+      old_library=
+      dlname=
+
+      # Test again, we may have decided not to build it any more
+      if test "$build_libtool_libs" = yes; then
+	if test "$hardcode_into_libs" = yes; then
+	  # Hardcode the library paths
+	  hardcode_libdirs=
+	  dep_rpath=
+	  rpath="$finalize_rpath"
+	  test "$mode" != relink && rpath="$compile_rpath$rpath"
+	  for libdir in $rpath; do
+	    if test -n "$hardcode_libdir_flag_spec"; then
+	      if test -n "$hardcode_libdir_separator"; then
+		if test -z "$hardcode_libdirs"; then
+		  hardcode_libdirs="$libdir"
+		else
+		  # Just accumulate the unique libdirs.
+		  case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+		  *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		    ;;
+		  *)
+		    hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
+		    ;;
+		  esac
+		fi
+	      else
+		eval flag=\"$hardcode_libdir_flag_spec\"
+		dep_rpath="$dep_rpath $flag"
+	      fi
+	    elif test -n "$runpath_var"; then
+	      case "$perm_rpath " in
+	      *" $libdir "*) ;;
+	      *) perm_rpath="$perm_rpath $libdir" ;;
+	      esac
+	    fi
+	  done
+	  # Substitute the hardcoded libdirs into the rpath.
+	  if test -n "$hardcode_libdir_separator" &&
+	     test -n "$hardcode_libdirs"; then
+	    libdir="$hardcode_libdirs"
+	    if test -n "$hardcode_libdir_flag_spec_ld"; then
+	      eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
+	    else
+	      eval dep_rpath=\"$hardcode_libdir_flag_spec\"
+	    fi
+	  fi
+	  if test -n "$runpath_var" && test -n "$perm_rpath"; then
+	    # We should set the runpath_var.
+	    rpath=
+	    for dir in $perm_rpath; do
+	      rpath="$rpath$dir:"
+	    done
+	    eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
+	  fi
+	  test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
+	fi
+
+	shlibpath="$finalize_shlibpath"
+	test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
+	if test -n "$shlibpath"; then
+	  eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
+	fi
+
+	# Get the real and link names of the library.
+	eval shared_ext=\"$shrext_cmds\"
+	eval library_names=\"$library_names_spec\"
+	set dummy $library_names
+	shift
+	realname="$1"
+	shift
+
+	if test -n "$soname_spec"; then
+	  eval soname=\"$soname_spec\"
+	else
+	  soname="$realname"
+	fi
+	if test -z "$dlname"; then
+	  dlname=$soname
+	fi
+
+	lib="$output_objdir/$realname"
+	linknames=
+	for link
+	do
+	  linknames="$linknames $link"
+	done
+
+	# Use standard objects if they are pic
+	test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	test "X$libobjs" = "X " && libobjs=
+
+	delfiles=
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
+	  export_symbols="$output_objdir/$libname.uexp"
+	  delfiles="$delfiles $export_symbols"
+	fi
+
+	orig_export_symbols=
+	case $host_os in
+	cygwin* | mingw* | cegcc*)
+	  if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
+	    # exporting using user supplied symfile
+	    if test "x`$SED 1q $export_symbols`" != xEXPORTS; then
+	      # and it's NOT already a .def file. Must figure out
+	      # which of the given symbols are data symbols and tag
+	      # them as such. So, trigger use of export_symbols_cmds.
+	      # export_symbols gets reassigned inside the "prepare
+	      # the list of exported symbols" if statement, so the
+	      # include_expsyms logic still works.
+	      orig_export_symbols="$export_symbols"
+	      export_symbols=
+	      always_export_symbols=yes
+	    fi
+	  fi
+	  ;;
+	esac
+
+	# Prepare the list of exported symbols
+	if test -z "$export_symbols"; then
+	  if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
+	    func_verbose "generating symbol list for \`$libname.la'"
+	    export_symbols="$output_objdir/$libname.exp"
+	    $opt_dry_run || $RM $export_symbols
+	    cmds=$export_symbols_cmds
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $cmds; do
+	      IFS="$save_ifs"
+	      eval cmd=\"$cmd\"
+	      func_len " $cmd"
+	      len=$func_len_result
+	      if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+		func_show_eval "$cmd" 'exit $?'
+		skipped_export=false
+	      else
+		# The command line is too long to execute in one step.
+		func_verbose "using reloadable object file for export list..."
+		skipped_export=:
+		# Break out early, otherwise skipped_export may be
+		# set to false by a later but shorter cmd.
+		break
+	      fi
+	    done
+	    IFS="$save_ifs"
+	    if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+	fi
+
+	if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	  tmp_export_symbols="$export_symbols"
+	  test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	  $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	fi
+
+	if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then
+	  # The given exports_symbols file has to be filtered, so filter it.
+	  func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	  # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	  # 's' commands which not all seds can handle. GNU sed should be fine
+	  # though. Also, the filter scales superlinearly with the number of
+	  # global variables. join(1) would be nice here, but unfortunately
+	  # isn't a blessed tool.
+	  $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	  delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
+	  export_symbols=$output_objdir/$libname.def
+	  $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	fi
+
+	tmp_deplibs=
+	for test_deplib in $deplibs; do
+	  case " $convenience " in
+	  *" $test_deplib "*) ;;
+	  *)
+	    tmp_deplibs="$tmp_deplibs $test_deplib"
+	    ;;
+	  esac
+	done
+	deplibs="$tmp_deplibs"
+
+	if test -n "$convenience"; then
+	  if test -n "$whole_archive_flag_spec" &&
+	    test "$compiler_needs_object" = yes &&
+	    test -z "$libobjs"; then
+	    # extract the archives, so we have objects to list.
+	    # TODO: could optimize this to just extract one archive.
+	    whole_archive_flag_spec=
+	  fi
+	  if test -n "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  else
+	    gentop="$output_objdir/${outputname}x"
+	    generated="$generated $gentop"
+
+	    func_extract_archives $gentop $convenience
+	    libobjs="$libobjs $func_extract_archives_result"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	fi
+
+	if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
+	  eval flag=\"$thread_safe_flag_spec\"
+	  linker_flags="$linker_flags $flag"
+	fi
+
+	# Make a backup of the uninstalled library when relinking
+	if test "$mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
+	fi
+
+	# Do each of the archive commands.
+	if test "$module" = yes && test -n "$module_cmds" ; then
+	  if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	    eval test_cmds=\"$module_expsym_cmds\"
+	    cmds=$module_expsym_cmds
+	  else
+	    eval test_cmds=\"$module_cmds\"
+	    cmds=$module_cmds
+	  fi
+	else
+	  if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	    eval test_cmds=\"$archive_expsym_cmds\"
+	    cmds=$archive_expsym_cmds
+	  else
+	    eval test_cmds=\"$archive_cmds\"
+	    cmds=$archive_cmds
+	  fi
+	fi
+
+	if test "X$skipped_export" != "X:" &&
+	   func_len " $test_cmds" &&
+	   len=$func_len_result &&
+	   test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  :
+	else
+	  # The command line is too long to link in one step, link piecewise
+	  # or, if using GNU ld and skipped_export is not :, use a linker
+	  # script.
+
+	  # Save the value of $output and $libobjs because we want to
+	  # use them later.  If we have whole_archive_flag_spec, we
+	  # want to use save_libobjs as it was before
+	  # whole_archive_flag_spec was expanded, because we can't
+	  # assume the linker understands whole_archive_flag_spec.
+	  # This may have to be revisited, in case too many
+	  # convenience libraries get linked in and end up exceeding
+	  # the spec.
+	  if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
+	    save_libobjs=$libobjs
+	  fi
+	  save_output=$output
+	  func_basename "$output"
+	  output_la=$func_basename_result
+
+	  # Clear the reloadable object creation command queue and
+	  # initialize k to one.
+	  test_cmds=
+	  concat_cmds=
+	  objlist=
+	  last_robj=
+	  k=1
+
+	  if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then
+	    output=${output_objdir}/${output_la}.lnkscript
+	    func_verbose "creating GNU ld script: $output"
+	    echo 'INPUT (' > $output
+	    for obj in $save_libobjs
+	    do
+	      $ECHO "$obj" >> $output
+	    done
+	    echo ')' >> $output
+	    delfiles="$delfiles $output"
+	  elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then
+	    output=${output_objdir}/${output_la}.lnk
+	    func_verbose "creating linker input file list: $output"
+	    : > $output
+	    set x $save_libobjs
+	    shift
+	    firstobj=
+	    if test "$compiler_needs_object" = yes; then
+	      firstobj="$1 "
+	      shift
+	    fi
+	    for obj
+	    do
+	      $ECHO "$obj" >> $output
+	    done
+	    delfiles="$delfiles $output"
+	    output=$firstobj\"$file_list_spec$output\"
+	  else
+	    if test -n "$save_libobjs"; then
+	      func_verbose "creating reloadable object files..."
+	      output=$output_objdir/$output_la-${k}.$objext
+	      eval test_cmds=\"$reload_cmds\"
+	      func_len " $test_cmds"
+	      len0=$func_len_result
+	      len=$len0
+
+	      # Loop over the list of objects to be linked.
+	      for obj in $save_libobjs
+	      do
+		func_len " $obj"
+		func_arith $len + $func_len_result
+		len=$func_arith_result
+		if test "X$objlist" = X ||
+		   test "$len" -lt "$max_cmd_len"; then
+		  func_append objlist " $obj"
+		else
+		  # The command $test_cmds is almost too long, add a
+		  # command to the queue.
+		  if test "$k" -eq 1 ; then
+		    # The first file doesn't have a previous command to add.
+		    reload_objs=$objlist
+		    eval concat_cmds=\"$reload_cmds\"
+		  else
+		    # All subsequent reloadable object files will link in
+		    # the last one created.
+		    reload_objs="$objlist $last_robj"
+		    eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
+		  fi
+		  last_robj=$output_objdir/$output_la-${k}.$objext
+		  func_arith $k + 1
+		  k=$func_arith_result
+		  output=$output_objdir/$output_la-${k}.$objext
+		  objlist=" $obj"
+		  func_len " $last_robj"
+		  func_arith $len0 + $func_len_result
+		  len=$func_arith_result
+		fi
+	      done
+	      # Handle the remaining objects by creating one last
+	      # reloadable object file.  All subsequent reloadable object
+	      # files will link in the last one created.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      reload_objs="$objlist $last_robj"
+	      eval concat_cmds=\"\${concat_cmds}$reload_cmds\"
+	      if test -n "$last_robj"; then
+	        eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"
+	      fi
+	      delfiles="$delfiles $output"
+
+	    else
+	      output=
+	    fi
+
+	    if ${skipped_export-false}; then
+	      func_verbose "generating symbol list for \`$libname.la'"
+	      export_symbols="$output_objdir/$libname.exp"
+	      $opt_dry_run || $RM $export_symbols
+	      libobjs=$output
+	      # Append the command to create the export file.
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
+	      if test -n "$last_robj"; then
+		eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
+	      fi
+	    fi
+
+	    test -n "$save_libobjs" &&
+	      func_verbose "creating a temporary reloadable object file: $output"
+
+	    # Loop through the commands generated above and execute them.
+	    save_ifs="$IFS"; IFS='~'
+	    for cmd in $concat_cmds; do
+	      IFS="$save_ifs"
+	      $opt_silent || {
+		  func_quote_for_expand "$cmd"
+		  eval "func_echo $func_quote_for_expand_result"
+	      }
+	      $opt_dry_run || eval "$cmd" || {
+		lt_exit=$?
+
+		# Restore the uninstalled library and exit
+		if test "$mode" = relink; then
+		  ( cd "$output_objdir" && \
+		    $RM "${realname}T" && \
+		    $MV "${realname}U" "$realname" )
+		fi
+
+		exit $lt_exit
+	      }
+	    done
+	    IFS="$save_ifs"
+
+	    if test -n "$export_symbols_regex" && ${skipped_export-false}; then
+	      func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
+	      func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
+	    fi
+	  fi
+
+          if ${skipped_export-false}; then
+	    if test -n "$export_symbols" && test -n "$include_expsyms"; then
+	      tmp_export_symbols="$export_symbols"
+	      test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols"
+	      $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
+	    fi
+
+	    if test -n "$orig_export_symbols"; then
+	      # The given exports_symbols file has to be filtered, so filter it.
+	      func_verbose "filter symbol list for \`$libname.la' to tag DATA exports"
+	      # FIXME: $output_objdir/$libname.filter potentially contains lots of
+	      # 's' commands which not all seds can handle. GNU sed should be fine
+	      # though. Also, the filter scales superlinearly with the number of
+	      # global variables. join(1) would be nice here, but unfortunately
+	      # isn't a blessed tool.
+	      $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
+	      delfiles="$delfiles $export_symbols $output_objdir/$libname.filter"
+	      export_symbols=$output_objdir/$libname.def
+	      $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
+	    fi
+	  fi
+
+	  libobjs=$output
+	  # Restore the value of output.
+	  output=$save_output
+
+	  if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
+	    eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
+	    test "X$libobjs" = "X " && libobjs=
+	  fi
+	  # Expand the library linking commands again to reset the
+	  # value of $libobjs for piecewise linking.
+
+	  # Do each of the archive commands.
+	  if test "$module" = yes && test -n "$module_cmds" ; then
+	    if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
+	      cmds=$module_expsym_cmds
+	    else
+	      cmds=$module_cmds
+	    fi
+	  else
+	    if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
+	      cmds=$archive_expsym_cmds
+	    else
+	      cmds=$archive_cmds
+	    fi
+	  fi
+	fi
+
+	if test -n "$delfiles"; then
+	  # Append the command to remove temporary files to $cmds.
+	  eval cmds=\"\$cmds~\$RM $delfiles\"
+	fi
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated="$generated $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  libobjs="$libobjs $func_extract_archives_result"
+	  test "X$libobjs" = "X " && libobjs=
+	fi
+
+	save_ifs="$IFS"; IFS='~'
+	for cmd in $cmds; do
+	  IFS="$save_ifs"
+	  eval cmd=\"$cmd\"
+	  $opt_silent || {
+	    func_quote_for_expand "$cmd"
+	    eval "func_echo $func_quote_for_expand_result"
+	  }
+	  $opt_dry_run || eval "$cmd" || {
+	    lt_exit=$?
+
+	    # Restore the uninstalled library and exit
+	    if test "$mode" = relink; then
+	      ( cd "$output_objdir" && \
+	        $RM "${realname}T" && \
+		$MV "${realname}U" "$realname" )
+	    fi
+
+	    exit $lt_exit
+	  }
+	done
+	IFS="$save_ifs"
+
+	# Restore the uninstalled library and exit
+	if test "$mode" = relink; then
+	  $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
+
+	  if test -n "$convenience"; then
+	    if test -z "$whole_archive_flag_spec"; then
+	      func_show_eval '${RM}r "$gentop"'
+	    fi
+	  fi
+
+	  exit $EXIT_SUCCESS
+	fi
+
+	# Create links to the real library.
+	for linkname in $linknames; do
+	  if test "$realname" != "$linkname"; then
+	    func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
+	  fi
+	done
+
+	# If -module or -export-dynamic was specified, set the dlname.
+	if test "$module" = yes || test "$export_dynamic" = yes; then
+	  # On all known operating systems, these are identical.
+	  dlname="$soname"
+	fi
+      fi
+      ;;
+
+    obj)
+      if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
+	func_warning "\`-dlopen' is ignored for objects"
+      fi
+
+      case " $deplibs" in
+      *\ -l* | *\ -L*)
+	func_warning "\`-l' and \`-L' are ignored for objects" ;;
+      esac
+
+      test -n "$rpath" && \
+	func_warning "\`-rpath' is ignored for objects"
+
+      test -n "$xrpath" && \
+	func_warning "\`-R' is ignored for objects"
+
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for objects"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for objects"
+
+      case $output in
+      *.lo)
+	test -n "$objs$old_deplibs" && \
+	  func_fatal_error "cannot build library object \`$output' from non-libtool objects"
+
+	libobj=$output
+	func_lo2o "$libobj"
+	obj=$func_lo2o_result
+	;;
+      *)
+	libobj=
+	obj="$output"
+	;;
+      esac
+
+      # Delete the old objects.
+      $opt_dry_run || $RM $obj $libobj
+
+      # Objects from convenience libraries.  This assumes
+      # single-version convenience libraries.  Whenever we create
+      # different ones for PIC/non-PIC, this we'll have to duplicate
+      # the extraction.
+      reload_conv_objs=
+      gentop=
+      # reload_cmds runs $LD directly, so let us get rid of
+      # -Wl from whole_archive_flag_spec and hope we can get by with
+      # turning comma into space..
+      wl=
+
+      if test -n "$convenience"; then
+	if test -n "$whole_archive_flag_spec"; then
+	  eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
+	  reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
+	else
+	  gentop="$output_objdir/${obj}x"
+	  generated="$generated $gentop"
+
+	  func_extract_archives $gentop $convenience
+	  reload_conv_objs="$reload_objs $func_extract_archives_result"
+	fi
+      fi
+
+      # Create the old-style object.
+      reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
+
+      output="$obj"
+      func_execute_cmds "$reload_cmds" 'exit $?'
+
+      # Exit if we aren't doing a library object file.
+      if test -z "$libobj"; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$build_libtool_libs" != yes; then
+	if test -n "$gentop"; then
+	  func_show_eval '${RM}r "$gentop"'
+	fi
+
+	# Create an invalid libtool object if no PIC, so that we don't
+	# accidentally link it into a program.
+	# $show "echo timestamp > $libobj"
+	# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
+	exit $EXIT_SUCCESS
+      fi
+
+      if test -n "$pic_flag" || test "$pic_mode" != default; then
+	# Only do commands if we really have different PIC objects.
+	reload_objs="$libobjs $reload_conv_objs"
+	output="$libobj"
+	func_execute_cmds "$reload_cmds" 'exit $?'
+      fi
+
+      if test -n "$gentop"; then
+	func_show_eval '${RM}r "$gentop"'
+      fi
+
+      exit $EXIT_SUCCESS
+      ;;
+
+    prog)
+      case $host in
+	*cygwin*) func_stripname '' '.exe' "$output"
+	          output=$func_stripname_result.exe;;
+      esac
+      test -n "$vinfo" && \
+	func_warning "\`-version-info' is ignored for programs"
+
+      test -n "$release" && \
+	func_warning "\`-release' is ignored for programs"
+
+      test "$preload" = yes \
+        && test "$dlopen_support" = unknown \
+	&& test "$dlopen_self" = unknown \
+	&& test "$dlopen_self_static" = unknown && \
+	  func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support."
+
+      case $host in
+      *-*-rhapsody* | *-*-darwin1.[012])
+	# On Rhapsody replace the C library is the System framework
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
+	;;
+      esac
+
+      case $host in
+      *-*-darwin*)
+	# Don't allow lazy linking, it breaks C++ global constructors
+	# But is supposedly fixed on 10.4 or later (yay!).
+	if test "$tagname" = CXX ; then
+	  case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
+	    10.[0123])
+	      compile_command="$compile_command ${wl}-bind_at_load"
+	      finalize_command="$finalize_command ${wl}-bind_at_load"
+	    ;;
+	  esac
+	fi
+	# Time to change all our "foo.ltframework" stuff back to "-framework foo"
+	compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
+	;;
+      esac
+
+
+      # move library search paths that coincide with paths to not yet
+      # installed libraries to the beginning of the library search list
+      new_libs=
+      for path in $notinst_path; do
+	case " $new_libs " in
+	*" -L$path/$objdir "*) ;;
+	*)
+	  case " $compile_deplibs " in
+	  *" -L$path/$objdir "*)
+	    new_libs="$new_libs -L$path/$objdir" ;;
+	  esac
+	  ;;
+	esac
+      done
+      for deplib in $compile_deplibs; do
+	case $deplib in
+	-L*)
+	  case " $new_libs " in
+	  *" $deplib "*) ;;
+	  *) new_libs="$new_libs $deplib" ;;
+	  esac
+	  ;;
+	*) new_libs="$new_libs $deplib" ;;
+	esac
+      done
+      compile_deplibs="$new_libs"
+
+
+      compile_command="$compile_command $compile_deplibs"
+      finalize_command="$finalize_command $finalize_deplibs"
+
+      if test -n "$rpath$xrpath"; then
+	# If the user specified any rpath flags, then add them.
+	for libdir in $rpath $xrpath; do
+	  # This is the magic to use -rpath.
+	  case "$finalize_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_rpath="$finalize_rpath $libdir" ;;
+	  esac
+	done
+      fi
+
+      # Now hardcode the library paths
+      rpath=
+      hardcode_libdirs=
+      for libdir in $compile_rpath $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath="$rpath $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) perm_rpath="$perm_rpath $libdir" ;;
+	  esac
+	fi
+	case $host in
+	*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
+	  testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'`
+	  case :$dllsearchpath: in
+	  *":$libdir:"*) ;;
+	  ::) dllsearchpath=$libdir;;
+	  *) dllsearchpath="$dllsearchpath:$libdir";;
+	  esac
+	  case :$dllsearchpath: in
+	  *":$testbindir:"*) ;;
+	  ::) dllsearchpath=$testbindir;;
+	  *) dllsearchpath="$dllsearchpath:$testbindir";;
+	  esac
+	  ;;
+	esac
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      compile_rpath="$rpath"
+
+      rpath=
+      hardcode_libdirs=
+      for libdir in $finalize_rpath; do
+	if test -n "$hardcode_libdir_flag_spec"; then
+	  if test -n "$hardcode_libdir_separator"; then
+	    if test -z "$hardcode_libdirs"; then
+	      hardcode_libdirs="$libdir"
+	    else
+	      # Just accumulate the unique libdirs.
+	      case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
+	      *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
+		;;
+	      *)
+		hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
+		;;
+	      esac
+	    fi
+	  else
+	    eval flag=\"$hardcode_libdir_flag_spec\"
+	    rpath="$rpath $flag"
+	  fi
+	elif test -n "$runpath_var"; then
+	  case "$finalize_perm_rpath " in
+	  *" $libdir "*) ;;
+	  *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
+	  esac
+	fi
+      done
+      # Substitute the hardcoded libdirs into the rpath.
+      if test -n "$hardcode_libdir_separator" &&
+	 test -n "$hardcode_libdirs"; then
+	libdir="$hardcode_libdirs"
+	eval rpath=\" $hardcode_libdir_flag_spec\"
+      fi
+      finalize_rpath="$rpath"
+
+      if test -n "$libobjs" && test "$build_old_libs" = yes; then
+	# Transform all the library objects into standard objects.
+	compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+	finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
+      fi
+
+      func_generate_dlsyms "$outputname" "@PROGRAM@" "no"
+
+      # template prelinking step
+      if test -n "$prelink_cmds"; then
+	func_execute_cmds "$prelink_cmds" 'exit $?'
+      fi
+
+      wrappers_required=yes
+      case $host in
+      *cegcc* | *mingw32ce*)
+        # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
+        wrappers_required=no
+        ;;
+      *cygwin* | *mingw* )
+        if test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      *)
+        if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+          wrappers_required=no
+        fi
+        ;;
+      esac
+      if test "$wrappers_required" = no; then
+	# Replace the output file specification.
+	compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	link_command="$compile_command$compile_rpath"
+
+	# We have no uninstalled library dependencies, so finalize right now.
+	exit_status=0
+	func_show_eval "$link_command" 'exit_status=$?'
+
+	# Delete the generated files.
+	if test -f "$output_objdir/${outputname}S.${objext}"; then
+	  func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"'
+	fi
+
+	exit $exit_status
+      fi
+
+      if test -n "$compile_shlibpath$finalize_shlibpath"; then
+	compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
+      fi
+      if test -n "$finalize_shlibpath"; then
+	finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
+      fi
+
+      compile_var=
+      finalize_var=
+      if test -n "$runpath_var"; then
+	if test -n "$perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $perm_rpath; do
+	    rpath="$rpath$dir:"
+	  done
+	  compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+	if test -n "$finalize_perm_rpath"; then
+	  # We should set the runpath_var.
+	  rpath=
+	  for dir in $finalize_perm_rpath; do
+	    rpath="$rpath$dir:"
+	  done
+	  finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
+	fi
+      fi
+
+      if test "$no_install" = yes; then
+	# We don't need to create a wrapper script.
+	link_command="$compile_var$compile_command$compile_rpath"
+	# Replace the output file specification.
+	link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
+	# Delete the old output file.
+	$opt_dry_run || $RM $output
+	# Link the executable and exit
+	func_show_eval "$link_command" 'exit $?'
+	exit $EXIT_SUCCESS
+      fi
+
+      if test "$hardcode_action" = relink; then
+	# Fast installation is not supported
+	link_command="$compile_var$compile_command$compile_rpath"
+	relink_command="$finalize_var$finalize_command$finalize_rpath"
+
+	func_warning "this platform does not like uninstalled shared libraries"
+	func_warning "\`$output' will be relinked during installation"
+      else
+	if test "$fast_install" != no; then
+	  link_command="$finalize_var$compile_command$finalize_rpath"
+	  if test "$fast_install" = yes; then
+	    relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
+	  else
+	    # fast_install is set to needless
+	    relink_command=
+	  fi
+	else
+	  link_command="$compile_var$compile_command$compile_rpath"
+	  relink_command="$finalize_var$finalize_command$finalize_rpath"
+	fi
+      fi
+
+      # Replace the output file specification.
+      link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
+
+      # Delete the old output files.
+      $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
+
+      func_show_eval "$link_command" 'exit $?'
+
+      # Now create the wrapper script.
+      func_verbose "creating $output"
+
+      # Quote the relink command for shipping.
+      if test -n "$relink_command"; then
+	# Preserve any variables that may affect compiler behavior
+	for var in $variables_saved_for_relink; do
+	  if eval test -z \"\${$var+set}\"; then
+	    relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	  elif eval var_value=\$$var; test -z "$var_value"; then
+	    relink_command="$var=; export $var; $relink_command"
+	  else
+	    func_quote_for_eval "$var_value"
+	    relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	  fi
+	done
+	relink_command="(cd `pwd`; $relink_command)"
+	relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      fi
+
+      # Only actually do things if not in dry run mode.
+      $opt_dry_run || {
+	# win32 will think the script is a binary if it has
+	# a .exe suffix, so we strip it off here.
+	case $output in
+	  *.exe) func_stripname '' '.exe' "$output"
+	         output=$func_stripname_result ;;
+	esac
+	# test for cygwin because mv fails w/o .exe extensions
+	case $host in
+	  *cygwin*)
+	    exeext=.exe
+	    func_stripname '' '.exe' "$outputname"
+	    outputname=$func_stripname_result ;;
+	  *) exeext= ;;
+	esac
+	case $host in
+	  *cygwin* | *mingw* )
+	    func_dirname_and_basename "$output" "" "."
+	    output_name=$func_basename_result
+	    output_path=$func_dirname_result
+	    cwrappersource="$output_path/$objdir/lt-$output_name.c"
+	    cwrapper="$output_path/$output_name.exe"
+	    $RM $cwrappersource $cwrapper
+	    trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_cwrapperexe_src > $cwrappersource
+
+	    # The wrapper executable is built using the $host compiler,
+	    # because it contains $host paths and files. If cross-
+	    # compiling, it, like the target executable, must be
+	    # executed on the $host or under an emulation environment.
+	    $opt_dry_run || {
+	      $LTCC $LTCFLAGS -o $cwrapper $cwrappersource
+	      $STRIP $cwrapper
+	    }
+
+	    # Now, create the wrapper script for func_source use:
+	    func_ltwrapper_scriptname $cwrapper
+	    $RM $func_ltwrapper_scriptname_result
+	    trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
+	    $opt_dry_run || {
+	      # note: this script will not be executed, so do not chmod.
+	      if test "x$build" = "x$host" ; then
+		$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
+	      else
+		func_emit_wrapper no > $func_ltwrapper_scriptname_result
+	      fi
+	    }
+	  ;;
+	  * )
+	    $RM $output
+	    trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
+
+	    func_emit_wrapper no > $output
+	    chmod +x $output
+	  ;;
+	esac
+      }
+      exit $EXIT_SUCCESS
+      ;;
+    esac
+
+    # See if we need to build an old-fashioned archive.
+    for oldlib in $oldlibs; do
+
+      if test "$build_libtool_libs" = convenience; then
+	oldobjs="$libobjs_save $symfileobj"
+	addlibs="$convenience"
+	build_libtool_libs=no
+      else
+	if test "$build_libtool_libs" = module; then
+	  oldobjs="$libobjs_save"
+	  build_libtool_libs=no
+	else
+	  oldobjs="$old_deplibs $non_pic_objects"
+	  if test "$preload" = yes && test -f "$symfileobj"; then
+	    oldobjs="$oldobjs $symfileobj"
+	  fi
+	fi
+	addlibs="$old_convenience"
+      fi
+
+      if test -n "$addlibs"; then
+	gentop="$output_objdir/${outputname}x"
+	generated="$generated $gentop"
+
+	func_extract_archives $gentop $addlibs
+	oldobjs="$oldobjs $func_extract_archives_result"
+      fi
+
+      # Do each command in the archive commands.
+      if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
+	cmds=$old_archive_from_new_cmds
+      else
+
+	# Add any objects from preloaded convenience libraries
+	if test -n "$dlprefiles"; then
+	  gentop="$output_objdir/${outputname}x"
+	  generated="$generated $gentop"
+
+	  func_extract_archives $gentop $dlprefiles
+	  oldobjs="$oldobjs $func_extract_archives_result"
+	fi
+
+	# POSIX demands no paths to be encoded in archives.  We have
+	# to avoid creating archives with duplicate basenames if we
+	# might have to extract them afterwards, e.g., when creating a
+	# static archive out of a convenience library, or when linking
+	# the entirety of a libtool archive into another (currently
+	# not supported by libtool).
+	if (for obj in $oldobjs
+	    do
+	      func_basename "$obj"
+	      $ECHO "$func_basename_result"
+	    done | sort | sort -uc >/dev/null 2>&1); then
+	  :
+	else
+	  echo "copying selected object files to avoid basename conflicts..."
+	  gentop="$output_objdir/${outputname}x"
+	  generated="$generated $gentop"
+	  func_mkdir_p "$gentop"
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  counter=1
+	  for obj in $save_oldobjs
+	  do
+	    func_basename "$obj"
+	    objbase="$func_basename_result"
+	    case " $oldobjs " in
+	    " ") oldobjs=$obj ;;
+	    *[\ /]"$objbase "*)
+	      while :; do
+		# Make sure we don't pick an alternate name that also
+		# overlaps.
+		newobj=lt$counter-$objbase
+		func_arith $counter + 1
+		counter=$func_arith_result
+		case " $oldobjs " in
+		*[\ /]"$newobj "*) ;;
+		*) if test ! -f "$gentop/$newobj"; then break; fi ;;
+		esac
+	      done
+	      func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
+	      oldobjs="$oldobjs $gentop/$newobj"
+	      ;;
+	    *) oldobjs="$oldobjs $obj" ;;
+	    esac
+	  done
+	fi
+	eval cmds=\"$old_archive_cmds\"
+
+	func_len " $cmds"
+	len=$func_len_result
+	if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
+	  cmds=$old_archive_cmds
+	else
+	  # the command line is too long to link in one step, link in parts
+	  func_verbose "using piecewise archive linking..."
+	  save_RANLIB=$RANLIB
+	  RANLIB=:
+	  objlist=
+	  concat_cmds=
+	  save_oldobjs=$oldobjs
+	  oldobjs=
+	  # Is there a better way of finding the last object in the list?
+	  for obj in $save_oldobjs
+	  do
+	    last_oldobj=$obj
+	  done
+	  eval test_cmds=\"$old_archive_cmds\"
+	  func_len " $test_cmds"
+	  len0=$func_len_result
+	  len=$len0
+	  for obj in $save_oldobjs
+	  do
+	    func_len " $obj"
+	    func_arith $len + $func_len_result
+	    len=$func_arith_result
+	    func_append objlist " $obj"
+	    if test "$len" -lt "$max_cmd_len"; then
+	      :
+	    else
+	      # the above command should be used before it gets too long
+	      oldobjs=$objlist
+	      if test "$obj" = "$last_oldobj" ; then
+		RANLIB=$save_RANLIB
+	      fi
+	      test -z "$concat_cmds" || concat_cmds=$concat_cmds~
+	      eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
+	      objlist=
+	      len=$len0
+	    fi
+	  done
+	  RANLIB=$save_RANLIB
+	  oldobjs=$objlist
+	  if test "X$oldobjs" = "X" ; then
+	    eval cmds=\"\$concat_cmds\"
+	  else
+	    eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
+	  fi
+	fi
+      fi
+      func_execute_cmds "$cmds" 'exit $?'
+    done
+
+    test -n "$generated" && \
+      func_show_eval "${RM}r$generated"
+
+    # Now create the libtool archive.
+    case $output in
+    *.la)
+      old_library=
+      test "$build_old_libs" = yes && old_library="$libname.$libext"
+      func_verbose "creating $output"
+
+      # Preserve any variables that may affect compiler behavior
+      for var in $variables_saved_for_relink; do
+	if eval test -z \"\${$var+set}\"; then
+	  relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
+	elif eval var_value=\$$var; test -z "$var_value"; then
+	  relink_command="$var=; export $var; $relink_command"
+	else
+	  func_quote_for_eval "$var_value"
+	  relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
+	fi
+      done
+      # Quote the link command for shipping.
+      relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
+      relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
+      if test "$hardcode_automatic" = yes ; then
+	relink_command=
+      fi
+
+      # Only create the output if not a dry run.
+      $opt_dry_run || {
+	for installed in no yes; do
+	  if test "$installed" = yes; then
+	    if test -z "$install_libdir"; then
+	      break
+	    fi
+	    output="$output_objdir/$outputname"i
+	    # Replace all uninstalled libtool libraries with the installed ones
+	    newdependency_libs=
+	    for deplib in $dependency_libs; do
+	      case $deplib in
+	      *.la)
+		func_basename "$deplib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$deplib' is not a valid libtool archive"
+		newdependency_libs="$newdependency_libs $libdir/$name"
+		;;
+	      *) newdependency_libs="$newdependency_libs $deplib" ;;
+	      esac
+	    done
+	    dependency_libs="$newdependency_libs"
+	    newdlfiles=
+
+	    for lib in $dlfiles; do
+	      case $lib in
+	      *.la)
+	        func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlfiles="$newdlfiles $libdir/$name"
+		;;
+	      *) newdlfiles="$newdlfiles $lib" ;;
+	      esac
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+	      *.la)
+		# Only pass preopened files to the pseudo-archive (for
+		# eventual linking with the app. that links it) if we
+		# didn't already link the preopened objects directly into
+		# the library:
+		func_basename "$lib"
+		name="$func_basename_result"
+		eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
+		test -z "$libdir" && \
+		  func_fatal_error "\`$lib' is not a valid libtool archive"
+		newdlprefiles="$newdlprefiles $libdir/$name"
+		;;
+	      esac
+	    done
+	    dlprefiles="$newdlprefiles"
+	  else
+	    newdlfiles=
+	    for lib in $dlfiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlfiles="$newdlfiles $abs"
+	    done
+	    dlfiles="$newdlfiles"
+	    newdlprefiles=
+	    for lib in $dlprefiles; do
+	      case $lib in
+		[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
+		*) abs=`pwd`"/$lib" ;;
+	      esac
+	      newdlprefiles="$newdlprefiles $abs"
+	    done
+	    dlprefiles="$newdlprefiles"
+	  fi
+	  $RM $output
+	  # place dlname in correct position for cygwin
+	  # In fact, it would be nice if we could use this code for all target
+	  # systems that can't hard-code library paths into their executables
+	  # and that have no shared library path variable independent of PATH,
+	  # but it turns out we can't easily determine that from inspecting
+	  # libtool variables, so we have to hard-code the OSs to which it
+	  # applies here; at the moment, that means platforms that use the PE
+	  # object format with DLL files.  See the long comment at the top of
+	  # tests/bindir.at for full details.
+	  tdlname=$dlname
+	  case $host,$output,$installed,$module,$dlname in
+	    *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
+	      # If a -bindir argument was supplied, place the dll there.
+	      if test "x$bindir" != x ;
+	      then
+		func_relative_path "$install_libdir" "$bindir"
+		tdlname=$func_relative_path_result$dlname
+	      else
+		# Otherwise fall back on heuristic.
+		tdlname=../bin/$dlname
+	      fi
+	      ;;
+	  esac
+	  $ECHO > $output "\
+# $outputname - a libtool library file
+# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
+#
+# Please DO NOT delete this file!
+# It is necessary for linking the library.
+
+# The name that we can dlopen(3).
+dlname='$tdlname'
+
+# Names of this library.
+library_names='$library_names'
+
+# The name of the static archive.
+old_library='$old_library'
+
+# Linker flags that can not go in dependency_libs.
+inherited_linker_flags='$new_inherited_linker_flags'
+
+# Libraries that this one depends upon.
+dependency_libs='$dependency_libs'
+
+# Names of additional weak libraries provided by this library
+weak_library_names='$weak_libs'
+
+# Version information for $libname.
+current=$current
+age=$age
+revision=$revision
+
+# Is this an already installed library?
+installed=$installed
+
+# Should we warn about portability when linking against -modules?
+shouldnotlink=$module
+
+# Files to dlopen/dlpreopen
+dlopen='$dlfiles'
+dlpreopen='$dlprefiles'
+
+# Directory that this library needs to be installed in:
+libdir='$install_libdir'"
+	  if test "$installed" = no && test "$need_relink" = yes; then
+	    $ECHO >> $output "\
+relink_command=\"$relink_command\""
+	  fi
+	done
+      }
+
+      # Do a symbolic link so that the libtool archive can be found in
+      # LD_LIBRARY_PATH before the program is installed.
+      func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
+      ;;
+    esac
+    exit $EXIT_SUCCESS
+}
+
+{ test "$mode" = link || test "$mode" = relink; } &&
+    func_mode_link ${1+"$@"}
+
+
+# func_mode_uninstall arg...
+func_mode_uninstall ()
+{
+    $opt_debug
+    RM="$nonopt"
+    files=
+    rmforce=
+    exit_status=0
+
+    # This variable tells wrapper scripts just to set variables rather
+    # than running their programs.
+    libtool_install_magic="$magic"
+
+    for arg
+    do
+      case $arg in
+      -f) RM="$RM $arg"; rmforce=yes ;;
+      -*) RM="$RM $arg" ;;
+      *) files="$files $arg" ;;
+      esac
+    done
+
+    test -z "$RM" && \
+      func_fatal_help "you must specify an RM program"
+
+    rmdirs=
+
+    origobjdir="$objdir"
+    for file in $files; do
+      func_dirname "$file" "" "."
+      dir="$func_dirname_result"
+      if test "X$dir" = X.; then
+	objdir="$origobjdir"
+      else
+	objdir="$dir/$origobjdir"
+      fi
+      func_basename "$file"
+      name="$func_basename_result"
+      test "$mode" = uninstall && objdir="$dir"
+
+      # Remember objdir for removal later, being careful to avoid duplicates
+      if test "$mode" = clean; then
+	case " $rmdirs " in
+	  *" $objdir "*) ;;
+	  *) rmdirs="$rmdirs $objdir" ;;
+	esac
+      fi
+
+      # Don't error if the file doesn't exist and rm -f was used.
+      if { test -L "$file"; } >/dev/null 2>&1 ||
+	 { test -h "$file"; } >/dev/null 2>&1 ||
+	 test -f "$file"; then
+	:
+      elif test -d "$file"; then
+	exit_status=1
+	continue
+      elif test "$rmforce" = yes; then
+	continue
+      fi
+
+      rmfiles="$file"
+
+      case $name in
+      *.la)
+	# Possibly a libtool archive, so verify it.
+	if func_lalib_p "$file"; then
+	  func_source $dir/$name
+
+	  # Delete the libtool libraries and symlinks.
+	  for n in $library_names; do
+	    rmfiles="$rmfiles $objdir/$n"
+	  done
+	  test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
+
+	  case "$mode" in
+	  clean)
+	    case "  $library_names " in
+	    # "  " in the beginning catches empty $dlname
+	    *" $dlname "*) ;;
+	    *) rmfiles="$rmfiles $objdir/$dlname" ;;
+	    esac
+	    test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
+	    ;;
+	  uninstall)
+	    if test -n "$library_names"; then
+	      # Do each command in the postuninstall commands.
+	      func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+
+	    if test -n "$old_library"; then
+	      # Do each command in the old_postuninstall commands.
+	      func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1'
+	    fi
+	    # FIXME: should reinstall the best remaining shared library.
+	    ;;
+	  esac
+	fi
+	;;
+
+      *.lo)
+	# Possibly a libtool object, so verify it.
+	if func_lalib_p "$file"; then
+
+	  # Read the .lo file
+	  func_source $dir/$name
+
+	  # Add PIC object to the list of files to remove.
+	  if test -n "$pic_object" &&
+	     test "$pic_object" != none; then
+	    rmfiles="$rmfiles $dir/$pic_object"
+	  fi
+
+	  # Add non-PIC object to the list of files to remove.
+	  if test -n "$non_pic_object" &&
+	     test "$non_pic_object" != none; then
+	    rmfiles="$rmfiles $dir/$non_pic_object"
+	  fi
+	fi
+	;;
+
+      *)
+	if test "$mode" = clean ; then
+	  noexename=$name
+	  case $file in
+	  *.exe)
+	    func_stripname '' '.exe' "$file"
+	    file=$func_stripname_result
+	    func_stripname '' '.exe' "$name"
+	    noexename=$func_stripname_result
+	    # $file with .exe has already been added to rmfiles,
+	    # add $file without .exe
+	    rmfiles="$rmfiles $file"
+	    ;;
+	  esac
+	  # Do a test to see if this is a libtool program.
+	  if func_ltwrapper_p "$file"; then
+	    if func_ltwrapper_executable_p "$file"; then
+	      func_ltwrapper_scriptname "$file"
+	      relink_command=
+	      func_source $func_ltwrapper_scriptname_result
+	      rmfiles="$rmfiles $func_ltwrapper_scriptname_result"
+	    else
+	      relink_command=
+	      func_source $dir/$noexename
+	    fi
+
+	    # note $name still contains .exe if it was in $file originally
+	    # as does the version of $file that was added into $rmfiles
+	    rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
+	    if test "$fast_install" = yes && test -n "$relink_command"; then
+	      rmfiles="$rmfiles $objdir/lt-$name"
+	    fi
+	    if test "X$noexename" != "X$name" ; then
+	      rmfiles="$rmfiles $objdir/lt-${noexename}.c"
+	    fi
+	  fi
+	fi
+	;;
+      esac
+      func_show_eval "$RM $rmfiles" 'exit_status=1'
+    done
+    objdir="$origobjdir"
+
+    # Try to remove the ${objdir}s in the directories where we deleted files
+    for dir in $rmdirs; do
+      if test -d "$dir"; then
+	func_show_eval "rmdir $dir >/dev/null 2>&1"
+      fi
+    done
+
+    exit $exit_status
+}
+
+{ test "$mode" = uninstall || test "$mode" = clean; } &&
+    func_mode_uninstall ${1+"$@"}
+
+test -z "$mode" && {
+  help="$generic_help"
+  func_fatal_help "you must specify a MODE"
+}
+
+test -z "$exec_cmd" && \
+  func_fatal_help "invalid operation mode \`$mode'"
+
+if test -n "$exec_cmd"; then
+  eval exec "$exec_cmd"
+  exit $EXIT_FAILURE
+fi
+
+exit $exit_status
+
+
+# The TAGs below are defined such that we never get into a situation
+# in which we disable both kinds of libraries.  Given conflicting
+# choices, we go for a static library, that is the most portable,
+# since we can't tell whether shared libraries were disabled because
+# the user asked for that or because the platform doesn't support
+# them.  This is particularly important on AIX, because we don't
+# support having both static and shared libraries enabled at the same
+# time on that platform, so we default to a shared-only configuration.
+# If a disable-shared tag is given, we'll fallback to a static-only
+# configuration.  But we'll never go from static-only to shared-only.
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
+build_libtool_libs=no
+build_old_libs=yes
+# ### END LIBTOOL TAG CONFIG: disable-shared
+
+# ### BEGIN LIBTOOL TAG CONFIG: disable-static
+build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
+# ### END LIBTOOL TAG CONFIG: disable-static
+
+# Local Variables:
+# mode:shell-script
+# sh-indentation:2
+# End:
+# vi:sw=2
+
diff --git a/lang/c/jansson/missing b/lang/c/jansson/missing
new file mode 100755
index 0000000..1c8ff70
--- /dev/null
+++ b/lang/c/jansson/missing
@@ -0,0 +1,367 @@
+#! /bin/sh
+# Common stub for a few missing GNU programs while installing.
+
+scriptversion=2006-05-10.23
+
+# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006
+#   Free Software Foundation, Inc.
+# Originally by Fran,cois Pinard <pinard at iro.umontreal.ca>, 1996.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+if test $# -eq 0; then
+  echo 1>&2 "Try \`$0 --help' for more information"
+  exit 1
+fi
+
+run=:
+sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p'
+sed_minuso='s/.* -o \([^ ]*\).*/\1/p'
+
+# In the cases where this matters, `missing' is being run in the
+# srcdir already.
+if test -f configure.ac; then
+  configure_ac=configure.ac
+else
+  configure_ac=configure.in
+fi
+
+msg="missing on your system"
+
+case $1 in
+--run)
+  # Try to run requested program, and just exit if it succeeds.
+  run=
+  shift
+  "$@" && exit 0
+  # Exit code 63 means version mismatch.  This often happens
+  # when the user try to use an ancient version of a tool on
+  # a file that requires a minimum version.  In this case we
+  # we should proceed has if the program had been absent, or
+  # if --run hadn't been passed.
+  if test $? = 63; then
+    run=:
+    msg="probably too old"
+  fi
+  ;;
+
+  -h|--h|--he|--hel|--help)
+    echo "\
+$0 [OPTION]... PROGRAM [ARGUMENT]...
+
+Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an
+error status if there is no known handling for PROGRAM.
+
+Options:
+  -h, --help      display this help and exit
+  -v, --version   output version information and exit
+  --run           try to run the given command, and emulate it if it fails
+
+Supported PROGRAM values:
+  aclocal      touch file \`aclocal.m4'
+  autoconf     touch file \`configure'
+  autoheader   touch file \`config.h.in'
+  autom4te     touch the output file, or create a stub one
+  automake     touch all \`Makefile.in' files
+  bison        create \`y.tab.[ch]', if possible, from existing .[ch]
+  flex         create \`lex.yy.c', if possible, from existing .c
+  help2man     touch the output file
+  lex          create \`lex.yy.c', if possible, from existing .c
+  makeinfo     touch the output file
+  tar          try tar, gnutar, gtar, then tar without non-portable flags
+  yacc         create \`y.tab.[ch]', if possible, from existing .[ch]
+
+Send bug reports to <bug-automake at gnu.org>."
+    exit $?
+    ;;
+
+  -v|--v|--ve|--ver|--vers|--versi|--versio|--version)
+    echo "missing $scriptversion (GNU Automake)"
+    exit $?
+    ;;
+
+  -*)
+    echo 1>&2 "$0: Unknown \`$1' option"
+    echo 1>&2 "Try \`$0 --help' for more information"
+    exit 1
+    ;;
+
+esac
+
+# Now exit if we have it, but it failed.  Also exit now if we
+# don't have it and --version was passed (most likely to detect
+# the program).
+case $1 in
+  lex|yacc)
+    # Not GNU programs, they don't have --version.
+    ;;
+
+  tar)
+    if test -n "$run"; then
+       echo 1>&2 "ERROR: \`tar' requires --run"
+       exit 1
+    elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
+       exit 1
+    fi
+    ;;
+
+  *)
+    if test -z "$run" && ($1 --version) > /dev/null 2>&1; then
+       # We have it, but it failed.
+       exit 1
+    elif test "x$2" = "x--version" || test "x$2" = "x--help"; then
+       # Could not run --version or --help.  This is probably someone
+       # running `$TOOL --version' or `$TOOL --help' to check whether
+       # $TOOL exists and not knowing $TOOL uses missing.
+       exit 1
+    fi
+    ;;
+esac
+
+# If it does not exist, or fails to run (possibly an outdated version),
+# try to emulate it.
+case $1 in
+  aclocal*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`acinclude.m4' or \`${configure_ac}'.  You might want
+         to install the \`Automake' and \`Perl' packages.  Grab them from
+         any GNU archive site."
+    touch aclocal.m4
+    ;;
+
+  autoconf)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`${configure_ac}'.  You might want to install the
+         \`Autoconf' and \`GNU m4' packages.  Grab them from any GNU
+         archive site."
+    touch configure
+    ;;
+
+  autoheader)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`acconfig.h' or \`${configure_ac}'.  You might want
+         to install the \`Autoconf' and \`GNU m4' packages.  Grab them
+         from any GNU archive site."
+    files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}`
+    test -z "$files" && files="config.h"
+    touch_files=
+    for f in $files; do
+      case $f in
+      *:*) touch_files="$touch_files "`echo "$f" |
+				       sed -e 's/^[^:]*://' -e 's/:.*//'`;;
+      *) touch_files="$touch_files $f.in";;
+      esac
+    done
+    touch $touch_files
+    ;;
+
+  automake*)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'.
+         You might want to install the \`Automake' and \`Perl' packages.
+         Grab them from any GNU archive site."
+    find . -type f -name Makefile.am -print |
+	   sed 's/\.am$/.in/' |
+	   while read f; do touch "$f"; done
+    ;;
+
+  autom4te)
+    echo 1>&2 "\
+WARNING: \`$1' is needed, but is $msg.
+         You might have modified some files without having the
+         proper tools for further handling them.
+         You can get \`$1' as part of \`Autoconf' from any GNU
+         archive site."
+
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -f "$file"; then
+	touch $file
+    else
+	test -z "$file" || exec >$file
+	echo "#! /bin/sh"
+	echo "# Created by GNU Automake missing as a replacement of"
+	echo "#  $ $@"
+	echo "exit 0"
+	chmod +x $file
+	exit 1
+    fi
+    ;;
+
+  bison|yacc)
+    echo 1>&2 "\
+WARNING: \`$1' $msg.  You should only need it if
+         you modified a \`.y' file.  You may need the \`Bison' package
+         in order for those modifications to take effect.  You can get
+         \`Bison' from any GNU archive site."
+    rm -f y.tab.c y.tab.h
+    if test $# -ne 1; then
+        eval LASTARG="\${$#}"
+	case $LASTARG in
+	*.y)
+	    SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" y.tab.c
+	    fi
+	    SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" y.tab.h
+	    fi
+	  ;;
+	esac
+    fi
+    if test ! -f y.tab.h; then
+	echo >y.tab.h
+    fi
+    if test ! -f y.tab.c; then
+	echo 'main() { return 0; }' >y.tab.c
+    fi
+    ;;
+
+  lex|flex)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified a \`.l' file.  You may need the \`Flex' package
+         in order for those modifications to take effect.  You can get
+         \`Flex' from any GNU archive site."
+    rm -f lex.yy.c
+    if test $# -ne 1; then
+        eval LASTARG="\${$#}"
+	case $LASTARG in
+	*.l)
+	    SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'`
+	    if test -f "$SRCFILE"; then
+	         cp "$SRCFILE" lex.yy.c
+	    fi
+	  ;;
+	esac
+    fi
+    if test ! -f lex.yy.c; then
+	echo 'main() { return 0; }' >lex.yy.c
+    fi
+    ;;
+
+  help2man)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+	 you modified a dependency of a manual page.  You may need the
+	 \`Help2man' package in order for those modifications to take
+	 effect.  You can get \`Help2man' from any GNU archive site."
+
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -f "$file"; then
+	touch $file
+    else
+	test -z "$file" || exec >$file
+	echo ".ab help2man is required to generate this page"
+	exit 1
+    fi
+    ;;
+
+  makeinfo)
+    echo 1>&2 "\
+WARNING: \`$1' is $msg.  You should only need it if
+         you modified a \`.texi' or \`.texinfo' file, or any other file
+         indirectly affecting the aspect of the manual.  The spurious
+         call might also be the consequence of using a buggy \`make' (AIX,
+         DU, IRIX).  You might want to install the \`Texinfo' package or
+         the \`GNU make' package.  Grab either from any GNU archive site."
+    # The file to touch is that specified with -o ...
+    file=`echo "$*" | sed -n "$sed_output"`
+    test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"`
+    if test -z "$file"; then
+      # ... or it is the one specified with @setfilename ...
+      infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'`
+      file=`sed -n '
+	/^@setfilename/{
+	  s/.* \([^ ]*\) *$/\1/
+	  p
+	  q
+	}' $infile`
+      # ... or it is derived from the source name (dir/f.texi becomes f.info)
+      test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info
+    fi
+    # If the file does not exist, the user really needs makeinfo;
+    # let's fail without touching anything.
+    test -f $file || exit 1
+    touch $file
+    ;;
+
+  tar)
+    shift
+
+    # We have already tried tar in the generic part.
+    # Look for gnutar/gtar before invocation to avoid ugly error
+    # messages.
+    if (gnutar --version > /dev/null 2>&1); then
+       gnutar "$@" && exit 0
+    fi
+    if (gtar --version > /dev/null 2>&1); then
+       gtar "$@" && exit 0
+    fi
+    firstarg="$1"
+    if shift; then
+	case $firstarg in
+	*o*)
+	    firstarg=`echo "$firstarg" | sed s/o//`
+	    tar "$firstarg" "$@" && exit 0
+	    ;;
+	esac
+	case $firstarg in
+	*h*)
+	    firstarg=`echo "$firstarg" | sed s/h//`
+	    tar "$firstarg" "$@" && exit 0
+	    ;;
+	esac
+    fi
+
+    echo 1>&2 "\
+WARNING: I can't seem to be able to run \`tar' with the given arguments.
+         You may want to install GNU tar or Free paxutils, or check the
+         command line arguments."
+    exit 1
+    ;;
+
+  *)
+    echo 1>&2 "\
+WARNING: \`$1' is needed, and is $msg.
+         You might have modified some files without having the
+         proper tools for further handling them.  Check the \`README' file,
+         it often tells you about the needed prerequisites for installing
+         this package.  You may also peek at any GNU archive site, in case
+         some other package would contain this missing \`$1' program."
+    exit 1
+    ;;
+esac
+
+exit 0
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-end: "$"
+# End:
diff --git a/lang/c/jansson/src/Makefile.am b/lang/c/jansson/src/Makefile.am
new file mode 100644
index 0000000..cd163fd
--- /dev/null
+++ b/lang/c/jansson/src/Makefile.am
@@ -0,0 +1,27 @@
+#include_HEADERS = jansson.h jansson_config.h
+
+noinst_LTLIBRARIES = libjansson.la
+libjansson_la_SOURCES = \
+	jansson.h \
+	jansson_config.h \
+	dump.c \
+	error.c \
+	hashtable.c \
+	hashtable.h \
+	jansson_private.h \
+	load.c \
+	memory.c \
+	pack_unpack.c \
+	strbuffer.c \
+	strbuffer.h \
+	utf.c \
+	utf.h \
+	value.c
+libjansson_la_LDFLAGS = \
+	-export-symbols-regex '^json_' \
+	-version-info 5:0:1
+
+if GCC
+# These flags are gcc specific
+AM_CFLAGS = -Wall -Wextra -Wdeclaration-after-statement -Werror
+endif
diff --git a/lang/c/jansson/src/Makefile.in b/lang/c/jansson/src/Makefile.in
new file mode 100644
index 0000000..d20abb0
--- /dev/null
+++ b/lang/c/jansson/src/Makefile.in
@@ -0,0 +1,477 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+
+#include_HEADERS = jansson.h jansson_config.h
+
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = src
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
+	$(srcdir)/jansson_config.h.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES = jansson_config.h
+LTLIBRARIES = $(noinst_LTLIBRARIES)
+libjansson_la_LIBADD =
+am_libjansson_la_OBJECTS = dump.lo error.lo hashtable.lo load.lo \
+	memory.lo pack_unpack.lo strbuffer.lo utf.lo value.lo
+libjansson_la_OBJECTS = $(am_libjansson_la_OBJECTS)
+libjansson_la_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) \
+	$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+	$(libjansson_la_LDFLAGS) $(LDFLAGS) -o $@
+DEFAULT_INCLUDES = -I. -I$(top_builddir)@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+	$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
+	$(LDFLAGS) -o $@
+SOURCES = $(libjansson_la_SOURCES)
+DIST_SOURCES = $(libjansson_la_SOURCES)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+noinst_LTLIBRARIES = libjansson.la
+libjansson_la_SOURCES = \
+	jansson.h \
+	jansson_config.h \
+	dump.c \
+	error.c \
+	hashtable.c \
+	hashtable.h \
+	jansson_private.h \
+	load.c \
+	memory.c \
+	pack_unpack.c \
+	strbuffer.c \
+	strbuffer.h \
+	utf.c \
+	utf.h \
+	value.c
+
+libjansson_la_LDFLAGS = \
+	-export-symbols-regex '^json_' \
+	-version-info 5:0:1
+
+
+# These flags are gcc specific
+ at GCC_TRUE@AM_CFLAGS = -Wall -Wextra -Wdeclaration-after-statement -Werror
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  src/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  src/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+jansson_config.h: $(top_builddir)/config.status $(srcdir)/jansson_config.h.in
+	cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
+
+clean-noinstLTLIBRARIES:
+	-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
+	@list='$(noinst_LTLIBRARIES)'; for p in $$list; do \
+	  dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \
+	  test "$$dir" != "$$p" || dir=.; \
+	  echo "rm -f \"$${dir}/so_locations\""; \
+	  rm -f "$${dir}/so_locations"; \
+	done
+libjansson.la: $(libjansson_la_OBJECTS) $(libjansson_la_DEPENDENCIES) 
+	$(libjansson_la_LINK)  $(libjansson_la_OBJECTS) $(libjansson_la_LIBADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/dump.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/error.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/hashtable.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/load.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/memory.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/pack_unpack.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/strbuffer.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/utf.Plo at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/value.Plo at am__quote@
+
+.c.o:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+ at am__fastdepCC_TRUE@	$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-am
+all-am: Makefile $(LTLIBRARIES)
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-info: install-info-am
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-ps: install-ps-am
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \
+	clean-libtool clean-noinstLTLIBRARIES ctags distclean \
+	distclean-compile distclean-generic distclean-libtool \
+	distclean-tags distdir dvi dvi-am html html-am info info-am \
+	install install-am install-data install-data-am install-dvi \
+	install-dvi-am install-exec install-exec-am install-html \
+	install-html-am install-info install-info-am install-man \
+	install-pdf install-pdf-am install-ps install-ps-am \
+	install-strip installcheck installcheck-am installdirs \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+	pdf pdf-am ps ps-am tags uninstall uninstall-am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/src/dump.c b/lang/c/jansson/src/dump.c
new file mode 100644
index 0000000..5e40b86
--- /dev/null
+++ b/lang/c/jansson/src/dump.c
@@ -0,0 +1,465 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#define _GNU_SOURCE
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+
+#include <jansson.h>
+#include "jansson_private.h"
+#include "strbuffer.h"
+#include "utf.h"
+
+#define MAX_INTEGER_STR_LENGTH  100
+#define MAX_REAL_STR_LENGTH     100
+
+typedef int (*dump_func)(const char *buffer, int size, void *data);
+
+struct string
+{
+    char *buffer;
+    int length;
+    int size;
+};
+
+static int dump_to_strbuffer(const char *buffer, int size, void *data)
+{
+    return strbuffer_append_bytes((strbuffer_t *)data, buffer, size);
+}
+
+static int dump_to_file(const char *buffer, int size, void *data)
+{
+    FILE *dest = (FILE *)data;
+    if(fwrite(buffer, size, 1, dest) != 1)
+        return -1;
+    return 0;
+}
+
+/* 32 spaces (the maximum indentation size) */
+static char whitespace[] = "                                ";
+
+static int dump_indent(size_t flags, int depth, int space, dump_func dump, void *data)
+{
+    if(JSON_INDENT(flags) > 0)
+    {
+        int i, ws_count = JSON_INDENT(flags);
+
+        if(dump("\n", 1, data))
+            return -1;
+
+        for(i = 0; i < depth; i++)
+        {
+            if(dump(whitespace, ws_count, data))
+                return -1;
+        }
+    }
+    else if(space && !(flags & JSON_COMPACT))
+    {
+        return dump(" ", 1, data);
+    }
+    return 0;
+}
+
+static int dump_string(const char *str, int ascii, dump_func dump, void *data)
+{
+    const char *pos, *end;
+    int32_t codepoint;
+
+    if(dump("\"", 1, data))
+        return -1;
+
+    end = pos = str;
+    while(1)
+    {
+        const char *text;
+        char seq[13];
+        int length;
+
+        while(*end)
+        {
+            end = utf8_iterate(pos, &codepoint);
+            if(!end)
+                return -1;
+
+            /* mandatory escape or control char */
+            if(codepoint == '\\' || codepoint == '"' || codepoint < 0x20)
+                break;
+
+            /* non-ASCII */
+            if(ascii && codepoint > 0x7F)
+                break;
+
+            pos = end;
+        }
+
+        if(pos != str) {
+            if(dump(str, pos - str, data))
+                return -1;
+        }
+
+        if(end == pos)
+            break;
+
+        /* handle \, ", and control codes */
+        length = 2;
+        switch(codepoint)
+        {
+            case '\\': text = "\\\\"; break;
+            case '\"': text = "\\\""; break;
+            case '\b': text = "\\b"; break;
+            case '\f': text = "\\f"; break;
+            case '\n': text = "\\n"; break;
+            case '\r': text = "\\r"; break;
+            case '\t': text = "\\t"; break;
+            default:
+            {
+                /* codepoint is in BMP */
+                if(codepoint < 0x10000)
+                {
+                    sprintf(seq, "\\u%04x", (int) codepoint);
+                    length = 6;
+                }
+
+                /* not in BMP -> construct a UTF-16 surrogate pair */
+                else
+                {
+                    int32_t first, last;
+
+                    codepoint -= 0x10000;
+                    first = 0xD800 | ((codepoint & 0xffc00) >> 10);
+                    last = 0xDC00 | (codepoint & 0x003ff);
+
+                    sprintf(seq, "\\u%04x\\u%04x", (int) first, (int) last);
+                    length = 12;
+                }
+
+                text = seq;
+                break;
+            }
+        }
+
+        if(dump(text, length, data))
+            return -1;
+
+        str = pos = end;
+    }
+
+    return dump("\"", 1, data);
+}
+
+static int object_key_compare_keys(const void *key1, const void *key2)
+{
+    return strcmp((*(const object_key_t **)key1)->key,
+                  (*(const object_key_t **)key2)->key);
+}
+
+static int object_key_compare_serials(const void *key1, const void *key2)
+{
+    return (*(const object_key_t **)key1)->serial -
+           (*(const object_key_t **)key2)->serial;
+}
+
+static int do_dump(const json_t *json, size_t flags, int depth,
+                   dump_func dump, void *data)
+{
+    int ascii = flags & JSON_ENSURE_ASCII ? 1 : 0;
+
+    switch(json_typeof(json)) {
+        case JSON_NULL:
+            return dump("null", 4, data);
+
+        case JSON_TRUE:
+            return dump("true", 4, data);
+
+        case JSON_FALSE:
+            return dump("false", 5, data);
+
+        case JSON_INTEGER:
+        {
+            char buffer[MAX_INTEGER_STR_LENGTH];
+            int size;
+
+            size = snprintf(buffer, MAX_INTEGER_STR_LENGTH,
+                            "%" JSON_INTEGER_FORMAT,
+                            json_integer_value(json));
+            if(size >= MAX_INTEGER_STR_LENGTH)
+                return -1;
+
+            return dump(buffer, size, data);
+        }
+
+        case JSON_REAL:
+        {
+            char buffer[MAX_REAL_STR_LENGTH];
+            int size;
+
+            size = snprintf(buffer, MAX_REAL_STR_LENGTH, "%.17g",
+                            json_real_value(json));
+            if(size >= MAX_REAL_STR_LENGTH)
+                return -1;
+
+            /* Make sure there's a dot or 'e' in the output. Otherwise
+               a real is converted to an integer when decoding */
+            if(strchr(buffer, '.') == NULL &&
+               strchr(buffer, 'e') == NULL)
+            {
+                if(size + 2 >= MAX_REAL_STR_LENGTH) {
+                    /* No space to append ".0" */
+                    return -1;
+                }
+                buffer[size] = '.';
+                buffer[size + 1] = '0';
+                size += 2;
+            }
+
+            return dump(buffer, size, data);
+        }
+
+        case JSON_STRING:
+            return dump_string(json_string_value(json), ascii, dump, data);
+
+        case JSON_ARRAY:
+        {
+            int i;
+            int n;
+            json_array_t *array;
+
+            /* detect circular references */
+            array = json_to_array(json);
+            if(array->visited)
+                goto array_error;
+            array->visited = 1;
+
+            n = json_array_size(json);
+
+            if(dump("[", 1, data))
+                goto array_error;
+            if(n == 0) {
+                array->visited = 0;
+                return dump("]", 1, data);
+            }
+            if(dump_indent(flags, depth + 1, 0, dump, data))
+                goto array_error;
+
+            for(i = 0; i < n; ++i) {
+                if(do_dump(json_array_get(json, i), flags, depth + 1,
+                           dump, data))
+                    goto array_error;
+
+                if(i < n - 1)
+                {
+                    if(dump(",", 1, data) ||
+                       dump_indent(flags, depth + 1, 1, dump, data))
+                        goto array_error;
+                }
+                else
+                {
+                    if(dump_indent(flags, depth, 0, dump, data))
+                        goto array_error;
+                }
+            }
+
+            array->visited = 0;
+            return dump("]", 1, data);
+
+        array_error:
+            array->visited = 0;
+            return -1;
+        }
+
+        case JSON_OBJECT:
+        {
+            json_object_t *object;
+            void *iter;
+            const char *separator;
+            int separator_length;
+
+            if(flags & JSON_COMPACT) {
+                separator = ":";
+                separator_length = 1;
+            }
+            else {
+                separator = ": ";
+                separator_length = 2;
+            }
+
+            /* detect circular references */
+            object = json_to_object(json);
+            if(object->visited)
+                goto object_error;
+            object->visited = 1;
+
+            iter = json_object_iter((json_t *)json);
+
+            if(dump("{", 1, data))
+                goto object_error;
+            if(!iter) {
+                object->visited = 0;
+                return dump("}", 1, data);
+            }
+            if(dump_indent(flags, depth + 1, 0, dump, data))
+                goto object_error;
+
+            if(flags & JSON_SORT_KEYS || flags & JSON_PRESERVE_ORDER)
+            {
+                const object_key_t **keys;
+                size_t size, i;
+                int (*cmp_func)(const void *, const void *);
+
+                size = json_object_size(json);
+                keys = (const object_key_t **) jsonp_malloc(size * sizeof(object_key_t *));
+                if(!keys)
+                    goto object_error;
+
+                i = 0;
+                while(iter)
+                {
+                    keys[i] = jsonp_object_iter_fullkey(iter);
+                    iter = json_object_iter_next((json_t *)json, iter);
+                    i++;
+                }
+                assert(i == size);
+
+                if(flags & JSON_SORT_KEYS)
+                    cmp_func = object_key_compare_keys;
+                else
+                    cmp_func = object_key_compare_serials;
+
+                qsort(keys, size, sizeof(object_key_t *), cmp_func);
+
+                for(i = 0; i < size; i++)
+                {
+                    const char *key;
+                    json_t *value;
+
+                    key = keys[i]->key;
+                    value = json_object_get(json, key);
+                    assert(value);
+
+                    dump_string(key, ascii, dump, data);
+                    if(dump(separator, separator_length, data) ||
+                       do_dump(value, flags, depth + 1, dump, data))
+                    {
+                        jsonp_free(keys);
+                        goto object_error;
+                    }
+
+                    if(i < size - 1)
+                    {
+                        if(dump(",", 1, data) ||
+                           dump_indent(flags, depth + 1, 1, dump, data))
+                        {
+                            jsonp_free(keys);
+                            goto object_error;
+                        }
+                    }
+                    else
+                    {
+                        if(dump_indent(flags, depth, 0, dump, data))
+                        {
+                            jsonp_free(keys);
+                            goto object_error;
+                        }
+                    }
+                }
+
+                jsonp_free(keys);
+            }
+            else
+            {
+                /* Don't sort keys */
+
+                while(iter)
+                {
+                    void *next = json_object_iter_next((json_t *)json, iter);
+
+                    dump_string(json_object_iter_key(iter), ascii, dump, data);
+                    if(dump(separator, separator_length, data) ||
+                       do_dump(json_object_iter_value(iter), flags, depth + 1,
+                               dump, data))
+                        goto object_error;
+
+                    if(next)
+                    {
+                        if(dump(",", 1, data) ||
+                           dump_indent(flags, depth + 1, 1, dump, data))
+                            goto object_error;
+                    }
+                    else
+                    {
+                        if(dump_indent(flags, depth, 0, dump, data))
+                            goto object_error;
+                    }
+
+                    iter = next;
+                }
+            }
+
+            object->visited = 0;
+            return dump("}", 1, data);
+
+        object_error:
+            object->visited = 0;
+            return -1;
+        }
+
+        default:
+            /* not reached */
+            return -1;
+    }
+}
+
+
+char *json_dumps(const json_t *json, size_t flags)
+{
+    strbuffer_t strbuff;
+    char *result;
+
+    if(!(flags & JSON_ENCODE_ANY)) {
+        if(!json_is_array(json) && !json_is_object(json))
+           return NULL;
+    }
+
+    if(strbuffer_init(&strbuff))
+        return NULL;
+
+    if(do_dump(json, flags, 0, dump_to_strbuffer, (void *)&strbuff)) {
+        strbuffer_close(&strbuff);
+        return NULL;
+    }
+
+    result = jsonp_strdup(strbuffer_value(&strbuff));
+    strbuffer_close(&strbuff);
+
+    return result;
+}
+
+int json_dumpf(const json_t *json, FILE *output, size_t flags)
+{
+    if(!(flags & JSON_ENCODE_ANY)) {
+        if(!json_is_array(json) && !json_is_object(json))
+           return -1;
+    }
+
+    return do_dump(json, flags, 0, dump_to_file, (void *)output);
+}
+
+int json_dump_file(const json_t *json, const char *path, size_t flags)
+{
+    int result;
+
+    FILE *output = fopen(path, "w");
+    if(!output)
+        return -1;
+
+    result = json_dumpf(json, output, flags);
+
+    fclose(output);
+    return result;
+}
diff --git a/lang/c/jansson/src/error.c b/lang/c/jansson/src/error.c
new file mode 100644
index 0000000..a7c8cbb
--- /dev/null
+++ b/lang/c/jansson/src/error.c
@@ -0,0 +1,62 @@
+#include <string.h>
+#include "jansson_private.h"
+
+void jsonp_error_init(json_error_t *error, const char *source)
+{
+    if(error)
+    {
+        error->text[0] = '\0';
+        error->line = -1;
+        error->column = -1;
+        error->position = 0;
+        if(source)
+            jsonp_error_set_source(error, source);
+        else
+            error->source[0] = '\0';
+    }
+}
+
+void jsonp_error_set_source(json_error_t *error, const char *source)
+{
+    size_t length;
+
+    if(!error || !source)
+        return;
+
+    length = strlen(source);
+    if(length < JSON_ERROR_SOURCE_LENGTH)
+        strcpy(error->source, source);
+    else {
+        size_t extra = length - JSON_ERROR_SOURCE_LENGTH + 4;
+        strcpy(error->source, "...");
+        strcpy(error->source + 3, source + extra);
+    }
+}
+
+void jsonp_error_set(json_error_t *error, int line, int column,
+                     size_t position, const char *msg, ...)
+{
+    va_list ap;
+
+    va_start(ap, msg);
+    jsonp_error_vset(error, line, column, position, msg, ap);
+    va_end(ap);
+}
+
+void jsonp_error_vset(json_error_t *error, int line, int column,
+                      size_t position, const char *msg, va_list ap)
+{
+    if(!error)
+        return;
+
+    if(error->text[0] != '\0') {
+        /* error already set */
+        return;
+    }
+
+    error->line = line;
+    error->column = column;
+    error->position = position;
+
+    vsnprintf(error->text, JSON_ERROR_TEXT_LENGTH, msg, ap);
+}
diff --git a/lang/c/jansson/src/hashtable.c b/lang/c/jansson/src/hashtable.c
new file mode 100644
index 0000000..515e4e2
--- /dev/null
+++ b/lang/c/jansson/src/hashtable.c
@@ -0,0 +1,372 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * This library is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <stdlib.h>
+#include <jansson_config.h>   /* for JSON_INLINE */
+#include "jansson_private.h"  /* for container_of() */
+#include "hashtable.h"
+
+typedef struct hashtable_list list_t;
+typedef struct hashtable_pair pair_t;
+typedef struct hashtable_bucket bucket_t;
+
+#define list_to_pair(list_)  container_of(list_, pair_t, list)
+
+static JSON_INLINE void list_init(list_t *list)
+{
+    list->next = list;
+    list->prev = list;
+}
+
+static JSON_INLINE void list_insert(list_t *list, list_t *node)
+{
+    node->next = list;
+    node->prev = list->prev;
+    list->prev->next = node;
+    list->prev = node;
+}
+
+static JSON_INLINE void list_remove(list_t *list)
+{
+    list->prev->next = list->next;
+    list->next->prev = list->prev;
+}
+
+static JSON_INLINE int bucket_is_empty(hashtable_t *hashtable, bucket_t *bucket)
+{
+    return bucket->first == &hashtable->list && bucket->first == bucket->last;
+}
+
+static void insert_to_bucket(hashtable_t *hashtable, bucket_t *bucket,
+                             list_t *list)
+{
+    if(bucket_is_empty(hashtable, bucket))
+    {
+        list_insert(&hashtable->list, list);
+        bucket->first = bucket->last = list;
+    }
+    else
+    {
+        list_insert(bucket->first, list);
+        bucket->first = list;
+    }
+}
+
+static size_t primes[] = {
+    5, 13, 23, 53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593,
+    49157, 98317, 196613, 393241, 786433, 1572869, 3145739, 6291469,
+    12582917, 25165843, 50331653, 100663319, 201326611, 402653189,
+    805306457, 1610612741
+};
+static const size_t num_primes = sizeof(primes) / sizeof(size_t);
+
+static JSON_INLINE size_t num_buckets(hashtable_t *hashtable)
+{
+    return primes[hashtable->num_buckets];
+}
+
+
+static pair_t *hashtable_find_pair(hashtable_t *hashtable, bucket_t *bucket,
+                                   const void *key, size_t hash)
+{
+    list_t *list;
+    pair_t *pair;
+
+    if(bucket_is_empty(hashtable, bucket))
+        return NULL;
+
+    list = bucket->first;
+    while(1)
+    {
+        pair = list_to_pair(list);
+        if(pair->hash == hash && hashtable->cmp_keys(pair->key, key))
+            return pair;
+
+        if(list == bucket->last)
+            break;
+
+        list = list->next;
+    }
+
+    return NULL;
+}
+
+/* returns 0 on success, -1 if key was not found */
+static int hashtable_do_del(hashtable_t *hashtable,
+                            const void *key, size_t hash)
+{
+    pair_t *pair;
+    bucket_t *bucket;
+    size_t index;
+
+    index = hash % num_buckets(hashtable);
+    bucket = &hashtable->buckets[index];
+
+    pair = hashtable_find_pair(hashtable, bucket, key, hash);
+    if(!pair)
+        return -1;
+
+    if(&pair->list == bucket->first && &pair->list == bucket->last)
+        bucket->first = bucket->last = &hashtable->list;
+
+    else if(&pair->list == bucket->first)
+        bucket->first = pair->list.next;
+
+    else if(&pair->list == bucket->last)
+        bucket->last = pair->list.prev;
+
+    list_remove(&pair->list);
+
+    if(hashtable->free_key)
+        hashtable->free_key(pair->key);
+    if(hashtable->free_value)
+        hashtable->free_value(pair->value);
+
+    jsonp_free(pair);
+    hashtable->size--;
+
+    return 0;
+}
+
+static void hashtable_do_clear(hashtable_t *hashtable)
+{
+    list_t *list, *next;
+    pair_t *pair;
+
+    for(list = hashtable->list.next; list != &hashtable->list; list = next)
+    {
+        next = list->next;
+        pair = list_to_pair(list);
+        if(hashtable->free_key)
+            hashtable->free_key(pair->key);
+        if(hashtable->free_value)
+            hashtable->free_value(pair->value);
+        jsonp_free(pair);
+    }
+}
+
+static int hashtable_do_rehash(hashtable_t *hashtable)
+{
+    list_t *list, *next;
+    pair_t *pair;
+    size_t i, index, new_size;
+
+    jsonp_free(hashtable->buckets);
+
+    hashtable->num_buckets++;
+    new_size = num_buckets(hashtable);
+
+    hashtable->buckets = (struct hashtable_bucket *) jsonp_malloc(new_size * sizeof(bucket_t));
+    if(!hashtable->buckets)
+        return -1;
+
+    for(i = 0; i < num_buckets(hashtable); i++)
+    {
+        hashtable->buckets[i].first = hashtable->buckets[i].last =
+            &hashtable->list;
+    }
+
+    list = hashtable->list.next;
+    list_init(&hashtable->list);
+
+    for(; list != &hashtable->list; list = next) {
+        next = list->next;
+        pair = list_to_pair(list);
+        index = pair->hash % new_size;
+        insert_to_bucket(hashtable, &hashtable->buckets[index], &pair->list);
+    }
+
+    return 0;
+}
+
+
+hashtable_t *hashtable_create(key_hash_fn hash_key, key_cmp_fn cmp_keys,
+                              free_fn free_key, free_fn free_value)
+{
+    hashtable_t *hashtable = (hashtable_t *) jsonp_malloc(sizeof(hashtable_t));
+    if(!hashtable)
+        return NULL;
+
+    if(hashtable_init(hashtable, hash_key, cmp_keys, free_key, free_value))
+    {
+        jsonp_free(hashtable);
+        return NULL;
+    }
+
+    return hashtable;
+}
+
+void hashtable_destroy(hashtable_t *hashtable)
+{
+    hashtable_close(hashtable);
+    jsonp_free(hashtable);
+}
+
+int hashtable_init(hashtable_t *hashtable,
+                   key_hash_fn hash_key, key_cmp_fn cmp_keys,
+                   free_fn free_key, free_fn free_value)
+{
+    size_t i;
+
+    hashtable->size = 0;
+    hashtable->num_buckets = 0;  /* index to primes[] */
+    hashtable->buckets = (struct hashtable_bucket *) jsonp_malloc(num_buckets(hashtable) * sizeof(bucket_t));
+    if(!hashtable->buckets)
+        return -1;
+
+    list_init(&hashtable->list);
+
+    hashtable->hash_key = hash_key;
+    hashtable->cmp_keys = cmp_keys;
+    hashtable->free_key = free_key;
+    hashtable->free_value = free_value;
+
+    for(i = 0; i < num_buckets(hashtable); i++)
+    {
+        hashtable->buckets[i].first = hashtable->buckets[i].last =
+            &hashtable->list;
+    }
+
+    return 0;
+}
+
+void hashtable_close(hashtable_t *hashtable)
+{
+    hashtable_do_clear(hashtable);
+    jsonp_free(hashtable->buckets);
+}
+
+int hashtable_set(hashtable_t *hashtable, void *key, void *value)
+{
+    pair_t *pair;
+    bucket_t *bucket;
+    size_t hash, index;
+
+    /* rehash if the load ratio exceeds 1 */
+    if(hashtable->size >= num_buckets(hashtable))
+        if(hashtable_do_rehash(hashtable))
+            return -1;
+
+    hash = hashtable->hash_key(key);
+    index = hash % num_buckets(hashtable);
+    bucket = &hashtable->buckets[index];
+    pair = hashtable_find_pair(hashtable, bucket, key, hash);
+
+    if(pair)
+    {
+        if(hashtable->free_key)
+            hashtable->free_key(key);
+        if(hashtable->free_value)
+            hashtable->free_value(pair->value);
+        pair->value = value;
+    }
+    else
+    {
+        pair = (pair_t *) jsonp_malloc(sizeof(pair_t));
+        if(!pair)
+            return -1;
+
+        pair->key = key;
+        pair->value = value;
+        pair->hash = hash;
+        list_init(&pair->list);
+
+        insert_to_bucket(hashtable, bucket, &pair->list);
+
+        hashtable->size++;
+    }
+    return 0;
+}
+
+void *hashtable_get(hashtable_t *hashtable, const void *key)
+{
+    pair_t *pair;
+    size_t hash;
+    bucket_t *bucket;
+
+    hash = hashtable->hash_key(key);
+    bucket = &hashtable->buckets[hash % num_buckets(hashtable)];
+
+    pair = hashtable_find_pair(hashtable, bucket, key, hash);
+    if(!pair)
+        return NULL;
+
+    return pair->value;
+}
+
+int hashtable_del(hashtable_t *hashtable, const void *key)
+{
+    size_t hash = hashtable->hash_key(key);
+    return hashtable_do_del(hashtable, key, hash);
+}
+
+void hashtable_clear(hashtable_t *hashtable)
+{
+    size_t i;
+
+    hashtable_do_clear(hashtable);
+
+    for(i = 0; i < num_buckets(hashtable); i++)
+    {
+        hashtable->buckets[i].first = hashtable->buckets[i].last =
+            &hashtable->list;
+    }
+
+    list_init(&hashtable->list);
+    hashtable->size = 0;
+}
+
+void *hashtable_iter(hashtable_t *hashtable)
+{
+    return hashtable_iter_next(hashtable, &hashtable->list);
+}
+
+void *hashtable_iter_at(hashtable_t *hashtable, const void *key)
+{
+    pair_t *pair;
+    size_t hash;
+    bucket_t *bucket;
+
+    hash = hashtable->hash_key(key);
+    bucket = &hashtable->buckets[hash % num_buckets(hashtable)];
+
+    pair = hashtable_find_pair(hashtable, bucket, key, hash);
+    if(!pair)
+        return NULL;
+
+    return &pair->list;
+}
+
+void *hashtable_iter_next(hashtable_t *hashtable, void *iter)
+{
+    list_t *list = (list_t *)iter;
+    if(list->next == &hashtable->list)
+        return NULL;
+    return list->next;
+}
+
+void *hashtable_iter_key(void *iter)
+{
+    pair_t *pair = list_to_pair((list_t *)iter);
+    return pair->key;
+}
+
+void *hashtable_iter_value(void *iter)
+{
+    pair_t *pair = list_to_pair((list_t *)iter);
+    return pair->value;
+}
+
+void hashtable_iter_set(hashtable_t *hashtable, void *iter, void *value)
+{
+    pair_t *pair = list_to_pair((list_t *)iter);
+
+    if(hashtable->free_value)
+        hashtable->free_value(pair->value);
+
+    pair->value = value;
+}
diff --git a/lang/c/jansson/src/hashtable.h b/lang/c/jansson/src/hashtable.h
new file mode 100644
index 0000000..aa295e6
--- /dev/null
+++ b/lang/c/jansson/src/hashtable.h
@@ -0,0 +1,218 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * This library is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef HASHTABLE_H
+#define HASHTABLE_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#pragma GCC visibility push(hidden)
+
+typedef size_t (*key_hash_fn)(const void *key);
+typedef int (*key_cmp_fn)(const void *key1, const void *key2);
+typedef void (*free_fn)(void *key);
+
+struct hashtable_list {
+    struct hashtable_list *prev;
+    struct hashtable_list *next;
+};
+
+struct hashtable_pair {
+    void *key;
+    void *value;
+    size_t hash;
+    struct hashtable_list list;
+};
+
+struct hashtable_bucket {
+    struct hashtable_list *first;
+    struct hashtable_list *last;
+};
+
+typedef struct hashtable {
+    size_t size;
+    struct hashtable_bucket *buckets;
+    size_t num_buckets;  /* index to primes[] */
+    struct hashtable_list list;
+
+    key_hash_fn hash_key;
+    key_cmp_fn cmp_keys;  /* returns non-zero for equal keys */
+    free_fn free_key;
+    free_fn free_value;
+} hashtable_t;
+
+/**
+ * hashtable_create - Create a hashtable object
+ *
+ * @hash_key: The key hashing function
+ * @cmp_keys: The key compare function. Returns non-zero for equal and
+ *     zero for unequal unequal keys
+ * @free_key: If non-NULL, called for a key that is no longer referenced.
+ * @free_value: If non-NULL, called for a value that is no longer referenced.
+ *
+ * Returns a new hashtable object that should be freed with
+ * hashtable_destroy when it's no longer used, or NULL on failure (out
+ * of memory).
+ */
+hashtable_t *hashtable_create(key_hash_fn hash_key, key_cmp_fn cmp_keys,
+                              free_fn free_key, free_fn free_value);
+
+/**
+ * hashtable_destroy - Destroy a hashtable object
+ *
+ * @hashtable: The hashtable
+ *
+ * Destroys a hashtable created with hashtable_create().
+ */
+void hashtable_destroy(hashtable_t *hashtable);
+
+/**
+ * hashtable_init - Initialize a hashtable object
+ *
+ * @hashtable: The (statically allocated) hashtable object
+ * @hash_key: The key hashing function
+ * @cmp_keys: The key compare function. Returns non-zero for equal and
+ *     zero for unequal unequal keys
+ * @free_key: If non-NULL, called for a key that is no longer referenced.
+ * @free_value: If non-NULL, called for a value that is no longer referenced.
+ *
+ * Initializes a statically allocated hashtable object. The object
+ * should be cleared with hashtable_close when it's no longer used.
+ *
+ * Returns 0 on success, -1 on error (out of memory).
+ */
+int hashtable_init(hashtable_t *hashtable,
+                   key_hash_fn hash_key, key_cmp_fn cmp_keys,
+                   free_fn free_key, free_fn free_value);
+
+/**
+ * hashtable_close - Release all resources used by a hashtable object
+ *
+ * @hashtable: The hashtable
+ *
+ * Destroys a statically allocated hashtable object.
+ */
+void hashtable_close(hashtable_t *hashtable);
+
+/**
+ * hashtable_set - Add/modify value in hashtable
+ *
+ * @hashtable: The hashtable object
+ * @key: The key
+ * @value: The value
+ *
+ * If a value with the given key already exists, its value is replaced
+ * with the new value.
+ *
+ * Key and value are "stealed" in the sense that hashtable frees them
+ * automatically when they are no longer used. The freeing is
+ * accomplished by calling free_key and free_value functions that were
+ * supplied to hashtable_new. In case one or both of the free
+ * functions is NULL, the corresponding item is not "stealed".
+ *
+ * Returns 0 on success, -1 on failure (out of memory).
+ */
+int hashtable_set(hashtable_t *hashtable, void *key, void *value);
+
+/**
+ * hashtable_get - Get a value associated with a key
+ *
+ * @hashtable: The hashtable object
+ * @key: The key
+ *
+ * Returns value if it is found, or NULL otherwise.
+ */
+void *hashtable_get(hashtable_t *hashtable, const void *key);
+
+/**
+ * hashtable_del - Remove a value from the hashtable
+ *
+ * @hashtable: The hashtable object
+ * @key: The key
+ *
+ * Returns 0 on success, or -1 if the key was not found.
+ */
+int hashtable_del(hashtable_t *hashtable, const void *key);
+
+/**
+ * hashtable_clear - Clear hashtable
+ *
+ * @hashtable: The hashtable object
+ *
+ * Removes all items from the hashtable.
+ */
+void hashtable_clear(hashtable_t *hashtable);
+
+/**
+ * hashtable_iter - Iterate over hashtable
+ *
+ * @hashtable: The hashtable object
+ *
+ * Returns an opaque iterator to the first element in the hashtable.
+ * The iterator should be passed to hashtable_iter_* functions.
+ * The hashtable items are not iterated over in any particular order.
+ *
+ * There's no need to free the iterator in any way. The iterator is
+ * valid as long as the item that is referenced by the iterator is not
+ * deleted. Other values may be added or deleted. In particular,
+ * hashtable_iter_next() may be called on an iterator, and after that
+ * the key/value pair pointed by the old iterator may be deleted.
+ */
+void *hashtable_iter(hashtable_t *hashtable);
+
+/**
+ * hashtable_iter_at - Return an iterator at a specific key
+ *
+ * @hashtable: The hashtable object
+ * @key: The key that the iterator should point to
+ *
+ * Like hashtable_iter() but returns an iterator pointing to a
+ * specific key.
+ */
+void *hashtable_iter_at(hashtable_t *hashtable, const void *key);
+
+/**
+ * hashtable_iter_next - Advance an iterator
+ *
+ * @hashtable: The hashtable object
+ * @iter: The iterator
+ *
+ * Returns a new iterator pointing to the next element in the
+ * hashtable or NULL if the whole hastable has been iterated over.
+ */
+void *hashtable_iter_next(hashtable_t *hashtable, void *iter);
+
+/**
+ * hashtable_iter_key - Retrieve the key pointed by an iterator
+ *
+ * @iter: The iterator
+ */
+void *hashtable_iter_key(void *iter);
+
+/**
+ * hashtable_iter_value - Retrieve the value pointed by an iterator
+ *
+ * @iter: The iterator
+ */
+void *hashtable_iter_value(void *iter);
+
+/**
+ * hashtable_iter_set - Set the value pointed by an iterator
+ *
+ * @iter: The iterator
+ * @value: The value to set
+ */
+void hashtable_iter_set(hashtable_t *hashtable, void *iter, void *value);
+
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/jansson/src/jansson.h b/lang/c/jansson/src/jansson.h
new file mode 100644
index 0000000..a0e6785
--- /dev/null
+++ b/lang/c/jansson/src/jansson.h
@@ -0,0 +1,257 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef JANSSON_H
+#define JANSSON_H
+
+#include <stdio.h>
+#include <stdlib.h>  /* for size_t */
+#include <stdarg.h>
+
+#include <jansson_config.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#pragma GCC visibility push(hidden)
+
+/* version */
+
+#define JANSSON_MAJOR_VERSION  2
+#define JANSSON_MINOR_VERSION  1
+#define JANSSON_MICRO_VERSION  0
+
+/* Micro version is omitted if it's 0 */
+#define JANSSON_VERSION  "2.1"
+
+/* Version as a 3-byte hex number, e.g. 0x010201 == 1.2.1. Use this
+   for numeric comparisons, e.g. #if JANSSON_VERSION_HEX >= ... */
+#define JANSSON_VERSION_HEX  ((JANSSON_MAJOR_VERSION << 16) |   \
+                              (JANSSON_MINOR_VERSION << 8)  |   \
+                              (JANSSON_MICRO_VERSION << 0))
+
+
+/* types */
+
+typedef enum {
+    JSON_OBJECT,
+    JSON_ARRAY,
+    JSON_STRING,
+    JSON_INTEGER,
+    JSON_REAL,
+    JSON_TRUE,
+    JSON_FALSE,
+    JSON_NULL
+} json_type;
+
+typedef struct {
+    json_type type;
+    size_t refcount;
+} json_t;
+
+#if JSON_INTEGER_IS_LONG_LONG
+#define JSON_INTEGER_FORMAT "lld"
+typedef long long json_int_t;
+#else
+#define JSON_INTEGER_FORMAT "ld"
+typedef long json_int_t;
+#endif /* JSON_INTEGER_IS_LONG_LONG */
+
+#define json_typeof(json)      ((json)->type)
+#define json_is_object(json)   (json && json_typeof(json) == JSON_OBJECT)
+#define json_is_array(json)    (json && json_typeof(json) == JSON_ARRAY)
+#define json_is_string(json)   (json && json_typeof(json) == JSON_STRING)
+#define json_is_integer(json)  (json && json_typeof(json) == JSON_INTEGER)
+#define json_is_real(json)     (json && json_typeof(json) == JSON_REAL)
+#define json_is_number(json)   (json_is_integer(json) || json_is_real(json))
+#define json_is_true(json)     (json && json_typeof(json) == JSON_TRUE)
+#define json_is_false(json)    (json && json_typeof(json) == JSON_FALSE)
+#define json_is_boolean(json)  (json_is_true(json) || json_is_false(json))
+#define json_is_null(json)     (json && json_typeof(json) == JSON_NULL)
+
+/* construction, destruction, reference counting */
+
+json_t *json_object(void);
+json_t *json_array(void);
+json_t *json_string(const char *value);
+json_t *json_string_nocheck(const char *value);
+json_t *json_integer(json_int_t value);
+json_t *json_real(double value);
+json_t *json_true(void);
+json_t *json_false(void);
+json_t *json_null(void);
+
+static JSON_INLINE
+json_t *json_incref(json_t *json)
+{
+    if(json && json->refcount != (size_t)-1)
+        ++json->refcount;
+    return json;
+}
+
+/* do not call json_delete directly */
+void json_delete(json_t *json);
+
+static JSON_INLINE
+void json_decref(json_t *json)
+{
+    if(json && json->refcount != (size_t)-1 && --json->refcount == 0)
+        json_delete(json);
+}
+
+
+/* error reporting */
+
+#define JSON_ERROR_TEXT_LENGTH    160
+#define JSON_ERROR_SOURCE_LENGTH   80
+
+typedef struct {
+    int line;
+    int column;
+    int position;
+    char source[JSON_ERROR_SOURCE_LENGTH];
+    char text[JSON_ERROR_TEXT_LENGTH];
+} json_error_t;
+
+
+/* getters, setters, manipulation */
+
+size_t json_object_size(const json_t *object);
+json_t *json_object_get(const json_t *object, const char *key);
+int json_object_set_new(json_t *object, const char *key, json_t *value);
+int json_object_set_new_nocheck(json_t *object, const char *key, json_t *value);
+int json_object_del(json_t *object, const char *key);
+int json_object_clear(json_t *object);
+int json_object_update(json_t *object, json_t *other);
+void *json_object_iter(json_t *object);
+void *json_object_iter_at(json_t *object, const char *key);
+void *json_object_iter_next(json_t *object, void *iter);
+const char *json_object_iter_key(void *iter);
+json_t *json_object_iter_value(void *iter);
+int json_object_iter_set_new(json_t *object, void *iter, json_t *value);
+
+static JSON_INLINE
+int json_object_set(json_t *object, const char *key, json_t *value)
+{
+    return json_object_set_new(object, key, json_incref(value));
+}
+
+static JSON_INLINE
+int json_object_set_nocheck(json_t *object, const char *key, json_t *value)
+{
+    return json_object_set_new_nocheck(object, key, json_incref(value));
+}
+
+static JSON_INLINE
+int json_object_iter_set(json_t *object, void *iter, json_t *value)
+{
+    return json_object_iter_set_new(object, iter, json_incref(value));
+}
+
+size_t json_array_size(const json_t *array);
+json_t *json_array_get(const json_t *array, size_t index);
+int json_array_set_new(json_t *array, size_t index, json_t *value);
+int json_array_append_new(json_t *array, json_t *value);
+int json_array_insert_new(json_t *array, size_t index, json_t *value);
+int json_array_remove(json_t *array, size_t index);
+int json_array_clear(json_t *array);
+int json_array_extend(json_t *array, json_t *other);
+
+static JSON_INLINE
+int json_array_set(json_t *array, size_t index, json_t *value)
+{
+    return json_array_set_new(array, index, json_incref(value));
+}
+
+static JSON_INLINE
+int json_array_append(json_t *array, json_t *value)
+{
+    return json_array_append_new(array, json_incref(value));
+}
+
+static JSON_INLINE
+int json_array_insert(json_t *array, size_t index, json_t *value)
+{
+    return json_array_insert_new(array, index, json_incref(value));
+}
+
+const char *json_string_value(const json_t *string);
+json_int_t json_integer_value(const json_t *integer);
+double json_real_value(const json_t *real);
+double json_number_value(const json_t *json);
+
+int json_string_set(json_t *string, const char *value);
+int json_string_set_nocheck(json_t *string, const char *value);
+int json_integer_set(json_t *integer, json_int_t value);
+int json_real_set(json_t *real, double value);
+
+
+/* pack, unpack */
+
+json_t *json_pack(const char *fmt, ...);
+json_t *json_pack_ex(json_error_t *error, size_t flags, const char *fmt, ...);
+json_t *json_vpack_ex(json_error_t *error, size_t flags, const char *fmt, va_list ap);
+
+#define JSON_VALIDATE_ONLY  0x1
+#define JSON_STRICT         0x2
+
+int json_unpack(json_t *root, const char *fmt, ...);
+int json_unpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, ...);
+int json_vunpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, va_list ap);
+
+
+/* equality */
+
+int json_equal(json_t *value1, json_t *value2);
+
+
+/* copying */
+
+json_t *json_copy(json_t *value);
+json_t *json_deep_copy(json_t *value);
+
+
+/* decoding */
+
+#define JSON_REJECT_DUPLICATES 0x1
+#define JSON_DISABLE_EOF_CHECK 0x2
+
+json_t *json_loads(const char *input, size_t flags, json_error_t *error);
+json_t *json_loadb(const char *buffer, size_t buflen, size_t flags, json_error_t *error);
+json_t *json_loadf(FILE *input, size_t flags, json_error_t *error);
+json_t *json_load_file(const char *path, size_t flags, json_error_t *error);
+
+
+/* encoding */
+
+#define JSON_INDENT(n)      (n & 0x1F)
+#define JSON_COMPACT        0x20
+#define JSON_ENSURE_ASCII   0x40
+#define JSON_SORT_KEYS      0x80
+#define JSON_PRESERVE_ORDER 0x100
+#define JSON_ENCODE_ANY     0x200
+
+char *json_dumps(const json_t *json, size_t flags);
+int json_dumpf(const json_t *json, FILE *output, size_t flags);
+int json_dump_file(const json_t *json, const char *path, size_t flags);
+
+
+/* custom memory allocation */
+
+typedef void *(*json_malloc_t)(size_t);
+typedef void (*json_free_t)(void *);
+
+void json_set_alloc_funcs(json_malloc_t malloc_fn, json_free_t free_fn);
+
+#pragma GCC visibility pop
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/lang/c/jansson/src/jansson_config.h b/lang/c/jansson/src/jansson_config.h
new file mode 100644
index 0000000..b3bb72a
--- /dev/null
+++ b/lang/c/jansson/src/jansson_config.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2010-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ *
+ *
+ * This file specifies a part of the site-specific configuration for
+ * Jansson, namely those things that affect the public API in
+ * jansson.h.
+ *
+ * The configure script copies this file to jansson_config.h and
+ * replaces @var@ substitutions by values that fit your system. If you
+ * cannot run the configure script, you can do the value substitution
+ * by hand.
+ */
+
+#ifndef JANSSON_CONFIG_H
+#define JANSSON_CONFIG_H
+
+/* If your compiler supports the inline keyword in C, JSON_INLINE is
+   defined to `inline', otherwise empty. In C++, the inline is always
+   supported. */
+#ifdef __cplusplus
+#define JSON_INLINE inline
+#else
+#define JSON_INLINE inline
+#endif
+
+/* If your compiler supports the `long long` type,
+   JSON_INTEGER_IS_LONG_LONG is defined to 1, otherwise to 0. */
+#define JSON_INTEGER_IS_LONG_LONG 1
+
+#endif
diff --git a/lang/c/jansson/src/jansson_config.h.in b/lang/c/jansson/src/jansson_config.h.in
new file mode 100644
index 0000000..7f519cd
--- /dev/null
+++ b/lang/c/jansson/src/jansson_config.h.in
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2010-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ *
+ *
+ * This file specifies a part of the site-specific configuration for
+ * Jansson, namely those things that affect the public API in
+ * jansson.h.
+ *
+ * The configure script copies this file to jansson_config.h and
+ * replaces @var@ substitutions by values that fit your system. If you
+ * cannot run the configure script, you can do the value substitution
+ * by hand.
+ */
+
+#ifndef JANSSON_CONFIG_H
+#define JANSSON_CONFIG_H
+
+/* If your compiler supports the inline keyword in C, JSON_INLINE is
+   defined to `inline', otherwise empty. In C++, the inline is always
+   supported. */
+#ifdef __cplusplus
+#define JSON_INLINE inline
+#else
+#define JSON_INLINE @json_inline@
+#endif
+
+/* If your compiler supports the `long long` type,
+   JSON_INTEGER_IS_LONG_LONG is defined to 1, otherwise to 0. */
+#define JSON_INTEGER_IS_LONG_LONG @json_have_long_long@
+
+#endif
diff --git a/lang/c/jansson/src/jansson_private.h b/lang/c/jansson/src/jansson_private.h
new file mode 100644
index 0000000..722220c
--- /dev/null
+++ b/lang/c/jansson/src/jansson_private.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef JANSSON_PRIVATE_H
+#define JANSSON_PRIVATE_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <stddef.h>
+#include "jansson.h"
+#include "hashtable.h"
+
+#pragma GCC visibility push(hidden)
+
+#ifdef _WIN32
+#define snprintf _snprintf
+#endif
+
+#define container_of(ptr_, type_, member_)  \
+    ((type_ *)((char *)ptr_ - offsetof(type_, member_)))
+
+/* On some platforms, max() may already be defined */
+#ifndef max
+#define max(a, b)  ((a) > (b) ? (a) : (b))
+#endif
+
+/* va_copy is a C99 feature. In C89 implementations, it's sometimes
+   available as __va_copy. If not, memcpy() should do the trick. */
+#ifndef va_copy
+#ifdef __va_copy
+#define va_copy __va_copy
+#else
+#define va_copy(a, b)  memcpy(&(a), &(b), sizeof(va_list))
+#endif
+#endif
+
+typedef struct {
+    json_t json;
+    hashtable_t hashtable;
+    size_t serial;
+    int visited;
+} json_object_t;
+
+typedef struct {
+    json_t json;
+    size_t size;
+    size_t entries;
+    json_t **table;
+    int visited;
+} json_array_t;
+
+typedef struct {
+    json_t json;
+    char *value;
+} json_string_t;
+
+typedef struct {
+    json_t json;
+    double value;
+} json_real_t;
+
+typedef struct {
+    json_t json;
+    json_int_t value;
+} json_integer_t;
+
+#define json_to_object(json_)  container_of(json_, json_object_t, json)
+#define json_to_array(json_)   container_of(json_, json_array_t, json)
+#define json_to_string(json_)  container_of(json_, json_string_t, json)
+#define json_to_real(json_)   container_of(json_, json_real_t, json)
+#define json_to_integer(json_) container_of(json_, json_integer_t, json)
+
+size_t jsonp_hash_str(const void *ptr);
+int jsonp_str_equal(const void *ptr1, const void *ptr2);
+
+typedef struct {
+    size_t serial;
+    char key[1];
+} object_key_t;
+
+const object_key_t *jsonp_object_iter_fullkey(void *iter);
+
+void jsonp_error_init(json_error_t *error, const char *source);
+void jsonp_error_set_source(json_error_t *error, const char *source);
+void jsonp_error_set(json_error_t *error, int line, int column,
+                     size_t position, const char *msg, ...);
+void jsonp_error_vset(json_error_t *error, int line, int column,
+                      size_t position, const char *msg, va_list ap);
+
+/* Wrappers for custom memory functions */
+void* jsonp_malloc(size_t size);
+void jsonp_free(void *ptr);
+char *jsonp_strdup(const char *str);
+
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/jansson/src/load.c b/lang/c/jansson/src/load.c
new file mode 100644
index 0000000..338f3c1
--- /dev/null
+++ b/lang/c/jansson/src/load.c
@@ -0,0 +1,964 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#define _GNU_SOURCE
+#include <ctype.h>
+#include <errno.h>
+#include <limits.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <assert.h>
+
+#include <jansson.h>
+#include "jansson_private.h"
+#include "strbuffer.h"
+#include "utf.h"
+
+#define STREAM_STATE_OK        0
+#define STREAM_STATE_EOF      -1
+#define STREAM_STATE_ERROR    -2
+
+#define TOKEN_INVALID         -1
+#define TOKEN_EOF              0
+#define TOKEN_STRING         256
+#define TOKEN_INTEGER        257
+#define TOKEN_REAL           258
+#define TOKEN_TRUE           259
+#define TOKEN_FALSE          260
+#define TOKEN_NULL           261
+
+/* Read one byte from stream, convert to unsigned char, then int, and
+   return. return EOF on end of file. This corresponds to the
+   behaviour of fgetc(). */
+typedef int (*get_func)(void *data);
+
+typedef struct {
+    get_func get;
+    void *data;
+    char buffer[5];
+    int buffer_pos;
+    int state;
+    int line;
+    int column, last_column;
+    size_t position;
+} stream_t;
+
+typedef struct {
+    stream_t stream;
+    strbuffer_t saved_text;
+    int token;
+    union {
+        char *string;
+        json_int_t integer;
+        double real;
+    } value;
+} lex_t;
+
+#define stream_to_lex(stream) container_of(stream, lex_t, stream)
+
+
+/*** error reporting ***/
+
+static void error_set(json_error_t *error, const lex_t *lex,
+                      const char *msg, ...)
+{
+    va_list ap;
+    char msg_text[JSON_ERROR_TEXT_LENGTH];
+
+    int line = -1, col = -1;
+    size_t pos = 0;
+    const char *result = msg_text;
+
+    if(!error)
+        return;
+
+    va_start(ap, msg);
+    vsnprintf(msg_text, JSON_ERROR_TEXT_LENGTH, msg, ap);
+    va_end(ap);
+
+    if(lex)
+    {
+        const char *saved_text = strbuffer_value(&lex->saved_text);
+        char msg_with_context[JSON_ERROR_TEXT_LENGTH];
+
+        line = lex->stream.line;
+        col = lex->stream.column;
+        pos = lex->stream.position;
+
+        if(saved_text && saved_text[0])
+        {
+            if(lex->saved_text.length <= 20) {
+                snprintf(msg_with_context, JSON_ERROR_TEXT_LENGTH,
+                         "%s near '%s'", msg_text, saved_text);
+                result = msg_with_context;
+            }
+        }
+        else
+        {
+            if(lex->stream.state == STREAM_STATE_ERROR) {
+                /* No context for UTF-8 decoding errors */
+                result = msg_text;
+            }
+            else {
+                snprintf(msg_with_context, JSON_ERROR_TEXT_LENGTH,
+                         "%s near end of file", msg_text);
+                result = msg_with_context;
+            }
+        }
+    }
+
+    jsonp_error_set(error, line, col, pos, "%s", result);
+}
+
+
+/*** lexical analyzer ***/
+
+static void
+stream_init(stream_t *stream, get_func get, void *data)
+{
+    stream->get = get;
+    stream->data = data;
+    stream->buffer[0] = '\0';
+    stream->buffer_pos = 0;
+
+    stream->state = STREAM_STATE_OK;
+    stream->line = 1;
+    stream->column = 0;
+    stream->position = 0;
+}
+
+static int stream_get(stream_t *stream, json_error_t *error)
+{
+    int c;
+
+    if(stream->state != STREAM_STATE_OK)
+        return stream->state;
+
+    if(!stream->buffer[stream->buffer_pos])
+    {
+        c = stream->get(stream->data);
+        if(c == EOF) {
+            stream->state = STREAM_STATE_EOF;
+            return STREAM_STATE_EOF;
+        }
+
+        stream->buffer[0] = c;
+        stream->buffer_pos = 0;
+
+        if(0x80 <= c && c <= 0xFF)
+        {
+            /* multi-byte UTF-8 sequence */
+            int i, count;
+
+            count = utf8_check_first(c);
+            if(!count)
+                goto out;
+
+            assert(count >= 2);
+
+            for(i = 1; i < count; i++)
+                stream->buffer[i] = stream->get(stream->data);
+
+            if(!utf8_check_full(stream->buffer, count, NULL))
+                goto out;
+
+            stream->buffer[count] = '\0';
+        }
+        else
+            stream->buffer[1] = '\0';
+    }
+
+    c = stream->buffer[stream->buffer_pos++];
+
+    stream->position++;
+    if(c == '\n') {
+        stream->line++;
+        stream->last_column = stream->column;
+        stream->column = 0;
+    }
+    else if(utf8_check_first(c)) {
+        /* track the Unicode character column, so increment only if
+           this is the first character of a UTF-8 sequence */
+        stream->column++;
+    }
+
+    return c;
+
+out:
+    stream->state = STREAM_STATE_ERROR;
+    error_set(error, stream_to_lex(stream), "unable to decode byte 0x%x", c);
+    return STREAM_STATE_ERROR;
+}
+
+static void stream_unget(stream_t *stream, int c)
+{
+    if(c == STREAM_STATE_EOF || c == STREAM_STATE_ERROR)
+        return;
+
+    stream->position--;
+    if(c == '\n') {
+        stream->line--;
+        stream->column = stream->last_column;
+    }
+    else if(utf8_check_first(c))
+        stream->column--;
+
+    assert(stream->buffer_pos > 0);
+    stream->buffer_pos--;
+    assert(stream->buffer[stream->buffer_pos] == c);
+}
+
+
+static int lex_get(lex_t *lex, json_error_t *error)
+{
+    return stream_get(&lex->stream, error);
+}
+
+static void lex_save(lex_t *lex, int c)
+{
+    strbuffer_append_byte(&lex->saved_text, c);
+}
+
+static int lex_get_save(lex_t *lex, json_error_t *error)
+{
+    int c = stream_get(&lex->stream, error);
+    if(c != STREAM_STATE_EOF && c != STREAM_STATE_ERROR)
+        lex_save(lex, c);
+    return c;
+}
+
+static void lex_unget(lex_t *lex, int c)
+{
+    stream_unget(&lex->stream, c);
+}
+
+static void lex_unget_unsave(lex_t *lex, int c)
+{
+    if(c != STREAM_STATE_EOF && c != STREAM_STATE_ERROR) {
+        char d;
+        stream_unget(&lex->stream, c);
+        d = strbuffer_pop(&lex->saved_text);
+        assert(c == d);
+    }
+}
+
+static void lex_save_cached(lex_t *lex)
+{
+    while(lex->stream.buffer[lex->stream.buffer_pos] != '\0')
+    {
+        lex_save(lex, lex->stream.buffer[lex->stream.buffer_pos]);
+        lex->stream.buffer_pos++;
+        lex->stream.position++;
+    }
+}
+
+/* assumes that str points to 'u' plus at least 4 valid hex digits */
+static int32_t decode_unicode_escape(const char *str)
+{
+    int i;
+    int32_t value = 0;
+
+    assert(str[0] == 'u');
+
+    for(i = 1; i <= 4; i++) {
+        char c = str[i];
+        value <<= 4;
+        if(isdigit(c))
+            value += c - '0';
+        else if(islower(c))
+            value += c - 'a' + 10;
+        else if(isupper(c))
+            value += c - 'A' + 10;
+        else
+            assert(0);
+    }
+
+    return value;
+}
+
+static void lex_scan_string(lex_t *lex, json_error_t *error)
+{
+    int c;
+    const char *p;
+    char *t;
+    int i;
+
+    lex->value.string = NULL;
+    lex->token = TOKEN_INVALID;
+
+    c = lex_get_save(lex, error);
+
+    while(c != '"') {
+        if(c == STREAM_STATE_ERROR)
+            goto out;
+
+        else if(c == STREAM_STATE_EOF) {
+            error_set(error, lex, "premature end of input");
+            goto out;
+        }
+
+        else if(0 <= c && c <= 0x1F) {
+            /* control character */
+            lex_unget_unsave(lex, c);
+            if(c == '\n')
+                error_set(error, lex, "unexpected newline", c);
+            else
+                error_set(error, lex, "control character 0x%x", c);
+            goto out;
+        }
+
+        else if(c == '\\') {
+            c = lex_get_save(lex, error);
+            if(c == 'u') {
+                c = lex_get_save(lex, error);
+                for(i = 0; i < 4; i++) {
+                    if(!isxdigit(c)) {
+                        error_set(error, lex, "invalid escape");
+                        goto out;
+                    }
+                    c = lex_get_save(lex, error);
+                }
+            }
+            else if(c == '"' || c == '\\' || c == '/' || c == 'b' ||
+                    c == 'f' || c == 'n' || c == 'r' || c == 't')
+                c = lex_get_save(lex, error);
+            else {
+                error_set(error, lex, "invalid escape");
+                goto out;
+            }
+        }
+        else
+            c = lex_get_save(lex, error);
+    }
+
+    /* the actual value is at most of the same length as the source
+       string, because:
+         - shortcut escapes (e.g. "\t") (length 2) are converted to 1 byte
+         - a single \uXXXX escape (length 6) is converted to at most 3 bytes
+         - two \uXXXX escapes (length 12) forming an UTF-16 surrogate pair
+           are converted to 4 bytes
+    */
+    lex->value.string = (char *) jsonp_malloc(lex->saved_text.length + 1);
+    if(!lex->value.string) {
+        /* this is not very nice, since TOKEN_INVALID is returned */
+        goto out;
+    }
+
+    /* the target */
+    t = lex->value.string;
+
+    /* + 1 to skip the " */
+    p = strbuffer_value(&lex->saved_text) + 1;
+
+    while(*p != '"') {
+        if(*p == '\\') {
+            p++;
+            if(*p == 'u') {
+                char buffer[4];
+                int length;
+                int32_t value;
+
+                value = decode_unicode_escape(p);
+                p += 5;
+
+                if(0xD800 <= value && value <= 0xDBFF) {
+                    /* surrogate pair */
+                    if(*p == '\\' && *(p + 1) == 'u') {
+                        int32_t value2 = decode_unicode_escape(++p);
+                        p += 5;
+
+                        if(0xDC00 <= value2 && value2 <= 0xDFFF) {
+                            /* valid second surrogate */
+                            value =
+                                ((value - 0xD800) << 10) +
+                                (value2 - 0xDC00) +
+                                0x10000;
+                        }
+                        else {
+                            /* invalid second surrogate */
+                            error_set(error, lex,
+                                      "invalid Unicode '\\u%04X\\u%04X'",
+                                      value, value2);
+                            goto out;
+                        }
+                    }
+                    else {
+                        /* no second surrogate */
+                        error_set(error, lex, "invalid Unicode '\\u%04X'",
+                                  value);
+                        goto out;
+                    }
+                }
+                else if(0xDC00 <= value && value <= 0xDFFF) {
+                    error_set(error, lex, "invalid Unicode '\\u%04X'", value);
+                    goto out;
+                }
+                else if(value == 0)
+                {
+                    error_set(error, lex, "\\u0000 is not allowed");
+                    goto out;
+                }
+
+                if(utf8_encode(value, buffer, &length))
+                    assert(0);
+
+                memcpy(t, buffer, length);
+                t += length;
+            }
+            else {
+                switch(*p) {
+                    case '"': case '\\': case '/':
+                        *t = *p; break;
+                    case 'b': *t = '\b'; break;
+                    case 'f': *t = '\f'; break;
+                    case 'n': *t = '\n'; break;
+                    case 'r': *t = '\r'; break;
+                    case 't': *t = '\t'; break;
+                    default: assert(0);
+                }
+                t++;
+                p++;
+            }
+        }
+        else
+            *(t++) = *(p++);
+    }
+    *t = '\0';
+    lex->token = TOKEN_STRING;
+    return;
+
+out:
+    jsonp_free(lex->value.string);
+}
+
+#if JSON_INTEGER_IS_LONG_LONG
+#ifdef _WIN32
+#define json_strtoint     _strtoi64
+#else
+#define json_strtoint     strtoll
+#endif
+#else
+#define json_strtoint     strtol
+#endif
+
+static int lex_scan_number(lex_t *lex, int c, json_error_t *error)
+{
+    const char *saved_text;
+    char *end;
+    double value;
+
+    lex->token = TOKEN_INVALID;
+
+    if(c == '-')
+        c = lex_get_save(lex, error);
+
+    if(c == '0') {
+        c = lex_get_save(lex, error);
+        if(isdigit(c)) {
+            lex_unget_unsave(lex, c);
+            goto out;
+        }
+    }
+    else if(isdigit(c)) {
+        c = lex_get_save(lex, error);
+        while(isdigit(c))
+            c = lex_get_save(lex, error);
+    }
+    else {
+        lex_unget_unsave(lex, c);
+        goto out;
+    }
+
+    if(c != '.' && c != 'E' && c != 'e') {
+        json_int_t value;
+
+        lex_unget_unsave(lex, c);
+
+        saved_text = strbuffer_value(&lex->saved_text);
+
+        errno = 0;
+        value = json_strtoint(saved_text, &end, 10);
+        if(errno == ERANGE) {
+            if(value < 0)
+                error_set(error, lex, "too big negative integer");
+            else
+                error_set(error, lex, "too big integer");
+            goto out;
+        }
+
+        assert(end == saved_text + lex->saved_text.length);
+
+        lex->token = TOKEN_INTEGER;
+        lex->value.integer = value;
+        return 0;
+    }
+
+    if(c == '.') {
+        c = lex_get(lex, error);
+        if(!isdigit(c)) {
+            lex_unget(lex, c);
+            goto out;
+        }
+        lex_save(lex, c);
+
+        c = lex_get_save(lex, error);
+        while(isdigit(c))
+            c = lex_get_save(lex, error);
+    }
+
+    if(c == 'E' || c == 'e') {
+        c = lex_get_save(lex, error);
+        if(c == '+' || c == '-')
+            c = lex_get_save(lex, error);
+
+        if(!isdigit(c)) {
+            lex_unget_unsave(lex, c);
+            goto out;
+        }
+
+        c = lex_get_save(lex, error);
+        while(isdigit(c))
+            c = lex_get_save(lex, error);
+    }
+
+    lex_unget_unsave(lex, c);
+
+    saved_text = strbuffer_value(&lex->saved_text);
+    errno = 0;
+    value = strtod(saved_text, &end);
+    assert(end == saved_text + lex->saved_text.length);
+
+    if(errno == ERANGE && value != 0) {
+        error_set(error, lex, "real number overflow");
+        goto out;
+    }
+
+    lex->token = TOKEN_REAL;
+    lex->value.real = value;
+    return 0;
+
+out:
+    return -1;
+}
+
+static int lex_scan(lex_t *lex, json_error_t *error)
+{
+    int c;
+
+    strbuffer_clear(&lex->saved_text);
+
+    if(lex->token == TOKEN_STRING) {
+        jsonp_free(lex->value.string);
+        lex->value.string = NULL;
+    }
+
+    c = lex_get(lex, error);
+    while(c == ' ' || c == '\t' || c == '\n' || c == '\r')
+        c = lex_get(lex, error);
+
+    if(c == STREAM_STATE_EOF) {
+        lex->token = TOKEN_EOF;
+        goto out;
+    }
+
+    if(c == STREAM_STATE_ERROR) {
+        lex->token = TOKEN_INVALID;
+        goto out;
+    }
+
+    lex_save(lex, c);
+
+    if(c == '{' || c == '}' || c == '[' || c == ']' || c == ':' || c == ',')
+        lex->token = c;
+
+    else if(c == '"')
+        lex_scan_string(lex, error);
+
+    else if(isdigit(c) || c == '-') {
+        if(lex_scan_number(lex, c, error))
+            goto out;
+    }
+
+    else if(isupper(c) || islower(c)) {
+        /* eat up the whole identifier for clearer error messages */
+        const char *saved_text;
+
+        c = lex_get_save(lex, error);
+        while(isupper(c) || islower(c))
+            c = lex_get_save(lex, error);
+        lex_unget_unsave(lex, c);
+
+        saved_text = strbuffer_value(&lex->saved_text);
+
+        if(strcmp(saved_text, "true") == 0)
+            lex->token = TOKEN_TRUE;
+        else if(strcmp(saved_text, "false") == 0)
+            lex->token = TOKEN_FALSE;
+        else if(strcmp(saved_text, "null") == 0)
+            lex->token = TOKEN_NULL;
+        else
+            lex->token = TOKEN_INVALID;
+    }
+
+    else {
+        /* save the rest of the input UTF-8 sequence to get an error
+           message of valid UTF-8 */
+        lex_save_cached(lex);
+        lex->token = TOKEN_INVALID;
+    }
+
+out:
+    return lex->token;
+}
+
+static char *lex_steal_string(lex_t *lex)
+{
+    char *result = NULL;
+    if(lex->token == TOKEN_STRING)
+    {
+        result = lex->value.string;
+        lex->value.string = NULL;
+    }
+    return result;
+}
+
+static int lex_init(lex_t *lex, get_func get, void *data)
+{
+    stream_init(&lex->stream, get, data);
+    if(strbuffer_init(&lex->saved_text))
+        return -1;
+
+    lex->token = TOKEN_INVALID;
+    return 0;
+}
+
+static void lex_close(lex_t *lex)
+{
+    if(lex->token == TOKEN_STRING)
+        jsonp_free(lex->value.string);
+    strbuffer_close(&lex->saved_text);
+}
+
+
+/*** parser ***/
+
+static json_t *parse_value(lex_t *lex, size_t flags, json_error_t *error);
+
+static json_t *parse_object(lex_t *lex, size_t flags, json_error_t *error)
+{
+    json_t *object = json_object();
+    if(!object)
+        return NULL;
+
+    lex_scan(lex, error);
+    if(lex->token == '}')
+        return object;
+
+    while(1) {
+        char *key;
+        json_t *value;
+
+        if(lex->token != TOKEN_STRING) {
+            error_set(error, lex, "string or '}' expected");
+            goto error;
+        }
+
+        key = lex_steal_string(lex);
+        if(!key)
+            return NULL;
+
+        if(flags & JSON_REJECT_DUPLICATES) {
+            if(json_object_get(object, key)) {
+                jsonp_free(key);
+                error_set(error, lex, "duplicate object key");
+                goto error;
+            }
+        }
+
+        lex_scan(lex, error);
+        if(lex->token != ':') {
+            jsonp_free(key);
+            error_set(error, lex, "':' expected");
+            goto error;
+        }
+
+        lex_scan(lex, error);
+        value = parse_value(lex, flags, error);
+        if(!value) {
+            jsonp_free(key);
+            goto error;
+        }
+
+        if(json_object_set_nocheck(object, key, value)) {
+            jsonp_free(key);
+            json_decref(value);
+            goto error;
+        }
+
+        json_decref(value);
+        jsonp_free(key);
+
+        lex_scan(lex, error);
+        if(lex->token != ',')
+            break;
+
+        lex_scan(lex, error);
+    }
+
+    if(lex->token != '}') {
+        error_set(error, lex, "'}' expected");
+        goto error;
+    }
+
+    return object;
+
+error:
+    json_decref(object);
+    return NULL;
+}
+
+static json_t *parse_array(lex_t *lex, size_t flags, json_error_t *error)
+{
+    json_t *array = json_array();
+    if(!array)
+        return NULL;
+
+    lex_scan(lex, error);
+    if(lex->token == ']')
+        return array;
+
+    while(lex->token) {
+        json_t *elem = parse_value(lex, flags, error);
+        if(!elem)
+            goto error;
+
+        if(json_array_append(array, elem)) {
+            json_decref(elem);
+            goto error;
+        }
+        json_decref(elem);
+
+        lex_scan(lex, error);
+        if(lex->token != ',')
+            break;
+
+        lex_scan(lex, error);
+    }
+
+    if(lex->token != ']') {
+        error_set(error, lex, "']' expected");
+        goto error;
+    }
+
+    return array;
+
+error:
+    json_decref(array);
+    return NULL;
+}
+
+static json_t *parse_value(lex_t *lex, size_t flags, json_error_t *error)
+{
+    json_t *json;
+
+    switch(lex->token) {
+        case TOKEN_STRING: {
+            json = json_string_nocheck(lex->value.string);
+            break;
+        }
+
+        case TOKEN_INTEGER: {
+            json = json_integer(lex->value.integer);
+            break;
+        }
+
+        case TOKEN_REAL: {
+            json = json_real(lex->value.real);
+            break;
+        }
+
+        case TOKEN_TRUE:
+            json = json_true();
+            break;
+
+        case TOKEN_FALSE:
+            json = json_false();
+            break;
+
+        case TOKEN_NULL:
+            json = json_null();
+            break;
+
+        case '{':
+            json = parse_object(lex, flags, error);
+            break;
+
+        case '[':
+            json = parse_array(lex, flags, error);
+            break;
+
+        case TOKEN_INVALID:
+            error_set(error, lex, "invalid token");
+            return NULL;
+
+        default:
+            error_set(error, lex, "unexpected token");
+            return NULL;
+    }
+
+    if(!json)
+        return NULL;
+
+    return json;
+}
+
+static json_t *parse_json(lex_t *lex, size_t flags, json_error_t *error)
+{
+    json_t *result;
+
+    lex_scan(lex, error);
+    if(lex->token != '[' && lex->token != '{') {
+        error_set(error, lex, "'[' or '{' expected");
+        return NULL;
+    }
+
+    result = parse_value(lex, flags, error);
+    if(!result)
+        return NULL;
+
+    if(!(flags & JSON_DISABLE_EOF_CHECK)) {
+        lex_scan(lex, error);
+        if(lex->token != TOKEN_EOF) {
+            error_set(error, lex, "end of file expected");
+            json_decref(result);
+            result = NULL;
+        }
+    }
+
+    return result;
+}
+
+typedef struct
+{
+    const char *data;
+    int pos;
+} string_data_t;
+
+static int string_get(void *data)
+{
+    char c;
+    string_data_t *stream = (string_data_t *)data;
+    c = stream->data[stream->pos];
+    if(c == '\0')
+        return EOF;
+    else
+    {
+        stream->pos++;
+        return (unsigned char)c;
+    }
+}
+
+json_t *json_loads(const char *string, size_t flags, json_error_t *error)
+{
+    lex_t lex;
+    json_t *result;
+    string_data_t stream_data;
+
+    stream_data.data = string;
+    stream_data.pos = 0;
+
+    if(lex_init(&lex, string_get, (void *)&stream_data))
+        return NULL;
+
+    jsonp_error_init(error, "<string>");
+    result = parse_json(&lex, flags, error);
+
+    lex_close(&lex);
+    return result;
+}
+
+typedef struct
+{
+    const char *data;
+    size_t len;
+    size_t pos;
+} buffer_data_t;
+
+static int buffer_get(void *data)
+{
+    char c;
+    buffer_data_t *stream = (buffer_data_t *) data;
+    if(stream->pos >= stream->len)
+      return EOF;
+
+    c = stream->data[stream->pos];
+    stream->pos++;
+    return (unsigned char)c;
+}
+
+json_t *json_loadb(const char *buffer, size_t buflen, size_t flags, json_error_t *error)
+{
+    lex_t lex;
+    json_t *result;
+    buffer_data_t stream_data;
+
+    stream_data.data = buffer;
+    stream_data.pos = 0;
+    stream_data.len = buflen;
+
+    if(lex_init(&lex, buffer_get, (void *)&stream_data))
+        return NULL;
+
+    jsonp_error_init(error, "<buffer>");
+    result = parse_json(&lex, flags, error);
+
+    lex_close(&lex);
+    return result;
+}
+
+json_t *json_loadf(FILE *input, size_t flags, json_error_t *error)
+{
+    lex_t lex;
+    const char *source;
+    json_t *result;
+
+    if(lex_init(&lex, (get_func)fgetc, input))
+        return NULL;
+
+    if(input == stdin)
+        source = "<stdin>";
+    else
+        source = "<stream>";
+
+    jsonp_error_init(error, source);
+    result = parse_json(&lex, flags, error);
+
+    lex_close(&lex);
+    return result;
+}
+
+json_t *json_load_file(const char *path, size_t flags, json_error_t *error)
+{
+    json_t *result;
+    FILE *fp;
+
+    jsonp_error_init(error, path);
+
+    fp = fopen(path, "r");
+    if(!fp)
+    {
+        error_set(error, NULL, "unable to open %s: %s",
+                  path, strerror(errno));
+        return NULL;
+    }
+
+    result = json_loadf(fp, flags, error);
+
+    fclose(fp);
+    return result;
+}
diff --git a/lang/c/jansson/src/memory.c b/lang/c/jansson/src/memory.c
new file mode 100644
index 0000000..0ed3de6
--- /dev/null
+++ b/lang/c/jansson/src/memory.c
@@ -0,0 +1,51 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ * Copyright (c) 2011 Basile Starynkevitch  <basile at starynkevitch.net>
+ *
+ * Jansson is free software; you can redistribute it and/or modify it
+ * under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <jansson.h>
+#include "jansson_private.h"
+
+/* memory function pointers */
+static json_malloc_t do_malloc = malloc;
+static json_free_t do_free = free;
+
+void *jsonp_malloc(size_t size)
+{
+    if(!size)
+        return NULL;
+
+    return (*do_malloc)(size);
+}
+
+void jsonp_free(void *ptr)
+{
+    if(!ptr)
+        return;
+
+    (*do_free)(ptr);
+}
+
+char *jsonp_strdup(const char *str)
+{
+    char *new_str;
+
+    new_str = (char *) jsonp_malloc(strlen(str) + 1);
+    if(!new_str)
+        return NULL;
+
+    strcpy(new_str, str);
+    return new_str;
+}
+
+void json_set_alloc_funcs(json_malloc_t malloc_fn, json_free_t free_fn)
+{
+    do_malloc = malloc_fn;
+    do_free = free_fn;
+}
diff --git a/lang/c/jansson/src/pack_unpack.c b/lang/c/jansson/src/pack_unpack.c
new file mode 100644
index 0000000..20d540b
--- /dev/null
+++ b/lang/c/jansson/src/pack_unpack.c
@@ -0,0 +1,610 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ * Copyright (c) 2011 Graeme Smecher <graeme.smecher at mail.mcgill.ca>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include <jansson.h>
+#include "jansson_private.h"
+#include "utf.h"
+
+typedef struct {
+    const char *start;
+    const char *fmt;
+    char token;
+    json_error_t *error;
+    size_t flags;
+    int line;
+    int column;
+} scanner_t;
+
+static const char *type_names[] = {
+    "object",
+    "array",
+    "string",
+    "integer",
+    "real",
+    "true",
+    "false",
+    "null"
+};
+
+#define type_name(x) type_names[json_typeof(x)]
+
+static const char *unpack_value_starters = "{[siIbfFOon";
+
+
+static void scanner_init(scanner_t *s, json_error_t *error,
+                         size_t flags, const char *fmt)
+{
+    s->error = error;
+    s->flags = flags;
+    s->fmt = s->start = fmt;
+    s->line = 1;
+    s->column = 0;
+}
+
+static void next_token(scanner_t *s)
+{
+    const char *t = s->fmt;
+    s->column++;
+
+    /* skip space and ignored chars */
+    while(*t == ' ' || *t == '\t' || *t == '\n' || *t == ',' || *t == ':') {
+        if(*t == '\n') {
+            s->line++;
+            s->column = 1;
+        }
+        else
+            s->column++;
+
+        t++;
+    }
+
+    s->token = *t;
+
+    t++;
+    s->fmt = t;
+}
+
+static void set_error(scanner_t *s, const char *source, const char *fmt, ...)
+{
+    va_list ap;
+    size_t pos;
+    va_start(ap, fmt);
+
+    pos = (size_t)(s->fmt - s->start);
+    jsonp_error_vset(s->error, s->line, s->column, pos, fmt, ap);
+
+    jsonp_error_set_source(s->error, source);
+
+    va_end(ap);
+}
+
+static json_t *pack(scanner_t *s, va_list *ap);
+
+static json_t *pack_object(scanner_t *s, va_list *ap)
+{
+    json_t *object = json_object();
+    next_token(s);
+
+    while(s->token != '}') {
+        const char *key;
+        json_t *value;
+
+        if(!s->token) {
+            set_error(s, "<format>", "Unexpected end of format string");
+            goto error;
+        }
+
+        if(s->token != 's') {
+            set_error(s, "<format>", "Expected format 's', got '%c'", s->token);
+            goto error;
+        }
+
+        key = va_arg(*ap, const char *);
+        if(!key) {
+            set_error(s, "<args>", "NULL object key");
+            goto error;
+        }
+
+        if(!utf8_check_string(key, -1)) {
+            set_error(s, "<args>", "Invalid UTF-8 in object key");
+            goto error;
+        }
+
+        next_token(s);
+
+        value = pack(s, ap);
+        if(!value)
+            goto error;
+
+        if(json_object_set_new_nocheck(object, key, value)) {
+            set_error(s, "<internal>", "Unable to add key \"%s\"", key);
+            goto error;
+        }
+
+        next_token(s);
+    }
+
+    return object;
+
+error:
+    json_decref(object);
+    return NULL;
+}
+
+static json_t *pack_array(scanner_t *s, va_list *ap)
+{
+    json_t *array = json_array();
+    next_token(s);
+
+    while(s->token != ']') {
+        json_t *value;
+
+        if(!s->token) {
+            set_error(s, "<format>", "Unexpected end of format string");
+            goto error;
+        }
+
+        value = pack(s, ap);
+        if(!value)
+            goto error;
+
+        if(json_array_append_new(array, value)) {
+            set_error(s, "<internal>", "Unable to append to array");
+            goto error;
+        }
+
+        next_token(s);
+    }
+    return array;
+
+error:
+    json_decref(array);
+    return NULL;
+}
+
+static json_t *pack(scanner_t *s, va_list *ap)
+{
+    switch(s->token) {
+        case '{':
+            return pack_object(s, ap);
+
+        case '[':
+            return pack_array(s, ap);
+
+        case 's': /* string */
+        {
+            const char *str = va_arg(*ap, const char *);
+            if(!str) {
+                set_error(s, "<args>", "NULL string argument");
+                return NULL;
+            }
+            if(!utf8_check_string(str, -1)) {
+                set_error(s, "<args>", "Invalid UTF-8 string");
+                return NULL;
+            }
+            return json_string_nocheck(str);
+        }
+
+        case 'n': /* null */
+            return json_null();
+
+        case 'b': /* boolean */
+            return va_arg(*ap, int) ? json_true() : json_false();
+
+        case 'i': /* integer from int */
+            return json_integer(va_arg(*ap, int));
+
+        case 'I': /* integer from json_int_t */
+            return json_integer(va_arg(*ap, json_int_t));
+
+        case 'f': /* real */
+            return json_real(va_arg(*ap, double));
+
+        case 'O': /* a json_t object; increments refcount */
+            return json_incref(va_arg(*ap, json_t *));
+
+        case 'o': /* a json_t object; doesn't increment refcount */
+            return va_arg(*ap, json_t *);
+
+        default:
+            set_error(s, "<format>", "Unexpected format character '%c'",
+                      s->token);
+            return NULL;
+    }
+}
+
+static int unpack(scanner_t *s, json_t *root, va_list *ap);
+
+static int unpack_object(scanner_t *s, json_t *root, va_list *ap)
+{
+    int ret = -1;
+    int strict = 0;
+
+    /* Use a set (emulated by a hashtable) to check that all object
+       keys are accessed. Checking that the correct number of keys
+       were accessed is not enough, as the same key can be unpacked
+       multiple times.
+    */
+    hashtable_t key_set;
+
+    if(hashtable_init(&key_set, jsonp_hash_str, jsonp_str_equal, NULL, NULL)) {
+        set_error(s, "<internal>", "Out of memory");
+        return -1;
+    }
+
+    if(!json_is_object(root)) {
+        set_error(s, "<validation>", "Expected object, got %s",
+                  type_name(root));
+        goto out;
+    }
+    next_token(s);
+
+    while(s->token != '}') {
+        const char *key;
+        json_t *value;
+
+        if(strict != 0) {
+            set_error(s, "<format>", "Expected '}' after '%c', got '%c'",
+                      (strict == 1 ? '!' : '*'), s->token);
+            goto out;
+        }
+
+        if(!s->token) {
+            set_error(s, "<format>", "Unexpected end of format string");
+            goto out;
+        }
+
+        if(s->token == '!' || s->token == '*') {
+            strict = (s->token == '!' ? 1 : -1);
+            next_token(s);
+            continue;
+        }
+
+        if(s->token != 's') {
+            set_error(s, "<format>", "Expected format 's', got '%c'", s->token);
+            goto out;
+        }
+
+        key = va_arg(*ap, const char *);
+        if(!key) {
+            set_error(s, "<args>", "NULL object key");
+            goto out;
+        }
+
+        next_token(s);
+
+        value = json_object_get(root, key);
+        if(!value) {
+            set_error(s, "<validation>", "Object item not found: %s", key);
+            goto out;
+        }
+
+        if(unpack(s, value, ap))
+            goto out;
+
+        hashtable_set(&key_set, (void *)key, NULL);
+        next_token(s);
+    }
+
+    if(strict == 0 && (s->flags & JSON_STRICT))
+        strict = 1;
+
+    if(strict == 1 && key_set.size != json_object_size(root)) {
+        long diff = (long)json_object_size(root) - (long)key_set.size;
+        set_error(s, "<validation>", "%li object item(s) left unpacked", diff);
+        goto out;
+    }
+
+    ret = 0;
+
+out:
+    hashtable_close(&key_set);
+    return ret;
+}
+
+static int unpack_array(scanner_t *s, json_t *root, va_list *ap)
+{
+    size_t i = 0;
+    int strict = 0;
+
+    if(!json_is_array(root)) {
+        set_error(s, "<validation>", "Expected array, got %s", type_name(root));
+        return -1;
+    }
+    next_token(s);
+
+    while(s->token != ']') {
+        json_t *value;
+
+        if(strict != 0) {
+            set_error(s, "<format>", "Expected ']' after '%c', got '%c'",
+                      (strict == 1 ? '!' : '*'),
+                      s->token);
+            return -1;
+        }
+
+        if(!s->token) {
+            set_error(s, "<format>", "Unexpected end of format string");
+            return -1;
+        }
+
+        if(s->token == '!' || s->token == '*') {
+            strict = (s->token == '!' ? 1 : -1);
+            next_token(s);
+            continue;
+        }
+
+        if(!strchr(unpack_value_starters, s->token)) {
+            set_error(s, "<format>", "Unexpected format character '%c'",
+                      s->token);
+            return -1;
+        }
+
+        value = json_array_get(root, i);
+        if(!value) {
+            set_error(s, "<validation>", "Array index %lu out of range",
+                      (unsigned long)i);
+            return -1;
+        }
+
+        if(unpack(s, value, ap))
+            return -1;
+
+        next_token(s);
+        i++;
+    }
+
+    if(strict == 0 && (s->flags & JSON_STRICT))
+        strict = 1;
+
+    if(strict == 1 && i != json_array_size(root)) {
+        long diff = (long)json_array_size(root) - (long)i;
+        set_error(s, "<validation>", "%li array item(s) left unpacked", diff);
+        return -1;
+    }
+
+    return 0;
+}
+
+static int unpack(scanner_t *s, json_t *root, va_list *ap)
+{
+    switch(s->token)
+    {
+        case '{':
+            return unpack_object(s, root, ap);
+
+        case '[':
+            return unpack_array(s, root, ap);
+
+        case 's':
+            if(!json_is_string(root)) {
+                set_error(s, "<validation>", "Expected string, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY)) {
+                const char **str;
+
+                str = va_arg(*ap, const char **);
+                if(!str) {
+                    set_error(s, "<args>", "NULL string argument");
+                    return -1;
+                }
+
+                *str = json_string_value(root);
+            }
+            return 0;
+
+        case 'i':
+            if(!json_is_integer(root)) {
+                set_error(s, "<validation>", "Expected integer, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, int*) = json_integer_value(root);
+
+            return 0;
+
+        case 'I':
+            if(!json_is_integer(root)) {
+                set_error(s, "<validation>", "Expected integer, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, json_int_t*) = json_integer_value(root);
+
+            return 0;
+
+        case 'b':
+            if(!json_is_boolean(root)) {
+                set_error(s, "<validation>", "Expected true or false, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, int*) = json_is_true(root);
+
+            return 0;
+
+        case 'f':
+            if(!json_is_real(root)) {
+                set_error(s, "<validation>", "Expected real, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, double*) = json_real_value(root);
+
+            return 0;
+
+        case 'F':
+            if(!json_is_number(root)) {
+                set_error(s, "<validation>", "Expected real or integer, got %s",
+                          type_name(root));
+                return -1;
+            }
+
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, double*) = json_number_value(root);
+
+            return 0;
+
+        case 'O':
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                json_incref(root);
+            /* Fall through */
+
+        case 'o':
+            if(!(s->flags & JSON_VALIDATE_ONLY))
+                *va_arg(*ap, json_t**) = root;
+
+            return 0;
+
+        case 'n':
+            /* Never assign, just validate */
+            if(!json_is_null(root)) {
+                set_error(s, "<validation>", "Expected null, got %s",
+                          type_name(root));
+                return -1;
+            }
+            return 0;
+
+        default:
+            set_error(s, "<format>", "Unexpected format character '%c'",
+                      s->token);
+            return -1;
+    }
+}
+
+json_t *json_vpack_ex(json_error_t *error, size_t flags,
+                      const char *fmt, va_list ap)
+{
+    scanner_t s;
+    va_list ap_copy;
+    json_t *value;
+
+    if(!fmt || !*fmt) {
+        jsonp_error_init(error, "<format>");
+        jsonp_error_set(error, -1, -1, 0, "NULL or empty format string");
+        return NULL;
+    }
+    jsonp_error_init(error, NULL);
+
+    scanner_init(&s, error, flags, fmt);
+    next_token(&s);
+
+    va_copy(ap_copy, ap);
+    value = pack(&s, &ap_copy);
+    va_end(ap_copy);
+
+    if(!value)
+        return NULL;
+
+    next_token(&s);
+    if(s.token) {
+        json_decref(value);
+        set_error(&s, "<format>", "Garbage after format string");
+        return NULL;
+    }
+
+    return value;
+}
+
+json_t *json_pack_ex(json_error_t *error, size_t flags, const char *fmt, ...)
+{
+    json_t *value;
+    va_list ap;
+
+    va_start(ap, fmt);
+    value = json_vpack_ex(error, flags, fmt, ap);
+    va_end(ap);
+
+    return value;
+}
+
+json_t *json_pack(const char *fmt, ...)
+{
+    json_t *value;
+    va_list ap;
+
+    va_start(ap, fmt);
+    value = json_vpack_ex(NULL, 0, fmt, ap);
+    va_end(ap);
+
+    return value;
+}
+
+int json_vunpack_ex(json_t *root, json_error_t *error, size_t flags,
+                    const char *fmt, va_list ap)
+{
+    scanner_t s;
+    va_list ap_copy;
+
+    if(!root) {
+        jsonp_error_init(error, "<root>");
+        jsonp_error_set(error, -1, -1, 0, "NULL root value");
+        return -1;
+    }
+
+    if(!fmt || !*fmt) {
+        jsonp_error_init(error, "<format>");
+        jsonp_error_set(error, -1, -1, 0, "NULL or empty format string");
+        return -1;
+    }
+    jsonp_error_init(error, NULL);
+
+    scanner_init(&s, error, flags, fmt);
+    next_token(&s);
+
+    va_copy(ap_copy, ap);
+    if(unpack(&s, root, &ap_copy)) {
+        va_end(ap_copy);
+        return -1;
+    }
+    va_end(ap_copy);
+
+    next_token(&s);
+    if(s.token) {
+        set_error(&s, "<format>", "Garbage after format string");
+        return -1;
+    }
+
+    return 0;
+}
+
+int json_unpack_ex(json_t *root, json_error_t *error, size_t flags, const char *fmt, ...)
+{
+    int ret;
+    va_list ap;
+
+    va_start(ap, fmt);
+    ret = json_vunpack_ex(root, error, flags, fmt, ap);
+    va_end(ap);
+
+    return ret;
+}
+
+int json_unpack(json_t *root, const char *fmt, ...)
+{
+    int ret;
+    va_list ap;
+
+    va_start(ap, fmt);
+    ret = json_vunpack_ex(root, NULL, 0, fmt, ap);
+    va_end(ap);
+
+    return ret;
+}
diff --git a/lang/c/jansson/src/strbuffer.c b/lang/c/jansson/src/strbuffer.c
new file mode 100644
index 0000000..ad009c7
--- /dev/null
+++ b/lang/c/jansson/src/strbuffer.c
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#define _GNU_SOURCE
+#include <stdlib.h>
+#include <string.h>
+#include "jansson_private.h"
+#include "strbuffer.h"
+
+#define STRBUFFER_MIN_SIZE  16
+#define STRBUFFER_FACTOR    2
+
+int strbuffer_init(strbuffer_t *strbuff)
+{
+    strbuff->size = STRBUFFER_MIN_SIZE;
+    strbuff->length = 0;
+
+    strbuff->value = (char *) jsonp_malloc(strbuff->size);
+    if(!strbuff->value)
+        return -1;
+
+    /* initialize to empty */
+    strbuff->value[0] = '\0';
+    return 0;
+}
+
+void strbuffer_close(strbuffer_t *strbuff)
+{
+    jsonp_free(strbuff->value);
+    strbuff->size = 0;
+    strbuff->length = 0;
+    strbuff->value = NULL;
+}
+
+void strbuffer_clear(strbuffer_t *strbuff)
+{
+    strbuff->length = 0;
+    strbuff->value[0] = '\0';
+}
+
+const char *strbuffer_value(const strbuffer_t *strbuff)
+{
+    return strbuff->value;
+}
+
+char *strbuffer_steal_value(strbuffer_t *strbuff)
+{
+    char *result = strbuff->value;
+    strbuffer_init(strbuff);
+    return result;
+}
+
+int strbuffer_append(strbuffer_t *strbuff, const char *string)
+{
+    return strbuffer_append_bytes(strbuff, string, strlen(string));
+}
+
+int strbuffer_append_byte(strbuffer_t *strbuff, char byte)
+{
+    return strbuffer_append_bytes(strbuff, &byte, 1);
+}
+
+int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, int size)
+{
+    if(strbuff->length + size >= strbuff->size)
+    {
+        size_t new_size;
+        char *new_value;
+
+        new_size = max(strbuff->size * STRBUFFER_FACTOR,
+                       strbuff->length + size + 1);
+
+        new_value = (char *) jsonp_malloc(new_size);
+        if(!new_value)
+            return -1;
+
+        memcpy(new_value, strbuff->value, strbuff->length);
+
+        jsonp_free(strbuff->value);
+        strbuff->value = new_value;
+        strbuff->size = new_size;
+    }
+
+    memcpy(strbuff->value + strbuff->length, data, size);
+    strbuff->length += size;
+    strbuff->value[strbuff->length] = '\0';
+
+    return 0;
+}
+
+char strbuffer_pop(strbuffer_t *strbuff)
+{
+    if(strbuff->length > 0) {
+        char c = strbuff->value[--strbuff->length];
+        strbuff->value[strbuff->length] = '\0';
+        return c;
+    }
+    else
+        return '\0';
+}
diff --git a/lang/c/jansson/src/strbuffer.h b/lang/c/jansson/src/strbuffer.h
new file mode 100644
index 0000000..06a1522
--- /dev/null
+++ b/lang/c/jansson/src/strbuffer.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef STRBUFFER_H
+#define STRBUFFER_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#pragma GCC visibility push(hidden)
+
+typedef struct {
+    char *value;
+    int length;   /* bytes used */
+    int size;     /* bytes allocated */
+} strbuffer_t;
+
+int strbuffer_init(strbuffer_t *strbuff);
+void strbuffer_close(strbuffer_t *strbuff);
+
+void strbuffer_clear(strbuffer_t *strbuff);
+
+const char *strbuffer_value(const strbuffer_t *strbuff);
+char *strbuffer_steal_value(strbuffer_t *strbuff);
+
+int strbuffer_append(strbuffer_t *strbuff, const char *string);
+int strbuffer_append_byte(strbuffer_t *strbuff, char byte);
+int strbuffer_append_bytes(strbuffer_t *strbuff, const char *data, int size);
+
+char strbuffer_pop(strbuffer_t *strbuff);
+
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/jansson/src/utf.c b/lang/c/jansson/src/utf.c
new file mode 100644
index 0000000..f48c2e7
--- /dev/null
+++ b/lang/c/jansson/src/utf.c
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include "utf.h"
+
+int utf8_encode(int32_t codepoint, char *buffer, int *size)
+{
+    if(codepoint < 0)
+        return -1;
+    else if(codepoint < 0x80)
+    {
+        buffer[0] = (char)codepoint;
+        *size = 1;
+    }
+    else if(codepoint < 0x800)
+    {
+        buffer[0] = 0xC0 + ((codepoint & 0x7C0) >> 6);
+        buffer[1] = 0x80 + ((codepoint & 0x03F));
+        *size = 2;
+    }
+    else if(codepoint < 0x10000)
+    {
+        buffer[0] = 0xE0 + ((codepoint & 0xF000) >> 12);
+        buffer[1] = 0x80 + ((codepoint & 0x0FC0) >> 6);
+        buffer[2] = 0x80 + ((codepoint & 0x003F));
+        *size = 3;
+    }
+    else if(codepoint <= 0x10FFFF)
+    {
+        buffer[0] = 0xF0 + ((codepoint & 0x1C0000) >> 18);
+        buffer[1] = 0x80 + ((codepoint & 0x03F000) >> 12);
+        buffer[2] = 0x80 + ((codepoint & 0x000FC0) >> 6);
+        buffer[3] = 0x80 + ((codepoint & 0x00003F));
+        *size = 4;
+    }
+    else
+        return -1;
+
+    return 0;
+}
+
+int utf8_check_first(char byte)
+{
+    unsigned char u = (unsigned char)byte;
+
+    if(u < 0x80)
+        return 1;
+
+    if(0x80 <= u && u <= 0xBF) {
+        /* second, third or fourth byte of a multi-byte
+           sequence, i.e. a "continuation byte" */
+        return 0;
+    }
+    else if(u == 0xC0 || u == 0xC1) {
+        /* overlong encoding of an ASCII byte */
+        return 0;
+    }
+    else if(0xC2 <= u && u <= 0xDF) {
+        /* 2-byte sequence */
+        return 2;
+    }
+
+    else if(0xE0 <= u && u <= 0xEF) {
+        /* 3-byte sequence */
+        return 3;
+    }
+    else if(0xF0 <= u && u <= 0xF4) {
+        /* 4-byte sequence */
+        return 4;
+    }
+    else { /* u >= 0xF5 */
+        /* Restricted (start of 4-, 5- or 6-byte sequence) or invalid
+           UTF-8 */
+        return 0;
+    }
+}
+
+int utf8_check_full(const char *buffer, int size, int32_t *codepoint)
+{
+    int i;
+    int32_t value = 0;
+    unsigned char u = (unsigned char)buffer[0];
+
+    if(size == 2)
+    {
+        value = u & 0x1F;
+    }
+    else if(size == 3)
+    {
+        value = u & 0xF;
+    }
+    else if(size == 4)
+    {
+        value = u & 0x7;
+    }
+    else
+        return 0;
+
+    for(i = 1; i < size; i++)
+    {
+        u = (unsigned char)buffer[i];
+
+        if(u < 0x80 || u > 0xBF) {
+            /* not a continuation byte */
+            return 0;
+        }
+
+        value = (value << 6) + (u & 0x3F);
+    }
+
+    if(value > 0x10FFFF) {
+        /* not in Unicode range */
+        return 0;
+    }
+
+    else if(0xD800 <= value && value <= 0xDFFF) {
+        /* invalid code point (UTF-16 surrogate halves) */
+        return 0;
+    }
+
+    else if((size == 2 && value < 0x80) ||
+            (size == 3 && value < 0x800) ||
+            (size == 4 && value < 0x10000)) {
+        /* overlong encoding */
+        return 0;
+    }
+
+    if(codepoint)
+        *codepoint = value;
+
+    return 1;
+}
+
+const char *utf8_iterate(const char *buffer, int32_t *codepoint)
+{
+    int count;
+    int32_t value;
+
+    if(!*buffer)
+        return buffer;
+
+    count = utf8_check_first(buffer[0]);
+    if(count <= 0)
+        return NULL;
+
+    if(count == 1)
+        value = (unsigned char)buffer[0];
+    else
+    {
+        if(!utf8_check_full(buffer, count, &value))
+            return NULL;
+    }
+
+    if(codepoint)
+        *codepoint = value;
+
+    return buffer + count;
+}
+
+int utf8_check_string(const char *string, int length)
+{
+    int i;
+
+    if(length == -1)
+        length = strlen(string);
+
+    for(i = 0; i < length; i++)
+    {
+        int count = utf8_check_first(string[i]);
+        if(count == 0)
+            return 0;
+        else if(count > 1)
+        {
+            if(i + count > length)
+                return 0;
+
+            if(!utf8_check_full(&string[i], count, NULL))
+                return 0;
+
+            i += count - 1;
+        }
+    }
+
+    return 1;
+}
diff --git a/lang/c/jansson/src/utf.h b/lang/c/jansson/src/utf.h
new file mode 100644
index 0000000..8385978
--- /dev/null
+++ b/lang/c/jansson/src/utf.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef UTF_H
+#define UTF_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+
+#ifdef HAVE_INTTYPES_H
+/* inttypes.h includes stdint.h in a standard environment, so there's
+no need to include stdint.h separately. If inttypes.h doesn't define
+int32_t, it's defined in config.h. */
+#include <inttypes.h>
+#endif /* HAVE_INTTYPES_H */
+
+#else /* !HAVE_CONFIG_H */
+#ifdef _WIN32
+typedef int int32_t;
+#else /* !_WIN32 */
+/* Assume a standard environment */
+#include <inttypes.h>
+#endif /* _WIN32 */
+
+#endif /* HAVE_CONFIG_H */
+
+#pragma GCC visibility push(hidden)
+
+int utf8_encode(int32_t codepoint, char *buffer, int *size);
+
+int utf8_check_first(char byte);
+int utf8_check_full(const char *buffer, int size, int32_t *codepoint);
+const char *utf8_iterate(const char *buffer, int32_t *codepoint);
+
+int utf8_check_string(const char *string, int length);
+
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/jansson/src/value.c b/lang/c/jansson/src/value.c
new file mode 100644
index 0000000..daffbbd
--- /dev/null
+++ b/lang/c/jansson/src/value.c
@@ -0,0 +1,983 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#define _GNU_SOURCE
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <jansson.h>
+#include "hashtable.h"
+#include "jansson_private.h"
+#include "utf.h"
+
+
+static JSON_INLINE void json_init(json_t *json, json_type type)
+{
+    json->type = type;
+    json->refcount = 1;
+}
+
+
+/*** object ***/
+
+/* From http://www.cse.yorku.ca/~oz/hash.html */
+size_t jsonp_hash_str(const void *ptr)
+{
+    const char *str = (const char *)ptr;
+
+    size_t hash = 5381;
+    size_t c;
+
+    while((c = (size_t)*str))
+    {
+        hash = ((hash << 5) + hash) + c;
+        str++;
+    }
+
+    return hash;
+}
+
+int jsonp_str_equal(const void *ptr1, const void *ptr2)
+{
+    return strcmp((const char *)ptr1, (const char *)ptr2) == 0;
+}
+
+/* This macro just returns a pointer that's a few bytes backwards from
+   string. This makes it possible to pass a pointer to object_key_t
+   when only the string inside it is used, without actually creating
+   an object_key_t instance. */
+#define string_to_key(string)  container_of(string, object_key_t, key)
+
+static size_t hash_key(const void *ptr)
+{
+    return jsonp_hash_str(((const object_key_t *)ptr)->key);
+}
+
+static int key_equal(const void *ptr1, const void *ptr2)
+{
+    return jsonp_str_equal(((const object_key_t *)ptr1)->key,
+                           ((const object_key_t *)ptr2)->key);
+}
+
+static void value_decref(void *value)
+{
+    json_decref((json_t *)value);
+}
+
+json_t *json_object(void)
+{
+    json_object_t *object = (json_object_t *) jsonp_malloc(sizeof(json_object_t));
+    if(!object)
+        return NULL;
+    json_init(&object->json, JSON_OBJECT);
+
+    if(hashtable_init(&object->hashtable,
+                      hash_key, key_equal,
+                      jsonp_free, value_decref))
+    {
+        jsonp_free(object);
+        return NULL;
+    }
+
+    object->serial = 0;
+    object->visited = 0;
+
+    return &object->json;
+}
+
+static void json_delete_object(json_object_t *object)
+{
+    hashtable_close(&object->hashtable);
+    jsonp_free(object);
+}
+
+size_t json_object_size(const json_t *json)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json))
+        return 0;
+
+    object = json_to_object(json);
+    return object->hashtable.size;
+}
+
+json_t *json_object_get(const json_t *json, const char *key)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json))
+        return NULL;
+
+    object = json_to_object(json);
+    return (json_t *) hashtable_get(&object->hashtable, string_to_key(key));
+}
+
+int json_object_set_new_nocheck(json_t *json, const char *key, json_t *value)
+{
+    json_object_t *object;
+    object_key_t *k;
+
+    if(!key || !value)
+        return -1;
+
+    if(!json_is_object(json) || json == value)
+    {
+        json_decref(value);
+        return -1;
+    }
+    object = json_to_object(json);
+
+    /* offsetof(...) returns the size of object_key_t without the
+       last, flexible member. This way, the correct amount is
+       allocated. */
+    k = (object_key_t *) jsonp_malloc(offsetof(object_key_t, key) + strlen(key) + 1);
+    if(!k)
+    {
+        json_decref(value);
+        return -1;
+    }
+
+    k->serial = object->serial++;
+    strcpy(k->key, key);
+
+    if(hashtable_set(&object->hashtable, k, value))
+    {
+        json_decref(value);
+        return -1;
+    }
+
+    return 0;
+}
+
+int json_object_set_new(json_t *json, const char *key, json_t *value)
+{
+    if(!key || !utf8_check_string(key, -1))
+    {
+        json_decref(value);
+        return -1;
+    }
+
+    return json_object_set_new_nocheck(json, key, value);
+}
+
+int json_object_del(json_t *json, const char *key)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json))
+        return -1;
+
+    object = json_to_object(json);
+    return hashtable_del(&object->hashtable, string_to_key(key));
+}
+
+int json_object_clear(json_t *json)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json))
+        return -1;
+
+    object = json_to_object(json);
+    hashtable_clear(&object->hashtable);
+
+    return 0;
+}
+
+int json_object_update(json_t *object, json_t *other)
+{
+    void *iter;
+
+    if(!json_is_object(object) || !json_is_object(other))
+        return -1;
+
+    iter = json_object_iter(other);
+    while(iter) {
+        const char *key;
+        json_t *value;
+
+        key = json_object_iter_key(iter);
+        value = json_object_iter_value(iter);
+
+        if(json_object_set_nocheck(object, key, value))
+            return -1;
+
+        iter = json_object_iter_next(other, iter);
+    }
+
+    return 0;
+}
+
+void *json_object_iter(json_t *json)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json))
+        return NULL;
+
+    object = json_to_object(json);
+    return hashtable_iter(&object->hashtable);
+}
+
+void *json_object_iter_at(json_t *json, const char *key)
+{
+    json_object_t *object;
+
+    if(!key || !json_is_object(json))
+        return NULL;
+
+    object = json_to_object(json);
+    return hashtable_iter_at(&object->hashtable, string_to_key(key));
+}
+
+void *json_object_iter_next(json_t *json, void *iter)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json) || iter == NULL)
+        return NULL;
+
+    object = json_to_object(json);
+    return hashtable_iter_next(&object->hashtable, iter);
+}
+
+const object_key_t *jsonp_object_iter_fullkey(void *iter)
+{
+    if(!iter)
+        return NULL;
+
+    return (const object_key_t *) hashtable_iter_key(iter);
+}
+
+const char *json_object_iter_key(void *iter)
+{
+    if(!iter)
+        return NULL;
+
+    return jsonp_object_iter_fullkey(iter)->key;
+}
+
+json_t *json_object_iter_value(void *iter)
+{
+    if(!iter)
+        return NULL;
+
+    return (json_t *)hashtable_iter_value(iter);
+}
+
+int json_object_iter_set_new(json_t *json, void *iter, json_t *value)
+{
+    json_object_t *object;
+
+    if(!json_is_object(json) || !iter || !value)
+        return -1;
+
+    object = json_to_object(json);
+    hashtable_iter_set(&object->hashtable, iter, value);
+
+    return 0;
+}
+
+static int json_object_equal(json_t *object1, json_t *object2)
+{
+    void *iter;
+
+    if(json_object_size(object1) != json_object_size(object2))
+        return 0;
+
+    iter = json_object_iter(object1);
+    while(iter)
+    {
+        const char *key;
+        json_t *value1, *value2;
+
+        key = json_object_iter_key(iter);
+        value1 = json_object_iter_value(iter);
+        value2 = json_object_get(object2, key);
+
+        if(!json_equal(value1, value2))
+            return 0;
+
+        iter = json_object_iter_next(object1, iter);
+    }
+
+    return 1;
+}
+
+static json_t *json_object_copy(json_t *object)
+{
+    json_t *result;
+    void *iter;
+
+    result = json_object();
+    if(!result)
+        return NULL;
+
+    iter = json_object_iter(object);
+    while(iter)
+    {
+        const char *key;
+        json_t *value;
+
+        key = json_object_iter_key(iter);
+        value = json_object_iter_value(iter);
+        json_object_set_nocheck(result, key, value);
+
+        iter = json_object_iter_next(object, iter);
+    }
+
+    return result;
+}
+
+static json_t *json_object_deep_copy(json_t *object)
+{
+    json_t *result;
+    void *iter;
+
+    result = json_object();
+    if(!result)
+        return NULL;
+
+    iter = json_object_iter(object);
+    while(iter)
+    {
+        const char *key;
+        json_t *value;
+
+        key = json_object_iter_key(iter);
+        value = json_object_iter_value(iter);
+        json_object_set_new_nocheck(result, key, json_deep_copy(value));
+
+        iter = json_object_iter_next(object, iter);
+    }
+
+    return result;
+}
+
+
+/*** array ***/
+
+json_t *json_array(void)
+{
+    json_array_t *array = (json_array_t *) jsonp_malloc(sizeof(json_array_t));
+    if(!array)
+        return NULL;
+    json_init(&array->json, JSON_ARRAY);
+
+    array->entries = 0;
+    array->size = 8;
+
+    array->table = (json_t **) jsonp_malloc(array->size * sizeof(json_t *));
+    if(!array->table) {
+        jsonp_free(array);
+        return NULL;
+    }
+
+    array->visited = 0;
+
+    return &array->json;
+}
+
+static void json_delete_array(json_array_t *array)
+{
+    size_t i;
+
+    for(i = 0; i < array->entries; i++)
+        json_decref(array->table[i]);
+
+    jsonp_free(array->table);
+    jsonp_free(array);
+}
+
+size_t json_array_size(const json_t *json)
+{
+    if(!json_is_array(json))
+        return 0;
+
+    return json_to_array(json)->entries;
+}
+
+json_t *json_array_get(const json_t *json, size_t index)
+{
+    json_array_t *array;
+    if(!json_is_array(json))
+        return NULL;
+    array = json_to_array(json);
+
+    if(index >= array->entries)
+        return NULL;
+
+    return array->table[index];
+}
+
+int json_array_set_new(json_t *json, size_t index, json_t *value)
+{
+    json_array_t *array;
+
+    if(!value)
+        return -1;
+
+    if(!json_is_array(json) || json == value)
+    {
+        json_decref(value);
+        return -1;
+    }
+    array = json_to_array(json);
+
+    if(index >= array->entries)
+    {
+        json_decref(value);
+        return -1;
+    }
+
+    json_decref(array->table[index]);
+    array->table[index] = value;
+
+    return 0;
+}
+
+static void array_move(json_array_t *array, size_t dest,
+                       size_t src, size_t count)
+{
+    memmove(&array->table[dest], &array->table[src], count * sizeof(json_t *));
+}
+
+static void array_copy(json_t **dest, size_t dpos,
+                       json_t **src, size_t spos,
+                       size_t count)
+{
+    memcpy(&dest[dpos], &src[spos], count * sizeof(json_t *));
+}
+
+static json_t **json_array_grow(json_array_t *array,
+                                size_t amount,
+                                int copy)
+{
+    size_t new_size;
+    json_t **old_table, **new_table;
+
+    if(array->entries + amount <= array->size)
+        return array->table;
+
+    old_table = array->table;
+
+    new_size = max(array->size + amount, array->size * 2);
+    new_table = (json_t **) jsonp_malloc(new_size * sizeof(json_t *));
+    if(!new_table)
+        return NULL;
+
+    array->size = new_size;
+    array->table = new_table;
+
+    if(copy) {
+        array_copy(array->table, 0, old_table, 0, array->entries);
+        jsonp_free(old_table);
+        return array->table;
+    }
+
+    return old_table;
+}
+
+int json_array_append_new(json_t *json, json_t *value)
+{
+    json_array_t *array;
+
+    if(!value)
+        return -1;
+
+    if(!json_is_array(json) || json == value)
+    {
+        json_decref(value);
+        return -1;
+    }
+    array = json_to_array(json);
+
+    if(!json_array_grow(array, 1, 1)) {
+        json_decref(value);
+        return -1;
+    }
+
+    array->table[array->entries] = value;
+    array->entries++;
+
+    return 0;
+}
+
+int json_array_insert_new(json_t *json, size_t index, json_t *value)
+{
+    json_array_t *array;
+    json_t **old_table;
+
+    if(!value)
+        return -1;
+
+    if(!json_is_array(json) || json == value) {
+        json_decref(value);
+        return -1;
+    }
+    array = json_to_array(json);
+
+    if(index > array->entries) {
+        json_decref(value);
+        return -1;
+    }
+
+    old_table = json_array_grow(array, 1, 0);
+    if(!old_table) {
+        json_decref(value);
+        return -1;
+    }
+
+    if(old_table != array->table) {
+        array_copy(array->table, 0, old_table, 0, index);
+        array_copy(array->table, index + 1, old_table, index,
+                   array->entries - index);
+        jsonp_free(old_table);
+    }
+    else
+        array_move(array, index + 1, index, array->entries - index);
+
+    array->table[index] = value;
+    array->entries++;
+
+    return 0;
+}
+
+int json_array_remove(json_t *json, size_t index)
+{
+    json_array_t *array;
+
+    if(!json_is_array(json))
+        return -1;
+    array = json_to_array(json);
+
+    if(index >= array->entries)
+        return -1;
+
+    json_decref(array->table[index]);
+
+    array_move(array, index, index + 1, array->entries - index);
+    array->entries--;
+
+    return 0;
+}
+
+int json_array_clear(json_t *json)
+{
+    json_array_t *array;
+    size_t i;
+
+    if(!json_is_array(json))
+        return -1;
+    array = json_to_array(json);
+
+    for(i = 0; i < array->entries; i++)
+        json_decref(array->table[i]);
+
+    array->entries = 0;
+    return 0;
+}
+
+int json_array_extend(json_t *json, json_t *other_json)
+{
+    json_array_t *array, *other;
+    size_t i;
+
+    if(!json_is_array(json) || !json_is_array(other_json))
+        return -1;
+    array = json_to_array(json);
+    other = json_to_array(other_json);
+
+    if(!json_array_grow(array, other->entries, 1))
+        return -1;
+
+    for(i = 0; i < other->entries; i++)
+        json_incref(other->table[i]);
+
+    array_copy(array->table, array->entries, other->table, 0, other->entries);
+
+    array->entries += other->entries;
+    return 0;
+}
+
+static int json_array_equal(json_t *array1, json_t *array2)
+{
+    size_t i, size;
+
+    size = json_array_size(array1);
+    if(size != json_array_size(array2))
+        return 0;
+
+    for(i = 0; i < size; i++)
+    {
+        json_t *value1, *value2;
+
+        value1 = json_array_get(array1, i);
+        value2 = json_array_get(array2, i);
+
+        if(!json_equal(value1, value2))
+            return 0;
+    }
+
+    return 1;
+}
+
+static json_t *json_array_copy(json_t *array)
+{
+    json_t *result;
+    size_t i;
+
+    result = json_array();
+    if(!result)
+        return NULL;
+
+    for(i = 0; i < json_array_size(array); i++)
+        json_array_append(result, json_array_get(array, i));
+
+    return result;
+}
+
+static json_t *json_array_deep_copy(json_t *array)
+{
+    json_t *result;
+    size_t i;
+
+    result = json_array();
+    if(!result)
+        return NULL;
+
+    for(i = 0; i < json_array_size(array); i++)
+        json_array_append_new(result, json_deep_copy(json_array_get(array, i)));
+
+    return result;
+}
+
+/*** string ***/
+
+json_t *json_string_nocheck(const char *value)
+{
+    json_string_t *string;
+
+    if(!value)
+        return NULL;
+
+    string = (json_string_t *) jsonp_malloc(sizeof(json_string_t));
+    if(!string)
+        return NULL;
+    json_init(&string->json, JSON_STRING);
+
+    string->value = jsonp_strdup(value);
+    if(!string->value) {
+        jsonp_free(string);
+        return NULL;
+    }
+
+    return &string->json;
+}
+
+json_t *json_string(const char *value)
+{
+    if(!value || !utf8_check_string(value, -1))
+        return NULL;
+
+    return json_string_nocheck(value);
+}
+
+const char *json_string_value(const json_t *json)
+{
+    if(!json_is_string(json))
+        return NULL;
+
+    return json_to_string(json)->value;
+}
+
+int json_string_set_nocheck(json_t *json, const char *value)
+{
+    char *dup;
+    json_string_t *string;
+
+    dup = jsonp_strdup(value);
+    if(!dup)
+        return -1;
+
+    string = json_to_string(json);
+    jsonp_free(string->value);
+    string->value = dup;
+
+    return 0;
+}
+
+int json_string_set(json_t *json, const char *value)
+{
+    if(!value || !utf8_check_string(value, -1))
+        return -1;
+
+    return json_string_set_nocheck(json, value);
+}
+
+static void json_delete_string(json_string_t *string)
+{
+    jsonp_free(string->value);
+    jsonp_free(string);
+}
+
+static int json_string_equal(json_t *string1, json_t *string2)
+{
+    return strcmp(json_string_value(string1), json_string_value(string2)) == 0;
+}
+
+static json_t *json_string_copy(json_t *string)
+{
+    return json_string_nocheck(json_string_value(string));
+}
+
+
+/*** integer ***/
+
+json_t *json_integer(json_int_t value)
+{
+    json_integer_t *integer = (json_integer_t *) jsonp_malloc(sizeof(json_integer_t));
+    if(!integer)
+        return NULL;
+    json_init(&integer->json, JSON_INTEGER);
+
+    integer->value = value;
+    return &integer->json;
+}
+
+json_int_t json_integer_value(const json_t *json)
+{
+    if(!json_is_integer(json))
+        return 0;
+
+    return json_to_integer(json)->value;
+}
+
+int json_integer_set(json_t *json, json_int_t value)
+{
+    if(!json_is_integer(json))
+        return -1;
+
+    json_to_integer(json)->value = value;
+
+    return 0;
+}
+
+static void json_delete_integer(json_integer_t *integer)
+{
+    jsonp_free(integer);
+}
+
+static int json_integer_equal(json_t *integer1, json_t *integer2)
+{
+    return json_integer_value(integer1) == json_integer_value(integer2);
+}
+
+static json_t *json_integer_copy(json_t *integer)
+{
+    return json_integer(json_integer_value(integer));
+}
+
+
+/*** real ***/
+
+json_t *json_real(double value)
+{
+    json_real_t *real = (json_real_t *) jsonp_malloc(sizeof(json_real_t));
+    if(!real)
+        return NULL;
+    json_init(&real->json, JSON_REAL);
+
+    real->value = value;
+    return &real->json;
+}
+
+double json_real_value(const json_t *json)
+{
+    if(!json_is_real(json))
+        return 0;
+
+    return json_to_real(json)->value;
+}
+
+int json_real_set(json_t *json, double value)
+{
+    if(!json_is_real(json))
+        return 0;
+
+    json_to_real(json)->value = value;
+
+    return 0;
+}
+
+static void json_delete_real(json_real_t *real)
+{
+    jsonp_free(real);
+}
+
+static int json_real_equal(json_t *real1, json_t *real2)
+{
+    return json_real_value(real1) == json_real_value(real2);
+}
+
+static json_t *json_real_copy(json_t *real)
+{
+    return json_real(json_real_value(real));
+}
+
+
+/*** number ***/
+
+double json_number_value(const json_t *json)
+{
+    if(json_is_integer(json))
+        return (double) json_integer_value(json);
+    else if(json_is_real(json))
+        return json_real_value(json);
+    else
+        return 0.0;
+}
+
+
+/*** simple values ***/
+
+json_t *json_true(void)
+{
+    static json_t the_true = {JSON_TRUE, (size_t)-1};
+    return &the_true;
+}
+
+
+json_t *json_false(void)
+{
+    static json_t the_false = {JSON_FALSE, (size_t)-1};
+    return &the_false;
+}
+
+
+json_t *json_null(void)
+{
+    static json_t the_null = {JSON_NULL, (size_t)-1};
+    return &the_null;
+}
+
+
+/*** deletion ***/
+
+void json_delete(json_t *json)
+{
+    if(json_is_object(json))
+        json_delete_object(json_to_object(json));
+
+    else if(json_is_array(json))
+        json_delete_array(json_to_array(json));
+
+    else if(json_is_string(json))
+        json_delete_string(json_to_string(json));
+
+    else if(json_is_integer(json))
+        json_delete_integer(json_to_integer(json));
+
+    else if(json_is_real(json))
+        json_delete_real(json_to_real(json));
+
+    /* json_delete is not called for true, false or null */
+}
+
+
+/*** equality ***/
+
+int json_equal(json_t *json1, json_t *json2)
+{
+    if(!json1 || !json2)
+        return 0;
+
+    if(json_typeof(json1) != json_typeof(json2))
+        return 0;
+
+    /* this covers true, false and null as they are singletons */
+    if(json1 == json2)
+        return 1;
+
+    if(json_is_object(json1))
+        return json_object_equal(json1, json2);
+
+    if(json_is_array(json1))
+        return json_array_equal(json1, json2);
+
+    if(json_is_string(json1))
+        return json_string_equal(json1, json2);
+
+    if(json_is_integer(json1))
+        return json_integer_equal(json1, json2);
+
+    if(json_is_real(json1))
+        return json_real_equal(json1, json2);
+
+    return 0;
+}
+
+
+/*** copying ***/
+
+json_t *json_copy(json_t *json)
+{
+    if(!json)
+        return NULL;
+
+    if(json_is_object(json))
+        return json_object_copy(json);
+
+    if(json_is_array(json))
+        return json_array_copy(json);
+
+    if(json_is_string(json))
+        return json_string_copy(json);
+
+    if(json_is_integer(json))
+        return json_integer_copy(json);
+
+    if(json_is_real(json))
+        return json_real_copy(json);
+
+    if(json_is_true(json) || json_is_false(json) || json_is_null(json))
+        return json;
+
+    return NULL;
+}
+
+json_t *json_deep_copy(json_t *json)
+{
+    if(!json)
+        return NULL;
+
+    if(json_is_object(json))
+        return json_object_deep_copy(json);
+
+    if(json_is_array(json))
+        return json_array_deep_copy(json);
+
+    /* for the rest of the types, deep copying doesn't differ from
+       shallow copying */
+
+    if(json_is_string(json))
+        return json_string_copy(json);
+
+    if(json_is_integer(json))
+        return json_integer_copy(json);
+
+    if(json_is_real(json))
+        return json_real_copy(json);
+
+    if(json_is_true(json) || json_is_false(json) || json_is_null(json))
+        return json;
+
+    return NULL;
+}
diff --git a/lang/c/jansson/test/.gitignore b/lang/c/jansson/test/.gitignore
new file mode 100644
index 0000000..0de6db6
--- /dev/null
+++ b/lang/c/jansson/test/.gitignore
@@ -0,0 +1,13 @@
+bin/json_process
+suites/api/test_array
+suites/api/test_copy
+suites/api/test_dump
+suites/api/test_equal
+suites/api/test_load
+suites/api/test_loadb
+suites/api/test_memory_funcs
+suites/api/test_number
+suites/api/test_object
+suites/api/test_pack
+suites/api/test_simple
+suites/api/test_unpack
diff --git a/lang/c/jansson/test/Makefile.am b/lang/c/jansson/test/Makefile.am
new file mode 100644
index 0000000..86d1614
--- /dev/null
+++ b/lang/c/jansson/test/Makefile.am
@@ -0,0 +1,10 @@
+SUBDIRS = bin suites
+EXTRA_DIST = scripts run-suites
+
+TESTS = run-suites
+TESTS_ENVIRONMENT = \
+	top_srcdir=$(top_srcdir) \
+	top_builddir=$(top_builddir)
+
+clean-local:
+	rm -rf logs
diff --git a/lang/c/jansson/test/Makefile.in b/lang/c/jansson/test/Makefile.in
new file mode 100644
index 0000000..822a1ec
--- /dev/null
+++ b/lang/c/jansson/test/Makefile.in
@@ -0,0 +1,569 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = test
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+SOURCES =
+DIST_SOURCES =
+RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \
+	html-recursive info-recursive install-data-recursive \
+	install-dvi-recursive install-exec-recursive \
+	install-html-recursive install-info-recursive \
+	install-pdf-recursive install-ps-recursive install-recursive \
+	installcheck-recursive installdirs-recursive pdf-recursive \
+	ps-recursive uninstall-recursive
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
+  distclean-recursive maintainer-clean-recursive
+ETAGS = etags
+CTAGS = ctags
+DIST_SUBDIRS = $(SUBDIRS)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+SUBDIRS = bin suites
+EXTRA_DIST = scripts run-suites
+TESTS = run-suites
+TESTS_ENVIRONMENT = \
+	top_srcdir=$(top_srcdir) \
+	top_builddir=$(top_builddir)
+
+all: all-recursive
+
+.SUFFIXES:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  test/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  test/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run `make' without going through this Makefile.
+# To change the values of `make' variables: instead of editing Makefiles,
+# (1) if the variable is set in `config.status', edit `config.status'
+#     (which will cause the Makefiles to be regenerated when you run `make');
+# (2) otherwise, pass the desired values on the `make' command line.
+$(RECURSIVE_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	target=`echo $@ | sed s/-recursive//`; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    dot_seen=yes; \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done; \
+	if test "$$dot_seen" = "no"; then \
+	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+	fi; test -z "$$fail"
+
+$(RECURSIVE_CLEAN_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	case "$@" in \
+	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+	  *) list='$(SUBDIRS)' ;; \
+	esac; \
+	rev=''; for subdir in $$list; do \
+	  if test "$$subdir" = "."; then :; else \
+	    rev="$$subdir $$rev"; \
+	  fi; \
+	done; \
+	rev="$$rev ."; \
+	target=`echo $@ | sed s/-recursive//`; \
+	for subdir in $$rev; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done && test -z "$$fail"
+tags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \
+	done
+ctags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \
+	done
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS: tags-recursive $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+	  include_option=--etags-include; \
+	  empty_fix=.; \
+	else \
+	  include_option=--include; \
+	  empty_fix=; \
+	fi; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test ! -f $$subdir/TAGS || \
+	      tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \
+	  fi; \
+	done; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS: ctags-recursive $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+check-TESTS: $(TESTS)
+	@failed=0; all=0; xfail=0; xpass=0; skip=0; ws='[	 ]'; \
+	srcdir=$(srcdir); export srcdir; \
+	list=' $(TESTS) '; \
+	if test -n "$$list"; then \
+	  for tst in $$list; do \
+	    if test -f ./$$tst; then dir=./; \
+	    elif test -f $$tst; then dir=; \
+	    else dir="$(srcdir)/"; fi; \
+	    if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \
+	      all=`expr $$all + 1`; \
+	      case " $(XFAIL_TESTS) " in \
+	      *$$ws$$tst$$ws*) \
+		xpass=`expr $$xpass + 1`; \
+		failed=`expr $$failed + 1`; \
+		echo "XPASS: $$tst"; \
+	      ;; \
+	      *) \
+		echo "PASS: $$tst"; \
+	      ;; \
+	      esac; \
+	    elif test $$? -ne 77; then \
+	      all=`expr $$all + 1`; \
+	      case " $(XFAIL_TESTS) " in \
+	      *$$ws$$tst$$ws*) \
+		xfail=`expr $$xfail + 1`; \
+		echo "XFAIL: $$tst"; \
+	      ;; \
+	      *) \
+		failed=`expr $$failed + 1`; \
+		echo "FAIL: $$tst"; \
+	      ;; \
+	      esac; \
+	    else \
+	      skip=`expr $$skip + 1`; \
+	      echo "SKIP: $$tst"; \
+	    fi; \
+	  done; \
+	  if test "$$failed" -eq 0; then \
+	    if test "$$xfail" -eq 0; then \
+	      banner="All $$all tests passed"; \
+	    else \
+	      banner="All $$all tests behaved as expected ($$xfail expected failures)"; \
+	    fi; \
+	  else \
+	    if test "$$xpass" -eq 0; then \
+	      banner="$$failed of $$all tests failed"; \
+	    else \
+	      banner="$$failed of $$all tests did not behave as expected ($$xpass unexpected passes)"; \
+	    fi; \
+	  fi; \
+	  dashes="$$banner"; \
+	  skipped=""; \
+	  if test "$$skip" -ne 0; then \
+	    skipped="($$skip tests were not run)"; \
+	    test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \
+	      dashes="$$skipped"; \
+	  fi; \
+	  report=""; \
+	  if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \
+	    report="Please report to $(PACKAGE_BUGREPORT)"; \
+	    test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \
+	      dashes="$$report"; \
+	  fi; \
+	  dashes=`echo "$$dashes" | sed s/./=/g`; \
+	  echo "$$dashes"; \
+	  echo "$$banner"; \
+	  test -z "$$skipped" || echo "$$skipped"; \
+	  test -z "$$report" || echo "$$report"; \
+	  echo "$$dashes"; \
+	  test "$$failed" -eq 0; \
+	else :; fi
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+	list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test -d "$(distdir)/$$subdir" \
+	    || $(MKDIR_P) "$(distdir)/$$subdir" \
+	    || exit 1; \
+	    distdir=`$(am__cd) $(distdir) && pwd`; \
+	    top_distdir=`$(am__cd) $(top_distdir) && pwd`; \
+	    (cd $$subdir && \
+	      $(MAKE) $(AM_MAKEFLAGS) \
+	        top_distdir="$$top_distdir" \
+	        distdir="$$distdir/$$subdir" \
+		am__remove_distdir=: \
+		am__skip_length_check=: \
+	        distdir) \
+	      || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+	$(MAKE) $(AM_MAKEFLAGS) check-TESTS
+check: check-recursive
+all-am: Makefile
+installdirs: installdirs-recursive
+installdirs-am:
+install: install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-recursive
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-recursive
+
+clean-am: clean-generic clean-libtool clean-local mostlyclean-am
+
+distclean: distclean-recursive
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic distclean-tags
+
+dvi: dvi-recursive
+
+dvi-am:
+
+html: html-recursive
+
+info: info-recursive
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-recursive
+
+install-exec-am:
+
+install-html: install-html-recursive
+
+install-info: install-info-recursive
+
+install-man:
+
+install-pdf: install-pdf-recursive
+
+install-ps: install-ps-recursive
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-recursive
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-recursive
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-recursive
+
+pdf-am:
+
+ps: ps-recursive
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \
+	install-strip
+
+.PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \
+	all all-am check check-TESTS check-am clean clean-generic \
+	clean-libtool clean-local ctags ctags-recursive distclean \
+	distclean-generic distclean-libtool distclean-tags distdir dvi \
+	dvi-am html html-am info info-am install install-am \
+	install-data install-data-am install-dvi install-dvi-am \
+	install-exec install-exec-am install-html install-html-am \
+	install-info install-info-am install-man install-pdf \
+	install-pdf-am install-ps install-ps-am install-strip \
+	installcheck installcheck-am installdirs installdirs-am \
+	maintainer-clean maintainer-clean-generic mostlyclean \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags tags-recursive uninstall uninstall-am
+
+
+clean-local:
+	rm -rf logs
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/test/bin/Makefile.am b/lang/c/jansson/test/bin/Makefile.am
new file mode 100644
index 0000000..346db5d
--- /dev/null
+++ b/lang/c/jansson/test/bin/Makefile.am
@@ -0,0 +1,6 @@
+check_PROGRAMS = json_process
+
+AM_CPPFLAGS = -I$(top_srcdir)/src
+AM_CFLAGS = -Wall -Werror
+LDFLAGS = -static  # for speed and Valgrind
+LDADD = $(top_builddir)/src/libjansson.la
diff --git a/lang/c/jansson/test/bin/Makefile.in b/lang/c/jansson/test/bin/Makefile.in
new file mode 100644
index 0000000..736e2c8
--- /dev/null
+++ b/lang/c/jansson/test/bin/Makefile.in
@@ -0,0 +1,439 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+check_PROGRAMS = json_process$(EXEEXT)
+subdir = test/bin
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+json_process_SOURCES = json_process.c
+json_process_OBJECTS = json_process.$(OBJEXT)
+json_process_LDADD = $(LDADD)
+json_process_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+DEFAULT_INCLUDES = -I. -I$(top_builddir)@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+	$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
+	$(LDFLAGS) -o $@
+SOURCES = json_process.c
+DIST_SOURCES = json_process.c
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = -static  # for speed and Valgrind
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+AM_CPPFLAGS = -I$(top_srcdir)/src
+AM_CFLAGS = -Wall -Werror
+LDADD = $(top_builddir)/src/libjansson.la
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  test/bin/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  test/bin/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+clean-checkPROGRAMS:
+	@list='$(check_PROGRAMS)'; for p in $$list; do \
+	  f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+	  echo " rm -f $$p $$f"; \
+	  rm -f $$p $$f ; \
+	done
+json_process$(EXEEXT): $(json_process_OBJECTS) $(json_process_DEPENDENCIES) 
+	@rm -f json_process$(EXEEXT)
+	$(LINK) $(json_process_OBJECTS) $(json_process_LDADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/json_process.Po at am__quote@
+
+.c.o:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+ at am__fastdepCC_TRUE@	$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+	$(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
+check: check-am
+all-am: Makefile
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-info: install-info-am
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-ps: install-ps-am
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean \
+	clean-checkPROGRAMS clean-generic clean-libtool ctags \
+	distclean distclean-compile distclean-generic \
+	distclean-libtool distclean-tags distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dvi install-dvi-am install-exec \
+	install-exec-am install-html install-html-am install-info \
+	install-info-am install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-compile \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags uninstall uninstall-am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/test/bin/json_process.c b/lang/c/jansson/test/bin/json_process.c
new file mode 100644
index 0000000..1ed0c57
--- /dev/null
+++ b/lang/c/jansson/test/bin/json_process.c
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <ctype.h>
+#include <jansson.h>
+
+static int getenv_int(const char *name)
+{
+    char *value, *end;
+    long result;
+
+    value = getenv(name);
+    if(!value)
+        return 0;
+
+    result = strtol(value, &end, 10);
+    if(*end != '\0')
+        return 0;
+
+    return (int)result;
+}
+
+/* Return a pointer to the first non-whitespace character of str.
+   Modifies str so that all trailing whitespace characters are
+   replaced by '\0'. */
+static const char *strip(char *str)
+{
+    size_t length;
+    char *result = str;
+    while(*result && isspace(*result))
+        result++;
+
+    length = strlen(result);
+    if(length == 0)
+        return result;
+
+    while(isspace(result[length - 1]))
+        result[--length] = '\0';
+
+    return result;
+}
+
+int main(int argc, char *argv[])
+{
+    int indent = 0;
+    size_t flags = 0;
+
+    json_t *json;
+    json_error_t error;
+
+    if(argc != 1) {
+        fprintf(stderr, "usage: %s\n", argv[0]);
+        return 2;
+    }
+
+    indent = getenv_int("JSON_INDENT");
+    if(indent < 0 || indent > 255) {
+        fprintf(stderr, "invalid value for JSON_INDENT: %d\n", indent);
+        return 2;
+    }
+
+    if(indent > 0)
+        flags |= JSON_INDENT(indent);
+
+    if(getenv_int("JSON_COMPACT") > 0)
+        flags |= JSON_COMPACT;
+
+    if(getenv_int("JSON_ENSURE_ASCII"))
+        flags |= JSON_ENSURE_ASCII;
+
+    if(getenv_int("JSON_PRESERVE_ORDER"))
+        flags |= JSON_PRESERVE_ORDER;
+
+    if(getenv_int("JSON_SORT_KEYS"))
+        flags |= JSON_SORT_KEYS;
+
+    if(getenv_int("STRIP")) {
+        /* Load to memory, strip leading and trailing whitespace */
+        size_t size = 0, used = 0;
+        char *buffer = NULL;
+
+        while(1) {
+            int count;
+
+            size = (size == 0 ? 128 : size * 2);
+            buffer = realloc(buffer, size);
+            if(!buffer) {
+                fprintf(stderr, "Unable to allocate %d bytes\n", (int)size);
+                return 1;
+            }
+
+            count = fread(buffer + used, 1, size - used, stdin);
+            if(count < size - used) {
+                buffer[used + count] = '\0';
+                break;
+            }
+            used += count;
+        }
+
+        json = json_loads(strip(buffer), 0, &error);
+        free(buffer);
+    }
+    else
+        json = json_loadf(stdin, 0, &error);
+
+    if(!json) {
+        fprintf(stderr, "%d %d %d\n%s\n",
+                error.line, error.column, error.position,
+                error.text);
+        return 1;
+    }
+
+    json_dumpf(json, stdout, flags);
+    json_decref(json);
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/run-suites b/lang/c/jansson/test/run-suites
new file mode 100755
index 0000000..ea35424
--- /dev/null
+++ b/lang/c/jansson/test/run-suites
@@ -0,0 +1,49 @@
+#!/bin/sh
+
+while [ -n "$1" ]; do
+    suite=$1
+    if [ -x $top_srcdir/test/suites/$suite/run ]; then
+        SUITES="$SUITES $suite"
+    else
+        echo "No such suite: $suite"
+        exit 1
+    fi
+    shift
+done
+
+if [ -z "$SUITES" ]; then
+    suitedirs=$top_srcdir/test/suites/*
+    for suitedir in $suitedirs; do
+        if [ -d $suitedir ]; then
+            SUITES="$SUITES `basename $suitedir`"
+        fi
+    done
+fi
+
+[ -z "$STOP" ] && STOP=0
+
+export suites_srcdir=$top_srcdir/test/suites
+export suites_builddir=suites
+export scriptdir=$top_srcdir/test/scripts
+export logdir=logs
+export bindir=bin
+
+passed=0
+failed=0
+for suite in $SUITES; do
+    echo "Suite: $suite"
+    if $suites_srcdir/$suite/run $suite; then
+        passed=$(($passed+1))
+    else
+        failed=$(($failed+1))
+        [ $STOP -eq 1 ] && break
+    fi
+done
+
+if [ $failed -gt 0 ]; then
+    echo "$failed of $((passed+failed)) test suites failed"
+    exit 1
+else
+    echo "$passed test suites passed"
+    rm -rf $logdir
+fi
diff --git a/lang/c/jansson/test/scripts/run-tests.sh b/lang/c/jansson/test/scripts/run-tests.sh
new file mode 100644
index 0000000..adf34d4
--- /dev/null
+++ b/lang/c/jansson/test/scripts/run-tests.sh
@@ -0,0 +1,89 @@
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+json_process=$bindir/json_process
+
+suite_name=$1
+suite_srcdir=$suites_srcdir/$suite_name
+suite_builddir=$suites_builddir/$suite_name
+suite_log=$logdir/$suite_name
+
+
+[ -z "$VERBOSE" ] && VERBOSE=0
+[ -z "$STOP" ] && STOP=0
+
+. $scriptdir/valgrind.sh
+
+rm -rf $suite_log
+mkdir -p $suite_log
+
+for test_path in $suite_srcdir/*; do
+    test_name=$(basename $test_path)
+    test_builddir=$suite_builddir/$test_name
+    test_log=$suite_log/$test_name
+
+    [ "$test_name" = "run" ] && continue
+    is_test || continue
+
+    rm -rf $test_log
+    mkdir -p $test_log
+    if [ $VERBOSE -eq 1 ]; then
+        printf '%s... ' "$test_name"
+    fi
+
+    run_test
+    case $? in
+        0)
+            # Success
+            if [ $VERBOSE -eq 1 ]; then
+                printf 'ok\n'
+            else
+                printf '.'
+            fi
+            rm -rf $test_log
+            ;;
+
+        77)
+            # Skip
+            if [ $VERBOSE -eq 1 ]; then
+                printf 'skipped\n'
+            else
+                printf 'S'
+            fi
+            rm -rf $test_log
+            ;;
+
+        *)
+            # Failure
+            if [ $VERBOSE -eq 1 ]; then
+                printf 'FAILED\n'
+            else
+                printf 'F'
+            fi
+
+            [ $STOP -eq 1 ] && break
+            ;;
+    esac
+done
+
+if [ $VERBOSE -eq 0 ]; then
+    printf '\n'
+fi
+
+if [ -n "$(ls -A $suite_log)" ]; then
+    for test_log in $suite_log/*; do
+        test_name=$(basename $test_log)
+        test_path=$suite_srcdir/$test_name
+        echo "================================================================="
+        echo "$suite_name/$test_name"
+        echo "================================================================="
+        show_error
+        echo
+    done
+    echo "================================================================="
+    exit 1
+else
+    rm -rf $suite_log
+fi
diff --git a/lang/c/jansson/test/scripts/valgrind.sh b/lang/c/jansson/test/scripts/valgrind.sh
new file mode 100644
index 0000000..0eb39f4
--- /dev/null
+++ b/lang/c/jansson/test/scripts/valgrind.sh
@@ -0,0 +1,35 @@
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+[ -z "$VALGRIND" ] && VALGRIND=0
+
+VALGRIND_CMDLINE="valgrind --leak-check=full --show-reachable=yes --track-origins=yes -q"
+
+if [ $VALGRIND -eq 1 ]; then
+    test_runner="$VALGRIND_CMDLINE"
+    json_process="$VALGRIND_CMDLINE $json_process"
+else
+    test_runner=""
+fi
+
+valgrind_check() {
+    if [ $VALGRIND -eq 1 ]; then
+        # Check for Valgrind error output. The valgrind option
+        # --error-exitcode is not enough because Valgrind doesn't
+        # think unfreed allocs are errors.
+        if grep -E -q '^==[0-9]+== ' $1; then
+            touch $test_log/valgrind_error
+            return 1
+        fi
+    fi
+}
+
+valgrind_show_error() {
+    if [ $VALGRIND -eq 1 -a -f $test_log/valgrind_error ]; then
+        echo "valgrind detected an error"
+        return 0
+    fi
+    return 1
+}
diff --git a/lang/c/jansson/test/suites/Makefile.am b/lang/c/jansson/test/suites/Makefile.am
new file mode 100644
index 0000000..a53eb07
--- /dev/null
+++ b/lang/c/jansson/test/suites/Makefile.am
@@ -0,0 +1,2 @@
+SUBDIRS = api
+EXTRA_DIST = invalid invalid-unicode valid
diff --git a/lang/c/jansson/test/suites/Makefile.in b/lang/c/jansson/test/suites/Makefile.in
new file mode 100644
index 0000000..220087d
--- /dev/null
+++ b/lang/c/jansson/test/suites/Makefile.in
@@ -0,0 +1,487 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = test/suites
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+SOURCES =
+DIST_SOURCES =
+RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \
+	html-recursive info-recursive install-data-recursive \
+	install-dvi-recursive install-exec-recursive \
+	install-html-recursive install-info-recursive \
+	install-pdf-recursive install-ps-recursive install-recursive \
+	installcheck-recursive installdirs-recursive pdf-recursive \
+	ps-recursive uninstall-recursive
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
+  distclean-recursive maintainer-clean-recursive
+ETAGS = etags
+CTAGS = ctags
+DIST_SUBDIRS = $(SUBDIRS)
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+SUBDIRS = api
+EXTRA_DIST = invalid invalid-unicode valid
+all: all-recursive
+
+.SUFFIXES:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  test/suites/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  test/suites/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run `make' without going through this Makefile.
+# To change the values of `make' variables: instead of editing Makefiles,
+# (1) if the variable is set in `config.status', edit `config.status'
+#     (which will cause the Makefiles to be regenerated when you run `make');
+# (2) otherwise, pass the desired values on the `make' command line.
+$(RECURSIVE_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	target=`echo $@ | sed s/-recursive//`; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    dot_seen=yes; \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done; \
+	if test "$$dot_seen" = "no"; then \
+	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+	fi; test -z "$$fail"
+
+$(RECURSIVE_CLEAN_TARGETS):
+	@failcom='exit 1'; \
+	for f in x $$MAKEFLAGS; do \
+	  case $$f in \
+	    *=* | --[!k]*);; \
+	    *k*) failcom='fail=yes';; \
+	  esac; \
+	done; \
+	dot_seen=no; \
+	case "$@" in \
+	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+	  *) list='$(SUBDIRS)' ;; \
+	esac; \
+	rev=''; for subdir in $$list; do \
+	  if test "$$subdir" = "."; then :; else \
+	    rev="$$subdir $$rev"; \
+	  fi; \
+	done; \
+	rev="$$rev ."; \
+	target=`echo $@ | sed s/-recursive//`; \
+	for subdir in $$rev; do \
+	  echo "Making $$target in $$subdir"; \
+	  if test "$$subdir" = "."; then \
+	    local_target="$$target-am"; \
+	  else \
+	    local_target="$$target"; \
+	  fi; \
+	  (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+	  || eval $$failcom; \
+	done && test -z "$$fail"
+tags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \
+	done
+ctags-recursive:
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \
+	done
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS: tags-recursive $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+	  include_option=--etags-include; \
+	  empty_fix=.; \
+	else \
+	  include_option=--include; \
+	  empty_fix=; \
+	fi; \
+	list='$(SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test ! -f $$subdir/TAGS || \
+	      tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \
+	  fi; \
+	done; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS: ctags-recursive $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+	list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+	  if test "$$subdir" = .; then :; else \
+	    test -d "$(distdir)/$$subdir" \
+	    || $(MKDIR_P) "$(distdir)/$$subdir" \
+	    || exit 1; \
+	    distdir=`$(am__cd) $(distdir) && pwd`; \
+	    top_distdir=`$(am__cd) $(top_distdir) && pwd`; \
+	    (cd $$subdir && \
+	      $(MAKE) $(AM_MAKEFLAGS) \
+	        top_distdir="$$top_distdir" \
+	        distdir="$$distdir/$$subdir" \
+		am__remove_distdir=: \
+		am__skip_length_check=: \
+	        distdir) \
+	      || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+check: check-recursive
+all-am: Makefile
+installdirs: installdirs-recursive
+installdirs-am:
+install: install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-recursive
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-recursive
+
+clean-am: clean-generic clean-libtool mostlyclean-am
+
+distclean: distclean-recursive
+	-rm -f Makefile
+distclean-am: clean-am distclean-generic distclean-tags
+
+dvi: dvi-recursive
+
+dvi-am:
+
+html: html-recursive
+
+info: info-recursive
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-recursive
+
+install-exec-am:
+
+install-html: install-html-recursive
+
+install-info: install-info-recursive
+
+install-man:
+
+install-pdf: install-pdf-recursive
+
+install-ps: install-ps-recursive
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-recursive
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-recursive
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-recursive
+
+pdf-am:
+
+ps: ps-recursive
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \
+	install-strip
+
+.PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \
+	all all-am check check-am clean clean-generic clean-libtool \
+	ctags ctags-recursive distclean distclean-generic \
+	distclean-libtool distclean-tags distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dvi install-dvi-am install-exec \
+	install-exec-am install-html install-html-am install-info \
+	install-info-am install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs installdirs-am maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-generic \
+	mostlyclean-libtool pdf pdf-am ps ps-am tags tags-recursive \
+	uninstall uninstall-am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/test/suites/api/Makefile.am b/lang/c/jansson/test/suites/api/Makefile.am
new file mode 100644
index 0000000..58c31c6
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/Makefile.am
@@ -0,0 +1,32 @@
+EXTRA_DIST = run
+
+check_PROGRAMS = \
+	test_array \
+	test_copy \
+	test_dump \
+	test_equal \
+	test_load \
+	test_loadb \
+	test_memory_funcs \
+	test_number \
+	test_object \
+	test_pack \
+	test_simple \
+	test_unpack
+
+test_array_SOURCES = test_array.c util.h
+test_copy_SOURCES = test_copy.c util.h
+test_dump_SOURCES = test_dump.c util.h
+test_load_SOURCES = test_load.c util.h
+test_loadb_SOURCES = test_loadb.c util.h
+test_memory_funcs_SOURCES = test_memory_funcs.c util.h
+test_number_SOURCES = test_number.c util.h
+test_object_SOURCES = test_object.c util.h
+test_pack_SOURCES = test_pack.c util.h
+test_simple_SOURCES = test_simple.c util.h
+test_unpack_SOURCES = test_unpack.c util.h
+
+AM_CPPFLAGS = -I$(top_srcdir)/src
+AM_CFLAGS = -Wall -Werror
+LDFLAGS = -static  # for speed and Valgrind
+LDADD = $(top_builddir)/src/libjansson.la
diff --git a/lang/c/jansson/test/suites/api/Makefile.in b/lang/c/jansson/test/suites/api/Makefile.in
new file mode 100644
index 0000000..b84690b
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/Makefile.in
@@ -0,0 +1,553 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006  Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+ at SET_MAKE@
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+check_PROGRAMS = test_array$(EXEEXT) test_copy$(EXEEXT) \
+	test_dump$(EXEEXT) test_equal$(EXEEXT) test_load$(EXEEXT) \
+	test_loadb$(EXEEXT) test_memory_funcs$(EXEEXT) \
+	test_number$(EXEEXT) test_object$(EXEEXT) test_pack$(EXEEXT) \
+	test_simple$(EXEEXT) test_unpack$(EXEEXT)
+subdir = test/suites/api
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+	$(ACLOCAL_M4)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+am_test_array_OBJECTS = test_array.$(OBJEXT)
+test_array_OBJECTS = $(am_test_array_OBJECTS)
+test_array_LDADD = $(LDADD)
+test_array_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_copy_OBJECTS = test_copy.$(OBJEXT)
+test_copy_OBJECTS = $(am_test_copy_OBJECTS)
+test_copy_LDADD = $(LDADD)
+test_copy_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_dump_OBJECTS = test_dump.$(OBJEXT)
+test_dump_OBJECTS = $(am_test_dump_OBJECTS)
+test_dump_LDADD = $(LDADD)
+test_dump_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+test_equal_SOURCES = test_equal.c
+test_equal_OBJECTS = test_equal.$(OBJEXT)
+test_equal_LDADD = $(LDADD)
+test_equal_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_load_OBJECTS = test_load.$(OBJEXT)
+test_load_OBJECTS = $(am_test_load_OBJECTS)
+test_load_LDADD = $(LDADD)
+test_load_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_loadb_OBJECTS = test_loadb.$(OBJEXT)
+test_loadb_OBJECTS = $(am_test_loadb_OBJECTS)
+test_loadb_LDADD = $(LDADD)
+test_loadb_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_memory_funcs_OBJECTS = test_memory_funcs.$(OBJEXT)
+test_memory_funcs_OBJECTS = $(am_test_memory_funcs_OBJECTS)
+test_memory_funcs_LDADD = $(LDADD)
+test_memory_funcs_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_number_OBJECTS = test_number.$(OBJEXT)
+test_number_OBJECTS = $(am_test_number_OBJECTS)
+test_number_LDADD = $(LDADD)
+test_number_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_object_OBJECTS = test_object.$(OBJEXT)
+test_object_OBJECTS = $(am_test_object_OBJECTS)
+test_object_LDADD = $(LDADD)
+test_object_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_pack_OBJECTS = test_pack.$(OBJEXT)
+test_pack_OBJECTS = $(am_test_pack_OBJECTS)
+test_pack_LDADD = $(LDADD)
+test_pack_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_simple_OBJECTS = test_simple.$(OBJEXT)
+test_simple_OBJECTS = $(am_test_simple_OBJECTS)
+test_simple_LDADD = $(LDADD)
+test_simple_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+am_test_unpack_OBJECTS = test_unpack.$(OBJEXT)
+test_unpack_OBJECTS = $(am_test_unpack_OBJECTS)
+test_unpack_LDADD = $(LDADD)
+test_unpack_DEPENDENCIES = $(top_builddir)/src/libjansson.la
+DEFAULT_INCLUDES = -I. -I$(top_builddir)@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+	$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+	$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+	--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
+	$(LDFLAGS) -o $@
+SOURCES = $(test_array_SOURCES) $(test_copy_SOURCES) \
+	$(test_dump_SOURCES) test_equal.c $(test_load_SOURCES) \
+	$(test_loadb_SOURCES) $(test_memory_funcs_SOURCES) \
+	$(test_number_SOURCES) $(test_object_SOURCES) \
+	$(test_pack_SOURCES) $(test_simple_SOURCES) \
+	$(test_unpack_SOURCES)
+DIST_SOURCES = $(test_array_SOURCES) $(test_copy_SOURCES) \
+	$(test_dump_SOURCES) test_equal.c $(test_load_SOURCES) \
+	$(test_loadb_SOURCES) $(test_memory_funcs_SOURCES) \
+	$(test_number_SOURCES) $(test_object_SOURCES) \
+	$(test_pack_SOURCES) $(test_simple_SOURCES) \
+	$(test_unpack_SOURCES)
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+LD = @LD@
+LDFLAGS = -static  # for speed and Valgrind
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+json_have_long_long = @json_have_long_long@
+json_inline = @json_inline@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+EXTRA_DIST = run
+test_array_SOURCES = test_array.c util.h
+test_copy_SOURCES = test_copy.c util.h
+test_dump_SOURCES = test_dump.c util.h
+test_load_SOURCES = test_load.c util.h
+test_loadb_SOURCES = test_loadb.c util.h
+test_memory_funcs_SOURCES = test_memory_funcs.c util.h
+test_number_SOURCES = test_number.c util.h
+test_object_SOURCES = test_object.c util.h
+test_pack_SOURCES = test_pack.c util.h
+test_simple_SOURCES = test_simple.c util.h
+test_unpack_SOURCES = test_unpack.c util.h
+AM_CPPFLAGS = -I$(top_srcdir)/src
+AM_CFLAGS = -Wall -Werror
+LDADD = $(top_builddir)/src/libjansson.la
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+	@for dep in $?; do \
+	  case '$(am__configure_deps)' in \
+	    *$$dep*) \
+	      cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+		&& exit 0; \
+	      exit 1;; \
+	  esac; \
+	done; \
+	echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign  test/suites/api/Makefile'; \
+	cd $(top_srcdir) && \
+	  $(AUTOMAKE) --foreign  test/suites/api/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+	@case '$?' in \
+	  *config.status*) \
+	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+	  *) \
+	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+	esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+clean-checkPROGRAMS:
+	@list='$(check_PROGRAMS)'; for p in $$list; do \
+	  f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+	  echo " rm -f $$p $$f"; \
+	  rm -f $$p $$f ; \
+	done
+test_array$(EXEEXT): $(test_array_OBJECTS) $(test_array_DEPENDENCIES) 
+	@rm -f test_array$(EXEEXT)
+	$(LINK) $(test_array_OBJECTS) $(test_array_LDADD) $(LIBS)
+test_copy$(EXEEXT): $(test_copy_OBJECTS) $(test_copy_DEPENDENCIES) 
+	@rm -f test_copy$(EXEEXT)
+	$(LINK) $(test_copy_OBJECTS) $(test_copy_LDADD) $(LIBS)
+test_dump$(EXEEXT): $(test_dump_OBJECTS) $(test_dump_DEPENDENCIES) 
+	@rm -f test_dump$(EXEEXT)
+	$(LINK) $(test_dump_OBJECTS) $(test_dump_LDADD) $(LIBS)
+test_equal$(EXEEXT): $(test_equal_OBJECTS) $(test_equal_DEPENDENCIES) 
+	@rm -f test_equal$(EXEEXT)
+	$(LINK) $(test_equal_OBJECTS) $(test_equal_LDADD) $(LIBS)
+test_load$(EXEEXT): $(test_load_OBJECTS) $(test_load_DEPENDENCIES) 
+	@rm -f test_load$(EXEEXT)
+	$(LINK) $(test_load_OBJECTS) $(test_load_LDADD) $(LIBS)
+test_loadb$(EXEEXT): $(test_loadb_OBJECTS) $(test_loadb_DEPENDENCIES) 
+	@rm -f test_loadb$(EXEEXT)
+	$(LINK) $(test_loadb_OBJECTS) $(test_loadb_LDADD) $(LIBS)
+test_memory_funcs$(EXEEXT): $(test_memory_funcs_OBJECTS) $(test_memory_funcs_DEPENDENCIES) 
+	@rm -f test_memory_funcs$(EXEEXT)
+	$(LINK) $(test_memory_funcs_OBJECTS) $(test_memory_funcs_LDADD) $(LIBS)
+test_number$(EXEEXT): $(test_number_OBJECTS) $(test_number_DEPENDENCIES) 
+	@rm -f test_number$(EXEEXT)
+	$(LINK) $(test_number_OBJECTS) $(test_number_LDADD) $(LIBS)
+test_object$(EXEEXT): $(test_object_OBJECTS) $(test_object_DEPENDENCIES) 
+	@rm -f test_object$(EXEEXT)
+	$(LINK) $(test_object_OBJECTS) $(test_object_LDADD) $(LIBS)
+test_pack$(EXEEXT): $(test_pack_OBJECTS) $(test_pack_DEPENDENCIES) 
+	@rm -f test_pack$(EXEEXT)
+	$(LINK) $(test_pack_OBJECTS) $(test_pack_LDADD) $(LIBS)
+test_simple$(EXEEXT): $(test_simple_OBJECTS) $(test_simple_DEPENDENCIES) 
+	@rm -f test_simple$(EXEEXT)
+	$(LINK) $(test_simple_OBJECTS) $(test_simple_LDADD) $(LIBS)
+test_unpack$(EXEEXT): $(test_unpack_OBJECTS) $(test_unpack_DEPENDENCIES) 
+	@rm -f test_unpack$(EXEEXT)
+	$(LINK) $(test_unpack_OBJECTS) $(test_unpack_LDADD) $(LIBS)
+
+mostlyclean-compile:
+	-rm -f *.$(OBJEXT)
+
+distclean-compile:
+	-rm -f *.tab.c
+
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_array.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_copy.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_dump.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_equal.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_load.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_loadb.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_memory_funcs.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_number.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_object.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_pack.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_simple.Po at am__quote@
+ at AMDEP_TRUE@@am__include@ @am__quote at ./$(DEPDIR)/test_unpack.Po at am__quote@
+
+.c.o:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c $<
+
+.c.obj:
+ at am__fastdepCC_TRUE@	$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+ at am__fastdepCC_TRUE@	$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+ at am__fastdepCC_TRUE@	mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+ at AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ at am__fastdepCC_FALSE@	$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+	-rm -f *.lo
+
+clean-libtool:
+	-rm -rf .libs _libs
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+	list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	mkid -fID $$unique
+tags: TAGS
+
+TAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+	  test -n "$$unique" || unique=$$empty_fix; \
+	  $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+	    $$tags $$unique; \
+	fi
+ctags: CTAGS
+CTAGS:  $(HEADERS) $(SOURCES)  $(TAGS_DEPENDENCIES) \
+		$(TAGS_FILES) $(LISP)
+	tags=; \
+	here=`pwd`; \
+	list='$(SOURCES) $(HEADERS)  $(LISP) $(TAGS_FILES)'; \
+	unique=`for i in $$list; do \
+	    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+	  done | \
+	  $(AWK) '    { files[$$0] = 1; } \
+	       END { for (i in files) print i; }'`; \
+	test -z "$(CTAGS_ARGS)$$tags$$unique" \
+	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+	     $$tags $$unique
+
+GTAGS:
+	here=`$(am__cd) $(top_builddir) && pwd` \
+	  && cd $(top_srcdir) \
+	  && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+	list='$(DISTFILES)'; \
+	  dist_files=`for file in $$list; do echo $$file; done | \
+	  sed -e "s|^$$srcdirstrip/||;t" \
+	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+	case $$dist_files in \
+	  */*) $(MKDIR_P) `echo "$$dist_files" | \
+			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+			   sort -u` ;; \
+	esac; \
+	for file in $$dist_files; do \
+	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+	  if test -d $$d/$$file; then \
+	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+	      cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+	    fi; \
+	    cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+	  else \
+	    test -f $(distdir)/$$file \
+	    || cp -p $$d/$$file $(distdir)/$$file \
+	    || exit 1; \
+	  fi; \
+	done
+check-am: all-am
+	$(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
+check: check-am
+all-am: Makefile
+installdirs:
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+	$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+	  install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+	  `test -z '$(STRIP)' || \
+	    echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+
+maintainer-clean-generic:
+	@echo "This command is intended for maintainers to use"
+	@echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
+	mostlyclean-am
+
+distclean: distclean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+	distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-info: install-info-am
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-ps: install-ps-am
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+	-rm -rf ./$(DEPDIR)
+	-rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+	mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am check check-am clean \
+	clean-checkPROGRAMS clean-generic clean-libtool ctags \
+	distclean distclean-compile distclean-generic \
+	distclean-libtool distclean-tags distdir dvi dvi-am html \
+	html-am info info-am install install-am install-data \
+	install-data-am install-dvi install-dvi-am install-exec \
+	install-exec-am install-html install-html-am install-info \
+	install-info-am install-man install-pdf install-pdf-am \
+	install-ps install-ps-am install-strip installcheck \
+	installcheck-am installdirs maintainer-clean \
+	maintainer-clean-generic mostlyclean mostlyclean-compile \
+	mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+	tags uninstall uninstall-am
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/lang/c/jansson/test/suites/api/run b/lang/c/jansson/test/suites/api/run
new file mode 100755
index 0000000..f1aed9b
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/run
@@ -0,0 +1,36 @@
+#!/bin/sh
+#
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+is_test() {
+    case "$test_name" in
+        *.c|check-exports)
+            return 0
+            ;;
+        *)
+            return 1
+            ;;
+    esac
+}
+
+run_test() {
+    if [ "$test_name" = "check-exports" ]; then
+        test_log=$test_log $test_path >$test_log/stdout 2>$test_log/stderr
+    else
+        $test_runner $suite_builddir/${test_name%.c} \
+            >$test_log/stdout \
+            2>$test_log/stderr \
+            || return 1
+        valgrind_check $test_log/stderr || return 1
+    fi
+}
+
+show_error() {
+    valgrind_show_error && return
+    cat $test_log/stderr
+}
+
+. $top_srcdir/test/scripts/run-tests.sh
diff --git a/lang/c/jansson/test/suites/api/test_array.c b/lang/c/jansson/test/suites/api/test_array.c
new file mode 100644
index 0000000..18b4652
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_array.c
@@ -0,0 +1,400 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include "util.h"
+
+static void test_misc(void)
+{
+    json_t *array, *five, *seven, *value;
+    int i;
+
+    array = json_array();
+    five = json_integer(5);
+    seven = json_integer(7);
+
+    if(!array)
+        fail("unable to create array");
+    if(!five || !seven)
+        fail("unable to create integer");
+
+    if(json_array_size(array) != 0)
+        fail("empty array has nonzero size");
+
+    if(!json_array_append(array, NULL))
+        fail("able to append NULL");
+
+    if(json_array_append(array, five))
+        fail("unable to append");
+
+    if(json_array_size(array) != 1)
+        fail("wrong array size");
+
+    value = json_array_get(array, 0);
+    if(!value)
+        fail("unable to get item");
+    if(value != five)
+        fail("got wrong value");
+
+    if(json_array_append(array, seven))
+        fail("unable to append value");
+
+    if(json_array_size(array) != 2)
+        fail("wrong array size");
+
+    value = json_array_get(array, 1);
+    if(!value)
+        fail("unable to get item");
+    if(value != seven)
+        fail("got wrong value");
+
+    if(json_array_set(array, 0, seven))
+        fail("unable to set value");
+
+    if(!json_array_set(array, 0, NULL))
+        fail("able to set NULL");
+
+    if(json_array_size(array) != 2)
+        fail("wrong array size");
+
+    value = json_array_get(array, 0);
+    if(!value)
+        fail("unable to get item");
+    if(value != seven)
+        fail("got wrong value");
+
+    if(json_array_get(array, 2) != NULL)
+        fail("able to get value out of bounds");
+
+    if(!json_array_set(array, 2, seven))
+        fail("able to set value out of bounds");
+
+    for(i = 2; i < 30; i++) {
+        if(json_array_append(array, seven))
+            fail("unable to append value");
+
+        if(json_array_size(array) != i + 1)
+            fail("wrong array size");
+    }
+
+    for(i = 0; i < 30; i++) {
+        value = json_array_get(array, i);
+        if(!value)
+            fail("unable to get item");
+        if(value != seven)
+            fail("got wrong value");
+    }
+
+    if(json_array_set_new(array, 15, json_integer(123)))
+        fail("unable to set new value");
+
+    value = json_array_get(array, 15);
+    if(!json_is_integer(value) || json_integer_value(value) != 123)
+        fail("json_array_set_new works incorrectly");
+
+    if(!json_array_set_new(array, 15, NULL))
+        fail("able to set_new NULL value");
+
+    if(json_array_append_new(array, json_integer(321)))
+        fail("unable to append new value");
+
+    value = json_array_get(array, json_array_size(array) - 1);
+    if(!json_is_integer(value) || json_integer_value(value) != 321)
+        fail("json_array_append_new works incorrectly");
+
+    if(!json_array_append_new(array, NULL))
+        fail("able to append_new NULL value");
+
+    json_decref(five);
+    json_decref(seven);
+    json_decref(array);
+}
+
+static void test_insert(void)
+{
+    json_t *array, *five, *seven, *eleven, *value;
+    int i;
+
+    array = json_array();
+    five = json_integer(5);
+    seven = json_integer(7);
+    eleven = json_integer(11);
+
+    if(!array)
+        fail("unable to create array");
+    if(!five || !seven || !eleven)
+        fail("unable to create integer");
+
+
+    if(!json_array_insert(array, 1, five))
+        fail("able to insert value out of bounds");
+
+
+    if(json_array_insert(array, 0, five))
+        fail("unable to insert value in an empty array");
+
+    if(json_array_get(array, 0) != five)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_size(array) != 1)
+        fail("array size is invalid after insertion");
+
+
+    if(json_array_insert(array, 1, seven))
+        fail("unable to insert value at the end of an array");
+
+    if(json_array_get(array, 0) != five)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_get(array, 1) != seven)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_size(array) != 2)
+        fail("array size is invalid after insertion");
+
+
+    if(json_array_insert(array, 1, eleven))
+        fail("unable to insert value in the middle of an array");
+
+    if(json_array_get(array, 0) != five)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_get(array, 1) != eleven)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_get(array, 2) != seven)
+        fail("json_array_insert works incorrectly");
+
+    if(json_array_size(array) != 3)
+        fail("array size is invalid after insertion");
+
+
+    if(json_array_insert_new(array, 2, json_integer(123)))
+        fail("unable to insert value in the middle of an array");
+
+    value = json_array_get(array, 2);
+    if(!json_is_integer(value) || json_integer_value(value) != 123)
+        fail("json_array_insert_new works incorrectly");
+
+    if(json_array_size(array) != 4)
+        fail("array size is invalid after insertion");
+
+
+    for(i = 0; i < 20; i++) {
+        if(json_array_insert(array, 0, seven))
+            fail("unable to insert value at the begining of an array");
+    }
+
+    for(i = 0; i < 20; i++) {
+        if(json_array_get(array, i) != seven)
+            fail("json_aray_insert works incorrectly");
+    }
+
+    if(json_array_size(array) != 24)
+        fail("array size is invalid after loop insertion");
+
+    json_decref(five);
+    json_decref(seven);
+    json_decref(eleven);
+    json_decref(array);
+}
+
+static void test_remove(void)
+{
+    json_t *array, *five, *seven;
+
+    array = json_array();
+    five = json_integer(5);
+    seven = json_integer(7);
+
+    if(!array)
+        fail("unable to create array");
+    if(!five)
+        fail("unable to create integer");
+    if(!seven)
+        fail("unable to create integer");
+
+
+    if(!json_array_remove(array, 0))
+        fail("able to remove an unexisting index");
+
+
+    if(json_array_append(array, five))
+        fail("unable to append");
+
+    if(!json_array_remove(array, 1))
+        fail("able to remove an unexisting index");
+
+    if(json_array_remove(array, 0))
+        fail("unable to remove");
+
+    if(json_array_size(array) != 0)
+        fail("array size is invalid after removing");
+
+
+    if(json_array_append(array, five) ||
+       json_array_append(array, seven) ||
+       json_array_append(array, five) ||
+       json_array_append(array, seven))
+        fail("unable to append");
+
+    if(json_array_remove(array, 2))
+        fail("unable to remove");
+
+    if(json_array_size(array) != 3)
+        fail("array size is invalid after removing");
+
+    if(json_array_get(array, 0) != five ||
+       json_array_get(array, 1) != seven ||
+       json_array_get(array, 2) != seven)
+        fail("remove works incorrectly");
+
+    json_decref(five);
+    json_decref(seven);
+    json_decref(array);
+}
+
+static void test_clear(void)
+{
+    json_t *array, *five, *seven;
+    int i;
+
+    array = json_array();
+    five = json_integer(5);
+    seven = json_integer(7);
+
+    if(!array)
+        fail("unable to create array");
+    if(!five || !seven)
+        fail("unable to create integer");
+
+    for(i = 0; i < 10; i++) {
+        if(json_array_append(array, five))
+            fail("unable to append");
+    }
+    for(i = 0; i < 10; i++) {
+        if(json_array_append(array, seven))
+            fail("unable to append");
+    }
+
+    if(json_array_size(array) != 20)
+        fail("array size is invalid after appending");
+
+    if(json_array_clear(array))
+        fail("unable to clear");
+
+    if(json_array_size(array) != 0)
+        fail("array size is invalid after clearing");
+
+    json_decref(five);
+    json_decref(seven);
+    json_decref(array);
+}
+
+static void test_extend(void)
+{
+    json_t *array1, *array2, *five, *seven;
+    int i;
+
+    array1 = json_array();
+    array2 = json_array();
+    five = json_integer(5);
+    seven = json_integer(7);
+
+    if(!array1 || !array2)
+        fail("unable to create array");
+    if(!five || !seven)
+        fail("unable to create integer");
+
+    for(i = 0; i < 10; i++) {
+        if(json_array_append(array1, five))
+            fail("unable to append");
+    }
+    for(i = 0; i < 10; i++) {
+        if(json_array_append(array2, seven))
+            fail("unable to append");
+    }
+
+    if(json_array_size(array1) != 10 || json_array_size(array2) != 10)
+        fail("array size is invalid after appending");
+
+    if(json_array_extend(array1, array2))
+        fail("unable to extend");
+
+    for(i = 0; i < 10; i++) {
+        if(json_array_get(array1, i) != five)
+            fail("invalid array contents after extending");
+    }
+    for(i = 10; i < 20; i++) {
+        if(json_array_get(array1, i) != seven)
+            fail("invalid array contents after extending");
+    }
+
+    json_decref(five);
+    json_decref(seven);
+    json_decref(array1);
+    json_decref(array2);
+}
+
+static void test_circular()
+{
+    json_t *array1, *array2;
+
+    /* the simple cases are checked */
+
+    array1 = json_array();
+    if(!array1)
+        fail("unable to create array");
+
+    if(json_array_append(array1, array1) == 0)
+        fail("able to append self");
+
+    if(json_array_insert(array1, 0, array1) == 0)
+        fail("able to insert self");
+
+    if(json_array_append_new(array1, json_true()))
+        fail("failed to append true");
+
+    if(json_array_set(array1, 0, array1) == 0)
+        fail("able to set self");
+
+    json_decref(array1);
+
+
+    /* create circular references */
+
+    array1 = json_array();
+    array2 = json_array();
+    if(!array1 || !array2)
+        fail("unable to create array");
+
+    if(json_array_append(array1, array2) ||
+       json_array_append(array2, array1))
+        fail("unable to append");
+
+    /* circularity is detected when dumping */
+    if(json_dumps(array1, 0) != NULL)
+        fail("able to dump circulars");
+
+    /* decref twice to deal with the circular references */
+    json_decref(array1);
+    json_decref(array2);
+    json_decref(array1);
+}
+
+
+int main()
+{
+    test_misc();
+    test_insert();
+    test_remove();
+    test_clear();
+    test_extend();
+    test_circular();
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_copy.c b/lang/c/jansson/test/suites/api/test_copy.c
new file mode 100644
index 0000000..6310f69
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_copy.c
@@ -0,0 +1,319 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include <jansson.h>
+#include "util.h"
+
+static void test_copy_simple(void)
+{
+    json_t *value, *copy;
+
+    if(json_copy(NULL))
+        fail("copying NULL doesn't return NULL");
+
+    /* true */
+    value = json_true();
+    copy = json_copy(value);
+    if(value != copy)
+        fail("copying true failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* false */
+    value = json_false();
+    copy = json_copy(value);
+    if(value != copy)
+        fail("copying false failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* null */
+    value = json_null();
+    copy = json_copy(value);
+    if(value != copy)
+        fail("copying null failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* string */
+    value = json_string("foo");
+    if(!value)
+        fail("unable to create a string");
+    copy = json_copy(value);
+    if(!copy)
+        fail("unable to copy a string");
+    if(copy == value)
+        fail("copying a string doesn't copy");
+    if(!json_equal(copy, value))
+        fail("copying a string produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+
+    /* integer */
+    value = json_integer(543);
+    if(!value)
+        fail("unable to create an integer");
+    copy = json_copy(value);
+    if(!copy)
+        fail("unable to copy an integer");
+    if(copy == value)
+        fail("copying an integer doesn't copy");
+    if(!json_equal(copy, value))
+        fail("copying an integer produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+
+    /* real */
+    value = json_real(123e9);
+    if(!value)
+        fail("unable to create a real");
+    copy = json_copy(value);
+    if(!copy)
+        fail("unable to copy a real");
+    if(copy == value)
+        fail("copying a real doesn't copy");
+    if(!json_equal(copy, value))
+        fail("copying a real produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+}
+
+static void test_deep_copy_simple(void)
+{
+    json_t *value, *copy;
+
+    if(json_deep_copy(NULL))
+        fail("deep copying NULL doesn't return NULL");
+
+    /* true */
+    value = json_true();
+    copy = json_deep_copy(value);
+    if(value != copy)
+        fail("deep copying true failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* false */
+    value = json_false();
+    copy = json_deep_copy(value);
+    if(value != copy)
+        fail("deep copying false failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* null */
+    value = json_null();
+    copy = json_deep_copy(value);
+    if(value != copy)
+        fail("deep copying null failed");
+    json_decref(value);
+    json_decref(copy);
+
+    /* string */
+    value = json_string("foo");
+    if(!value)
+        fail("unable to create a string");
+    copy = json_deep_copy(value);
+    if(!copy)
+        fail("unable to deep copy a string");
+    if(copy == value)
+        fail("deep copying a string doesn't copy");
+    if(!json_equal(copy, value))
+        fail("deep copying a string produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+
+    /* integer */
+    value = json_integer(543);
+    if(!value)
+        fail("unable to create an integer");
+    copy = json_deep_copy(value);
+    if(!copy)
+        fail("unable to deep copy an integer");
+    if(copy == value)
+        fail("deep copying an integer doesn't copy");
+    if(!json_equal(copy, value))
+        fail("deep copying an integer produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+
+    /* real */
+    value = json_real(123e9);
+    if(!value)
+        fail("unable to create a real");
+    copy = json_deep_copy(value);
+    if(!copy)
+        fail("unable to deep copy a real");
+    if(copy == value)
+        fail("deep copying a real doesn't copy");
+    if(!json_equal(copy, value))
+        fail("deep copying a real produces an inequal copy");
+    if(value->refcount != 1 || copy->refcount != 1)
+        fail("invalid refcounts");
+    json_decref(value);
+    json_decref(copy);
+}
+
+static void test_copy_array(void)
+{
+    const char *json_array_text = "[1, \"foo\", 3.141592, {\"foo\": \"bar\"}]";
+
+    json_t *array, *copy;
+    size_t i;
+
+    array = json_loads(json_array_text, 0, NULL);
+    if(!array)
+        fail("unable to parse an array");
+
+    copy = json_copy(array);
+    if(!copy)
+        fail("unable to copy an array");
+    if(copy == array)
+        fail("copying an array doesn't copy");
+    if(!json_equal(copy, array))
+        fail("copying an array produces an inequal copy");
+
+    for(i = 0; i < json_array_size(copy); i++)
+    {
+        if(json_array_get(array, i) != json_array_get(copy, i))
+            fail("copying an array modifies its elements");
+    }
+
+    json_decref(array);
+    json_decref(copy);
+}
+
+static void test_deep_copy_array(void)
+{
+    const char *json_array_text = "[1, \"foo\", 3.141592, {\"foo\": \"bar\"}]";
+
+    json_t *array, *copy;
+    size_t i;
+
+    array = json_loads(json_array_text, 0, NULL);
+    if(!array)
+        fail("unable to parse an array");
+
+    copy = json_deep_copy(array);
+    if(!copy)
+        fail("unable to deep copy an array");
+    if(copy == array)
+        fail("deep copying an array doesn't copy");
+    if(!json_equal(copy, array))
+        fail("deep copying an array produces an inequal copy");
+
+    for(i = 0; i < json_array_size(copy); i++)
+    {
+        if(json_array_get(array, i) == json_array_get(copy, i))
+            fail("deep copying an array doesn't copy its elements");
+    }
+
+    json_decref(array);
+    json_decref(copy);
+}
+
+static void test_copy_object(void)
+{
+    const char *json_object_text =
+        "{\"foo\": \"bar\", \"a\": 1, \"b\": 3.141592, \"c\": [1,2,3,4]}";
+
+    json_t *object, *copy;
+    void *iter;
+
+    object = json_loads(json_object_text, 0, NULL);
+    if(!object)
+        fail("unable to parse an object");
+
+    copy = json_copy(object);
+    if(!copy)
+        fail("unable to copy an object");
+    if(copy == object)
+        fail("copying an object doesn't copy");
+    if(!json_equal(copy, object))
+        fail("copying an object produces an inequal copy");
+
+    iter = json_object_iter(object);
+    while(iter)
+    {
+        const char *key;
+        json_t *value1, *value2;
+
+        key = json_object_iter_key(iter);
+        value1 = json_object_iter_value(iter);
+        value2 = json_object_get(copy, key);
+
+        if(value1 != value2)
+            fail("deep copying an object modifies its items");
+
+        iter = json_object_iter_next(object, iter);
+    }
+
+    json_decref(object);
+    json_decref(copy);
+}
+
+static void test_deep_copy_object(void)
+{
+    const char *json_object_text =
+        "{\"foo\": \"bar\", \"a\": 1, \"b\": 3.141592, \"c\": [1,2,3,4]}";
+
+    json_t *object, *copy;
+    void *iter;
+
+    object = json_loads(json_object_text, 0, NULL);
+    if(!object)
+        fail("unable to parse an object");
+
+    copy = json_deep_copy(object);
+    if(!copy)
+        fail("unable to deep copy an object");
+    if(copy == object)
+        fail("deep copying an object doesn't copy");
+    if(!json_equal(copy, object))
+        fail("deep copying an object produces an inequal copy");
+
+    iter = json_object_iter(object);
+    while(iter)
+    {
+        const char *key;
+        json_t *value1, *value2;
+
+        key = json_object_iter_key(iter);
+        value1 = json_object_iter_value(iter);
+        value2 = json_object_get(copy, key);
+
+        if(value1 == value2)
+            fail("deep copying an object doesn't copy its items");
+
+        iter = json_object_iter_next(object, iter);
+    }
+
+    json_decref(object);
+    json_decref(copy);
+}
+
+int main()
+{
+    test_copy_simple();
+    test_deep_copy_simple();
+    test_copy_array();
+    test_deep_copy_array();
+    test_copy_object();
+    test_deep_copy_object();
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_dump.c b/lang/c/jansson/test/suites/api/test_dump.c
new file mode 100644
index 0000000..17d588b
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_dump.c
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include <string.h>
+#include "util.h"
+
+static void encode_twice()
+{
+    /* Encode an empty object/array, add an item, encode again */
+
+    json_t *json;
+    char *result;
+
+    json = json_object();
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "{}"))
+      fail("json_dumps failed");
+    free(result);
+
+    json_object_set_new(json, "foo", json_integer(5));
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "{\"foo\": 5}"))
+      fail("json_dumps failed");
+    free(result);
+
+    json_decref(json);
+
+    json = json_array();
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "[]"))
+      fail("json_dumps failed");
+    free(result);
+
+    json_array_append_new(json, json_integer(5));
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "[5]"))
+      fail("json_dumps failed");
+    free(result);
+
+    json_decref(json);
+}
+
+static void circular_references()
+{
+    /* Construct a JSON object/array with a circular reference:
+
+       object: {"a": {"b": {"c": <circular reference to $.a>}}}
+       array: [[[<circular reference to the $[0] array>]]]
+
+       Encode it, remove the circular reference and encode again.
+    */
+
+    json_t *json;
+    char *result;
+
+    json = json_object();
+    json_object_set_new(json, "a", json_object());
+    json_object_set_new(json_object_get(json, "a"), "b", json_object());
+    json_object_set(json_object_get(json_object_get(json, "a"), "b"), "c",
+                    json_object_get(json, "a"));
+
+    if(json_dumps(json, 0))
+        fail("json_dumps encoded a circular reference!");
+
+    json_object_del(json_object_get(json_object_get(json, "a"), "b"), "c");
+
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "{\"a\": {\"b\": {}}}"))
+        fail("json_dumps failed!");
+    free(result);
+
+    json_decref(json);
+
+    json = json_array();
+    json_array_append_new(json, json_array());
+    json_array_append_new(json_array_get(json, 0), json_array());
+    json_array_append(json_array_get(json_array_get(json, 0), 0),
+                      json_array_get(json, 0));
+
+    if(json_dumps(json, 0))
+        fail("json_dumps encoded a circular reference!");
+
+    json_array_remove(json_array_get(json_array_get(json, 0), 0), 0);
+
+    result = json_dumps(json, 0);
+    if(!result || strcmp(result, "[[[]]]"))
+        fail("json_dumps failed!");
+    free(result);
+
+    json_decref(json);
+}
+
+static void encode_other_than_array_or_object()
+{
+    /* Encoding anything other than array or object should only
+     * succeed if the JSON_ENCODE_ANY flag is used */
+
+    json_t *json;
+    FILE *fp = NULL;
+    char *result;
+
+    json = json_string("foo");
+    if(json_dumps(json, 0) != NULL)
+        fail("json_dumps encoded a string!");
+    if(json_dumpf(json, fp, 0) == 0)
+        fail("json_dumpf encoded a string!");
+
+    result = json_dumps(json, JSON_ENCODE_ANY);
+    if(!result || strcmp(result, "\"foo\"") != 0)
+        fail("json_dumps failed to encode a string with JSON_ENCODE_ANY");
+
+    free(result);
+    json_decref(json);
+
+    json = json_integer(42);
+    if(json_dumps(json, 0) != NULL)
+        fail("json_dumps encoded an integer!");
+    if(json_dumpf(json, fp, 0) == 0)
+        fail("json_dumpf encoded an integer!");
+
+    result = json_dumps(json, JSON_ENCODE_ANY);
+    if(!result || strcmp(result, "42") != 0)
+        fail("json_dumps failed to encode an integer with JSON_ENCODE_ANY");
+
+    free(result);
+    json_decref(json);
+
+
+}
+
+int main()
+{
+    encode_twice();
+    circular_references();
+    encode_other_than_array_or_object();
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_equal.c b/lang/c/jansson/test/suites/api/test_equal.c
new file mode 100644
index 0000000..ba7ab43
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_equal.c
@@ -0,0 +1,190 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include "util.h"
+
+static void test_equal_simple()
+{
+    json_t *value1, *value2;
+
+    if(json_equal(NULL, NULL))
+        fail("json_equal fails for two NULLs");
+
+    value1 = json_true();
+    if(json_equal(value1, NULL) || json_equal(NULL, value1))
+        fail("json_equal fails for NULL");
+
+    /* this covers true, false and null as they are singletons */
+    if(!json_equal(value1, value1))
+        fail("identical objects are not equal");
+    json_decref(value1);
+
+    /* integer */
+    value1 = json_integer(1);
+    value2 = json_integer(1);
+    if(!value1 || !value2)
+        fail("unable to create integers");
+    if(!json_equal(value1, value2))
+        fail("json_equal fails for two equal integers");
+    json_decref(value2);
+
+    value2 = json_integer(2);
+    if(!value2)
+        fail("unable to create an integer");
+    if(json_equal(value1, value2))
+        fail("json_equal fails for two inequal integers");
+
+    json_decref(value1);
+    json_decref(value2);
+
+    /* real */
+    value1 = json_real(1.2);
+    value2 = json_real(1.2);
+    if(!value1 || !value2)
+        fail("unable to create reals");
+    if(!json_equal(value1, value2))
+        fail("json_equal fails for two equal reals");
+    json_decref(value2);
+
+    value2 = json_real(3.141592);
+    if(!value2)
+        fail("unable to create an real");
+    if(json_equal(value1, value2))
+        fail("json_equal fails for two inequal reals");
+
+    json_decref(value1);
+    json_decref(value2);
+
+    /* string */
+    value1 = json_string("foo");
+    value2 = json_string("foo");
+    if(!value1 || !value2)
+        fail("unable to create strings");
+    if(!json_equal(value1, value2))
+        fail("json_equal fails for two equal strings");
+    json_decref(value2);
+
+    value2 = json_string("bar");
+    if(!value2)
+        fail("unable to create an string");
+    if(json_equal(value1, value2))
+        fail("json_equal fails for two inequal strings");
+
+    json_decref(value1);
+    json_decref(value2);
+}
+
+static void test_equal_array()
+{
+    json_t *array1, *array2;
+
+    array1 = json_array();
+    array2 = json_array();
+    if(!array1 || !array2)
+        fail("unable to create arrays");
+
+    if(!json_equal(array1, array2))
+        fail("json_equal fails for two empty arrays");
+
+    json_array_append_new(array1, json_integer(1));
+    json_array_append_new(array2, json_integer(1));
+    json_array_append_new(array1, json_string("foo"));
+    json_array_append_new(array2, json_string("foo"));
+    json_array_append_new(array1, json_integer(2));
+    json_array_append_new(array2, json_integer(2));
+    if(!json_equal(array1, array2))
+        fail("json_equal fails for two equal arrays");
+
+    json_array_remove(array2, 2);
+    if(json_equal(array1, array2))
+        fail("json_equal fails for two inequal arrays");
+
+    json_array_append_new(array2, json_integer(3));
+    if(json_equal(array1, array2))
+        fail("json_equal fails for two inequal arrays");
+
+    json_decref(array1);
+    json_decref(array2);
+}
+
+static void test_equal_object()
+{
+    json_t *object1, *object2;
+
+    object1 = json_object();
+    object2 = json_object();
+    if(!object1 || !object2)
+        fail("unable to create objects");
+
+    if(!json_equal(object1, object2))
+        fail("json_equal fails for two empty objects");
+
+    json_object_set_new(object1, "a", json_integer(1));
+    json_object_set_new(object2, "a", json_integer(1));
+    json_object_set_new(object1, "b", json_string("foo"));
+    json_object_set_new(object2, "b", json_string("foo"));
+    json_object_set_new(object1, "c", json_integer(2));
+    json_object_set_new(object2, "c", json_integer(2));
+    if(!json_equal(object1, object2))
+        fail("json_equal fails for two equal objects");
+
+    json_object_del(object2, "c");
+    if(json_equal(object1, object2))
+        fail("json_equal fails for two inequal objects");
+
+    json_object_set_new(object2, "c", json_integer(3));
+    if(json_equal(object1, object2))
+        fail("json_equal fails for two inequal objects");
+
+    json_object_del(object2, "c");
+    json_object_set_new(object2, "d", json_integer(2));
+    if(json_equal(object1, object2))
+        fail("json_equal fails for two inequal objects");
+
+    json_decref(object1);
+    json_decref(object2);
+}
+
+static void test_equal_complex()
+{
+    json_t *value1, *value2;
+
+    const char *complex_json =
+"{"
+"    \"integer\": 1, "
+"    \"real\": 3.141592, "
+"    \"string\": \"foobar\", "
+"    \"true\": true, "
+"    \"object\": {"
+"        \"array-in-object\": [1,true,\"foo\",{}],"
+"        \"object-in-object\": {\"foo\": \"bar\"}"
+"    },"
+"    \"array\": [\"foo\", false, null, 1.234]"
+"}";
+
+    value1 = json_loads(complex_json, 0, NULL);
+    value2 = json_loads(complex_json, 0, NULL);
+    if(!value1 || !value2)
+        fail("unable to parse JSON");
+    if(!json_equal(value1, value2))
+        fail("json_equal fails for two inequal strings");
+
+    json_decref(value1);
+    json_decref(value2);
+
+    /* TODO: There's no negative test case here */
+}
+
+int main()
+{
+    test_equal_simple();
+    test_equal_array();
+    test_equal_object();
+    test_equal_complex();
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_load.c b/lang/c/jansson/test/suites/api/test_load.c
new file mode 100644
index 0000000..2a60eb3
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_load.c
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include <string.h>
+#include "util.h"
+
+static void file_not_found()
+{
+    json_t *json;
+    json_error_t error;
+
+    json = json_load_file("/path/to/nonexistent/file.json", 0, &error);
+    if(json)
+        fail("json_load_file returned non-NULL for a nonexistent file");
+    if(error.line != -1)
+        fail("json_load_file returned an invalid line number");
+    if(strcmp(error.text, "unable to open /path/to/nonexistent/file.json: No such file or directory") != 0)
+        fail("json_load_file returned an invalid error message");
+}
+
+static void reject_duplicates()
+{
+    json_error_t error;
+
+    if(json_loads("{\"foo\": 1, \"foo\": 2}", JSON_REJECT_DUPLICATES, &error))
+        fail("json_loads did not detect a duplicate key");
+    check_error("duplicate object key near '\"foo\"'", "<string>", 1, 16, 16);
+}
+
+static void disable_eof_check()
+{
+    json_error_t error;
+    json_t *json;
+
+    const char *text = "{\"foo\": 1} garbage";
+
+    if(json_loads(text, 0, &error))
+        fail("json_loads did not detect garbage after JSON text");
+    check_error("end of file expected near 'garbage'", "<string>", 1, 18, 18);
+
+    json = json_loads(text, JSON_DISABLE_EOF_CHECK, &error);
+    if(!json)
+        fail("json_loads failed with JSON_DISABLE_EOF_CHECK");
+
+    json_decref(json);
+}
+
+int main()
+{
+    file_not_found();
+    reject_duplicates();
+    disable_eof_check();
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_loadb.c b/lang/c/jansson/test/suites/api/test_loadb.c
new file mode 100644
index 0000000..27ea575
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_loadb.c
@@ -0,0 +1,38 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include <string.h>
+#include "util.h"
+
+int main()
+{
+    json_t *json;
+    json_error_t error;
+    const char str[] = "[\"A\", {\"B\": \"C\"}, 1, 2, 3]garbage";
+    size_t len = strlen(str) - strlen("garbage");
+
+    json = json_loadb(str, len, 0, &error);
+    if(!json) {
+        fail("json_loadb failed on a valid JSON buffer");
+    }
+    json_decref(json);
+
+    json = json_loadb(str, len - 1, 0, &error);
+    if (json) {
+        json_decref(json);
+        fail("json_loadb should have failed on an incomplete buffer, but it didn't");
+    }
+    if(error.line != 1) {
+        fail("json_loadb returned an invalid line number on fail");
+    }
+    if(strcmp(error.text, "']' expected near end of file") != 0) {
+        fail("json_loadb returned an invalid error message for an unclosed top-level array");
+    }
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_memory_funcs.c b/lang/c/jansson/test/suites/api/test_memory_funcs.c
new file mode 100644
index 0000000..1a6681f
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_memory_funcs.c
@@ -0,0 +1,84 @@
+#include <string.h>
+#include <jansson.h>
+
+#include "util.h"
+
+static int malloc_called = 0;
+static int free_called = 0;
+
+/* helper */
+static void create_and_free_complex_object()
+{
+    json_t *obj;
+
+    obj = json_pack("{s:i,s:n,s:b,s:b,s:{s:s},s:[i,i,i]",
+                    "foo", 42,
+                    "bar",
+                    "baz", 1,
+                    "qux", 0,
+                    "alice", "bar", "baz",
+                    "bob", 9, 8, 7);
+
+    json_decref(obj);
+}
+
+static void *my_malloc(size_t size)
+{
+    malloc_called += 1;
+    return malloc(size);
+}
+
+static void my_free(void *ptr)
+{
+    free_called += 1;
+    free(ptr);
+}
+
+static void test_simple()
+{
+    json_set_alloc_funcs(my_malloc, my_free);
+    create_and_free_complex_object();
+
+    if(malloc_called != 27 || free_called != 27)
+        fail("Custom allocation failed");
+}
+
+
+/*
+  Test the secure memory functions code given in the API reference
+  documentation, but by using plain memset instead of
+  guaranteed_memset().
+*/
+
+static void *secure_malloc(size_t size)
+{
+    /* Store the memory area size in the beginning of the block */
+    void *ptr = malloc(size + 8);
+    *((size_t *)ptr) = size;
+    return ptr + 8;
+}
+
+static void secure_free(void *ptr)
+{
+    size_t size;
+
+    ptr -= 8;
+    size = *((size_t *)ptr);
+
+    /*guaranteed_*/memset(ptr, 0, size);
+    free(ptr);
+}
+
+static void test_secure_funcs(void)
+{
+    json_set_alloc_funcs(secure_malloc, secure_free);
+    create_and_free_complex_object();
+}
+
+int main()
+{
+    test_simple();
+    test_secure_funcs();
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_number.c b/lang/c/jansson/test/suites/api/test_number.c
new file mode 100644
index 0000000..ff0741e
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_number.c
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include "util.h"
+
+int main()
+{
+    json_t *integer, *real;
+    int i;
+    double d;
+
+    integer = json_integer(5);
+    real = json_real(100.1);
+
+    if(!integer)
+        fail("unable to create integer");
+    if(!real)
+        fail("unable to create real");
+
+    i = json_integer_value(integer);
+    if(i != 5)
+        fail("wrong integer value");
+
+    d = json_real_value(real);
+    if(d != 100.1)
+        fail("wrong real value");
+
+    d = json_number_value(integer);
+    if(d != 5.0)
+        fail("wrong number value");
+    d = json_number_value(real);
+    if(d != 100.1)
+        fail("wrong number value");
+
+    json_decref(integer);
+    json_decref(real);
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_object.c b/lang/c/jansson/test/suites/api/test_object.c
new file mode 100644
index 0000000..0499f76
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_object.c
@@ -0,0 +1,451 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <jansson.h>
+#include <string.h>
+#include "util.h"
+
+static void test_clear()
+{
+    json_t *object, *ten;
+
+    object = json_object();
+    ten = json_integer(10);
+
+    if(!object)
+        fail("unable to create object");
+    if(!ten)
+        fail("unable to create integer");
+
+    if(json_object_set(object, "a", ten) ||
+       json_object_set(object, "b", ten) ||
+       json_object_set(object, "c", ten) ||
+       json_object_set(object, "d", ten) ||
+       json_object_set(object, "e", ten))
+        fail("unable to set value");
+
+    if(json_object_size(object) != 5)
+        fail("invalid size");
+
+    json_object_clear(object);
+
+    if(json_object_size(object) != 0)
+        fail("invalid size after clear");
+
+    json_decref(ten);
+    json_decref(object);
+}
+
+static void test_update()
+{
+    json_t *object, *other, *nine, *ten;
+
+    object = json_object();
+    other = json_object();
+
+    nine = json_integer(9);
+    ten = json_integer(10);
+
+    if(!object || !other)
+        fail("unable to create object");
+    if(!nine || !ten)
+        fail("unable to create integer");
+
+
+    /* update an empty object with an empty object */
+
+    if(json_object_update(object, other))
+        fail("unable to update an emtpy object with an empty object");
+
+    if(json_object_size(object) != 0)
+        fail("invalid size after update");
+
+    if(json_object_size(other) != 0)
+        fail("invalid size for updater after update");
+
+
+    /* update an empty object with a nonempty object */
+
+    if(json_object_set(other, "a", ten) ||
+       json_object_set(other, "b", ten) ||
+       json_object_set(other, "c", ten) ||
+       json_object_set(other, "d", ten) ||
+       json_object_set(other, "e", ten))
+        fail("unable to set value");
+
+    if(json_object_update(object, other))
+        fail("unable to update an empty object");
+
+    if(json_object_size(object) != 5)
+        fail("invalid size after update");
+
+    if(json_object_get(object, "a") != ten ||
+       json_object_get(object, "b") != ten ||
+       json_object_get(object, "c") != ten ||
+       json_object_get(object, "d") != ten ||
+       json_object_get(object, "e") != ten)
+        fail("update works incorrectly");
+
+
+    /* perform the same update again */
+
+    if(json_object_update(object, other))
+        fail("unable to update an empty object");
+
+    if(json_object_size(object) != 5)
+        fail("invalid size after update");
+
+    if(json_object_get(object, "a") != ten ||
+       json_object_get(object, "b") != ten ||
+       json_object_get(object, "c") != ten ||
+       json_object_get(object, "d") != ten ||
+       json_object_get(object, "e") != ten)
+        fail("update works incorrectly");
+
+
+    /* update a nonempty object with a nonempty object with both old
+       and new keys */
+
+    if(json_object_clear(other))
+        fail("clear failed");
+
+    if(json_object_set(other, "a", nine) ||
+       json_object_set(other, "b", nine) ||
+       json_object_set(other, "f", nine) ||
+       json_object_set(other, "g", nine) ||
+       json_object_set(other, "h", nine))
+        fail("unable to set value");
+
+    if(json_object_update(object, other))
+        fail("unable to update a nonempty object");
+
+    if(json_object_size(object) != 8)
+        fail("invalid size after update");
+
+    if(json_object_get(object, "a") != nine ||
+       json_object_get(object, "b") != nine ||
+       json_object_get(object, "f") != nine ||
+       json_object_get(object, "g") != nine ||
+       json_object_get(object, "h") != nine)
+        fail("update works incorrectly");
+
+    json_decref(nine);
+    json_decref(ten);
+    json_decref(other);
+    json_decref(object);
+}
+
+static void test_circular()
+{
+    json_t *object1, *object2;
+
+    object1 = json_object();
+    object2 = json_object();
+    if(!object1 || !object2)
+        fail("unable to create object");
+
+    /* the simple case is checked */
+    if(json_object_set(object1, "a", object1) == 0)
+        fail("able to set self");
+
+    /* create circular references */
+    if(json_object_set(object1, "a", object2) ||
+       json_object_set(object2, "a", object1))
+        fail("unable to set value");
+
+    /* circularity is detected when dumping */
+    if(json_dumps(object1, 0) != NULL)
+        fail("able to dump circulars");
+
+    /* decref twice to deal with the circular references */
+    json_decref(object1);
+    json_decref(object2);
+    json_decref(object1);
+}
+
+static void test_set_nocheck()
+{
+    json_t *object, *string;
+
+    object = json_object();
+    string = json_string("bar");
+
+    if(!object)
+        fail("unable to create object");
+    if(!string)
+        fail("unable to create string");
+
+    if(json_object_set_nocheck(object, "foo", string))
+        fail("json_object_set_nocheck failed");
+    if(json_object_get(object, "foo") != string)
+        fail("json_object_get after json_object_set_nocheck failed");
+
+    /* invalid UTF-8 in key */
+    if(json_object_set_nocheck(object, "a\xefz", string))
+        fail("json_object_set_nocheck failed for invalid UTF-8");
+    if(json_object_get(object, "a\xefz") != string)
+        fail("json_object_get after json_object_set_nocheck failed");
+
+    if(json_object_set_new_nocheck(object, "bax", json_integer(123)))
+        fail("json_object_set_new_nocheck failed");
+    if(json_integer_value(json_object_get(object, "bax")) != 123)
+        fail("json_object_get after json_object_set_new_nocheck failed");
+
+    /* invalid UTF-8 in key */
+    if(json_object_set_new_nocheck(object, "asdf\xfe", json_integer(321)))
+        fail("json_object_set_new_nocheck failed for invalid UTF-8");
+    if(json_integer_value(json_object_get(object, "asdf\xfe")) != 321)
+        fail("json_object_get after json_object_set_new_nocheck failed");
+
+    json_decref(string);
+    json_decref(object);
+}
+
+static void test_iterators()
+{
+    json_t *object, *foo, *bar, *baz;
+    void *iter;
+
+    if(json_object_iter(NULL))
+        fail("able to iterate over NULL");
+
+    if(json_object_iter_next(NULL, NULL))
+        fail("able to increment an iterator on a NULL object");
+
+    object = json_object();
+    foo = json_string("foo");
+    bar = json_string("bar");
+    baz = json_string("baz");
+    if(!object || !foo || !bar || !bar)
+        fail("unable to create values");
+
+    if(json_object_iter_next(object, NULL))
+        fail("able to increment a NULL iterator");
+
+    if(json_object_set(object, "a", foo) ||
+       json_object_set(object, "b", bar) ||
+       json_object_set(object, "c", baz))
+        fail("unable to populate object");
+
+    iter = json_object_iter(object);
+    if(!iter)
+        fail("unable to get iterator");
+    if(strcmp(json_object_iter_key(iter), "a"))
+        fail("iterating failed: wrong key");
+    if(json_object_iter_value(iter) != foo)
+        fail("iterating failed: wrong value");
+
+    iter = json_object_iter_next(object, iter);
+    if(!iter)
+        fail("unable to increment iterator");
+    if(strcmp(json_object_iter_key(iter), "b"))
+        fail("iterating failed: wrong key");
+    if(json_object_iter_value(iter) != bar)
+        fail("iterating failed: wrong value");
+
+    iter = json_object_iter_next(object, iter);
+    if(!iter)
+        fail("unable to increment iterator");
+    if(strcmp(json_object_iter_key(iter), "c"))
+        fail("iterating failed: wrong key");
+    if(json_object_iter_value(iter) != baz)
+        fail("iterating failed: wrong value");
+
+    if(json_object_iter_next(object, iter) != NULL)
+        fail("able to iterate over the end");
+
+    if(json_object_iter_at(object, "foo"))
+        fail("json_object_iter_at() succeeds for non-existent key");
+
+    iter = json_object_iter_at(object, "b");
+    if(!iter)
+        fail("json_object_iter_at() fails for an existing key");
+
+    if(strcmp(json_object_iter_key(iter), "b"))
+        fail("iterating failed: wrong key");
+    if(json_object_iter_value(iter) != bar)
+        fail("iterating failed: wrong value");
+
+    iter = json_object_iter_next(object, iter);
+    if(!iter)
+        fail("unable to increment iterator");
+    if(strcmp(json_object_iter_key(iter), "c"))
+        fail("iterating failed: wrong key");
+    if(json_object_iter_value(iter) != baz)
+        fail("iterating failed: wrong value");
+
+    if(json_object_iter_set(object, iter, bar))
+        fail("unable to set value at iterator");
+
+    if(strcmp(json_object_iter_key(iter), "c"))
+        fail("json_object_iter_key() fails after json_object_iter_set()");
+    if(json_object_iter_value(iter) != bar)
+        fail("json_object_iter_value() fails after json_object_iter_set()");
+    if(json_object_get(object, "c") != bar)
+        fail("json_object_get() fails after json_object_iter_set()");
+
+    json_decref(object);
+    json_decref(foo);
+    json_decref(bar);
+    json_decref(baz);
+}
+
+static void test_misc()
+{
+    json_t *object, *string, *other_string, *value;
+
+    object = json_object();
+    string = json_string("test");
+    other_string = json_string("other");
+
+    if(!object)
+        fail("unable to create object");
+    if(!string || !other_string)
+        fail("unable to create string");
+
+    if(json_object_get(object, "a"))
+        fail("value for nonexisting key");
+
+    if(json_object_set(object, "a", string))
+        fail("unable to set value");
+
+    if(!json_object_set(object, NULL, string))
+        fail("able to set NULL key");
+
+    if(!json_object_set(object, "a", NULL))
+        fail("able to set NULL value");
+
+    /* invalid UTF-8 in key */
+    if(!json_object_set(object, "a\xefz", string))
+        fail("able to set invalid unicode key");
+
+    value = json_object_get(object, "a");
+    if(!value)
+        fail("no value for existing key");
+    if(value != string)
+        fail("got different value than what was added");
+
+    /* "a", "lp" and "px" collide in a five-bucket hashtable */
+    if(json_object_set(object, "b", string) ||
+       json_object_set(object, "lp", string) ||
+       json_object_set(object, "px", string))
+        fail("unable to set value");
+
+    value = json_object_get(object, "a");
+    if(!value)
+        fail("no value for existing key");
+    if(value != string)
+        fail("got different value than what was added");
+
+    if(json_object_set(object, "a", other_string))
+        fail("unable to replace an existing key");
+
+    value = json_object_get(object, "a");
+    if(!value)
+        fail("no value for existing key");
+    if(value != other_string)
+        fail("got different value than what was set");
+
+    if(!json_object_del(object, "nonexisting"))
+        fail("able to delete a nonexisting key");
+
+    if(json_object_del(object, "px"))
+        fail("unable to delete an existing key");
+
+    if(json_object_del(object, "a"))
+        fail("unable to delete an existing key");
+
+    if(json_object_del(object, "lp"))
+        fail("unable to delete an existing key");
+
+
+    /* add many keys to initiate rehashing */
+
+    if(json_object_set(object, "a", string))
+        fail("unable to set value");
+
+    if(json_object_set(object, "lp", string))
+        fail("unable to set value");
+
+    if(json_object_set(object, "px", string))
+        fail("unable to set value");
+
+    if(json_object_set(object, "c", string))
+        fail("unable to set value");
+
+    if(json_object_set(object, "d", string))
+        fail("unable to set value");
+
+    if(json_object_set(object, "e", string))
+        fail("unable to set value");
+
+
+    if(json_object_set_new(object, "foo", json_integer(123)))
+        fail("unable to set new value");
+
+    value = json_object_get(object, "foo");
+    if(!json_is_integer(value) || json_integer_value(value) != 123)
+        fail("json_object_set_new works incorrectly");
+
+    if(!json_object_set_new(object, NULL, json_integer(432)))
+        fail("able to set_new NULL key");
+
+    if(!json_object_set_new(object, "foo", NULL))
+        fail("able to set_new NULL value");
+
+    json_decref(string);
+    json_decref(other_string);
+    json_decref(object);
+}
+
+static void test_preserve_order()
+{
+    json_t *object;
+    char *result;
+
+    const char *expected = "{\"foobar\": 1, \"bazquux\": 6, \"lorem ipsum\": 3, \"sit amet\": 5, \"helicopter\": 7}";
+
+    object = json_object();
+
+    json_object_set_new(object, "foobar", json_integer(1));
+    json_object_set_new(object, "bazquux", json_integer(2));
+    json_object_set_new(object, "lorem ipsum", json_integer(3));
+    json_object_set_new(object, "dolor", json_integer(4));
+    json_object_set_new(object, "sit amet", json_integer(5));
+
+    /* changing a value should preserve the order */
+    json_object_set_new(object, "bazquux", json_integer(6));
+
+    /* deletion shouldn't change the order of others */
+    json_object_del(object, "dolor");
+
+    /* add a new item just to make sure */
+    json_object_set_new(object, "helicopter", json_integer(7));
+
+    result = json_dumps(object, JSON_PRESERVE_ORDER);
+
+    if(strcmp(expected, result) != 0) {
+        fprintf(stderr, "%s != %s", expected, result);
+        fail("JSON_PRESERVE_ORDER doesn't work");
+    }
+
+    free(result);
+    json_decref(object);
+}
+
+int main()
+{
+    test_misc();
+    test_clear();
+    test_update();
+    test_circular();
+    test_set_nocheck();
+    test_iterators();
+    test_preserve_order();
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_pack.c b/lang/c/jansson/test/suites/api/test_pack.c
new file mode 100644
index 0000000..ccab051
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_pack.c
@@ -0,0 +1,232 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ * Copyright (c) 2010-2011 Graeme Smecher <graeme.smecher at mail.mcgill.ca>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include <jansson.h>
+#include <stdio.h>
+#include "util.h"
+
+int main()
+{
+    json_t *value;
+    int i;
+    json_error_t error;
+
+    /*
+     * Simple, valid json_pack cases
+     */
+
+    /* true */
+    value = json_pack("b", 1);
+    if(!json_is_true(value))
+        fail("json_pack boolean failed");
+    if(value->refcount != (ssize_t)-1)
+        fail("json_pack boolean refcount failed");
+    json_decref(value);
+
+    /* false */
+    value = json_pack("b", 0);
+    if(!json_is_false(value))
+        fail("json_pack boolean failed");
+    if(value->refcount != (ssize_t)-1)
+        fail("json_pack boolean refcount failed");
+    json_decref(value);
+
+    /* null */
+    value = json_pack("n");
+    if(!json_is_null(value))
+        fail("json_pack null failed");
+    if(value->refcount != (ssize_t)-1)
+        fail("json_pack null refcount failed");
+    json_decref(value);
+
+    /* integer */
+    value = json_pack("i", 1);
+    if(!json_is_integer(value) || json_integer_value(value) != 1)
+        fail("json_pack integer failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack integer refcount failed");
+    json_decref(value);
+
+    /* integer from json_int_t */
+    value = json_pack("I", (json_int_t)555555);
+    if(!json_is_integer(value) || json_integer_value(value) != 555555)
+        fail("json_pack json_int_t failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack integer refcount failed");
+    json_decref(value);
+
+    /* real */
+    value = json_pack("f", 1.0);
+    if(!json_is_real(value) || json_real_value(value) != 1.0)
+        fail("json_pack real failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack real refcount failed");
+    json_decref(value);
+
+    /* string */
+    value = json_pack("s", "test");
+    if(!json_is_string(value) || strcmp("test", json_string_value(value)))
+        fail("json_pack string failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack string refcount failed");
+    json_decref(value);
+
+    /* empty object */
+    value = json_pack("{}", 1.0);
+    if(!json_is_object(value) || json_object_size(value) != 0)
+        fail("json_pack empty object failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack empty object refcount failed");
+    json_decref(value);
+
+    /* empty list */
+    value = json_pack("[]", 1.0);
+    if(!json_is_array(value) || json_array_size(value) != 0)
+        fail("json_pack empty list failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack empty list failed");
+    json_decref(value);
+
+    /* non-incref'd object */
+    value = json_pack("o", json_integer(1));
+    if(!json_is_integer(value) || json_integer_value(value) != 1)
+        fail("json_pack object failed");
+    if(value->refcount != (ssize_t)1)
+        fail("json_pack integer refcount failed");
+    json_decref(value);
+
+    /* incref'd object */
+    value = json_pack("O", json_integer(1));
+    if(!json_is_integer(value) || json_integer_value(value) != 1)
+        fail("json_pack object failed");
+    if(value->refcount != (ssize_t)2)
+        fail("json_pack integer refcount failed");
+    json_decref(value);
+    json_decref(value);
+
+    /* simple object */
+    value = json_pack("{s:[]}", "foo");
+    if(!json_is_object(value) || json_object_size(value) != 1)
+        fail("json_pack array failed");
+    if(!json_is_array(json_object_get(value, "foo")))
+        fail("json_pack array failed");
+    if(json_object_get(value, "foo")->refcount != (ssize_t)1)
+        fail("json_pack object refcount failed");
+    json_decref(value);
+
+    /* simple array */
+    value = json_pack("[i,i,i]", 0, 1, 2);
+    if(!json_is_array(value) || json_array_size(value) != 3)
+        fail("json_pack object failed");
+    for(i=0; i<3; i++)
+    {
+        if(!json_is_integer(json_array_get(value, i)) ||
+           json_integer_value(json_array_get(value, i)) != i)
+
+            fail("json_pack integer array failed");
+    }
+    json_decref(value);
+
+    /* Whitespace; regular string */
+    value = json_pack(" s ", "test");
+    if(!json_is_string(value) || strcmp("test", json_string_value(value)))
+        fail("json_pack string (with whitespace) failed");
+    json_decref(value);
+
+    /* Whitespace; empty array */
+    value = json_pack("[ ]");
+    if(!json_is_array(value) || json_array_size(value) != 0)
+        fail("json_pack empty array (with whitespace) failed");
+    json_decref(value);
+
+    /* Whitespace; array */
+    value = json_pack("[ i , i,  i ] ", 1, 2, 3);
+    if(!json_is_array(value) || json_array_size(value) != 3)
+        fail("json_pack array (with whitespace) failed");
+    json_decref(value);
+
+    /*
+     * Invalid cases
+     */
+
+    /* newline in format string */
+    if(json_pack_ex(&error, 0, "{\n\n1"))
+        fail("json_pack failed to catch invalid format '1'");
+    check_error("Expected format 's', got '1'", "<format>", 3, 1, 4);
+
+    /* mismatched open/close array/object */
+    if(json_pack_ex(&error, 0, "[}"))
+        fail("json_pack failed to catch mismatched '}'");
+    check_error("Unexpected format character '}'", "<format>", 1, 2, 2);
+
+    if(json_pack_ex(&error, 0, "{]"))
+        fail("json_pack failed to catch mismatched ']'");
+    check_error("Expected format 's', got ']'", "<format>", 1, 2, 2);
+
+    /* missing close array */
+    if(json_pack_ex(&error, 0, "["))
+        fail("json_pack failed to catch missing ']'");
+    check_error("Unexpected end of format string", "<format>", 1, 2, 2);
+
+    /* missing close object */
+    if(json_pack_ex(&error, 0, "{"))
+        fail("json_pack failed to catch missing '}'");
+    check_error("Unexpected end of format string", "<format>", 1, 2, 2);
+
+    /* garbage after format string */
+    if(json_pack_ex(&error, 0, "[i]a", 42))
+        fail("json_pack failed to catch garbage after format string");
+    check_error("Garbage after format string", "<format>", 1, 4, 4);
+
+    if(json_pack_ex(&error, 0, "ia", 42))
+        fail("json_pack failed to catch garbage after format string");
+    check_error("Garbage after format string", "<format>", 1, 2, 2);
+
+    /* NULL string */
+    if(json_pack_ex(&error, 0, "s", NULL))
+        fail("json_pack failed to catch null argument string");
+    check_error("NULL string argument", "<args>", 1, 1, 1);
+
+    /* NULL format */
+    if(json_pack_ex(&error, 0, NULL))
+        fail("json_pack failed to catch NULL format string");
+    check_error("NULL or empty format string", "<format>", -1, -1, 0);
+
+    /* NULL key */
+    if(json_pack_ex(&error, 0, "{s:i}", NULL, 1))
+        fail("json_pack failed to catch NULL key");
+    check_error("NULL object key", "<args>", 1, 2, 2);
+
+    /* More complicated checks for row/columns */
+    if(json_pack_ex(&error, 0, "{ {}: s }", "foo"))
+        fail("json_pack failed to catch object as key");
+    check_error("Expected format 's', got '{'", "<format>", 1, 3, 3);
+
+    /* Complex object */
+    if(json_pack_ex(&error, 0, "{ s: {},  s:[ii{} }", "foo", "bar", 12, 13))
+        fail("json_pack failed to catch missing ]");
+    check_error("Unexpected format character '}'", "<format>", 1, 19, 19);
+
+    /* Complex array */
+    if(json_pack_ex(&error, 0, "[[[[[   [[[[[  [[[[ }]]]] ]]]] ]]]]]"))
+        fail("json_pack failed to catch extra }");
+    check_error("Unexpected format character '}'", "<format>", 1, 21, 21);
+
+    /* Invalid UTF-8 in object key */
+    if(json_pack_ex(&error, 0, "{s:i}", "\xff\xff", 42))
+        fail("json_pack failed to catch invalid UTF-8 in an object key");
+    check_error("Invalid UTF-8 in object key", "<args>", 1, 2, 2);
+
+    /* Invalid UTF-8 in a string */
+    if(json_pack_ex(&error, 0, "{s:s}", "foo", "\xff\xff"))
+        fail("json_pack failed to catch invalid UTF-8 in a string");
+    check_error("Invalid UTF-8 string", "<args>", 1, 4, 4);
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_simple.c b/lang/c/jansson/test/suites/api/test_simple.c
new file mode 100644
index 0000000..8c71329
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_simple.c
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include <jansson.h>
+#include "util.h"
+
+/* Call the simple functions not covered by other tests of the public API */
+int main()
+{
+    json_t *value;
+
+    value = json_integer(1);
+    if(json_typeof(value) != JSON_INTEGER)
+        fail("json_typeof failed");
+
+    if(json_is_object(value))
+        fail("json_is_object failed");
+
+    if(json_is_array(value))
+        fail("json_is_array failed");
+
+    if(json_is_string(value))
+        fail("json_is_string failed");
+
+    if(!json_is_integer(value))
+        fail("json_is_integer failed");
+
+    if(json_is_real(value))
+        fail("json_is_real failed");
+
+    if(!json_is_number(value))
+        fail("json_is_number failed");
+
+    if(json_is_true(value))
+        fail("json_is_true failed");
+
+    if(json_is_false(value))
+        fail("json_is_false failed");
+
+    if(json_is_boolean(value))
+        fail("json_is_boolean failed");
+
+    if(json_is_null(value))
+        fail("json_is_null failed");
+
+    json_decref(value);
+
+
+    value = json_string("foo");
+    if(!value)
+        fail("json_string failed");
+    if(strcmp(json_string_value(value), "foo"))
+        fail("invalid string value");
+
+    if(json_string_set(value, "bar"))
+        fail("json_string_set failed");
+    if(strcmp(json_string_value(value), "bar"))
+        fail("invalid string value");
+
+    json_decref(value);
+
+    value = json_string(NULL);
+    if(value)
+        fail("json_string(NULL) failed");
+
+    /* invalid UTF-8  */
+    value = json_string("a\xefz");
+    if(value)
+        fail("json_string(<invalid utf-8>) failed");
+
+    value = json_string_nocheck("foo");
+    if(!value)
+        fail("json_string_nocheck failed");
+    if(strcmp(json_string_value(value), "foo"))
+        fail("invalid string value");
+
+    if(json_string_set_nocheck(value, "bar"))
+        fail("json_string_set_nocheck failed");
+    if(strcmp(json_string_value(value), "bar"))
+        fail("invalid string value");
+
+    json_decref(value);
+
+    /* invalid UTF-8 */
+    value = json_string_nocheck("qu\xff");
+    if(!value)
+        fail("json_string_nocheck failed");
+    if(strcmp(json_string_value(value), "qu\xff"))
+        fail("invalid string value");
+
+    if(json_string_set_nocheck(value, "\xfd\xfe\xff"))
+        fail("json_string_set_nocheck failed");
+    if(strcmp(json_string_value(value), "\xfd\xfe\xff"))
+        fail("invalid string value");
+
+    json_decref(value);
+
+
+    value = json_integer(123);
+    if(!value)
+        fail("json_integer failed");
+    if(json_integer_value(value) != 123)
+        fail("invalid integer value");
+    if(json_number_value(value) != 123.0)
+        fail("invalid number value");
+
+    if(json_integer_set(value, 321))
+        fail("json_integer_set failed");
+    if(json_integer_value(value) != 321)
+        fail("invalid integer value");
+    if(json_number_value(value) != 321.0)
+        fail("invalid number value");
+
+    json_decref(value);
+
+    value = json_real(123.123);
+    if(!value)
+        fail("json_real failed");
+    if(json_real_value(value) != 123.123)
+        fail("invalid integer value");
+    if(json_number_value(value) != 123.123)
+        fail("invalid number value");
+
+    if(json_real_set(value, 321.321))
+        fail("json_real_set failed");
+    if(json_real_value(value) != 321.321)
+        fail("invalid real value");
+    if(json_number_value(value) != 321.321)
+        fail("invalid number value");
+
+    json_decref(value);
+
+    value = json_true();
+    if(!value)
+        fail("json_true failed");
+    json_decref(value);
+
+    value = json_false();
+    if(!value)
+        fail("json_false failed");
+    json_decref(value);
+
+    value = json_null();
+    if(!value)
+        fail("json_null failed");
+    json_decref(value);
+
+    /* Test reference counting on singletons (true, false, null) */
+    value = json_true();
+    if(value->refcount != (size_t)-1)
+      fail("refcounting true works incorrectly");
+    json_decref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting true works incorrectly");
+    json_incref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting true works incorrectly");
+
+    value = json_false();
+    if(value->refcount != (size_t)-1)
+      fail("refcounting false works incorrectly");
+    json_decref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting false works incorrectly");
+    json_incref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting false works incorrectly");
+
+    value = json_null();
+    if(value->refcount != (size_t)-1)
+      fail("refcounting null works incorrectly");
+    json_decref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting null works incorrectly");
+    json_incref(value);
+    if(value->refcount != (size_t)-1)
+      fail("refcounting null works incorrectly");
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/test_unpack.c b/lang/c/jansson/test/suites/api/test_unpack.c
new file mode 100644
index 0000000..9426104
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/test_unpack.c
@@ -0,0 +1,341 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ * Copyright (c) 2010-2011 Graeme Smecher <graeme.smecher at mail.mcgill.ca>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#include <string.h>
+#include <jansson.h>
+#include <stdio.h>
+#include "util.h"
+
+int main()
+{
+    json_t *j, *j2;
+    int i1, i2, i3;
+    json_int_t I1;
+    int rv;
+    double f;
+    char *s;
+
+    json_error_t error;
+
+    /*
+     * Simple, valid json_pack cases
+     */
+
+    /* true */
+    rv = json_unpack(json_true(), "b", &i1);
+    if(rv || !i1)
+        fail("json_unpack boolean failed");
+
+    /* false */
+    rv = json_unpack(json_false(), "b", &i1);
+    if(rv || i1)
+        fail("json_unpack boolean failed");
+
+    /* null */
+    if(json_unpack(json_null(), "n"))
+        fail("json_unpack null failed");
+
+    /* integer */
+    j = json_integer(42);
+    rv = json_unpack(j, "i", &i1);
+    if(rv || i1 != 42)
+        fail("json_unpack integer failed");
+    json_decref(j);
+
+    /* json_int_t */
+    j = json_integer(5555555);
+    rv = json_unpack(j, "I", &I1);
+    if(rv || I1 != 5555555)
+        fail("json_unpack json_int_t failed");
+    json_decref(j);
+
+    /* real */
+    j = json_real(1.7);
+    rv = json_unpack(j, "f", &f);
+    if(rv || f != 1.7)
+        fail("json_unpack real failed");
+    json_decref(j);
+
+    /* number */
+    j = json_integer(12345);
+    rv = json_unpack(j, "F", &f);
+    if(rv || f != 12345.0)
+        fail("json_unpack (real or) integer failed");
+    json_decref(j);
+
+    j = json_real(1.7);
+    rv = json_unpack(j, "F", &f);
+    if(rv || f != 1.7)
+        fail("json_unpack real (or integer) failed");
+    json_decref(j);
+
+    /* string */
+    j = json_string("foo");
+    rv = json_unpack(j, "s", &s);
+    if(rv || strcmp(s, "foo"))
+        fail("json_unpack string failed");
+    json_decref(j);
+
+    /* empty object */
+    j = json_object();
+    if(json_unpack(j, "{}"))
+        fail("json_unpack empty object failed");
+    json_decref(j);
+
+    /* empty list */
+    j = json_array();
+    if(json_unpack(j, "[]"))
+        fail("json_unpack empty list failed");
+    json_decref(j);
+
+    /* non-incref'd object */
+    j = json_object();
+    rv = json_unpack(j, "o", &j2);
+    if(j2 != j || j->refcount != 1)
+        fail("json_unpack object failed");
+    json_decref(j);
+
+    /* incref'd object */
+    j = json_object();
+    rv = json_unpack(j, "O", &j2);
+    if(j2 != j || j->refcount != 2)
+        fail("json_unpack object failed");
+    json_decref(j);
+    json_decref(j);
+
+    /* simple object */
+    j = json_pack("{s:i}", "foo", 42);
+    rv = json_unpack(j, "{s:i}", "foo", &i1);
+    if(rv || i1 != 42)
+        fail("json_unpack simple object failed");
+    json_decref(j);
+
+    /* simple array */
+    j = json_pack("[iii]", 1, 2, 3);
+    rv = json_unpack(j, "[i,i,i]", &i1, &i2, &i3);
+    if(rv || i1 != 1 || i2 != 2 || i3 != 3)
+        fail("json_unpack simple array failed");
+    json_decref(j);
+
+    /* object with many items & strict checking */
+    j = json_pack("{s:i, s:i, s:i}", "a", 1, "b", 2, "c", 3);
+    rv = json_unpack(j, "{s:i, s:i, s:i}", "a", &i1, "b", &i2, "c", &i3);
+    if(rv || i1 != 1 || i2 != 2 || i3 != 3)
+        fail("json_unpack object with many items failed");
+    json_decref(j);
+
+    /*
+     * Invalid cases
+     */
+
+    j = json_integer(42);
+    if(!json_unpack_ex(j, &error, 0, "z"))
+        fail("json_unpack succeeded with invalid format character");
+    check_error("Unexpected format character 'z'", "<format>", 1, 1, 1);
+
+    if(!json_unpack_ex(NULL, &error, 0, "[i]"))
+        fail("json_unpack succeeded with NULL root");
+    check_error("NULL root value", "<root>", -1, -1, 0);
+    json_decref(j);
+
+    /* mismatched open/close array/object */
+    j = json_pack("[]");
+    if(!json_unpack_ex(j, &error, 0, "[}"))
+        fail("json_unpack failed to catch mismatched ']'");
+    check_error("Unexpected format character '}'", "<format>", 1, 2, 2);
+    json_decref(j);
+
+    j = json_pack("{}");
+    if(!json_unpack_ex(j, &error, 0, "{]"))
+        fail("json_unpack failed to catch mismatched '}'");
+    check_error("Expected format 's', got ']'", "<format>", 1, 2, 2);
+    json_decref(j);
+
+    /* missing close array */
+    j = json_pack("[]");
+    if(!json_unpack_ex(j, &error, 0, "["))
+        fail("json_unpack failed to catch missing ']'");
+    check_error("Unexpected end of format string", "<format>", 1, 2, 2);
+    json_decref(j);
+
+    /* missing close object */
+    j = json_pack("{}");
+    if(!json_unpack_ex(j, &error, 0, "{"))
+        fail("json_unpack failed to catch missing '}'");
+    check_error("Unexpected end of format string", "<format>", 1, 2, 2);
+    json_decref(j);
+
+    /* garbage after format string */
+    j = json_pack("[i]", 42);
+    if(!json_unpack_ex(j, &error, 0, "[i]a", &i1))
+        fail("json_unpack failed to catch garbage after format string");
+    check_error("Garbage after format string", "<format>", 1, 4, 4);
+    json_decref(j);
+
+    j = json_integer(12345);
+    if(!json_unpack_ex(j, &error, 0, "ia", &i1))
+        fail("json_unpack failed to catch garbage after format string");
+    check_error("Garbage after format string", "<format>", 1, 2, 2);
+    json_decref(j);
+
+    /* NULL format string */
+    j = json_pack("[]");
+    if(!json_unpack_ex(j, &error, 0, NULL))
+        fail("json_unpack failed to catch null format string");
+    check_error("NULL or empty format string", "<format>", -1, -1, 0);
+    json_decref(j);
+
+    /* NULL string pointer */
+    j = json_string("foobie");
+    if(!json_unpack_ex(j, &error, 0, "s", NULL))
+        fail("json_unpack failed to catch null string pointer");
+    check_error("NULL string argument", "<args>", 1, 1, 1);
+    json_decref(j);
+
+    /* invalid types */
+    j = json_integer(42);
+    j2 = json_string("foo");
+    if(!json_unpack_ex(j, &error, 0, "s"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected string, got integer", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j, &error, 0, "n"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected null, got integer", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j, &error, 0, "b"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected true or false, got integer", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j2, &error, 0, "i"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected integer, got string", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j2, &error, 0, "I"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected integer, got string", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j, &error, 0, "f"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected real, got integer", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j2, &error, 0, "F"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected real or integer, got string", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j, &error, 0, "[i]"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected array, got integer", "<validation>", 1, 1, 1);
+
+    if(!json_unpack_ex(j, &error, 0, "{si}", "foo"))
+        fail("json_unpack failed to catch invalid type");
+    check_error("Expected object, got integer", "<validation>", 1, 1, 1);
+
+    json_decref(j);
+    json_decref(j2);
+
+    /* Array index out of range */
+    j = json_pack("[i]", 1);
+    if(!json_unpack_ex(j, &error, 0, "[ii]", &i1, &i2))
+        fail("json_unpack failed to catch index out of array bounds");
+    check_error("Array index 1 out of range", "<validation>", 1, 3, 3);
+    json_decref(j);
+
+    /* NULL object key */
+    j = json_pack("{si}", "foo", 42);
+    if(!json_unpack_ex(j, &error, 0, "{si}", NULL, &i1))
+        fail("json_unpack failed to catch null string pointer");
+    check_error("NULL object key", "<args>", 1, 2, 2);
+    json_decref(j);
+
+    /* Object key not found */
+    j = json_pack("{si}", "foo", 42);
+    if(!json_unpack_ex(j, &error, 0, "{si}", "baz", &i1))
+        fail("json_unpack failed to catch null string pointer");
+    check_error("Object item not found: baz", "<validation>", 1, 3, 3);
+    json_decref(j);
+
+    /*
+     * Strict validation
+     */
+
+    j = json_pack("[iii]", 1, 2, 3);
+    rv = json_unpack(j, "[iii!]", &i1, &i2, &i3);
+    if(rv || i1 != 1 || i2 != 2 || i3 != 3)
+        fail("json_unpack array with strict validation failed");
+    json_decref(j);
+
+    j = json_pack("[iii]", 1, 2, 3);
+    if(!json_unpack_ex(j, &error, 0, "[ii!]", &i1, &i2))
+        fail("json_unpack array with strict validation failed");
+    check_error("1 array item(s) left unpacked", "<validation>", 1, 5, 5);
+    json_decref(j);
+
+    /* Like above, but with JSON_STRICT instead of '!' format */
+    j = json_pack("[iii]", 1, 2, 3);
+    if(!json_unpack_ex(j, &error, JSON_STRICT, "[ii]", &i1, &i2))
+        fail("json_unpack array with strict validation failed");
+    check_error("1 array item(s) left unpacked", "<validation>", 1, 4, 4);
+    json_decref(j);
+
+    j = json_pack("{s:s, s:i}", "foo", "bar", "baz", 42);
+    rv = json_unpack(j, "{sssi!}", "foo", &s, "baz", &i1);
+    if(rv || strcmp(s, "bar") != 0 || i1 != 42)
+        fail("json_unpack object with strict validation failed");
+    json_decref(j);
+
+    /* Unpack the same item twice */
+    j = json_pack("{s:s, s:i}", "foo", "bar", "baz", 42);
+    if(!json_unpack_ex(j, &error, 0, "{s:s,s:s!}", "foo", &s, "foo", &s))
+        fail("json_unpack object with strict validation failed");
+    check_error("1 object item(s) left unpacked", "<validation>", 1, 10, 10);
+    json_decref(j);
+
+    j = json_pack("[i,{s:i,s:n},[i,i]]", 1, "foo", 2, "bar", 3, 4);
+    if(json_unpack_ex(j, NULL, JSON_STRICT | JSON_VALIDATE_ONLY,
+                      "[i{sisn}[ii]]", "foo", "bar"))
+        fail("json_unpack complex value with strict validation failed");
+    json_decref(j);
+
+    /* ! and * must be last */
+    j = json_pack("[ii]", 1, 2);
+    if(!json_unpack_ex(j, &error, 0, "[i!i]", &i1, &i2))
+        fail("json_unpack failed to catch ! in the middle of an array");
+    check_error("Expected ']' after '!', got 'i'", "<format>", 1, 4, 4);
+
+    if(!json_unpack_ex(j, &error, 0, "[i*i]", &i1, &i2))
+        fail("json_unpack failed to catch * in the middle of an array");
+    check_error("Expected ']' after '*', got 'i'", "<format>", 1, 4, 4);
+    json_decref(j);
+
+    j = json_pack("{sssi}", "foo", "bar", "baz", 42);
+    if(!json_unpack_ex(j, &error, 0, "{ss!si}", "foo", &s, "baz", &i1))
+        fail("json_unpack failed to catch ! in the middle of an object");
+    check_error("Expected '}' after '!', got 's'", "<format>", 1, 5, 5);
+
+    if(!json_unpack_ex(j, &error, 0, "{ss*si}", "foo", &s, "baz", &i1))
+        fail("json_unpack failed to catch ! in the middle of an object");
+    check_error("Expected '}' after '*', got 's'", "<format>", 1, 5, 5);
+    json_decref(j);
+
+    /* Error in nested object */
+    j = json_pack("{s{snsn}}", "foo", "bar", "baz");
+    if(!json_unpack_ex(j, &error, 0, "{s{sn!}}", "foo", "bar"))
+        fail("json_unpack nested object with strict validation failed");
+    check_error("1 object item(s) left unpacked", "<validation>", 1, 7, 7);
+    json_decref(j);
+
+    /* Error in nested array */
+    j = json_pack("[[ii]]", 1, 2);
+    if(!json_unpack_ex(j, &error, 0, "[[i!]]", &i1))
+        fail("json_unpack nested array with strict validation failed");
+    check_error("1 array item(s) left unpacked", "<validation>", 1, 5, 5);
+    json_decref(j);
+
+    return 0;
+}
diff --git a/lang/c/jansson/test/suites/api/util.h b/lang/c/jansson/test/suites/api/util.h
new file mode 100644
index 0000000..83be721
--- /dev/null
+++ b/lang/c/jansson/test/suites/api/util.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+ *
+ * Jansson is free software; you can redistribute it and/or modify
+ * it under the terms of the MIT license. See LICENSE for details.
+ */
+
+#ifndef UTIL_H
+#define UTIL_H
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <jansson.h>
+
+#define failhdr fprintf(stderr, "%s:%s:%d: ", __FILE__, __FUNCTION__, __LINE__)
+
+#define fail(msg)                                                \
+    do {                                                         \
+        failhdr;                                                 \
+        fprintf(stderr, "%s\n", msg);                            \
+        exit(1);                                                 \
+    } while(0)
+
+/* Assumes json_error_t error */
+#define check_error(text_, source_, line_, column_, position_)          \
+    do {                                                                \
+        if(strcmp(error.text, text_) != 0) {                            \
+            failhdr;                                                    \
+            fprintf(stderr, "text: \"%s\" != \"%s\"\n", error.text, text_); \
+            exit(1);                                                    \
+        }                                                               \
+        if(strcmp(error.source, source_) != 0) {                        \
+            failhdr;                                                    \
+                                                                        \
+            fprintf(stderr, "source: \"%s\" != \"%s\"\n", error.source, source_); \
+            exit(1);                                                    \
+        }                                                               \
+        if(error.line != line_) {                                       \
+            failhdr;                                                    \
+            fprintf(stderr, "line: %d != %d\n", error.line, line_);     \
+            exit(1);                                                    \
+        }                                                               \
+        if(error.column != column_) {                                   \
+            failhdr;                                                    \
+            fprintf(stderr, "column: %d != %d\n", error.column, column_); \
+            exit(1);                                                    \
+        }                                                               \
+        if(error.position != position_) {                               \
+            failhdr;                                                    \
+            fprintf(stderr, "position: %d != %d\n", error.position, position_); \
+            exit(1);                                                    \
+        }                                                               \
+    } while(0)
+
+#endif
diff --git a/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/error b/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/error
new file mode 100644
index 0000000..762d2c4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xed near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/input b/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/input
new file mode 100644
index 0000000..515dd93
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/encoded-surrogate-half/input
@@ -0,0 +1 @@
+["� <-- encoded surrogate half"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/error
new file mode 100644
index 0000000..b16dc17
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/error
@@ -0,0 +1,2 @@
+1 3 3
+unable to decode byte 0xe5 near '"\'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/input
new file mode 100644
index 0000000..57c8bee
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-after-backslash/input
@@ -0,0 +1 @@
+["\�"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/error
new file mode 100644
index 0000000..be15386
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/error
@@ -0,0 +1,2 @@
+1 1 1
+unable to decode byte 0xe5
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/input
new file mode 100644
index 0000000..ebefcd6
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-array/input
@@ -0,0 +1 @@
+[�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/error
new file mode 100644
index 0000000..01b4476
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/error
@@ -0,0 +1,2 @@
+1 4 4
+unable to decode byte 0xe5 near '123'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/input
new file mode 100644
index 0000000..e512f9a
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-bigger-int/input
@@ -0,0 +1 @@
+[123�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/error
new file mode 100644
index 0000000..c13583d
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/error
@@ -0,0 +1,2 @@
+1 4 4
+unable to decode byte 0xe5 near '"\u'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/input
new file mode 100644
index 0000000..2b271b8
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-escape/input
@@ -0,0 +1 @@
+["\u�"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/error
new file mode 100644
index 0000000..c7b20b7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/error
@@ -0,0 +1,2 @@
+1 4 4
+unable to decode byte 0xe5 near '1e1'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/input
new file mode 100644
index 0000000..d8e83c5
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-exponent/input
@@ -0,0 +1 @@
+[1e1�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/error
new file mode 100644
index 0000000..33dfc23
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xe5 near 'a'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/input
new file mode 100644
index 0000000..ef03851
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-identifier/input
@@ -0,0 +1 @@
+[a�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/error
new file mode 100644
index 0000000..8f08970
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xe5 near '0'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/input
new file mode 100644
index 0000000..371226e
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-int/input
@@ -0,0 +1 @@
+[0�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/error
new file mode 100644
index 0000000..b7660e3
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/error
@@ -0,0 +1,2 @@
+1 3 3
+unable to decode byte 0xe5 near '1e'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/input
new file mode 100644
index 0000000..17fc29c
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-real-after-e/input
@@ -0,0 +1 @@
+[1e�]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/error b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/error
new file mode 100644
index 0000000..0b7039a
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xe5 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/input b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/input
new file mode 100644
index 0000000..00b79c0
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/invalid-utf-8-in-string/input
@@ -0,0 +1 @@
+["� <-- invalid UTF-8"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/error b/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/error
new file mode 100644
index 0000000..8e9a511
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/error
@@ -0,0 +1,2 @@
+1 0 0
+unable to decode byte 0xe5
diff --git a/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/input b/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/input
new file mode 100644
index 0000000..eb80796
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/lone-invalid-utf-8/input
@@ -0,0 +1 @@
+�
diff --git a/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/error b/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/error
new file mode 100644
index 0000000..86bbad3
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0x81 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/input b/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/input
new file mode 100644
index 0000000..62a26b6
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/lone-utf-8-continuation-byte/input
@@ -0,0 +1 @@
+["�"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/error b/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/error
new file mode 100644
index 0000000..d07ccb3
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xf4 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/input b/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/input
new file mode 100644
index 0000000..1216186
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/not-in-unicode-range/input
@@ -0,0 +1 @@
+["��"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/error b/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/error
new file mode 100644
index 0000000..8a05aba
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xe0 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/input b/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/input
new file mode 100644
index 0000000..0bf909f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-3-byte-encoding/input
@@ -0,0 +1 @@
+["�� <-- overlong encoding"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/error b/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/error
new file mode 100644
index 0000000..7e19c5f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xf0 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/input b/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/input
new file mode 100644
index 0000000..c6b6313
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-4-byte-encoding/input
@@ -0,0 +1 @@
+["�� <-- overlong encoding"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/error b/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/error
new file mode 100644
index 0000000..1d382ed
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xc1 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/input b/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/input
new file mode 100644
index 0000000..ef6e10a
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/overlong-ascii-encoding/input
@@ -0,0 +1 @@
+["�"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/error b/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/error
new file mode 100644
index 0000000..d018f5f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xfd near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/input b/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/input
new file mode 100644
index 0000000..ba60170
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/restricted-utf-8/input
@@ -0,0 +1 @@
+["�"]
diff --git a/lang/c/jansson/test/suites/invalid-unicode/run b/lang/c/jansson/test/suites/invalid-unicode/run
new file mode 100755
index 0000000..7cb3243
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/run
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+is_test() {
+    test -d $test_path
+}
+
+run_test() {
+    $json_process <$test_path/input >$test_log/stdout 2>$test_log/stderr
+    valgrind_check $test_log/stderr || return 1
+    cmp -s $test_path/error $test_log/stderr
+}
+
+show_error() {
+    valgrind_show_error && return
+
+    echo "EXPECTED ERROR:"
+    nl -bn $test_path/error
+    echo "ACTUAL ERROR:"
+    nl -bn $test_log/stderr
+}
+
+. $top_srcdir/test/scripts/run-tests.sh
diff --git a/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/error b/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/error
new file mode 100644
index 0000000..8a05aba
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/error
@@ -0,0 +1,2 @@
+1 2 2
+unable to decode byte 0xe0 near '"'
diff --git a/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/input b/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/input
new file mode 100644
index 0000000..bce9e18
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid-unicode/truncated-utf-8/input
@@ -0,0 +1 @@
+["�� <-- truncated UTF-8"]
diff --git a/lang/c/jansson/test/suites/invalid/apostrophe/error b/lang/c/jansson/test/suites/invalid/apostrophe/error
new file mode 100644
index 0000000..79bb2a0
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/apostrophe/error
@@ -0,0 +1,2 @@
+1 2 2
+invalid token near '''
diff --git a/lang/c/jansson/test/suites/invalid/apostrophe/input b/lang/c/jansson/test/suites/invalid/apostrophe/input
new file mode 100644
index 0000000..f2dd4d2
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/apostrophe/input
@@ -0,0 +1 @@
+['
diff --git a/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/error b/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/error
new file mode 100644
index 0000000..a4d8142
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/error
@@ -0,0 +1,2 @@
+1 1 1
+'[' or '{' expected near 'a'
diff --git a/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/input b/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/input
new file mode 100644
index 0000000..c2c0208
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/ascii-unicode-identifier/input
@@ -0,0 +1 @@
+aå
diff --git a/lang/c/jansson/test/suites/invalid/brace-comma/error b/lang/c/jansson/test/suites/invalid/brace-comma/error
new file mode 100644
index 0000000..ce04621
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/brace-comma/error
@@ -0,0 +1,2 @@
+1 2 2
+string or '}' expected near ','
diff --git a/lang/c/jansson/test/suites/invalid/brace-comma/input b/lang/c/jansson/test/suites/invalid/brace-comma/input
new file mode 100644
index 0000000..74a6628
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/brace-comma/input
@@ -0,0 +1 @@
+{,
diff --git a/lang/c/jansson/test/suites/invalid/bracket-comma/error b/lang/c/jansson/test/suites/invalid/bracket-comma/error
new file mode 100644
index 0000000..ce0a912
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/bracket-comma/error
@@ -0,0 +1,2 @@
+1 2 2
+unexpected token near ','
diff --git a/lang/c/jansson/test/suites/invalid/bracket-comma/input b/lang/c/jansson/test/suites/invalid/bracket-comma/input
new file mode 100644
index 0000000..5b911f1
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/bracket-comma/input
@@ -0,0 +1 @@
+[,
diff --git a/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.normal b/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.normal
new file mode 100644
index 0000000..0248b11
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.normal
@@ -0,0 +1,2 @@
+2 0 4
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.strip b/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.strip
new file mode 100644
index 0000000..f89b38f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/bracket-one-comma/error.strip
@@ -0,0 +1,2 @@
+1 3 3
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/bracket-one-comma/input b/lang/c/jansson/test/suites/invalid/bracket-one-comma/input
new file mode 100644
index 0000000..874691b
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/bracket-one-comma/input
@@ -0,0 +1 @@
+[1,
diff --git a/lang/c/jansson/test/suites/invalid/empty/error b/lang/c/jansson/test/suites/invalid/empty/error
new file mode 100644
index 0000000..f45da6f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/empty/error
@@ -0,0 +1,2 @@
+1 0 0
+'[' or '{' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/empty/input b/lang/c/jansson/test/suites/invalid/empty/input
new file mode 100644
index 0000000..e69de29
diff --git a/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/error b/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/error
new file mode 100644
index 0000000..9795f18
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/error
@@ -0,0 +1,2 @@
+1 33 33
+\u0000 is not allowed
diff --git a/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/input b/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/input
new file mode 100644
index 0000000..22ae82b
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/escaped-null-byte-in-string/input
@@ -0,0 +1 @@
+["\u0000 (null byte not allowed)"]
diff --git a/lang/c/jansson/test/suites/invalid/extra-comma-in-array/error b/lang/c/jansson/test/suites/invalid/extra-comma-in-array/error
new file mode 100644
index 0000000..cae86c2
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/extra-comma-in-array/error
@@ -0,0 +1,2 @@
+1 4 4
+unexpected token near ']'
diff --git a/lang/c/jansson/test/suites/invalid/extra-comma-in-array/input b/lang/c/jansson/test/suites/invalid/extra-comma-in-array/input
new file mode 100644
index 0000000..e8b1a17
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/extra-comma-in-array/input
@@ -0,0 +1 @@
+[1,]
diff --git a/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/error b/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/error
new file mode 100644
index 0000000..5baeea4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/error
@@ -0,0 +1,2 @@
+6 1 17
+unexpected token near ']'
diff --git a/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/input b/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/input
new file mode 100644
index 0000000..bcb2a75
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/extra-comma-in-multiline-array/input
@@ -0,0 +1,6 @@
+[1,
+2,
+3,
+4,
+5,
+]
diff --git a/lang/c/jansson/test/suites/invalid/garbage-after-newline/error b/lang/c/jansson/test/suites/invalid/garbage-after-newline/error
new file mode 100644
index 0000000..5d2dec3
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/garbage-after-newline/error
@@ -0,0 +1,2 @@
+2 3 11
+end of file expected near 'foo'
diff --git a/lang/c/jansson/test/suites/invalid/garbage-after-newline/input b/lang/c/jansson/test/suites/invalid/garbage-after-newline/input
new file mode 100644
index 0000000..3614ac7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/garbage-after-newline/input
@@ -0,0 +1,2 @@
+[1,2,3]
+foo
diff --git a/lang/c/jansson/test/suites/invalid/garbage-at-the-end/error b/lang/c/jansson/test/suites/invalid/garbage-at-the-end/error
new file mode 100644
index 0000000..cdd8175
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/garbage-at-the-end/error
@@ -0,0 +1,2 @@
+1 10 10
+end of file expected near 'foo'
diff --git a/lang/c/jansson/test/suites/invalid/garbage-at-the-end/input b/lang/c/jansson/test/suites/invalid/garbage-at-the-end/input
new file mode 100644
index 0000000..55aee53
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/garbage-at-the-end/input
@@ -0,0 +1 @@
+[1,2,3]foo
diff --git a/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/error b/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/error
new file mode 100644
index 0000000..64e0536
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/error
@@ -0,0 +1,2 @@
+1 2 2
+invalid token near '0'
diff --git a/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/input b/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/input
new file mode 100644
index 0000000..12f67e2
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/integer-starting-with-zero/input
@@ -0,0 +1 @@
+[012]
diff --git a/lang/c/jansson/test/suites/invalid/invalid-escape/error b/lang/c/jansson/test/suites/invalid/invalid-escape/error
new file mode 100644
index 0000000..d9863f7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-escape/error
@@ -0,0 +1,2 @@
+1 4 4
+invalid escape near '"\a'
diff --git a/lang/c/jansson/test/suites/invalid/invalid-escape/input b/lang/c/jansson/test/suites/invalid/invalid-escape/input
new file mode 100644
index 0000000..64c7b70
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-escape/input
@@ -0,0 +1 @@
+["\a <-- invalid escape"]
diff --git a/lang/c/jansson/test/suites/invalid/invalid-identifier/error b/lang/c/jansson/test/suites/invalid/invalid-identifier/error
new file mode 100644
index 0000000..496c6ab
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-identifier/error
@@ -0,0 +1,2 @@
+1 5 5
+invalid token near 'troo'
diff --git a/lang/c/jansson/test/suites/invalid/invalid-identifier/input b/lang/c/jansson/test/suites/invalid/invalid-identifier/input
new file mode 100644
index 0000000..3d2860d
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-identifier/input
@@ -0,0 +1 @@
+[troo
diff --git a/lang/c/jansson/test/suites/invalid/invalid-negative-integer/error b/lang/c/jansson/test/suites/invalid/invalid-negative-integer/error
new file mode 100644
index 0000000..f2526c5
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-negative-integer/error
@@ -0,0 +1,2 @@
+1 8 8
+']' expected near 'foo'
diff --git a/lang/c/jansson/test/suites/invalid/invalid-negative-integer/input b/lang/c/jansson/test/suites/invalid/invalid-negative-integer/input
new file mode 100644
index 0000000..6196980
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-negative-integer/input
@@ -0,0 +1 @@
+[-123foo]
diff --git a/lang/c/jansson/test/suites/invalid/invalid-negative-real/error b/lang/c/jansson/test/suites/invalid/invalid-negative-real/error
new file mode 100644
index 0000000..933158a
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-negative-real/error
@@ -0,0 +1,2 @@
+1 12 12
+']' expected near 'foo'
diff --git a/lang/c/jansson/test/suites/invalid/invalid-negative-real/input b/lang/c/jansson/test/suites/invalid/invalid-negative-real/input
new file mode 100644
index 0000000..3c763d3
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-negative-real/input
@@ -0,0 +1 @@
+[-123.123foo]
diff --git a/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/error b/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/error
new file mode 100644
index 0000000..e5a2359
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/error
@@ -0,0 +1,2 @@
+1 62 62
+invalid Unicode '\uD888\u3210'
diff --git a/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/input b/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/input
new file mode 100644
index 0000000..b21453f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/invalid-second-surrogate/input
@@ -0,0 +1 @@
+["\uD888\u3210 (first surrogate and invalid second surrogate)"]
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-brace/error.normal b/lang/c/jansson/test/suites/invalid/lone-open-brace/error.normal
new file mode 100644
index 0000000..00dc765
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-brace/error.normal
@@ -0,0 +1,2 @@
+2 0 2
+string or '}' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-brace/error.strip b/lang/c/jansson/test/suites/invalid/lone-open-brace/error.strip
new file mode 100644
index 0000000..bb1c047
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-brace/error.strip
@@ -0,0 +1,2 @@
+1 1 1
+string or '}' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-brace/input b/lang/c/jansson/test/suites/invalid/lone-open-brace/input
new file mode 100644
index 0000000..98232c6
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-brace/input
@@ -0,0 +1 @@
+{
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.normal b/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.normal
new file mode 100644
index 0000000..f463928
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.normal
@@ -0,0 +1,2 @@
+2 0 2
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.strip b/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.strip
new file mode 100644
index 0000000..2bc07ea
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-bracket/error.strip
@@ -0,0 +1,2 @@
+1 1 1
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/lone-open-bracket/input b/lang/c/jansson/test/suites/invalid/lone-open-bracket/input
new file mode 100644
index 0000000..558ed37
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-open-bracket/input
@@ -0,0 +1 @@
+[
diff --git a/lang/c/jansson/test/suites/invalid/lone-second-surrogate/error b/lang/c/jansson/test/suites/invalid/lone-second-surrogate/error
new file mode 100644
index 0000000..bc5f34e
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-second-surrogate/error
@@ -0,0 +1,2 @@
+1 40 40
+invalid Unicode '\uDFAA'
diff --git a/lang/c/jansson/test/suites/invalid/lone-second-surrogate/input b/lang/c/jansson/test/suites/invalid/lone-second-surrogate/input
new file mode 100644
index 0000000..328e35c
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/lone-second-surrogate/input
@@ -0,0 +1 @@
+["\uDFAA (second surrogate on it's own)"]
diff --git a/lang/c/jansson/test/suites/invalid/minus-sign-without-number/error b/lang/c/jansson/test/suites/invalid/minus-sign-without-number/error
new file mode 100644
index 0000000..b3a78b9
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/minus-sign-without-number/error
@@ -0,0 +1,2 @@
+1 2 2
+invalid token near '-'
diff --git a/lang/c/jansson/test/suites/invalid/minus-sign-without-number/input b/lang/c/jansson/test/suites/invalid/minus-sign-without-number/input
new file mode 100644
index 0000000..0337883
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/minus-sign-without-number/input
@@ -0,0 +1 @@
+[-foo]
diff --git a/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/error b/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/error
new file mode 100644
index 0000000..36adc34
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/error
@@ -0,0 +1,2 @@
+1 3 3
+invalid token near '-0'
diff --git a/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/input b/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/input
new file mode 100644
index 0000000..6fbb7a2
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/negative-integer-starting-with-zero/input
@@ -0,0 +1 @@
+[-012]
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-in-string/error b/lang/c/jansson/test/suites/invalid/null-byte-in-string/error
new file mode 100644
index 0000000..45f9bd8
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null-byte-in-string/error
@@ -0,0 +1,2 @@
+1 12 12
+control character 0x0 near '"null byte '
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-in-string/input b/lang/c/jansson/test/suites/invalid/null-byte-in-string/input
new file mode 100644
index 0000000..268d1f1
Binary files /dev/null and b/lang/c/jansson/test/suites/invalid/null-byte-in-string/input differ
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-in-string/nostrip b/lang/c/jansson/test/suites/invalid/null-byte-in-string/nostrip
new file mode 100644
index 0000000..80f4bf7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null-byte-in-string/nostrip
@@ -0,0 +1,2 @@
+The embedded NULL byte breaks json_loads(), which is used instead of
+json_loadf() in the stripped tests.
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-outside-string/error b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/error
new file mode 100644
index 0000000..44d4def
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/error
@@ -0,0 +1,2 @@
+1 2 2
+invalid token near end of file
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-outside-string/input b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/input
new file mode 100644
index 0000000..aa550eb
Binary files /dev/null and b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/input differ
diff --git a/lang/c/jansson/test/suites/invalid/null-byte-outside-string/nostrip b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/nostrip
new file mode 100644
index 0000000..80f4bf7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null-byte-outside-string/nostrip
@@ -0,0 +1,2 @@
+The embedded NULL byte breaks json_loads(), which is used instead of
+json_loadf() in the stripped tests.
diff --git a/lang/c/jansson/test/suites/invalid/null/error b/lang/c/jansson/test/suites/invalid/null/error
new file mode 100644
index 0000000..1f5d464
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null/error
@@ -0,0 +1,2 @@
+1 4 4
+'[' or '{' expected near 'null'
diff --git a/lang/c/jansson/test/suites/invalid/null/input b/lang/c/jansson/test/suites/invalid/null/input
new file mode 100644
index 0000000..19765bd
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/null/input
@@ -0,0 +1 @@
+null
diff --git a/lang/c/jansson/test/suites/invalid/object-apostrophes/error b/lang/c/jansson/test/suites/invalid/object-apostrophes/error
new file mode 100644
index 0000000..23fab01
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-apostrophes/error
@@ -0,0 +1,2 @@
+1 2 2
+string or '}' expected near '''
diff --git a/lang/c/jansson/test/suites/invalid/object-apostrophes/input b/lang/c/jansson/test/suites/invalid/object-apostrophes/input
new file mode 100644
index 0000000..52b2905
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-apostrophes/input
@@ -0,0 +1 @@
+{'a'
diff --git a/lang/c/jansson/test/suites/invalid/object-garbage-at-end/error b/lang/c/jansson/test/suites/invalid/object-garbage-at-end/error
new file mode 100644
index 0000000..06c4ec1
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-garbage-at-end/error
@@ -0,0 +1,2 @@
+1 12 12
+'}' expected near '123'
diff --git a/lang/c/jansson/test/suites/invalid/object-garbage-at-end/input b/lang/c/jansson/test/suites/invalid/object-garbage-at-end/input
new file mode 100644
index 0000000..62c19d7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-garbage-at-end/input
@@ -0,0 +1 @@
+{"a":"a" 123}
diff --git a/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.normal b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.normal
new file mode 100644
index 0000000..0248b11
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.normal
@@ -0,0 +1,2 @@
+2 0 4
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.strip b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.strip
new file mode 100644
index 0000000..f89b38f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/error.strip
@@ -0,0 +1,2 @@
+1 3 3
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/input b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/input
new file mode 100644
index 0000000..ca9ec37
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-in-unterminated-array/input
@@ -0,0 +1 @@
+[{}
diff --git a/lang/c/jansson/test/suites/invalid/object-no-colon/error.normal b/lang/c/jansson/test/suites/invalid/object-no-colon/error.normal
new file mode 100644
index 0000000..78d84f7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-colon/error.normal
@@ -0,0 +1,2 @@
+2 0 5
+':' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-no-colon/error.strip b/lang/c/jansson/test/suites/invalid/object-no-colon/error.strip
new file mode 100644
index 0000000..528e266
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-colon/error.strip
@@ -0,0 +1,2 @@
+1 4 4
+':' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-no-colon/input b/lang/c/jansson/test/suites/invalid/object-no-colon/input
new file mode 100644
index 0000000..107e626
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-colon/input
@@ -0,0 +1 @@
+{"a"
diff --git a/lang/c/jansson/test/suites/invalid/object-no-value/error.normal b/lang/c/jansson/test/suites/invalid/object-no-value/error.normal
new file mode 100644
index 0000000..47ad902
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-value/error.normal
@@ -0,0 +1,2 @@
+2 0 6
+unexpected token near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-no-value/error.strip b/lang/c/jansson/test/suites/invalid/object-no-value/error.strip
new file mode 100644
index 0000000..b36c5e2
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-value/error.strip
@@ -0,0 +1,2 @@
+1 5 5
+unexpected token near end of file
diff --git a/lang/c/jansson/test/suites/invalid/object-no-value/input b/lang/c/jansson/test/suites/invalid/object-no-value/input
new file mode 100644
index 0000000..f68f262
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-no-value/input
@@ -0,0 +1 @@
+{"a":
diff --git a/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.normal b/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.normal
new file mode 100644
index 0000000..2ad76d4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.normal
@@ -0,0 +1,2 @@
+1 7 7
+unexpected newline near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.strip b/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.strip
new file mode 100644
index 0000000..385afb5
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-unterminated-value/error.strip
@@ -0,0 +1,2 @@
+1 7 7
+premature end of input near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/object-unterminated-value/input b/lang/c/jansson/test/suites/invalid/object-unterminated-value/input
new file mode 100644
index 0000000..b854d7e
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/object-unterminated-value/input
@@ -0,0 +1 @@
+{"a":"a
diff --git a/lang/c/jansson/test/suites/invalid/real-garbage-after-e/error b/lang/c/jansson/test/suites/invalid/real-garbage-after-e/error
new file mode 100644
index 0000000..b40ffa9
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-garbage-after-e/error
@@ -0,0 +1,2 @@
+1 3 3
+invalid token near '1e'
diff --git a/lang/c/jansson/test/suites/invalid/real-garbage-after-e/input b/lang/c/jansson/test/suites/invalid/real-garbage-after-e/input
new file mode 100644
index 0000000..6a945ac
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-garbage-after-e/input
@@ -0,0 +1 @@
+[1ea]
diff --git a/lang/c/jansson/test/suites/invalid/real-negative-overflow/error b/lang/c/jansson/test/suites/invalid/real-negative-overflow/error
new file mode 100644
index 0000000..d7f8e41
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-negative-overflow/error
@@ -0,0 +1,2 @@
+1 15 15
+real number overflow near '-123123e100000'
diff --git a/lang/c/jansson/test/suites/invalid/real-negative-overflow/input b/lang/c/jansson/test/suites/invalid/real-negative-overflow/input
new file mode 100644
index 0000000..b5bd21c
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-negative-overflow/input
@@ -0,0 +1 @@
+[-123123e100000]
diff --git a/lang/c/jansson/test/suites/invalid/real-positive-overflow/error b/lang/c/jansson/test/suites/invalid/real-positive-overflow/error
new file mode 100644
index 0000000..55883c9
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-positive-overflow/error
@@ -0,0 +1,2 @@
+1 14 14
+real number overflow near '123123e100000'
diff --git a/lang/c/jansson/test/suites/invalid/real-positive-overflow/input b/lang/c/jansson/test/suites/invalid/real-positive-overflow/input
new file mode 100644
index 0000000..524e53b
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-positive-overflow/input
@@ -0,0 +1 @@
+[123123e100000]
diff --git a/lang/c/jansson/test/suites/invalid/real-truncated-at-e/error b/lang/c/jansson/test/suites/invalid/real-truncated-at-e/error
new file mode 100644
index 0000000..b40ffa9
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-truncated-at-e/error
@@ -0,0 +1,2 @@
+1 3 3
+invalid token near '1e'
diff --git a/lang/c/jansson/test/suites/invalid/real-truncated-at-e/input b/lang/c/jansson/test/suites/invalid/real-truncated-at-e/input
new file mode 100644
index 0000000..1d67b7b
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-truncated-at-e/input
@@ -0,0 +1 @@
+[1e]
diff --git a/lang/c/jansson/test/suites/invalid/real-truncated-at-point/error b/lang/c/jansson/test/suites/invalid/real-truncated-at-point/error
new file mode 100644
index 0000000..db972e8
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-truncated-at-point/error
@@ -0,0 +1,2 @@
+1 3 3
+invalid token near '1.'
diff --git a/lang/c/jansson/test/suites/invalid/real-truncated-at-point/input b/lang/c/jansson/test/suites/invalid/real-truncated-at-point/input
new file mode 100644
index 0000000..b652b3f
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/real-truncated-at-point/input
@@ -0,0 +1 @@
+[1.]
diff --git a/lang/c/jansson/test/suites/invalid/run b/lang/c/jansson/test/suites/invalid/run
new file mode 100755
index 0000000..f8394d7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/run
@@ -0,0 +1,57 @@
+#!/bin/sh
+#
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+is_test() {
+    test -d $test_path
+}
+
+do_run() {
+    variant=$1
+    s=".$1"
+
+    strip=0
+    if [ "$variant" = "strip" ]; then
+        # This test should not be stripped
+        [ -f $test_path/nostrip ] && return
+        strip=1
+    fi
+
+    STRIP=$strip $json_process \
+        <$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
+    valgrind_check $test_log/stderr$s || return 1
+
+    ref=error
+    [ -f $test_path/error$s ] && ref=error$s
+
+    if ! cmp -s $test_path/$ref $test_log/stderr$s; then
+        echo $variant > $test_log/variant
+        return 1
+    fi
+}
+
+run_test() {
+    do_run normal && do_run strip
+}
+
+show_error() {
+    valgrind_show_error && return
+
+    read variant < $test_log/variant
+    s=".$variant"
+
+    echo "VARIANT: $variant"
+
+    echo "EXPECTED ERROR:"
+    ref=error
+    [ -f $test_path/error$s ] && ref=error$s
+    nl -bn $test_path/$ref
+
+    echo "ACTUAL ERROR:"
+    nl -bn $test_log/stderr$s
+}
+
+. $top_srcdir/test/scripts/run-tests.sh
diff --git a/lang/c/jansson/test/suites/invalid/tab-character-in-string/error b/lang/c/jansson/test/suites/invalid/tab-character-in-string/error
new file mode 100644
index 0000000..9e2f76e
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/tab-character-in-string/error
@@ -0,0 +1,2 @@
+1 2 2
+control character 0x9 near '"'
diff --git a/lang/c/jansson/test/suites/invalid/tab-character-in-string/input b/lang/c/jansson/test/suites/invalid/tab-character-in-string/input
new file mode 100644
index 0000000..3ebae09
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/tab-character-in-string/input
@@ -0,0 +1 @@
+["	 <-- tab character"]
diff --git a/lang/c/jansson/test/suites/invalid/too-big-negative-integer/error b/lang/c/jansson/test/suites/invalid/too-big-negative-integer/error
new file mode 100644
index 0000000..a0640b9
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/too-big-negative-integer/error
@@ -0,0 +1,2 @@
+1 32 32
+too big negative integer
diff --git a/lang/c/jansson/test/suites/invalid/too-big-negative-integer/input b/lang/c/jansson/test/suites/invalid/too-big-negative-integer/input
new file mode 100644
index 0000000..d6c26f1
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/too-big-negative-integer/input
@@ -0,0 +1 @@
+[-123123123123123123123123123123]
diff --git a/lang/c/jansson/test/suites/invalid/too-big-positive-integer/error b/lang/c/jansson/test/suites/invalid/too-big-positive-integer/error
new file mode 100644
index 0000000..3bdbefd
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/too-big-positive-integer/error
@@ -0,0 +1,2 @@
+1 31 31
+too big integer
diff --git a/lang/c/jansson/test/suites/invalid/too-big-positive-integer/input b/lang/c/jansson/test/suites/invalid/too-big-positive-integer/input
new file mode 100644
index 0000000..27c8553
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/too-big-positive-integer/input
@@ -0,0 +1 @@
+[123123123123123123123123123123]
diff --git a/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/error b/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/error
new file mode 100644
index 0000000..1b99f06
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/error
@@ -0,0 +1,2 @@
+1 46 46
+invalid Unicode '\uDADA'
diff --git a/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/input b/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/input
new file mode 100644
index 0000000..2b340f4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/truncated-unicode-surrogate/input
@@ -0,0 +1 @@
+["\uDADA (first surrogate without the second)"]
diff --git a/lang/c/jansson/test/suites/invalid/unicode-identifier/error b/lang/c/jansson/test/suites/invalid/unicode-identifier/error
new file mode 100644
index 0000000..178b0dd
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unicode-identifier/error
@@ -0,0 +1,2 @@
+1 1 2
+'[' or '{' expected near 'å'
diff --git a/lang/c/jansson/test/suites/invalid/unicode-identifier/input b/lang/c/jansson/test/suites/invalid/unicode-identifier/input
new file mode 100644
index 0000000..aad321c
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unicode-identifier/input
@@ -0,0 +1 @@
+å
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.normal b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.normal
new file mode 100644
index 0000000..5b19804
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.normal
@@ -0,0 +1,2 @@
+2 0 3
+string or '}' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.strip b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.strip
new file mode 100644
index 0000000..da2bb22
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/error.strip
@@ -0,0 +1,2 @@
+1 2 2
+string or '}' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/input b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/input
new file mode 100644
index 0000000..cd9dc64
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array-and-object/input
@@ -0,0 +1 @@
+[{
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array/error.normal b/lang/c/jansson/test/suites/invalid/unterminated-array/error.normal
new file mode 100644
index 0000000..8025ed1
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array/error.normal
@@ -0,0 +1,2 @@
+2 0 5
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array/error.strip b/lang/c/jansson/test/suites/invalid/unterminated-array/error.strip
new file mode 100644
index 0000000..495d0f7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array/error.strip
@@ -0,0 +1,2 @@
+1 4 4
+']' expected near end of file
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-array/input b/lang/c/jansson/test/suites/invalid/unterminated-array/input
new file mode 100644
index 0000000..727ee81
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-array/input
@@ -0,0 +1 @@
+["a"
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.normal b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.normal
new file mode 100644
index 0000000..3d646ab
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.normal
@@ -0,0 +1,2 @@
+1 2 2
+unexpected newline near '"'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.strip b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.strip
new file mode 100644
index 0000000..94f1947
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/error.strip
@@ -0,0 +1,2 @@
+1 2 2
+premature end of input near '"'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-empty-key/input b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/input
new file mode 100644
index 0000000..4117452
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-empty-key/input
@@ -0,0 +1 @@
+{"
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-key/error.normal b/lang/c/jansson/test/suites/invalid/unterminated-key/error.normal
new file mode 100644
index 0000000..5f09b77
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-key/error.normal
@@ -0,0 +1,2 @@
+1 3 3
+unexpected newline near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-key/error.strip b/lang/c/jansson/test/suites/invalid/unterminated-key/error.strip
new file mode 100644
index 0000000..8b6bec4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-key/error.strip
@@ -0,0 +1,2 @@
+1 3 3
+premature end of input near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-key/input b/lang/c/jansson/test/suites/invalid/unterminated-key/input
new file mode 100644
index 0000000..705948c
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-key/input
@@ -0,0 +1 @@
+{"a
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/error b/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/error
new file mode 100644
index 0000000..ed97be7
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/error
@@ -0,0 +1,2 @@
+1 2 2
+string or '}' expected near '['
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/input b/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/input
new file mode 100644
index 0000000..da35a86
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-object-and-array/input
@@ -0,0 +1 @@
+{[
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-string/error.normal b/lang/c/jansson/test/suites/invalid/unterminated-string/error.normal
new file mode 100644
index 0000000..5f09b77
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-string/error.normal
@@ -0,0 +1,2 @@
+1 3 3
+unexpected newline near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-string/error.strip b/lang/c/jansson/test/suites/invalid/unterminated-string/error.strip
new file mode 100644
index 0000000..8b6bec4
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-string/error.strip
@@ -0,0 +1,2 @@
+1 3 3
+premature end of input near '"a'
diff --git a/lang/c/jansson/test/suites/invalid/unterminated-string/input b/lang/c/jansson/test/suites/invalid/unterminated-string/input
new file mode 100644
index 0000000..38ab6b0
--- /dev/null
+++ b/lang/c/jansson/test/suites/invalid/unterminated-string/input
@@ -0,0 +1 @@
+["a
diff --git a/lang/c/jansson/test/suites/valid/complex-array/input b/lang/c/jansson/test/suites/valid/complex-array/input
new file mode 100644
index 0000000..1b9bbb9
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/complex-array/input
@@ -0,0 +1,5 @@
+[1,2,3,4,
+"a", "b", "c",
+{"foo": "bar", "core": "dump"},
+true, false, true, true, null, false
+]
diff --git a/lang/c/jansson/test/suites/valid/complex-array/output b/lang/c/jansson/test/suites/valid/complex-array/output
new file mode 100644
index 0000000..7aefe56
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/complex-array/output
@@ -0,0 +1 @@
+[1, 2, 3, 4, "a", "b", "c", {"core": "dump", "foo": "bar"}, true, false, true, true, null, false]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/empty-array/input b/lang/c/jansson/test/suites/valid/empty-array/input
new file mode 100644
index 0000000..fe51488
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-array/input
@@ -0,0 +1 @@
+[]
diff --git a/lang/c/jansson/test/suites/valid/empty-array/output b/lang/c/jansson/test/suites/valid/empty-array/output
new file mode 100644
index 0000000..0637a08
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-array/output
@@ -0,0 +1 @@
+[]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/empty-object-in-array/input b/lang/c/jansson/test/suites/valid/empty-object-in-array/input
new file mode 100644
index 0000000..93d5140
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-object-in-array/input
@@ -0,0 +1 @@
+[{}]
diff --git a/lang/c/jansson/test/suites/valid/empty-object-in-array/output b/lang/c/jansson/test/suites/valid/empty-object-in-array/output
new file mode 100644
index 0000000..ee1aac4
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-object-in-array/output
@@ -0,0 +1 @@
+[{}]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/empty-object/input b/lang/c/jansson/test/suites/valid/empty-object/input
new file mode 100644
index 0000000..0967ef4
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-object/input
@@ -0,0 +1 @@
+{}
diff --git a/lang/c/jansson/test/suites/valid/empty-object/output b/lang/c/jansson/test/suites/valid/empty-object/output
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-object/output
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/empty-string/input b/lang/c/jansson/test/suites/valid/empty-string/input
new file mode 100644
index 0000000..66a1e18
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-string/input
@@ -0,0 +1 @@
+[""]
diff --git a/lang/c/jansson/test/suites/valid/empty-string/output b/lang/c/jansson/test/suites/valid/empty-string/output
new file mode 100644
index 0000000..93b6be2
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/empty-string/output
@@ -0,0 +1 @@
+[""]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/escaped-utf-control-char/input b/lang/c/jansson/test/suites/valid/escaped-utf-control-char/input
new file mode 100644
index 0000000..9a98545
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/escaped-utf-control-char/input
@@ -0,0 +1 @@
+["\u0012 escaped control character"]
diff --git a/lang/c/jansson/test/suites/valid/escaped-utf-control-char/output b/lang/c/jansson/test/suites/valid/escaped-utf-control-char/output
new file mode 100644
index 0000000..07221b7
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/escaped-utf-control-char/output
@@ -0,0 +1 @@
+["\u0012 escaped control character"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/false/input b/lang/c/jansson/test/suites/valid/false/input
new file mode 100644
index 0000000..4343652
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/false/input
@@ -0,0 +1 @@
+[false]
diff --git a/lang/c/jansson/test/suites/valid/false/output b/lang/c/jansson/test/suites/valid/false/output
new file mode 100644
index 0000000..67b2f07
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/false/output
@@ -0,0 +1 @@
+[false]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/negative-int/input b/lang/c/jansson/test/suites/valid/negative-int/input
new file mode 100644
index 0000000..a96d5cd
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-int/input
@@ -0,0 +1 @@
+[-123]
diff --git a/lang/c/jansson/test/suites/valid/negative-int/output b/lang/c/jansson/test/suites/valid/negative-int/output
new file mode 100644
index 0000000..8e30f8b
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-int/output
@@ -0,0 +1 @@
+[-123]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/negative-one/input b/lang/c/jansson/test/suites/valid/negative-one/input
new file mode 100644
index 0000000..2363a1a
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-one/input
@@ -0,0 +1 @@
+[-1]
diff --git a/lang/c/jansson/test/suites/valid/negative-one/output b/lang/c/jansson/test/suites/valid/negative-one/output
new file mode 100644
index 0000000..99d21a2
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-one/output
@@ -0,0 +1 @@
+[-1]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/negative-zero/input b/lang/c/jansson/test/suites/valid/negative-zero/input
new file mode 100644
index 0000000..40fc49c
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-zero/input
@@ -0,0 +1 @@
+[-0]
diff --git a/lang/c/jansson/test/suites/valid/negative-zero/output b/lang/c/jansson/test/suites/valid/negative-zero/output
new file mode 100644
index 0000000..6e7ea63
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/negative-zero/output
@@ -0,0 +1 @@
+[0]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/null/input b/lang/c/jansson/test/suites/valid/null/input
new file mode 100644
index 0000000..62864b3
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/null/input
@@ -0,0 +1 @@
+[null]
diff --git a/lang/c/jansson/test/suites/valid/null/output b/lang/c/jansson/test/suites/valid/null/output
new file mode 100644
index 0000000..500db4a
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/null/output
@@ -0,0 +1 @@
+[null]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/one-byte-utf-8/input b/lang/c/jansson/test/suites/valid/one-byte-utf-8/input
new file mode 100644
index 0000000..8bda468
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/one-byte-utf-8/input
@@ -0,0 +1 @@
+["\u002c one-byte UTF-8"]
diff --git a/lang/c/jansson/test/suites/valid/one-byte-utf-8/output b/lang/c/jansson/test/suites/valid/one-byte-utf-8/output
new file mode 100644
index 0000000..c33d250
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/one-byte-utf-8/output
@@ -0,0 +1 @@
+[", one-byte UTF-8"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/input b/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/input
new file mode 100644
index 0000000..1e9fa51
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/input
@@ -0,0 +1 @@
+[1E-2]
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/output b/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/output
new file mode 100644
index 0000000..75b9ef9
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e-negative-exponent/output
@@ -0,0 +1 @@
+[0.01]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/input b/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/input
new file mode 100644
index 0000000..6a6ab93
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/input
@@ -0,0 +1 @@
+[1E+2]
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/output b/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/output
new file mode 100644
index 0000000..d8ff702
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e-positive-exponent/output
@@ -0,0 +1 @@
+[100.0]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e/input b/lang/c/jansson/test/suites/valid/real-capital-e/input
new file mode 100644
index 0000000..e703223
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e/input
@@ -0,0 +1 @@
+[1E22]
diff --git a/lang/c/jansson/test/suites/valid/real-capital-e/output b/lang/c/jansson/test/suites/valid/real-capital-e/output
new file mode 100644
index 0000000..88e90ce
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-capital-e/output
@@ -0,0 +1 @@
+[1e+22]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-exponent/input b/lang/c/jansson/test/suites/valid/real-exponent/input
new file mode 100644
index 0000000..b2a69b9
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-exponent/input
@@ -0,0 +1 @@
+[123e45]
diff --git a/lang/c/jansson/test/suites/valid/real-exponent/output b/lang/c/jansson/test/suites/valid/real-exponent/output
new file mode 100644
index 0000000..ac910d6
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-exponent/output
@@ -0,0 +1 @@
+[1.2299999999999999e+47]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-fraction-exponent/input b/lang/c/jansson/test/suites/valid/real-fraction-exponent/input
new file mode 100644
index 0000000..0c1660d
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-fraction-exponent/input
@@ -0,0 +1 @@
+[123.456e78]
diff --git a/lang/c/jansson/test/suites/valid/real-fraction-exponent/output b/lang/c/jansson/test/suites/valid/real-fraction-exponent/output
new file mode 100644
index 0000000..4b87bda
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-fraction-exponent/output
@@ -0,0 +1 @@
+[1.23456e+80]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-negative-exponent/input b/lang/c/jansson/test/suites/valid/real-negative-exponent/input
new file mode 100644
index 0000000..daa4af9
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-negative-exponent/input
@@ -0,0 +1 @@
+[1e-2]
diff --git a/lang/c/jansson/test/suites/valid/real-negative-exponent/output b/lang/c/jansson/test/suites/valid/real-negative-exponent/output
new file mode 100644
index 0000000..75b9ef9
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-negative-exponent/output
@@ -0,0 +1 @@
+[0.01]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-positive-exponent/input b/lang/c/jansson/test/suites/valid/real-positive-exponent/input
new file mode 100644
index 0000000..f378077
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-positive-exponent/input
@@ -0,0 +1 @@
+[1e+2]
diff --git a/lang/c/jansson/test/suites/valid/real-positive-exponent/output b/lang/c/jansson/test/suites/valid/real-positive-exponent/output
new file mode 100644
index 0000000..d8ff702
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-positive-exponent/output
@@ -0,0 +1 @@
+[100.0]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/real-underflow/input b/lang/c/jansson/test/suites/valid/real-underflow/input
new file mode 100644
index 0000000..dc70996
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-underflow/input
@@ -0,0 +1 @@
+[123e-10000000]
diff --git a/lang/c/jansson/test/suites/valid/real-underflow/output b/lang/c/jansson/test/suites/valid/real-underflow/output
new file mode 100644
index 0000000..92df1df
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/real-underflow/output
@@ -0,0 +1 @@
+[0.0]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/run b/lang/c/jansson/test/suites/valid/run
new file mode 100755
index 0000000..fa594e6
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/run
@@ -0,0 +1,55 @@
+#!/bin/sh
+#
+# Copyright (c) 2009-2011 Petri Lehtinen <petri at digip.org>
+#
+# Jansson is free software; you can redistribute it and/or modify
+# it under the terms of the MIT license. See LICENSE for details.
+
+export JSON_SORT_KEYS=1
+
+is_test() {
+    test -d $test_path
+}
+
+do_run() {
+    variant=$1
+    s=".$1"
+
+    strip=0
+    [ "$variant" = "strip" ] && strip=1
+
+    STRIP=$strip $json_process \
+        <$test_path/input >$test_log/stdout$s 2>$test_log/stderr$s
+    valgrind_check $test_log/stderr$s || return 1
+
+    ref=output
+    [ -f $test_path/output$s ] && ref=output$s
+
+    if ! cmp -s $test_path/$ref $test_log/stdout$s; then
+        echo $variant > $test_log/variant
+        return 1
+    fi
+}
+
+run_test() {
+    do_run normal && do_run strip
+}
+
+show_error() {
+    valgrind_show_error && return
+
+    read variant < $test_log/variant
+    s=".$variant"
+
+    echo "VARIANT: $variant"
+
+    echo "EXPECTED OUTPUT:"
+    ref=output
+    [ -f $test_path/output$s ] && ref=output$s
+    nl -bn $test_path/$ref
+
+    echo "ACTUAL OUTPUT:"
+    nl -bn $test_log/stdout$s
+}
+
+. $top_srcdir/test/scripts/run-tests.sh
diff --git a/lang/c/jansson/test/suites/valid/short-string/input b/lang/c/jansson/test/suites/valid/short-string/input
new file mode 100644
index 0000000..0c3426d
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/short-string/input
@@ -0,0 +1 @@
+["a"]
diff --git a/lang/c/jansson/test/suites/valid/short-string/output b/lang/c/jansson/test/suites/valid/short-string/output
new file mode 100644
index 0000000..eac5f7b
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/short-string/output
@@ -0,0 +1 @@
+["a"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-ascii-string/input b/lang/c/jansson/test/suites/valid/simple-ascii-string/input
new file mode 100644
index 0000000..929b215
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-ascii-string/input
@@ -0,0 +1 @@
+["abcdefghijklmnopqrstuvwxyz1234567890 "]
diff --git a/lang/c/jansson/test/suites/valid/simple-ascii-string/output b/lang/c/jansson/test/suites/valid/simple-ascii-string/output
new file mode 100644
index 0000000..90358ab
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-ascii-string/output
@@ -0,0 +1 @@
+["abcdefghijklmnopqrstuvwxyz1234567890 "]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-int-0/input b/lang/c/jansson/test/suites/valid/simple-int-0/input
new file mode 100644
index 0000000..111bb86
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-0/input
@@ -0,0 +1 @@
+[0]
diff --git a/lang/c/jansson/test/suites/valid/simple-int-0/output b/lang/c/jansson/test/suites/valid/simple-int-0/output
new file mode 100644
index 0000000..6e7ea63
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-0/output
@@ -0,0 +1 @@
+[0]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-int-1/input b/lang/c/jansson/test/suites/valid/simple-int-1/input
new file mode 100644
index 0000000..7660873
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-1/input
@@ -0,0 +1 @@
+[1]
diff --git a/lang/c/jansson/test/suites/valid/simple-int-1/output b/lang/c/jansson/test/suites/valid/simple-int-1/output
new file mode 100644
index 0000000..bace2a0
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-1/output
@@ -0,0 +1 @@
+[1]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-int-123/input b/lang/c/jansson/test/suites/valid/simple-int-123/input
new file mode 100644
index 0000000..3214bfe
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-123/input
@@ -0,0 +1 @@
+[123]
diff --git a/lang/c/jansson/test/suites/valid/simple-int-123/output b/lang/c/jansson/test/suites/valid/simple-int-123/output
new file mode 100644
index 0000000..e47f69a
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-int-123/output
@@ -0,0 +1 @@
+[123]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-object/input b/lang/c/jansson/test/suites/valid/simple-object/input
new file mode 100644
index 0000000..a34fb49
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-object/input
@@ -0,0 +1 @@
+{"a":[]}
diff --git a/lang/c/jansson/test/suites/valid/simple-object/output b/lang/c/jansson/test/suites/valid/simple-object/output
new file mode 100644
index 0000000..982abe8
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-object/output
@@ -0,0 +1 @@
+{"a": []}
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/simple-real/input b/lang/c/jansson/test/suites/valid/simple-real/input
new file mode 100644
index 0000000..0fed7df
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-real/input
@@ -0,0 +1 @@
+[123.456789]
diff --git a/lang/c/jansson/test/suites/valid/simple-real/output b/lang/c/jansson/test/suites/valid/simple-real/output
new file mode 100644
index 0000000..b02878e
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/simple-real/output
@@ -0,0 +1 @@
+[123.456789]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/string-escapes/input b/lang/c/jansson/test/suites/valid/string-escapes/input
new file mode 100644
index 0000000..d994564
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/string-escapes/input
@@ -0,0 +1 @@
+["\"\\\/\b\f\n\r\t"]
diff --git a/lang/c/jansson/test/suites/valid/string-escapes/output b/lang/c/jansson/test/suites/valid/string-escapes/output
new file mode 100644
index 0000000..ca5c1c6
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/string-escapes/output
@@ -0,0 +1 @@
+["\"\\/\b\f\n\r\t"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/three-byte-utf-8/input b/lang/c/jansson/test/suites/valid/three-byte-utf-8/input
new file mode 100644
index 0000000..ccc0bfa
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/three-byte-utf-8/input
@@ -0,0 +1 @@
+["\u0821 three-byte UTF-8"]
diff --git a/lang/c/jansson/test/suites/valid/three-byte-utf-8/output b/lang/c/jansson/test/suites/valid/three-byte-utf-8/output
new file mode 100644
index 0000000..c44d124
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/three-byte-utf-8/output
@@ -0,0 +1 @@
+["ࠡ three-byte UTF-8"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/true/input b/lang/c/jansson/test/suites/valid/true/input
new file mode 100644
index 0000000..29513c4
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/true/input
@@ -0,0 +1 @@
+[true]
diff --git a/lang/c/jansson/test/suites/valid/true/output b/lang/c/jansson/test/suites/valid/true/output
new file mode 100644
index 0000000..de601e3
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/true/output
@@ -0,0 +1 @@
+[true]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/two-byte-utf-8/input b/lang/c/jansson/test/suites/valid/two-byte-utf-8/input
new file mode 100644
index 0000000..05ae854
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/two-byte-utf-8/input
@@ -0,0 +1 @@
+["\u0123 two-byte UTF-8"]
diff --git a/lang/c/jansson/test/suites/valid/two-byte-utf-8/output b/lang/c/jansson/test/suites/valid/two-byte-utf-8/output
new file mode 100644
index 0000000..1f0988d
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/two-byte-utf-8/output
@@ -0,0 +1 @@
+["ģ two-byte UTF-8"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/utf-8-string/input b/lang/c/jansson/test/suites/valid/utf-8-string/input
new file mode 100644
index 0000000..20dc64a
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/utf-8-string/input
@@ -0,0 +1 @@
+["€þıœəßð some utf-8 ĸʒ×ŋµåäö𝄞"]
diff --git a/lang/c/jansson/test/suites/valid/utf-8-string/output b/lang/c/jansson/test/suites/valid/utf-8-string/output
new file mode 100644
index 0000000..5372865
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/utf-8-string/output
@@ -0,0 +1 @@
+["€þıœəßð some utf-8 ĸʒ×ŋµåäö𝄞"]
\ No newline at end of file
diff --git a/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/input b/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/input
new file mode 100644
index 0000000..c598b41
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/input
@@ -0,0 +1 @@
+["\uD834\uDD1E surrogate, four-byte UTF-8"]
diff --git a/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/output b/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/output
new file mode 100644
index 0000000..fa806d2
--- /dev/null
+++ b/lang/c/jansson/test/suites/valid/utf-surrogate-four-byte-encoding/output
@@ -0,0 +1 @@
+["𝄞 surrogate, four-byte UTF-8"]
\ No newline at end of file
diff --git a/lang/c/src/.gitignore b/lang/c/src/.gitignore
new file mode 100644
index 0000000..e278a8b
--- /dev/null
+++ b/lang/c/src/.gitignore
@@ -0,0 +1,2 @@
+avro-c.pc
+avropipe
diff --git a/lang/c/src/CMakeLists.txt b/lang/c/src/CMakeLists.txt
new file mode 100644
index 0000000..589296b
--- /dev/null
+++ b/lang/c/src/CMakeLists.txt
@@ -0,0 +1,164 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+set(AVRO_SRC
+    allocation.c
+    array.c
+    avro.h
+    avro/allocation.h
+    avro/basics.h
+    avro/consumer.h
+    avro/data.h
+    avro/errors.h
+    avro/generic.h
+    avro/io.h
+    avro/legacy.h
+    avro/refcount.h
+    avro/resolver.h
+    avro/schema.h
+    avro/value.h
+    avro_generic_internal.h
+    avro_private.h
+    codec.c
+    codec.h
+    consumer.c
+    consume-binary.c
+    datafile.c
+    datum.c
+    datum.h
+    datum_equal.c
+    datum_read.c
+    datum_size.c
+    datum_skip.c
+    datum_validate.c
+    datum_value.c
+    datum_write.c
+    dump.c
+    dump.h
+    encoding.h
+    encoding_binary.c
+    errors.c
+    generic.c
+    io.c
+    map.c
+    memoize.c
+    resolved-reader.c
+    resolved-writer.c
+    resolver.c
+    schema.c
+    schema.h
+    schema_equal.c
+    st.c
+    st.h
+    string.c
+    value.c
+    value-hash.c
+    value-json.c
+    value-read.c
+    value-sizeof.c
+    value-write.c
+    wrapped-buffer.c
+)
+
+set(JANSSON_SRC
+    ../jansson/src/dump.c
+    ../jansson/src/error.c
+    ../jansson/src/hashtable.c
+    ../jansson/src/hashtable.h
+    ../jansson/src/jansson.h
+    ../jansson/src/jansson_private.h
+    ../jansson/src/load.c
+    ../jansson/src/memory.c
+    ../jansson/src/pack_unpack.c
+    ../jansson/src/strbuffer.c
+    ../jansson/src/strbuffer.h
+    ../jansson/src/utf.c
+    ../jansson/src/utf.h
+    ../jansson/src/value.c
+)
+
+source_group(Avro FILES ${AVRO_SRC})
+source_group(Jansson FILES ${JANSSON_SRC})
+
+# The version.sh script gives us a SOVERSION that uses colon as a
+# separator; we need periods.
+
+string(REPLACE ":" "." LIBAVRO_DOT_VERSION ${LIBAVRO_VERSION})
+
+add_library(avro-static STATIC ${AVRO_SRC} ${JANSSON_SRC})
+target_link_libraries(avro-static ${CODEC_LIBRARIES} ${THREADS_LIBRARIES})
+set_target_properties(avro-static PROPERTIES OUTPUT_NAME avro)
+
+if (NOT WIN32)
+# TODO: Create Windows DLLs. See http://www.cmake.org/Wiki/BuildingWinDLL
+add_library(avro-shared SHARED ${AVRO_SRC} ${JANSSON_SRC})
+target_link_libraries(avro-shared ${CODEC_LIBRARIES} ${THREADS_LIBRARIES})
+set_target_properties(avro-shared PROPERTIES
+        OUTPUT_NAME avro
+        SOVERSION ${LIBAVRO_DOT_VERSION})
+endif(NOT WIN32)
+
+install(FILES
+        ${CMAKE_CURRENT_SOURCE_DIR}/avro.h
+        DESTINATION include)
+install(DIRECTORY
+        ${CMAKE_CURRENT_SOURCE_DIR}/avro
+        DESTINATION include
+        FILES_MATCHING PATTERN "*.h")
+
+if (WIN32)
+install(TARGETS avro-static
+        RUNTIME DESTINATION bin
+        LIBRARY DESTINATION lib
+        ARCHIVE DESTINATION lib
+       )
+else(WIN32)
+install(TARGETS avro-static avro-shared
+        RUNTIME DESTINATION bin
+        LIBRARY DESTINATION lib
+        ARCHIVE DESTINATION lib
+       )
+endif(WIN32)
+
+# Install pkg-config file
+
+set(prefix ${CMAKE_INSTALL_PREFIX})
+set(VERSION ${AVRO_VERSION})
+configure_file(avro-c.pc.in avro-c.pc)
+install(FILES ${CMAKE_CURRENT_BINARY_DIR}/avro-c.pc
+        DESTINATION lib/pkgconfig)
+
+add_executable(avrocat avrocat.c)
+target_link_libraries(avrocat avro-static)
+install(TARGETS avrocat RUNTIME DESTINATION bin)
+
+add_executable(avroappend avroappend.c)
+target_link_libraries(avroappend avro-static)
+install(TARGETS avroappend RUNTIME DESTINATION bin)
+
+if (NOT WIN32)
+#TODO: Port getopt() to Windows to compile avropipe.c and avromod.c
+add_executable(avropipe avropipe.c)
+target_link_libraries(avropipe avro-static)
+install(TARGETS avropipe RUNTIME DESTINATION bin)
+
+add_executable(avromod avromod.c)
+target_link_libraries(avromod avro-static)
+install(TARGETS avromod RUNTIME DESTINATION bin)
+endif(NOT WIN32)
diff --git a/lang/c/src/allocation.c b/lang/c/src/allocation.c
new file mode 100644
index 0000000..58af9bd
--- /dev/null
+++ b/lang/c/src/allocation.c
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/legacy.h"
+
+static void *
+avro_default_allocator(void *ud, void *ptr, size_t osize, size_t nsize)
+{
+	AVRO_UNUSED(ud);
+	AVRO_UNUSED(osize);
+
+	if (nsize == 0) {
+		free(ptr);
+		return NULL;
+	} else {
+		return realloc(ptr, nsize);
+	}
+}
+
+struct avro_allocator_state  AVRO_CURRENT_ALLOCATOR = {
+	avro_default_allocator,
+	NULL
+};
+
+void avro_set_allocator(avro_allocator_t alloc, void *user_data)
+{
+	AVRO_CURRENT_ALLOCATOR.alloc = alloc;
+	AVRO_CURRENT_ALLOCATOR.user_data = user_data;
+}
+
+void *avro_calloc(size_t count, size_t size)
+{
+	void  *ptr = avro_malloc(count * size);
+	if (ptr != NULL) {
+		memset(ptr, 0, count * size);
+	}
+	return ptr;
+}
+
+char *avro_str_alloc(size_t str_size)
+{
+	size_t  buf_size = str_size + sizeof(size_t);
+
+	void  *buf = avro_malloc(buf_size);
+	if (buf == NULL) {
+		return NULL;
+	}
+
+	size_t  *size = (size_t *) buf;
+	char  *new_str = (char *) (size + 1);
+
+	*size = buf_size;
+
+	return new_str;
+}
+
+char *avro_strdup(const char *str)
+{
+	if (str == NULL) {
+		return NULL;
+	}
+
+	size_t  str_size = strlen(str)+1;
+	char *new_str = avro_str_alloc(str_size);
+	memcpy(new_str, str, str_size);
+
+	//fprintf(stderr, "--- new  %" PRIsz " %p %s\n", *size, new_str, new_str);
+	return new_str;
+}
+
+void avro_str_free(char *str)
+{
+	size_t  *size = ((size_t *) str) - 1;
+	//fprintf(stderr, "--- free %" PRIsz " %p %s\n", *size, str, str);
+	avro_free(size, *size);
+}
+
+
+void
+avro_alloc_free_func(void *ptr, size_t sz)
+{
+	avro_free(ptr, sz);
+}
diff --git a/lang/c/src/array.c b/lang/c/src/array.c
new file mode 100644
index 0000000..1117d1a
--- /dev/null
+++ b/lang/c/src/array.c
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro_private.h"
+
+
+void avro_raw_array_init(avro_raw_array_t *array, size_t element_size)
+{
+	memset(array, 0, sizeof(avro_raw_array_t));
+	array->element_size = element_size;
+}
+
+
+void avro_raw_array_done(avro_raw_array_t *array)
+{
+	if (array->data) {
+		avro_free(array->data, array->allocated_size);
+	}
+	memset(array, 0, sizeof(avro_raw_array_t));
+}
+
+
+void avro_raw_array_clear(avro_raw_array_t *array)
+{
+	array->element_count = 0;
+}
+
+
+int
+avro_raw_array_ensure_size(avro_raw_array_t *array, size_t desired_count)
+{
+	size_t  required_size = array->element_size * desired_count;
+	if (array->allocated_size >= required_size) {
+		return 0;
+	}
+
+	/*
+	 * Double the old size when reallocating.
+	 */
+
+	size_t  new_size;
+	if (array->allocated_size == 0) {
+		/*
+		 * Start with an arbitrary 10 items.
+		 */
+
+		new_size = 10 * array->element_size;
+	} else {
+		new_size = array->allocated_size * 2;
+	}
+
+	if (required_size > new_size) {
+		new_size = required_size;
+	}
+
+	array->data = avro_realloc(array->data, array->allocated_size, new_size);
+	if (array->data == NULL) {
+		avro_set_error("Cannot allocate space in array for %" PRIsz " elements",
+			       desired_count);
+		return ENOMEM;
+	}
+	array->allocated_size = new_size;
+
+	return 0;
+}
+
+
+int
+avro_raw_array_ensure_size0(avro_raw_array_t *array, size_t desired_count)
+{
+	int  rval;
+	size_t  old_allocated_size = array->allocated_size;
+	check(rval, avro_raw_array_ensure_size(array, desired_count));
+
+	if (array->allocated_size > old_allocated_size) {
+		size_t  extra_space = array->allocated_size - old_allocated_size;
+		void  *buf = array->data;
+		memset((char *)buf + old_allocated_size, 0, extra_space);
+	}
+
+	return 0;
+}
+
+
+void *avro_raw_array_append(avro_raw_array_t *array)
+{
+	int  rval;
+
+	rval = avro_raw_array_ensure_size(array, array->element_count + 1);
+	if (rval) {
+		return NULL;
+	}
+
+	size_t  offset = array->element_size * array->element_count;
+	array->element_count++;
+	return (char *)array->data + offset;
+}
diff --git a/lang/c/src/avro-c.pc.in b/lang/c/src/avro-c.pc.in
new file mode 100644
index 0000000..58b7b9d
--- /dev/null
+++ b/lang/c/src/avro-c.pc.in
@@ -0,0 +1,7 @@
+Name: avro-c
+Description: C library for parsing Avro data
+Version: @VERSION@
+URL: http://avro.apache.org/
+Libs: -L at prefix@/lib -lavro
+Cflags: -I at prefix@/include
+Requires: @CODEC_PKG@
diff --git a/lang/c/src/avro.h b/lang/c/src/avro.h
new file mode 100644
index 0000000..3c244e6
--- /dev/null
+++ b/lang/c/src/avro.h
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+#ifndef AVRO_H
+#define AVRO_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/allocation.h>
+#include <avro/basics.h>
+#include <avro/consumer.h>
+#include <avro/data.h>
+#include <avro/errors.h>
+#include <avro/generic.h>
+#include <avro/io.h>
+#include <avro/legacy.h>
+#include <avro/platform.h>
+#include <avro/resolver.h>
+#include <avro/schema.h>
+#include <avro/value.h>
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/allocation.h b/lang/c/src/avro/allocation.h
new file mode 100644
index 0000000..ca69517
--- /dev/null
+++ b/lang/c/src/avro/allocation.h
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#ifndef AVRO_ALLOCATION_H
+#define AVRO_ALLOCATION_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <stdlib.h>
+
+/*
+ * Allocation interface.  You can provide a custom allocator for the
+ * library, should you wish.  The allocator is provided as a single
+ * generic function, which can emulate the standard malloc, realloc, and
+ * free functions.  The design of this allocation interface is inspired
+ * by the implementation of the Lua interpreter.
+ *
+ * The ptr parameter will be the location of any existing memory
+ * buffer.  The osize parameter will be the size of this existing
+ * buffer.  If ptr is NULL, then osize will be 0.  The nsize parameter
+ * will be the size of the new buffer, or 0 if the new buffer should be
+ * freed.
+ *
+ * If nsize is 0, then the allocation function must return NULL.  If
+ * nsize is not 0, then it should return NULL if the allocation fails.
+ */
+
+typedef void *
+(*avro_allocator_t)(void *user_data, void *ptr, size_t osize, size_t nsize);
+
+void avro_set_allocator(avro_allocator_t alloc, void *user_data);
+
+struct avro_allocator_state {
+	avro_allocator_t  alloc;
+	void  *user_data;
+};
+
+extern struct avro_allocator_state  AVRO_CURRENT_ALLOCATOR;
+
+#define avro_realloc(ptr, osz, nsz)          \
+	(AVRO_CURRENT_ALLOCATOR.alloc        \
+	 (AVRO_CURRENT_ALLOCATOR.user_data,  \
+	  (ptr), (osz), (nsz)))
+
+#define avro_malloc(sz) (avro_realloc(NULL, 0, (sz)))
+#define avro_free(ptr, osz) (avro_realloc((ptr), (osz), 0))
+
+#define avro_new(type) (avro_realloc(NULL, 0, sizeof(type)))
+#define avro_freet(type, ptr) (avro_realloc((ptr), sizeof(type), 0))
+
+void *avro_calloc(size_t count, size_t size);
+
+/*
+ * This is probably too clever for our own good, but when we duplicate a
+ * string, we actually store its size in the same allocated memory
+ * buffer.  That lets us free the string later, without having to call
+ * strlen to get its size, and without the containing struct having to
+ * manually store the strings length.
+ *
+ * This means that any string return by avro_strdup MUST be freed using
+ * avro_str_free, and the only thing that can be passed into
+ * avro_str_free is a string created via avro_strdup.
+ */
+
+char *avro_str_alloc(size_t str_size);
+char *avro_strdup(const char *str);
+void avro_str_free(char *str);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/basics.h b/lang/c/src/avro/basics.h
new file mode 100644
index 0000000..d934af7
--- /dev/null
+++ b/lang/c/src/avro/basics.h
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_BASICS_H
+#define AVRO_BASICS_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+
+enum avro_type_t {
+	AVRO_STRING,
+	AVRO_BYTES,
+	AVRO_INT32,
+	AVRO_INT64,
+	AVRO_FLOAT,
+	AVRO_DOUBLE,
+	AVRO_BOOLEAN,
+	AVRO_NULL,
+	AVRO_RECORD,
+	AVRO_ENUM,
+	AVRO_FIXED,
+	AVRO_MAP,
+	AVRO_ARRAY,
+	AVRO_UNION,
+	AVRO_LINK
+};
+typedef enum avro_type_t avro_type_t;
+
+enum avro_class_t {
+	AVRO_SCHEMA,
+	AVRO_DATUM
+};
+typedef enum avro_class_t avro_class_t;
+
+struct avro_obj_t {
+	avro_type_t type;
+	avro_class_t class_type;
+	volatile int  refcount;
+};
+
+#define avro_classof(obj)     ((obj)->class_type)
+#define is_avro_schema(obj)   (obj && avro_classof(obj) == AVRO_SCHEMA)
+#define is_avro_datum(obj)    (obj && avro_classof(obj) == AVRO_DATUM)
+
+#define avro_typeof(obj)      ((obj)->type)
+#define is_avro_string(obj)   (obj && avro_typeof(obj) == AVRO_STRING)
+#define is_avro_bytes(obj)    (obj && avro_typeof(obj) == AVRO_BYTES)
+#define is_avro_int32(obj)    (obj && avro_typeof(obj) == AVRO_INT32)
+#define is_avro_int64(obj)    (obj && avro_typeof(obj) == AVRO_INT64)
+#define is_avro_float(obj)    (obj && avro_typeof(obj) == AVRO_FLOAT)
+#define is_avro_double(obj)   (obj && avro_typeof(obj) == AVRO_DOUBLE)
+#define is_avro_boolean(obj)  (obj && avro_typeof(obj) == AVRO_BOOLEAN)
+#define is_avro_null(obj)     (obj && avro_typeof(obj) == AVRO_NULL)
+#define is_avro_primitive(obj)(is_avro_string(obj) \
+                             ||is_avro_bytes(obj) \
+                             ||is_avro_int32(obj) \
+                             ||is_avro_int64(obj) \
+                             ||is_avro_float(obj) \
+                             ||is_avro_double(obj) \
+                             ||is_avro_boolean(obj) \
+                             ||is_avro_null(obj))
+#define is_avro_record(obj)   (obj && avro_typeof(obj) == AVRO_RECORD)
+#define is_avro_enum(obj)     (obj && avro_typeof(obj) == AVRO_ENUM)
+#define is_avro_fixed(obj)    (obj && avro_typeof(obj) == AVRO_FIXED)
+#define is_avro_named_type(obj)(is_avro_record(obj) \
+                              ||is_avro_enum(obj) \
+                              ||is_avro_fixed(obj))
+#define is_avro_map(obj)      (obj && avro_typeof(obj) == AVRO_MAP)
+#define is_avro_array(obj)    (obj && avro_typeof(obj) == AVRO_ARRAY)
+#define is_avro_union(obj)    (obj && avro_typeof(obj) == AVRO_UNION)
+#define is_avro_complex_type(obj) (!(is_avro_primitive(obj))
+#define is_avro_link(obj)     (obj && avro_typeof(obj) == AVRO_LINK)
+
+
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/consumer.h b/lang/c/src/avro/consumer.h
new file mode 100644
index 0000000..f7e836c
--- /dev/null
+++ b/lang/c/src/avro/consumer.h
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.	 You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_CONSUMER_H
+#define AVRO_CONSUMER_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include <avro/io.h>
+#include <avro/schema.h>
+
+
+/*---------------------------------------------------------------------
+ * Consumers
+ */
+
+/**
+ * A <i>consumer</i> is an object that knows how to process Avro data.
+ * There are consumer methods for each type of Avro data.  The
+ * <code>avro_consumer_t</code> struct is an abstract superclass, which
+ * you don't instantiate directly.  Later in this file, we define
+ * several consumer classes that know how to process Avro data in
+ * specific ways.
+ *
+ * For compound Avro values (records, arrays, maps, and unions), the
+ * consumer callbacks provide a nested consumer that should be used to
+ * process subvalues.  Each consumer instance, including these
+ * "subconsumers", contains a reference to the schema of the data that
+ * it expects to process.  This means that the functions that produce
+ * Avro data (such as avro_consume_binary) don't need to maintain their
+ * own references to any schemas, since they'll be encapsulated in the
+ * consumer that they pass their data off to.
+ */
+
+typedef struct avro_consumer_t avro_consumer_t;
+
+struct avro_consumer_t {
+	/**
+	 * The schema of the data that this consumer expects to process.
+	 */
+
+	avro_schema_t  schema;
+
+	/**
+	 * Called when this consumer is freed.  This function should
+	 * free any additional resources acquired by a consumer
+	 * subclass.
+	 */
+
+	void (*free)(avro_consumer_t *consumer);
+
+	/* PRIMITIVE VALUES */
+
+	/**
+	 * Called when a boolean value is encountered.
+	 */
+
+	int (*boolean_value)(avro_consumer_t *consumer,
+			     int value,
+			     void *user_data);
+
+	/**
+	 * Called when a bytes value is encountered. The @ref value
+	 * pointer is only guaranteed to be valid for the duration of
+	 * the callback function.  If you need to save the data for
+	 * processing later, you must copy it into another buffer.
+	 */
+
+	int (*bytes_value)(avro_consumer_t *consumer,
+			   const void *value, size_t value_len,
+			   void *user_data);
+
+	/**
+	 * Called when a double value is encountered.
+	 */
+
+	int (*double_value)(avro_consumer_t *consumer,
+			    double value,
+			    void *user_data);
+
+	/**
+	 * Called when a float value is encountered.
+	 */
+
+	int (*float_value)(avro_consumer_t *consumer,
+			   float value,
+			   void *user_data);
+
+	/**
+	 * Called when an int value is encountered.
+	 */
+
+	int (*int_value)(avro_consumer_t *consumer,
+			 int32_t value,
+			 void *user_data);
+
+	/**
+	 * Called when a long value is encountered.
+	 */
+
+	int (*long_value)(avro_consumer_t *consumer,
+			  int64_t value,
+			  void *user_data);
+
+	/**
+	 * Called when a null value is encountered.
+	 */
+
+	int (*null_value)(avro_consumer_t *consumer, void *user_data);
+
+	/**
+	 * Called when a string value is encountered.  The @ref value
+	 * pointer will point at UTF-8 encoded data.  (If the data
+	 * you're representing isn't a UTF-8 Unicode string, you
+	 * should use the bytes type.)	The @ref value_len parameter
+	 * gives the length of the data in bytes, not in Unicode
+	 * characters.	The @ref value pointer is only guaranteed to
+	 * be valid for the duration of the callback function.	If you
+	 * need to save the data for processing later, you must copy
+	 * it into another buffer.
+	 */
+
+	int (*string_value)(avro_consumer_t *consumer,
+			    const void *value, size_t value_len,
+			    void *user_data);
+
+	/* COMPOUND VALUES */
+
+	/**
+	 * Called when the beginning of an array block is encountered.
+	 * The @ref block_count parameter will contain the number of
+	 * elements in this block.
+	 */
+
+	int (*array_start_block)(avro_consumer_t *consumer,
+				 int is_first_block,
+				 unsigned int block_count,
+				 void *user_data);
+
+	/**
+	 * Called before each individual element of an array is
+	 * processed.  The index of the current element is passed into
+	 * the callback.  The callback should fill in @ref
+	 * element_consumer and @ref element_user_data with the consumer
+	 * and <code>user_data</code> pointer to use to process the
+	 * element.
+	 */
+
+	int (*array_element)(avro_consumer_t *consumer,
+			     unsigned int index,
+			     avro_consumer_t **element_consumer,
+			     void **element_user_data,
+			     void *user_data);
+
+	/**
+	 * Called when an enum value is encountered.
+	 */
+
+	int (*enum_value)(avro_consumer_t *consumer, int value,
+			  void *user_data);
+
+	/**
+	 * Called when a fixed value is encountered.  The @ref value
+	 * pointer is only guaranteed to be valid for the duration of
+	 * the callback function.  If you need to save the data for
+	 * processing later, you must copy it into another buffer.
+	 */
+
+	int (*fixed_value)(avro_consumer_t *consumer,
+			   const void *value, size_t value_len,
+			   void *user_data);
+
+	/**
+	 * Called when the beginning of a map block is encountered.
+	 * The @ref block_count parameter will contain the number of
+	 * elements in this block.
+	 */
+
+	int (*map_start_block)(avro_consumer_t *consumer,
+			       int is_first_block,
+			       unsigned int block_count,
+			       void *user_data);
+
+	/**
+	 * Called before each individual element of a map is
+	 * processed.  The index and key of the current element is
+	 * passed into the callback.  The key is only guaranteed to be
+	 * valid for the duration of the map_element_start callback,
+	 * and the map's subschema callback.  If you need to save it for
+	 * later use, you must copy the key into another memory
+	 * location.  The callback should fill in @ref value_consumer
+	 * and @ref value_user_data with the consumer and
+	 * <code>user_data</code> pointer to use to process the value.
+	 */
+
+	int (*map_element)(avro_consumer_t *consumer,
+			   unsigned int index,
+			   const char *key,
+			   avro_consumer_t **value_consumer,
+			   void **value_user_data,
+			   void *user_data);
+
+	/**
+	 * Called when the beginning of a record is encountered.
+	 */
+
+	int (*record_start)(avro_consumer_t *consumer,
+			    void *user_data);
+
+	/**
+	 * Called before each individual field of a record is
+	 * processed.  The index and name of the current field is
+	 * passed into the callback.  The name is only guaranteed to
+	 * be valid for the duration of the record_field_start
+	 * callback, and the field's subschema callback.  If you need to
+	 * save it for later use, you must copy the key into another
+	 * memory location.  The callback should fill in @ref
+	 * field_consumer and @ref field_user_data with the consumer
+	 * <code>user_data</code> pointer to use to process the field.
+	 */
+
+	int (*record_field)(avro_consumer_t *consumer,
+			    unsigned int index,
+			    avro_consumer_t **field_consumer,
+			    void **field_user_data,
+			    void *user_data);
+
+	/**
+	 * Called when a union value is encountered.  The callback
+	 * should fill in @ref branch_consumer and @ref branch_user_data
+	 * with the consumer <code>user_data</code> pointer to use to
+	 * process the branch.
+	 */
+
+	int (*union_branch)(avro_consumer_t *consumer,
+			    unsigned int discriminant,
+			    avro_consumer_t **branch_consumer,
+			    void **branch_user_data,
+			    void *user_data);
+};
+
+
+/**
+ * Calls the given callback in consumer, if it's present.  If the
+ * callback is NULL, it just returns a success code.
+ */
+
+#define avro_consumer_call(consumer, callback, ...)	\
+	(((consumer)->callback == NULL)? 0:		\
+	 (consumer)->callback((consumer), __VA_ARGS__))
+
+
+/**
+ * Frees an @ref avro_consumer_t instance.  (This function works on
+ * consumer subclasses, too.)
+ */
+
+void avro_consumer_free(avro_consumer_t *consumer);
+
+
+/*---------------------------------------------------------------------
+ * Resolvers
+ */
+
+/**
+ * A <i>resolver</i> is a special kind of consumer that knows how to
+ * implement Avro's schema resolution rules to translate between a
+ * writer schema and a reader schema.  The consumer callbacks line up
+ * with the writer schema; as each element of the datum is produced, the
+ * resolver fills in the contents of an @ref avro_datum_t instance.
+ * (The datum is provided as the user_data when you use the consumer.)
+ */
+
+avro_consumer_t *
+avro_resolver_new(avro_schema_t writer_schema,
+		  avro_schema_t reader_schema);
+
+
+/*---------------------------------------------------------------------
+ * Binary encoding
+ */
+
+/**
+ * Reads an Avro datum from the given @ref avro_reader_t.  As the
+ * datum is read, each portion of it is passed off to the appropriate
+ * callback in @ref consumer.
+ */
+
+int
+avro_consume_binary(avro_reader_t reader,
+		    avro_consumer_t *consumer,
+		    void *ud);
+
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/data.h b/lang/c/src/avro/data.h
new file mode 100644
index 0000000..5355042
--- /dev/null
+++ b/lang/c/src/avro/data.h
@@ -0,0 +1,526 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_DATA_H
+#define AVRO_DATA_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+/*
+ * This file defines some helper data structures that are used within
+ * Avro, and in the schema-specific types created by avrocc.
+ */
+
+
+/*---------------------------------------------------------------------
+ * Arrays
+ */
+
+/**
+ * A resizeable array of fixed-size elements.
+ */
+
+typedef struct avro_raw_array {
+	size_t  element_size;
+	size_t  element_count;
+	size_t  allocated_size;
+	void  *data;
+} avro_raw_array_t;
+
+/**
+ * Initializes a new avro_raw_array_t that you've allocated yourself.
+ */
+
+void avro_raw_array_init(avro_raw_array_t *array, size_t element_size);
+
+/**
+ * Finalizes an avro_raw_array_t.
+ */
+
+void avro_raw_array_done(avro_raw_array_t *array);
+
+/**
+ * Clears an avro_raw_array_t.  This does not deallocate any space; this
+ * allows us to reuse the underlying array buffer as we start to re-add
+ * elements to the array.
+ */
+
+void avro_raw_array_clear(avro_raw_array_t *array);
+
+/**
+ * Ensures that there is enough allocated space to store the given
+ * number of elements in an avro_raw_array_t.  If we can't allocate that
+ * much space, we return ENOMEM.
+ */
+
+int
+avro_raw_array_ensure_size(avro_raw_array_t *array, size_t desired_count);
+
+/**
+ * Ensures that there is enough allocated space to store the given
+ * number of elements in an avro_raw_array_t.  If the array grows as a
+ * result of this operation, we will fill in any newly allocated space
+ * with 0 bytes.  If we can't allocate that much space, we return
+ * ENOMEM.
+ */
+
+int
+avro_raw_array_ensure_size0(avro_raw_array_t *array, size_t desired_count);
+
+/**
+ * Returns the number of elements in an avro_raw_array_t.
+ */
+
+#define avro_raw_array_size(array) ((array)->element_count)
+
+/**
+ * Returns the given element of an avro_raw_array_t as a <code>void
+ * *</code>.
+ */
+
+#define avro_raw_array_get_raw(array, index) \
+	((char *) (array)->data + (array)->element_size * index)
+
+/**
+ * Returns the given element of an avro_raw_array_t, using element_type
+ * as the type of the elements.  The result is *not* a pointer to the
+ * element; you get the element itself.
+ */
+
+#define avro_raw_array_get(array, element_type, index) \
+	(((element_type *) (array)->data)[index])
+
+/**
+ * Appends a new element to an avro_raw_array_t, expanding it if
+ * necessary.  Returns a pointer to the new element, or NULL if we
+ * needed to reallocate the array and couldn't.
+ */
+
+void *avro_raw_array_append(avro_raw_array_t *array);
+
+
+/*---------------------------------------------------------------------
+ * Maps
+ */
+
+/**
+ * The type of the elements in a map's elements array.
+ */
+
+typedef struct avro_raw_map_entry {
+	const char  *key;
+} avro_raw_map_entry_t;
+
+/**
+ * A string-indexed map of fixed-size elements.
+ */
+
+typedef struct avro_raw_map {
+	avro_raw_array_t  elements;
+	void  *indices_by_key;
+} avro_raw_map_t;
+
+/**
+ * Initializes a new avro_raw_map_t that you've allocated yourself.
+ */
+
+void avro_raw_map_init(avro_raw_map_t *map, size_t element_size);
+
+/**
+ * Finalizes an avro_raw_map_t.
+ */
+
+void avro_raw_map_done(avro_raw_map_t *map);
+
+/**
+ * Clears an avro_raw_map_t.
+ */
+
+void avro_raw_map_clear(avro_raw_map_t *map);
+
+/**
+ * Ensures that there is enough allocated space to store the given
+ * number of elements in an avro_raw_map_t.  If we can't allocate that
+ * much space, we return ENOMEM.
+ */
+
+int
+avro_raw_map_ensure_size(avro_raw_map_t *map, size_t desired_count);
+
+/**
+ * Returns the number of elements in an avro_raw_map_t.
+ */
+
+#define avro_raw_map_size(map)  avro_raw_array_size(&((map)->elements))
+
+/**
+ * Returns the avro_raw_map_entry_t for a given index.
+ */
+
+#define avro_raw_get_entry(map, index) \
+	((avro_raw_map_entry_t *) \
+	 avro_raw_array_get_raw(&(map)->elements, index))
+
+/**
+ * Returns the given element of an avro_raw_array_t as a <code>void
+ * *</code>.  The indexes are assigned based on the order that the
+ * elements are added to the map.
+ */
+
+#define avro_raw_map_get_raw(map, index) \
+	(avro_raw_array_get_raw(&(map)->elements, index) + \
+	 sizeof(avro_raw_map_entry_t))
+
+/**
+ * Returns the element of an avro_raw_map_t with the given numeric
+ * index.  The indexes are assigned based on the order that the elements
+ * are added to the map.
+ */
+
+#define avro_raw_map_get_by_index(map, element_type, index) \
+	(*((element_type *) avro_raw_map_get_raw(map, index)))
+
+/**
+ * Returns the key of the element with the given numeric index.
+ */
+
+#define avro_raw_map_get_key(map, index) \
+	(avro_raw_get_entry(map, index)->key)
+
+/**
+ * Returns the element of an avro_raw_map_t with the given string key.
+ * If the given element doesn't exist, returns NULL.  If @ref index
+ * isn't NULL, it will be filled in with the index of the element.
+ */
+
+void *avro_raw_map_get(const avro_raw_map_t *map, const char *key,
+		       size_t *index);
+
+/**
+ * Retrieves the element of an avro_raw_map_t with the given string key,
+ * creating it if necessary.  A pointer to the element is placed into
+ * @ref element.  If @ref index isn't NULL, it will be filled in with
+ * the index of the element.  We return 1 if the element is new; 0 if
+ * it's not, and a negative error code if there was some problem.
+ */
+
+int avro_raw_map_get_or_create(avro_raw_map_t *map, const char *key,
+			       void **element, size_t *index);
+
+
+/*---------------------------------------------------------------------
+ * Wrapped buffers
+ */
+
+/**
+ * A pointer to an unmodifiable external memory region, along with
+ * functions for freeing that buffer when it's no longer needed, and
+ * copying it.
+ */
+
+typedef struct avro_wrapped_buffer  avro_wrapped_buffer_t;
+
+struct avro_wrapped_buffer {
+	/** A pointer to the memory region */
+	const void  *buf;
+
+	/** The size of the memory region */
+	size_t  size;
+
+	/** Additional data needed by the methods below */
+	void  *user_data;
+
+	/**
+	 * A function that will be called when the memory region is no
+	 * longer needed.  This pointer can be NULL if nothing special
+	 * needs to be done to free the buffer.
+	 */
+	void
+	(*free)(avro_wrapped_buffer_t *self);
+
+	/**
+	 * A function that makes a copy of a portion of a wrapped
+	 * buffer.  This doesn't have to involve duplicating the memory
+	 * region, but it should ensure that the free method can be
+	 * safely called on both copies without producing any errors or
+	 * memory corruption.  If this function is NULL, then we'll use
+	 * a default implementation that calls @ref
+	 * avro_wrapped_buffer_new_copy.
+	 */
+	int
+	(*copy)(avro_wrapped_buffer_t *dest, const avro_wrapped_buffer_t *src,
+		size_t offset, size_t length);
+
+	/**
+	 * A function that "slices" a wrapped buffer, causing it to
+	 * point at a subset of the existing buffer.  Usually, this just
+	 * requires * updating the @ref buf and @ref size fields.  If
+	 * you don't need to do anything other than this, this function
+	 * pointer can be left @c NULL.  The function can assume that
+	 * the @a offset and @a length parameters point to a valid
+	 * subset of the existing wrapped buffer.
+	 */
+	int
+	(*slice)(avro_wrapped_buffer_t *self, size_t offset, size_t length);
+};
+
+/**
+ * Free a wrapped buffer.
+ */
+
+#define avro_wrapped_buffer_free(self) \
+	do { \
+		if ((self)->free != NULL) { \
+			(self)->free((self)); \
+		} \
+	} while (0)
+
+/**
+ * A static initializer for an empty wrapped buffer.
+ */
+
+#define AVRO_WRAPPED_BUFFER_EMPTY  { NULL, 0, NULL, NULL, NULL, NULL }
+
+/**
+ * Moves a wrapped buffer.  After returning, @a dest will wrap the
+ * buffer that @a src used to point at, and @a src will be empty.
+ */
+
+void
+avro_wrapped_buffer_move(avro_wrapped_buffer_t *dest,
+			 avro_wrapped_buffer_t *src);
+
+/**
+ * Copies a buffer.
+ */
+
+int
+avro_wrapped_buffer_copy(avro_wrapped_buffer_t *dest,
+			 const avro_wrapped_buffer_t *src,
+			 size_t offset, size_t length);
+
+/**
+ * Slices a buffer.
+ */
+
+int
+avro_wrapped_buffer_slice(avro_wrapped_buffer_t *self,
+			  size_t offset, size_t length);
+
+/**
+ * Creates a new wrapped buffer wrapping the given memory region.  You
+ * have to ensure that buf stays around for as long as you need to new
+ * wrapped buffer.  If you copy the wrapped buffer (using
+ * avro_wrapped_buffer_copy), this will create a copy of the data.
+ * Additional copies will reuse this new copy.
+ */
+
+int
+avro_wrapped_buffer_new(avro_wrapped_buffer_t *dest,
+			const void *buf, size_t length);
+
+/**
+ * Creates a new wrapped buffer wrapping the given C string.
+ */
+
+#define avro_wrapped_buffer_new_string(dest, str) \
+    (avro_wrapped_buffer_new((dest), (str), strlen((str))+1))
+
+/**
+ * Creates a new wrapped buffer containing a copy of the given memory
+ * region.  This new copy will be reference counted; if you copy it
+ * further (using avro_wrapped_buffer_copy), the new copies will share a
+ * single underlying buffer.
+ */
+
+int
+avro_wrapped_buffer_new_copy(avro_wrapped_buffer_t *dest,
+			     const void *buf, size_t length);
+
+/**
+ * Creates a new wrapped buffer containing a copy of the given C string.
+ */
+
+#define avro_wrapped_buffer_new_string_copy(dest, str) \
+    (avro_wrapped_buffer_new_copy((dest), (str), strlen((str))+1))
+
+
+/*---------------------------------------------------------------------
+ * Strings
+ */
+
+/**
+ * A resizable buffer for storing strings and bytes values.
+ */
+
+typedef struct avro_raw_string {
+	avro_wrapped_buffer_t  wrapped;
+} avro_raw_string_t;
+
+/**
+ * Initializes an avro_raw_string_t that you've allocated yourself.
+ */
+
+void avro_raw_string_init(avro_raw_string_t *str);
+
+/**
+ * Finalizes an avro_raw_string_t.
+ */
+
+void avro_raw_string_done(avro_raw_string_t *str);
+
+/**
+ * Returns the length of the data stored in an avro_raw_string_t.  If
+ * the buffer contains a C string, this length includes the NUL
+ * terminator.
+ */
+
+#define avro_raw_string_length(str)  ((str)->wrapped.size)
+
+/**
+ * Returns a pointer to the data stored in an avro_raw_string_t.
+ */
+
+#define avro_raw_string_get(str)  ((str)->wrapped.buf)
+
+/**
+ * Fills an avro_raw_string_t with a copy of the given buffer.
+ */
+
+void avro_raw_string_set_length(avro_raw_string_t *str,
+				const void *src,
+				size_t length);
+
+/**
+ * Fills an avro_raw_string_t with a copy of the given C string.
+ */
+
+void avro_raw_string_set(avro_raw_string_t *str, const char *src);
+
+/**
+ * Appends the given C string to an avro_raw_string_t.
+ */
+
+void avro_raw_string_append(avro_raw_string_t *str, const char *src);
+
+/**
+ * Appends the given C string to an avro_raw_string_t, using the
+ * provided length instead of calling strlen(src).
+ */
+
+void avro_raw_string_append_length(avro_raw_string_t *str,
+				   const void *src,
+				   size_t length);
+/**
+ * Gives control of a buffer to an avro_raw_string_t.
+ */
+
+void
+avro_raw_string_give(avro_raw_string_t *str,
+		     avro_wrapped_buffer_t *src);
+
+/**
+ * Returns an avro_wrapped_buffer_t for the content of the string,
+ * ideally without copying it.
+ */
+
+int
+avro_raw_string_grab(const avro_raw_string_t *str,
+		     avro_wrapped_buffer_t *dest);
+
+/**
+ * Clears an avro_raw_string_t.
+ */
+
+void avro_raw_string_clear(avro_raw_string_t *str);
+
+
+/**
+ * Tests two avro_raw_string_t instances for equality.
+ */
+
+int avro_raw_string_equals(const avro_raw_string_t *str1,
+			   const avro_raw_string_t *str2);
+
+
+/*---------------------------------------------------------------------
+ * Memoization
+ */
+
+/**
+ * A specialized map that can be used to memoize the results of a
+ * function.  The API allows you to use two keys as the memoization
+ * keys; if you only need one key, just use NULL for the second key.
+ * The result of the function should be a single pointer, or an integer
+ * that can be cast into a pointer (i.e., an intptr_t).
+ */
+
+typedef struct avro_memoize {
+	void  *cache;
+} avro_memoize_t;
+
+/**
+ * Initialize an avro_memoize_t that you've allocated for yourself.
+ */
+
+void
+avro_memoize_init(avro_memoize_t *mem);
+
+/**
+ * Finalizes an avro_memoize_t.
+ */
+
+void
+avro_memoize_done(avro_memoize_t *mem);
+
+/**
+ * Search for a cached value in an avro_memoize_t.  Returns a boolean
+ * indicating whether there's a value in the cache for the given keys.
+ * If there is, the cached result is placed into @ref result.
+ */
+
+int
+avro_memoize_get(avro_memoize_t *mem,
+		 void *key1, void *key2,
+		 void **result);
+
+/**
+ * Stores a new cached value into an avro_memoize_t, overwriting it if
+ * necessary.
+ */
+
+void
+avro_memoize_set(avro_memoize_t *mem,
+		 void *key1, void *key2,
+		 void *result);
+
+/**
+ * Removes any cached value for the given key from an avro_memoize_t.
+ */
+
+void
+avro_memoize_delete(avro_memoize_t *mem, void *key1, void *key2);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/errors.h b/lang/c/src/avro/errors.h
new file mode 100644
index 0000000..ee544e0
--- /dev/null
+++ b/lang/c/src/avro/errors.h
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_ERRORS_H
+#define AVRO_ERRORS_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+/*
+ * Returns a textual description of the last error condition returned by
+ * an Avro function.
+ */
+
+const char *avro_strerror(void);
+
+void
+avro_set_error(const char *fmt, ...);
+
+void
+avro_prefix_error(const char *fmt, ...);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/generic.h b/lang/c/src/avro/generic.h
new file mode 100644
index 0000000..b1c0841
--- /dev/null
+++ b/lang/c/src/avro/generic.h
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_GENERIC_H
+#define AVRO_GENERIC_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include <avro/schema.h>
+#include <avro/value.h>
+
+/*
+ * This file contains an avro_value_t implementation that can store
+ * values of any Avro schema.  It replaces the old avro_datum_t class.
+ */
+
+
+/**
+ * Return a generic avro_value_iface_t implementation for the given
+ * schema, regardless of what type it is.
+ */
+
+avro_value_iface_t *
+avro_generic_class_from_schema(avro_schema_t schema);
+
+/**
+ * Allocate a new instance of the given generic value class.  @a iface
+ * must have been created by @ref avro_generic_class_from_schema.
+ */
+
+int
+avro_generic_value_new(avro_value_iface_t *iface, avro_value_t *dest);
+
+
+/*
+ * These functions return an avro_value_iface_t implementation for each
+ * primitive schema type.  (For enum, fixed, and the compound types, you
+ * must use the @ref avro_generic_class_from_schema function.)
+ */
+
+avro_value_iface_t *avro_generic_boolean_class(void);
+avro_value_iface_t *avro_generic_bytes_class(void);
+avro_value_iface_t *avro_generic_double_class(void);
+avro_value_iface_t *avro_generic_float_class(void);
+avro_value_iface_t *avro_generic_int_class(void);
+avro_value_iface_t *avro_generic_long_class(void);
+avro_value_iface_t *avro_generic_null_class(void);
+avro_value_iface_t *avro_generic_string_class(void);
+
+
+/*
+ * These functions instantiate a new generic primitive value.
+ */
+
+int avro_generic_boolean_new(avro_value_t *value, int val);
+int avro_generic_bytes_new(avro_value_t *value, void *buf, size_t size);
+int avro_generic_double_new(avro_value_t *value, double val);
+int avro_generic_float_new(avro_value_t *value, float val);
+int avro_generic_int_new(avro_value_t *value, int32_t val);
+int avro_generic_long_new(avro_value_t *value, int64_t val);
+int avro_generic_null_new(avro_value_t *value);
+int avro_generic_string_new(avro_value_t *value, const char *val);
+int avro_generic_string_new_length(avro_value_t *value, const char *val, size_t size);
+
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/io.h b/lang/c/src/avro/io.h
new file mode 100644
index 0000000..0cfb319
--- /dev/null
+++ b/lang/c/src/avro/io.h
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_IO_H
+#define AVRO_IO_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include <stdio.h>
+
+#include <avro/basics.h>
+#include <avro/legacy.h>
+#include <avro/schema.h>
+#include <avro/value.h>
+
+typedef struct avro_reader_t_ *avro_reader_t;
+typedef struct avro_writer_t_ *avro_writer_t;
+
+/*
+ * io
+ */
+
+avro_reader_t avro_reader_file(FILE * fp);
+avro_reader_t avro_reader_file_fp(FILE * fp, int should_close);
+avro_writer_t avro_writer_file(FILE * fp);
+avro_writer_t avro_writer_file_fp(FILE * fp, int should_close);
+avro_reader_t avro_reader_memory(const char *buf, int64_t len);
+avro_writer_t avro_writer_memory(const char *buf, int64_t len);
+
+void
+avro_reader_memory_set_source(avro_reader_t reader, const char *buf, int64_t len);
+
+void
+avro_writer_memory_set_dest(avro_writer_t writer, const char *buf, int64_t len);
+
+int avro_read(avro_reader_t reader, void *buf, int64_t len);
+int avro_skip(avro_reader_t reader, int64_t len);
+int avro_write(avro_writer_t writer, void *buf, int64_t len);
+
+void avro_reader_reset(avro_reader_t reader);
+
+void avro_writer_reset(avro_writer_t writer);
+int64_t avro_writer_tell(avro_writer_t writer);
+void avro_writer_flush(avro_writer_t writer);
+
+void avro_writer_dump(avro_writer_t writer, FILE * fp);
+void avro_reader_dump(avro_reader_t reader, FILE * fp);
+
+int avro_reader_is_eof(avro_reader_t reader);
+
+void avro_reader_free(avro_reader_t reader);
+void avro_writer_free(avro_writer_t writer);
+
+int avro_schema_to_json(const avro_schema_t schema, avro_writer_t out);
+
+/*
+ * Reads a binary-encoded Avro value from the given reader object,
+ * storing the result into dest.
+ */
+
+int
+avro_value_read(avro_reader_t reader, avro_value_t *dest);
+
+/*
+ * Writes a binary-encoded Avro value to the given writer object.
+ */
+
+int
+avro_value_write(avro_writer_t writer, avro_value_t *src);
+
+/*
+ * Returns the size of the binary encoding of the given Avro value.
+ */
+
+int
+avro_value_sizeof(avro_value_t *src, size_t *size);
+
+
+/* File object container */
+typedef struct avro_file_reader_t_ *avro_file_reader_t;
+typedef struct avro_file_writer_t_ *avro_file_writer_t;
+
+int avro_file_writer_create(const char *path, avro_schema_t schema,
+			    avro_file_writer_t * writer);
+int avro_file_writer_create_fp(FILE *fp, const char *path, int should_close,
+				avro_schema_t schema, avro_file_writer_t * writer);
+int avro_file_writer_create_with_codec(const char *path,
+				avro_schema_t schema, avro_file_writer_t * writer,
+				const char *codec, size_t block_size);
+int avro_file_writer_create_with_codec_fp(FILE *fp, const char *path, int should_close,
+				avro_schema_t schema, avro_file_writer_t * writer,
+				const char *codec, size_t block_size);
+int avro_file_writer_open(const char *path, avro_file_writer_t * writer);
+int avro_file_writer_open_bs(const char *path, avro_file_writer_t * writer, size_t block_size);
+int avro_file_reader(const char *path, avro_file_reader_t * reader);
+int avro_file_reader_fp(FILE *fp, const char *path, int should_close,
+			avro_file_reader_t * reader);
+
+avro_schema_t
+avro_file_reader_get_writer_schema(avro_file_reader_t reader);
+
+int avro_file_writer_sync(avro_file_writer_t writer);
+int avro_file_writer_flush(avro_file_writer_t writer);
+int avro_file_writer_close(avro_file_writer_t writer);
+
+int avro_file_reader_close(avro_file_reader_t reader);
+
+int
+avro_file_reader_read_value(avro_file_reader_t reader, avro_value_t *dest);
+
+int
+avro_file_writer_append_value(avro_file_writer_t writer, avro_value_t *src);
+
+int
+avro_file_writer_append_encoded(avro_file_writer_t writer,
+				const void *buf, int64_t len);
+
+/*
+ * Legacy avro_datum_t API
+ */
+
+int avro_read_data(avro_reader_t reader,
+		   avro_schema_t writer_schema,
+		   avro_schema_t reader_schema, avro_datum_t * datum);
+int avro_skip_data(avro_reader_t reader, avro_schema_t writer_schema);
+int avro_write_data(avro_writer_t writer,
+		    avro_schema_t writer_schema, avro_datum_t datum);
+int64_t avro_size_data(avro_writer_t writer,
+		       avro_schema_t writer_schema, avro_datum_t datum);
+
+int avro_file_writer_append(avro_file_writer_t writer, avro_datum_t datum);
+
+int avro_file_reader_read(avro_file_reader_t reader,
+			  avro_schema_t readers_schema, avro_datum_t * datum);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/legacy.h b/lang/c/src/avro/legacy.h
new file mode 100644
index 0000000..6f65daa
--- /dev/null
+++ b/lang/c/src/avro/legacy.h
@@ -0,0 +1,264 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_LEGACY_H
+#define AVRO_LEGACY_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include <stdio.h>
+
+#include <avro/basics.h>
+#include <avro/data.h>
+#include <avro/schema.h>
+#include <avro/value.h>
+
+/*
+ * This file defines the deprecated interface for handling Avro values.
+ * It's here solely for backwards compatibility.  New code should use
+ * the avro_value_t interface (defined in avro/value.h).  The
+ * avro_datum_t type has been replaced by the “generic” implementation
+ * of the value interface, which is defined in avro/generic.h.  You can
+ * also use your own application-specific types as Avro values by
+ * defining your own avro_value_t implementation for them.
+ */
+
+/**
+ * A function used to free a bytes, string, or fixed buffer once it is
+ * no longer needed by the datum that wraps it.
+ */
+
+typedef void
+(*avro_free_func_t)(void *ptr, size_t sz);
+
+/**
+ * An avro_free_func_t that frees the buffer using the custom allocator
+ * provided to avro_set_allocator.
+ */
+
+void
+avro_alloc_free_func(void *ptr, size_t sz);
+
+/*
+ * Datum constructors.  Each datum stores a reference to the schema that
+ * the datum is an instance of.  The primitive datum constructors don't
+ * need to take in an explicit avro_schema_t parameter, since there's
+ * only one schema that they could be an instance of.  The complex
+ * constructors do need an explicit schema parameter.
+ */
+
+typedef struct avro_obj_t *avro_datum_t;
+avro_datum_t avro_string(const char *str);
+avro_datum_t avro_givestring(const char *str,
+			     avro_free_func_t free);
+avro_datum_t avro_bytes(const char *buf, int64_t len);
+avro_datum_t avro_givebytes(const char *buf, int64_t len,
+			    avro_free_func_t free);
+avro_datum_t avro_int32(int32_t i);
+avro_datum_t avro_int64(int64_t l);
+avro_datum_t avro_float(float f);
+avro_datum_t avro_double(double d);
+avro_datum_t avro_boolean(int8_t i);
+avro_datum_t avro_null(void);
+avro_datum_t avro_record(avro_schema_t schema);
+avro_datum_t avro_enum(avro_schema_t schema, int i);
+avro_datum_t avro_fixed(avro_schema_t schema,
+			const char *bytes, const int64_t size);
+avro_datum_t avro_givefixed(avro_schema_t schema,
+			    const char *bytes, const int64_t size,
+			    avro_free_func_t free);
+avro_datum_t avro_map(avro_schema_t schema);
+avro_datum_t avro_array(avro_schema_t schema);
+avro_datum_t avro_union(avro_schema_t schema,
+			int64_t discriminant, const avro_datum_t datum);
+
+/**
+ * Returns the schema that the datum is an instance of.
+ */
+
+avro_schema_t avro_datum_get_schema(const avro_datum_t datum);
+
+/*
+ * Constructs a new avro_datum_t instance that's appropriate for holding
+ * values of the given schema.
+ */
+
+avro_datum_t avro_datum_from_schema(const avro_schema_t schema);
+
+/* getters */
+int avro_string_get(avro_datum_t datum, char **p);
+int avro_bytes_get(avro_datum_t datum, char **bytes, int64_t * size);
+int avro_int32_get(avro_datum_t datum, int32_t * i);
+int avro_int64_get(avro_datum_t datum, int64_t * l);
+int avro_float_get(avro_datum_t datum, float *f);
+int avro_double_get(avro_datum_t datum, double *d);
+int avro_boolean_get(avro_datum_t datum, int8_t * i);
+
+int avro_enum_get(const avro_datum_t datum);
+const char *avro_enum_get_name(const avro_datum_t datum);
+int avro_fixed_get(avro_datum_t datum, char **bytes, int64_t * size);
+int avro_record_get(const avro_datum_t record, const char *field_name,
+		    avro_datum_t * value);
+
+/*
+ * A helper macro that extracts the value of the given field of a
+ * record.
+ */
+
+#define avro_record_get_field_value(rc, rec, typ, fname, ...)	\
+	do {							\
+		avro_datum_t  field = NULL;			\
+		(rc) = avro_record_get((rec), (fname), &field);	\
+		if (rc) break;					\
+		(rc) = avro_##typ##_get(field, __VA_ARGS__);	\
+	} while (0)
+
+
+int avro_map_get(const avro_datum_t datum, const char *key,
+		 avro_datum_t * value);
+/*
+ * For maps, the "index" for each entry is based on the order that they
+ * were added to the map.
+ */
+int avro_map_get_key(const avro_datum_t datum, int index,
+		     const char **key);
+int avro_map_get_index(const avro_datum_t datum, const char *key,
+		       int *index);
+size_t avro_map_size(const avro_datum_t datum);
+int avro_array_get(const avro_datum_t datum, int64_t index, avro_datum_t * value);
+size_t avro_array_size(const avro_datum_t datum);
+
+/*
+ * These accessors allow you to query the current branch of a union
+ * value, returning either the branch's discriminant value or the
+ * avro_datum_t of the branch.  A union value can be uninitialized, in
+ * which case the discriminant will be -1 and the datum NULL.
+ */
+
+int64_t avro_union_discriminant(const avro_datum_t datum);
+avro_datum_t avro_union_current_branch(avro_datum_t datum);
+
+/* setters */
+int avro_string_set(avro_datum_t datum, const char *p);
+int avro_givestring_set(avro_datum_t datum, const char *p,
+			avro_free_func_t free);
+
+int avro_bytes_set(avro_datum_t datum, const char *bytes, const int64_t size);
+int avro_givebytes_set(avro_datum_t datum, const char *bytes,
+		       const int64_t size,
+		       avro_free_func_t free);
+
+int avro_int32_set(avro_datum_t datum, const int32_t i);
+int avro_int64_set(avro_datum_t datum, const int64_t l);
+int avro_float_set(avro_datum_t datum, const float f);
+int avro_double_set(avro_datum_t datum, const double d);
+int avro_boolean_set(avro_datum_t datum, const int8_t i);
+
+int avro_enum_set(avro_datum_t datum, const int symbol_value);
+int avro_enum_set_name(avro_datum_t datum, const char *symbol_name);
+int avro_fixed_set(avro_datum_t datum, const char *bytes, const int64_t size);
+int avro_givefixed_set(avro_datum_t datum, const char *bytes,
+		       const int64_t size,
+		       avro_free_func_t free);
+
+int avro_record_set(avro_datum_t record, const char *field_name,
+		    avro_datum_t value);
+
+/*
+ * A helper macro that sets the value of the given field of a record.
+ */
+
+#define avro_record_set_field_value(rc, rec, typ, fname, ...)	\
+	do {							\
+		avro_datum_t  field = NULL;			\
+		(rc) = avro_record_get((rec), (fname), &field);	\
+		if (rc) break;					\
+		(rc) = avro_##typ##_set(field, __VA_ARGS__);	\
+	} while (0)
+
+int avro_map_set(avro_datum_t map, const char *key,
+		 avro_datum_t value);
+int avro_array_append_datum(avro_datum_t array_datum,
+			    avro_datum_t datum);
+
+/*
+ * This function selects the active branch of a union value, and can be
+ * safely called on an existing union to change the current branch.  If
+ * the branch changes, we'll automatically construct a new avro_datum_t
+ * for the new branch's schema type.  If the desired branch is already
+ * the active branch of the union, we'll leave the existing datum
+ * instance as-is.  The branch datum will be placed into the "branch"
+ * parameter, regardless of whether we have to create a new datum
+ * instance or not.
+ */
+
+int avro_union_set_discriminant(avro_datum_t unionp,
+				int discriminant,
+				avro_datum_t *branch);
+
+/**
+ * Resets a datum instance.  For arrays and maps, this frees all
+ * elements and clears the container.  For records and unions, this
+ * recursively resets any child datum instances.
+ */
+
+int
+avro_datum_reset(avro_datum_t value);
+
+/* reference counting */
+avro_datum_t avro_datum_incref(avro_datum_t value);
+void avro_datum_decref(avro_datum_t value);
+
+void avro_datum_print(avro_datum_t value, FILE * fp);
+
+int avro_datum_equal(avro_datum_t a, avro_datum_t b);
+
+/*
+ * Returns a string containing the JSON encoding of an Avro value.  You
+ * must free this string when you're done with it, using the standard
+ * free() function.  (*Not* using the custom Avro allocator.)
+ */
+
+int avro_datum_to_json(const avro_datum_t datum,
+		       int one_line, char **json_str);
+
+
+int avro_schema_datum_validate(avro_schema_t
+			       expected_schema, avro_datum_t datum);
+
+/*
+ * An avro_value_t implementation for avro_datum_t objects.
+ */
+
+avro_value_iface_t *
+avro_datum_class(void);
+
+/*
+ * Creates a new avro_value_t instance for the given datum.
+ */
+
+int
+avro_datum_as_value(avro_value_t *value, avro_datum_t src);
+
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/msinttypes.h b/lang/c/src/avro/msinttypes.h
new file mode 100644
index 0000000..29be14b
--- /dev/null
+++ b/lang/c/src/avro/msinttypes.h
@@ -0,0 +1,315 @@
+// ISO C9x  compliant inttypes.h for Microsoft Visual Studio
+// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 
+// 
+//  Copyright (c) 2006 Alexander Chemeris
+// 
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+// 
+//   1. Redistributions of source code must retain the above copyright notice,
+//      this list of conditions and the following disclaimer.
+// 
+//   2. Redistributions in binary form must reproduce the above copyright
+//      notice, this list of conditions and the following disclaimer in the
+//      documentation and/or other materials provided with the distribution.
+// 
+//   3. The name of the author may be used to endorse or promote products
+//      derived from this software without specific prior written permission.
+// 
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
+// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// 
+///////////////////////////////////////////////////////////////////////////////
+
+#ifndef _MSC_VER // [
+#error "Use this header only with Microsoft Visual C++ compilers!"
+#endif // _MSC_VER ]
+
+#ifndef _MSC_INTTYPES_H_ // [
+#define _MSC_INTTYPES_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+//////////////////////////////////////
+// Start AVRO specific modifications
+//////////////////////////////////////
+#include "msstdint.h"
+
+// Modification for AVRO of inttypes.h
+#define __STDC_FORMAT_MACROS (1)
+
+//////////////////////////////////////
+// End AVRO specific modifications
+//////////////////////////////////////
+
+// 7.8 Format conversion of integer types
+
+typedef struct {
+   intmax_t quot;
+   intmax_t rem;
+} imaxdiv_t;
+
+// 7.8.1 Macros for format specifiers
+
+#if !defined(__cplusplus) || defined(__STDC_FORMAT_MACROS) // [   See footnote 185 at page 198
+
+// The fprintf macros for signed integers are:
+#define PRId8       "d"
+#define PRIi8       "i"
+#define PRIdLEAST8  "d"
+#define PRIiLEAST8  "i"
+#define PRIdFAST8   "d"
+#define PRIiFAST8   "i"
+
+#define PRId16       "hd"
+#define PRIi16       "hi"
+#define PRIdLEAST16  "hd"
+#define PRIiLEAST16  "hi"
+#define PRIdFAST16   "hd"
+#define PRIiFAST16   "hi"
+
+#define PRId32       "I32d"
+#define PRIi32       "I32i"
+#define PRIdLEAST32  "I32d"
+#define PRIiLEAST32  "I32i"
+#define PRIdFAST32   "I32d"
+#define PRIiFAST32   "I32i"
+
+#define PRId64       "I64d"
+#define PRIi64       "I64i"
+#define PRIdLEAST64  "I64d"
+#define PRIiLEAST64  "I64i"
+#define PRIdFAST64   "I64d"
+#define PRIiFAST64   "I64i"
+
+#define PRIdMAX     "I64d"
+#define PRIiMAX     "I64i"
+
+#define PRIdPTR     "Id"
+#define PRIiPTR     "Ii"
+
+// The fprintf macros for unsigned integers are:
+#define PRIo8       "o"
+#define PRIu8       "u"
+#define PRIx8       "x"
+#define PRIX8       "X"
+#define PRIoLEAST8  "o"
+#define PRIuLEAST8  "u"
+#define PRIxLEAST8  "x"
+#define PRIXLEAST8  "X"
+#define PRIoFAST8   "o"
+#define PRIuFAST8   "u"
+#define PRIxFAST8   "x"
+#define PRIXFAST8   "X"
+
+#define PRIo16       "ho"
+#define PRIu16       "hu"
+#define PRIx16       "hx"
+#define PRIX16       "hX"
+#define PRIoLEAST16  "ho"
+#define PRIuLEAST16  "hu"
+#define PRIxLEAST16  "hx"
+#define PRIXLEAST16  "hX"
+#define PRIoFAST16   "ho"
+#define PRIuFAST16   "hu"
+#define PRIxFAST16   "hx"
+#define PRIXFAST16   "hX"
+
+#define PRIo32       "I32o"
+#define PRIu32       "I32u"
+#define PRIx32       "I32x"
+#define PRIX32       "I32X"
+#define PRIoLEAST32  "I32o"
+#define PRIuLEAST32  "I32u"
+#define PRIxLEAST32  "I32x"
+#define PRIXLEAST32  "I32X"
+#define PRIoFAST32   "I32o"
+#define PRIuFAST32   "I32u"
+#define PRIxFAST32   "I32x"
+#define PRIXFAST32   "I32X"
+
+#define PRIo64       "I64o"
+#define PRIu64       "I64u"
+#define PRIx64       "I64x"
+#define PRIX64       "I64X"
+#define PRIoLEAST64  "I64o"
+#define PRIuLEAST64  "I64u"
+#define PRIxLEAST64  "I64x"
+#define PRIXLEAST64  "I64X"
+#define PRIoFAST64   "I64o"
+#define PRIuFAST64   "I64u"
+#define PRIxFAST64   "I64x"
+#define PRIXFAST64   "I64X"
+
+#define PRIoMAX     "I64o"
+#define PRIuMAX     "I64u"
+#define PRIxMAX     "I64x"
+#define PRIXMAX     "I64X"
+
+#define PRIoPTR     "Io"
+#define PRIuPTR     "Iu"
+#define PRIxPTR     "Ix"
+#define PRIXPTR     "IX"
+
+// The fscanf macros for signed integers are:
+#define SCNd8       "d"
+#define SCNi8       "i"
+#define SCNdLEAST8  "d"
+#define SCNiLEAST8  "i"
+#define SCNdFAST8   "d"
+#define SCNiFAST8   "i"
+
+#define SCNd16       "hd"
+#define SCNi16       "hi"
+#define SCNdLEAST16  "hd"
+#define SCNiLEAST16  "hi"
+#define SCNdFAST16   "hd"
+#define SCNiFAST16   "hi"
+
+#define SCNd32       "ld"
+#define SCNi32       "li"
+#define SCNdLEAST32  "ld"
+#define SCNiLEAST32  "li"
+#define SCNdFAST32   "ld"
+#define SCNiFAST32   "li"
+
+#define SCNd64       "I64d"
+#define SCNi64       "I64i"
+#define SCNdLEAST64  "I64d"
+#define SCNiLEAST64  "I64i"
+#define SCNdFAST64   "I64d"
+#define SCNiFAST64   "I64i"
+
+#define SCNdMAX     "I64d"
+#define SCNiMAX     "I64i"
+
+#ifdef _WIN64 // [
+#  define SCNdPTR     "I64d"
+#  define SCNiPTR     "I64i"
+#else  // _WIN64 ][
+#  define SCNdPTR     "ld"
+#  define SCNiPTR     "li"
+#endif  // _WIN64 ]
+
+// The fscanf macros for unsigned integers are:
+#define SCNo8       "o"
+#define SCNu8       "u"
+#define SCNx8       "x"
+#define SCNX8       "X"
+#define SCNoLEAST8  "o"
+#define SCNuLEAST8  "u"
+#define SCNxLEAST8  "x"
+#define SCNXLEAST8  "X"
+#define SCNoFAST8   "o"
+#define SCNuFAST8   "u"
+#define SCNxFAST8   "x"
+#define SCNXFAST8   "X"
+
+#define SCNo16       "ho"
+#define SCNu16       "hu"
+#define SCNx16       "hx"
+#define SCNX16       "hX"
+#define SCNoLEAST16  "ho"
+#define SCNuLEAST16  "hu"
+#define SCNxLEAST16  "hx"
+#define SCNXLEAST16  "hX"
+#define SCNoFAST16   "ho"
+#define SCNuFAST16   "hu"
+#define SCNxFAST16   "hx"
+#define SCNXFAST16   "hX"
+
+#define SCNo32       "lo"
+#define SCNu32       "lu"
+#define SCNx32       "lx"
+#define SCNX32       "lX"
+#define SCNoLEAST32  "lo"
+#define SCNuLEAST32  "lu"
+#define SCNxLEAST32  "lx"
+#define SCNXLEAST32  "lX"
+#define SCNoFAST32   "lo"
+#define SCNuFAST32   "lu"
+#define SCNxFAST32   "lx"
+#define SCNXFAST32   "lX"
+
+#define SCNo64       "I64o"
+#define SCNu64       "I64u"
+#define SCNx64       "I64x"
+#define SCNX64       "I64X"
+#define SCNoLEAST64  "I64o"
+#define SCNuLEAST64  "I64u"
+#define SCNxLEAST64  "I64x"
+#define SCNXLEAST64  "I64X"
+#define SCNoFAST64   "I64o"
+#define SCNuFAST64   "I64u"
+#define SCNxFAST64   "I64x"
+#define SCNXFAST64   "I64X"
+
+#define SCNoMAX     "I64o"
+#define SCNuMAX     "I64u"
+#define SCNxMAX     "I64x"
+#define SCNXMAX     "I64X"
+
+#ifdef _WIN64 // [
+#  define SCNoPTR     "I64o"
+#  define SCNuPTR     "I64u"
+#  define SCNxPTR     "I64x"
+#  define SCNXPTR     "I64X"
+#else  // _WIN64 ][
+#  define SCNoPTR     "lo"
+#  define SCNuPTR     "lu"
+#  define SCNxPTR     "lx"
+#  define SCNXPTR     "lX"
+#endif  // _WIN64 ]
+
+#endif // __STDC_FORMAT_MACROS ]
+
+// 7.8.2 Functions for greatest-width integer types
+
+// 7.8.2.1 The imaxabs function
+#define imaxabs _abs64
+
+// 7.8.2.2 The imaxdiv function
+
+// This is modified version of div() function from Microsoft's div.c found
+// in %MSVC.NET%\crt\src\div.c
+#ifdef STATIC_IMAXDIV // [
+static
+#else // STATIC_IMAXDIV ][
+_inline
+#endif // STATIC_IMAXDIV ]
+imaxdiv_t __cdecl imaxdiv(intmax_t numer, intmax_t denom)
+{
+   imaxdiv_t result;
+
+   result.quot = numer / denom;
+   result.rem = numer % denom;
+
+   if (numer < 0 && result.rem > 0) {
+      // did division wrong; must fix up
+      ++result.quot;
+      result.rem -= denom;
+   }
+
+   return result;
+}
+
+// 7.8.2.3 The strtoimax and strtoumax functions
+#define strtoimax _strtoi64
+#define strtoumax _strtoui64
+
+// 7.8.2.4 The wcstoimax and wcstoumax functions
+#define wcstoimax _wcstoi64
+#define wcstoumax _wcstoui64
+
+
+#endif // _MSC_INTTYPES_H_ ]
diff --git a/lang/c/src/avro/msstdint.h b/lang/c/src/avro/msstdint.h
new file mode 100644
index 0000000..d02608a
--- /dev/null
+++ b/lang/c/src/avro/msstdint.h
@@ -0,0 +1,247 @@
+// ISO C9x  compliant stdint.h for Microsoft Visual Studio
+// Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 
+// 
+//  Copyright (c) 2006-2008 Alexander Chemeris
+// 
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+// 
+//   1. Redistributions of source code must retain the above copyright notice,
+//      this list of conditions and the following disclaimer.
+// 
+//   2. Redistributions in binary form must reproduce the above copyright
+//      notice, this list of conditions and the following disclaimer in the
+//      documentation and/or other materials provided with the distribution.
+// 
+//   3. The name of the author may be used to endorse or promote products
+//      derived from this software without specific prior written permission.
+// 
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
+// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// 
+///////////////////////////////////////////////////////////////////////////////
+
+#ifndef _MSC_VER // [
+#error "Use this header only with Microsoft Visual C++ compilers!"
+#endif // _MSC_VER ]
+
+#ifndef _MSC_STDINT_H_ // [
+#define _MSC_STDINT_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+#include <limits.h>
+
+// For Visual Studio 6 in C++ mode and for many Visual Studio versions when
+// compiling for ARM we should wrap <wchar.h> include with 'extern "C++" {}'
+// or compiler give many errors like this:
+//   error C2733: second C linkage of overloaded function 'wmemchr' not allowed
+#ifdef __cplusplus
+extern "C" {
+#endif
+#  include <wchar.h>
+#ifdef __cplusplus
+}
+#endif
+
+// Define _W64 macros to mark types changing their size, like intptr_t.
+#ifndef _W64
+#  if !defined(__midl) && (defined(_X86_) || defined(_M_IX86)) && _MSC_VER >= 1300
+#     define _W64 __w64
+#  else
+#     define _W64
+#  endif
+#endif
+
+
+// 7.18.1 Integer types
+
+// 7.18.1.1 Exact-width integer types
+
+// Visual Studio 6 and Embedded Visual C++ 4 doesn't
+// realize that, e.g. char has the same size as __int8
+// so we give up on __intX for them.
+#if (_MSC_VER < 1300)
+   typedef signed char       int8_t;
+   typedef signed short      int16_t;
+   typedef signed int        int32_t;
+   typedef unsigned char     uint8_t;
+   typedef unsigned short    uint16_t;
+   typedef unsigned int      uint32_t;
+#else
+   typedef signed __int8     int8_t;
+   typedef signed __int16    int16_t;
+   typedef signed __int32    int32_t;
+   typedef unsigned __int8   uint8_t;
+   typedef unsigned __int16  uint16_t;
+   typedef unsigned __int32  uint32_t;
+#endif
+typedef signed __int64       int64_t;
+typedef unsigned __int64     uint64_t;
+
+
+// 7.18.1.2 Minimum-width integer types
+typedef int8_t    int_least8_t;
+typedef int16_t   int_least16_t;
+typedef int32_t   int_least32_t;
+typedef int64_t   int_least64_t;
+typedef uint8_t   uint_least8_t;
+typedef uint16_t  uint_least16_t;
+typedef uint32_t  uint_least32_t;
+typedef uint64_t  uint_least64_t;
+
+// 7.18.1.3 Fastest minimum-width integer types
+typedef int8_t    int_fast8_t;
+typedef int16_t   int_fast16_t;
+typedef int32_t   int_fast32_t;
+typedef int64_t   int_fast64_t;
+typedef uint8_t   uint_fast8_t;
+typedef uint16_t  uint_fast16_t;
+typedef uint32_t  uint_fast32_t;
+typedef uint64_t  uint_fast64_t;
+
+// 7.18.1.4 Integer types capable of holding object pointers
+#ifdef _WIN64 // [
+   typedef signed __int64    intptr_t;
+   typedef unsigned __int64  uintptr_t;
+#else // _WIN64 ][
+   typedef _W64 signed int   intptr_t;
+   typedef _W64 unsigned int uintptr_t;
+#endif // _WIN64 ]
+
+// 7.18.1.5 Greatest-width integer types
+typedef int64_t   intmax_t;
+typedef uint64_t  uintmax_t;
+
+
+// 7.18.2 Limits of specified-width integer types
+
+#if !defined(__cplusplus) || defined(__STDC_LIMIT_MACROS) // [   See footnote 220 at page 257 and footnote 221 at page 259
+
+// 7.18.2.1 Limits of exact-width integer types
+#define INT8_MIN     ((int8_t)_I8_MIN)
+#define INT8_MAX     _I8_MAX
+#define INT16_MIN    ((int16_t)_I16_MIN)
+#define INT16_MAX    _I16_MAX
+#define INT32_MIN    ((int32_t)_I32_MIN)
+#define INT32_MAX    _I32_MAX
+#define INT64_MIN    ((int64_t)_I64_MIN)
+#define INT64_MAX    _I64_MAX
+#define UINT8_MAX    _UI8_MAX
+#define UINT16_MAX   _UI16_MAX
+#define UINT32_MAX   _UI32_MAX
+#define UINT64_MAX   _UI64_MAX
+
+// 7.18.2.2 Limits of minimum-width integer types
+#define INT_LEAST8_MIN    INT8_MIN
+#define INT_LEAST8_MAX    INT8_MAX
+#define INT_LEAST16_MIN   INT16_MIN
+#define INT_LEAST16_MAX   INT16_MAX
+#define INT_LEAST32_MIN   INT32_MIN
+#define INT_LEAST32_MAX   INT32_MAX
+#define INT_LEAST64_MIN   INT64_MIN
+#define INT_LEAST64_MAX   INT64_MAX
+#define UINT_LEAST8_MAX   UINT8_MAX
+#define UINT_LEAST16_MAX  UINT16_MAX
+#define UINT_LEAST32_MAX  UINT32_MAX
+#define UINT_LEAST64_MAX  UINT64_MAX
+
+// 7.18.2.3 Limits of fastest minimum-width integer types
+#define INT_FAST8_MIN    INT8_MIN
+#define INT_FAST8_MAX    INT8_MAX
+#define INT_FAST16_MIN   INT16_MIN
+#define INT_FAST16_MAX   INT16_MAX
+#define INT_FAST32_MIN   INT32_MIN
+#define INT_FAST32_MAX   INT32_MAX
+#define INT_FAST64_MIN   INT64_MIN
+#define INT_FAST64_MAX   INT64_MAX
+#define UINT_FAST8_MAX   UINT8_MAX
+#define UINT_FAST16_MAX  UINT16_MAX
+#define UINT_FAST32_MAX  UINT32_MAX
+#define UINT_FAST64_MAX  UINT64_MAX
+
+// 7.18.2.4 Limits of integer types capable of holding object pointers
+#ifdef _WIN64 // [
+#  define INTPTR_MIN   INT64_MIN
+#  define INTPTR_MAX   INT64_MAX
+#  define UINTPTR_MAX  UINT64_MAX
+#else // _WIN64 ][
+#  define INTPTR_MIN   INT32_MIN
+#  define INTPTR_MAX   INT32_MAX
+#  define UINTPTR_MAX  UINT32_MAX
+#endif // _WIN64 ]
+
+// 7.18.2.5 Limits of greatest-width integer types
+#define INTMAX_MIN   INT64_MIN
+#define INTMAX_MAX   INT64_MAX
+#define UINTMAX_MAX  UINT64_MAX
+
+// 7.18.3 Limits of other integer types
+
+#ifdef _WIN64 // [
+#  define PTRDIFF_MIN  _I64_MIN
+#  define PTRDIFF_MAX  _I64_MAX
+#else  // _WIN64 ][
+#  define PTRDIFF_MIN  _I32_MIN
+#  define PTRDIFF_MAX  _I32_MAX
+#endif  // _WIN64 ]
+
+#define SIG_ATOMIC_MIN  INT_MIN
+#define SIG_ATOMIC_MAX  INT_MAX
+
+#ifndef SIZE_MAX // [
+#  ifdef _WIN64 // [
+#     define SIZE_MAX  _UI64_MAX
+#  else // _WIN64 ][
+#     define SIZE_MAX  _UI32_MAX
+#  endif // _WIN64 ]
+#endif // SIZE_MAX ]
+
+// WCHAR_MIN and WCHAR_MAX are also defined in <wchar.h>
+#ifndef WCHAR_MIN // [
+#  define WCHAR_MIN  0
+#endif  // WCHAR_MIN ]
+#ifndef WCHAR_MAX // [
+#  define WCHAR_MAX  _UI16_MAX
+#endif  // WCHAR_MAX ]
+
+#define WINT_MIN  0
+#define WINT_MAX  _UI16_MAX
+
+#endif // __STDC_LIMIT_MACROS ]
+
+
+// 7.18.4 Limits of other integer types
+
+#if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) // [   See footnote 224 at page 260
+
+// 7.18.4.1 Macros for minimum-width integer constants
+
+#define INT8_C(val)  val##i8
+#define INT16_C(val) val##i16
+#define INT32_C(val) val##i32
+#define INT64_C(val) val##i64
+
+#define UINT8_C(val)  val##ui8
+#define UINT16_C(val) val##ui16
+#define UINT32_C(val) val##ui32
+#define UINT64_C(val) val##ui64
+
+// 7.18.4.2 Macros for greatest-width integer constants
+#define INTMAX_C   INT64_C
+#define UINTMAX_C  UINT64_C
+
+#endif // __STDC_CONSTANT_MACROS ]
+
+
+#endif // _MSC_STDINT_H_ ]
diff --git a/lang/c/src/avro/platform.h b/lang/c/src/avro/platform.h
new file mode 100644
index 0000000..9efaa6f
--- /dev/null
+++ b/lang/c/src/avro/platform.h
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_PLATFORM_H
+#define AVRO_PLATFORM_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+/* Use this header file to include platform specific definitions */
+
+#ifdef _WIN32
+  #include <avro/msinttypes.h>
+#else
+  #include <inttypes.h>
+#endif
+
+// Defines for printing size_t.
+#if defined(_WIN64)
+  #define PRIsz PRIu64
+#elif defined(_WIN32)
+  #define PRIsz PRIu32
+#else // GCC
+  #define PRIsz "zu"
+#endif
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/refcount.h b/lang/c/src/avro/refcount.h
new file mode 100644
index 0000000..69afa4f
--- /dev/null
+++ b/lang/c/src/avro/refcount.h
@@ -0,0 +1,305 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_REFCOUNT_H
+#define AVRO_REFCOUNT_H
+
+#if defined(_WIN32) && defined(__cplusplus)
+/* Include the C++ file <intrin.h> outside the scope of extern "C" */
+#include <intrin.h>
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+/**
+ * Atomically sets the value of a reference count.
+ */
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value);
+
+/**
+ * Increments a reference count, ensuring that its value doesn't
+ * overflow.
+ */
+
+static inline void
+avro_refcount_inc(volatile int *refcount);
+
+/**
+ * Decrements a reference count, and returns whether the resulting
+ * (decremented) value is 0.
+ */
+
+static inline int
+avro_refcount_dec(volatile int *refcount);
+
+
+/*-----------------------------------------------------------------------
+ * Non-Atomic Reference Count
+ */
+#if defined(AVRO_NON_ATOMIC_REFCOUNT)
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		*refcount += 1;
+	}
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		*refcount -= 1;
+		return (*refcount == 0);
+	}
+	return 0;
+}
+
+/*-----------------------------------------------------------------------
+ * Mac OS X
+ */
+
+#elif __ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__ >= 1050
+
+#include <libkern/OSAtomic.h>
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		OSAtomicIncrement32(refcount);
+	}
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		return (OSAtomicDecrement32(refcount) == 0);
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * GCC intrinsics
+ */
+
+#elif (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40500
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		__sync_add_and_fetch(refcount, 1);
+	}
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		return (__sync_sub_and_fetch(refcount, 1) == 0);
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Raw x86 assembly
+ */
+
+#elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
+
+/* determine the size of int */
+
+#include <limits.h>
+#include <avro/platform.h>
+#if INT_MAX == INT32_MAX
+#define REFCOUNT_SS "l"
+#elif INT_MAX == INT64_MAX
+#define REFCOUNT_SS "q"
+#else
+#error "Unknown int size"
+#endif
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		__asm__ __volatile__ ("lock ; inc"REFCOUNT_SS" %0"
+				      :"=m" (*refcount)
+				      :"m" (*refcount));
+	}
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		char result;
+		__asm__ __volatile__ ("lock ; dec"REFCOUNT_SS" %0; setz %1"
+				      :"=m" (*refcount), "=q" (result)
+				      :"m" (*refcount));
+		return result;
+	}
+	return 0;
+}
+
+#undef REFCOUNT_SS
+
+
+/*-----------------------------------------------------------------------
+ * Raw PPC assembly
+ */
+
+#elif defined(__GNUC__) && defined(__ppc__)
+
+static inline int
+avro_refcount_LL_int(volatile int *ptr)
+{
+	int val;
+	__asm__ __volatile__ ("lwarx %[val],0,%[ptr]"
+			      : [val] "=r" (val)
+			      : [ptr] "r" (&ptr)
+			      : "cc");
+
+	return val;
+}
+
+/* Returns non-zero if the store was successful, zero otherwise. */
+static inline int
+avro_refcount_SC_int(volatile int *ptr, int val)
+{
+	int ret = 1; /* init to non-zero, will be reset to 0 if SC was successful */
+	__asm__ __volatile__ ("stwcx. %[val],0,%[ptr];\n"
+			      "beq 1f;\n"
+			      "li %[ret], 0;\n"
+			      "1: ;\n"
+			      : [ret] "=r" (ret)
+			      : [ptr] "r" (&ptr), [val] "r" (val), "0" (ret)
+			      : "cc", "memory");
+	return ret;
+}
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	int prev;
+	do {
+		prev = avro_refcount_LL_int(refcount);
+		if (prev == (int) -1) {
+			return;
+		}
+	} while (!avro_refcount_SC_int(refcount, prev + 1));
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	int prev;
+	do {
+		prev = avro_refcount_LL_int(refcount);
+		if (prev == (int) -1) {
+			return 0;
+		}
+	} while (!avro_refcount_SC_int(refcount, prev - 1));
+	return prev == 1;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Windows intrinsics
+ */
+#elif defined(_WIN32)
+
+#ifdef __cplusplus
+// Note: <intrin.h> included outside the extern "C" wrappers above
+#else
+#include <windows.h>
+#include <intrin.h>
+#endif // __cplusplus
+
+static inline void
+avro_refcount_set(volatile int *refcount, int value)
+{
+	*refcount = value;
+}
+
+static inline void
+avro_refcount_inc(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		_InterlockedIncrement((volatile long *) refcount);
+	}
+}
+
+static inline int
+avro_refcount_dec(volatile int *refcount)
+{
+	if (*refcount != (int) -1) {
+		return (_InterlockedDecrement((volatile long *) refcount) == 0);
+	}
+	return 0;
+}
+
+/*-----------------------------------------------------------------------
+ * Fallback
+ */
+#else
+#error "No atomic implementation!"
+#endif
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/resolver.h b/lang/c/src/avro/resolver.h
new file mode 100644
index 0000000..63121c0
--- /dev/null
+++ b/lang/c/src/avro/resolver.h
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_RESOLVER_H
+#define AVRO_RESOLVER_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/schema.h>
+#include <avro/value.h>
+
+/*
+ * A <i>resolved value</i> is a special kind of value that knows how to
+ * implement Avro's schema resolution rules to translate between a
+ * writer schema and a reader schema.  A resolved value doesn't store or
+ * process data itself; instead, it wraps an existing value instance.
+ *
+ * There are two resolved value classes.  In the first (@ref
+ * avro_resolved_writer_t), the resolved value is an instance of the
+ * writer schema, and wraps an instance of the reader schema.  This is
+ * used, for instance, when reading from an Avro data file; you want the
+ * end result to be a reader schema value, and the resolved value allows
+ * the file reader to ignore the schema resolution and simply fill in
+ * the values of the writer schema.  You can only set the values of a
+ * resolved writer; you must use the original wrapped value to read.
+ *
+ * With other class (@ref avro_resolved_reader_t), the resolved value is
+ * an instance of the reader schema, and wraps an instance of the writer
+ * schema.  This is used when resolving an existing Avro value to
+ * another schema; you've already got the value in the original (writer)
+ * schema, and want to transparently treat it as if it were an instance
+ * of the new (reader) schema.  You can only read the values of a
+ * resolved reader; you must use the original wrapped value to write.
+ *
+ * For both classes, the “self” pointer of the resolved value is an
+ * avro_value_t pointer, which points at the wrapped value.
+ */
+
+
+/**
+ * Create a new resolved writer implementation for the given writer and
+ * reader schemas.
+ */
+
+avro_value_iface_t *
+avro_resolved_writer_new(avro_schema_t writer_schema,
+			 avro_schema_t reader_schema);
+
+/**
+ * Creates a new resolved writer value.
+ */
+
+int
+avro_resolved_writer_new_value(avro_value_iface_t *iface,
+			       avro_value_t *value);
+
+/**
+ * Sets the wrapped value for a resolved writer.  This must be an
+ * instance of the reader schema.  We create our own reference to the
+ * destination value.
+ */
+
+void
+avro_resolved_writer_set_dest(avro_value_t *resolved,
+			      avro_value_t *dest);
+
+
+/**
+ * Clears the wrapped value for a resolved writer.
+ */
+
+void
+avro_resolved_writer_clear_dest(avro_value_t *resolved);
+
+
+/**
+ * Create a new resolved reader implementation for the given writer and
+ * reader schemas.
+ */
+
+avro_value_iface_t *
+avro_resolved_reader_new(avro_schema_t writer_schema,
+			 avro_schema_t reader_schema);
+
+/**
+ * Creates a new resolved reader value.
+ */
+
+int
+avro_resolved_reader_new_value(avro_value_iface_t *iface,
+			       avro_value_t *value);
+
+/**
+ * Sets the wrapped value for a resolved reader.  This must be an
+ * instance of the reader schema.  We create our own reference to the
+ * source value.
+ */
+
+void
+avro_resolved_reader_set_source(avro_value_t *resolved,
+				avro_value_t *dest);
+
+
+/**
+ * Clears the wrapped value for a resolved reader.
+ */
+
+void
+avro_resolved_reader_clear_source(avro_value_t *resolved);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/schema.h b/lang/c/src/avro/schema.h
new file mode 100644
index 0000000..b9c59cd
--- /dev/null
+++ b/lang/c/src/avro/schema.h
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_SCHEMA_H
+#define AVRO_SCHEMA_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include <avro/basics.h>
+
+typedef struct avro_obj_t *avro_schema_t;
+
+avro_schema_t avro_schema_string(void);
+avro_schema_t avro_schema_bytes(void);
+avro_schema_t avro_schema_int(void);
+avro_schema_t avro_schema_long(void);
+avro_schema_t avro_schema_float(void);
+avro_schema_t avro_schema_double(void);
+avro_schema_t avro_schema_boolean(void);
+avro_schema_t avro_schema_null(void);
+
+avro_schema_t avro_schema_record(const char *name, const char *space);
+avro_schema_t avro_schema_record_field_get(const avro_schema_t
+					   record, const char *field_name);
+const char *avro_schema_record_field_name(const avro_schema_t schema, int index);
+int avro_schema_record_field_get_index(const avro_schema_t schema,
+				       const char *field_name);
+avro_schema_t avro_schema_record_field_get_by_index
+(const avro_schema_t record, int index);
+int avro_schema_record_field_append(const avro_schema_t record,
+				    const char *field_name,
+				    const avro_schema_t type);
+size_t avro_schema_record_size(const avro_schema_t record);
+
+avro_schema_t avro_schema_enum(const char *name);
+const char *avro_schema_enum_get(const avro_schema_t enump,
+				 int index);
+int avro_schema_enum_get_by_name(const avro_schema_t enump,
+				 const char *symbol_name);
+int avro_schema_enum_symbol_append(const avro_schema_t
+				   enump, const char *symbol);
+
+avro_schema_t avro_schema_fixed(const char *name, const int64_t len);
+int64_t avro_schema_fixed_size(const avro_schema_t fixed);
+
+avro_schema_t avro_schema_map(const avro_schema_t values);
+avro_schema_t avro_schema_map_values(avro_schema_t map);
+
+avro_schema_t avro_schema_array(const avro_schema_t items);
+avro_schema_t avro_schema_array_items(avro_schema_t array);
+
+avro_schema_t avro_schema_union(void);
+size_t avro_schema_union_size(const avro_schema_t union_schema);
+int avro_schema_union_append(const avro_schema_t
+			     union_schema, const avro_schema_t schema);
+avro_schema_t avro_schema_union_branch(avro_schema_t union_schema,
+				       int branch_index);
+avro_schema_t avro_schema_union_branch_by_name
+(avro_schema_t union_schema, int *branch_index, const char *name);
+
+avro_schema_t avro_schema_link(avro_schema_t schema);
+avro_schema_t avro_schema_link_target(avro_schema_t schema);
+
+typedef struct avro_schema_error_t_ *avro_schema_error_t;
+
+int avro_schema_from_json(const char *jsontext, int32_t unused1,
+			  avro_schema_t *schema, avro_schema_error_t *unused2);
+
+/* jsontext does not need to be NUL terminated.  length must *NOT*
+ * include the NUL terminator, if one is present. */
+int avro_schema_from_json_length(const char *jsontext, size_t length,
+				 avro_schema_t *schema);
+
+/* A helper macro for loading a schema from a string literal.  The
+ * literal must be declared as a char[], not a char *, since we use the
+ * sizeof operator to determine its length. */
+#define avro_schema_from_json_literal(json, schema) \
+    (avro_schema_from_json_length((json), sizeof((json))-1, (schema)))
+
+int avro_schema_to_specific(avro_schema_t schema, const char *prefix);
+
+avro_schema_t avro_schema_get_subschema(const avro_schema_t schema,
+         const char *name);
+const char *avro_schema_name(const avro_schema_t schema);
+const char *avro_schema_type_name(const avro_schema_t schema);
+avro_schema_t avro_schema_copy(avro_schema_t schema);
+int avro_schema_equal(avro_schema_t a, avro_schema_t b);
+
+avro_schema_t avro_schema_incref(avro_schema_t schema);
+int avro_schema_decref(avro_schema_t schema);
+
+int avro_schema_match(avro_schema_t writers_schema,
+		      avro_schema_t readers_schema);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro/value.h b/lang/c/src/avro/value.h
new file mode 100644
index 0000000..ba01caa
--- /dev/null
+++ b/lang/c/src/avro/value.h
@@ -0,0 +1,498 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_VALUE_H
+#define AVRO_VALUE_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <errno.h>
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include <avro/data.h>
+#include <avro/schema.h>
+
+/*
+ * This file defines an interface struct for Avro data.  Most of the
+ * interesting parts of this library will work with Avro data values
+ * that are expressed in whatever C type you want, as long as you can
+ * provide an implementation of this interface for that type.
+ */
+
+typedef struct avro_value_iface  avro_value_iface_t;
+
+typedef struct avro_value {
+	avro_value_iface_t  *iface;
+	void  *self;
+} avro_value_t;
+
+struct avro_value_iface {
+	/*-------------------------------------------------------------
+	 * "class" methods
+	 */
+
+	/**
+	 * Increment the reference count of the interface struct.  This
+	 * should be a no-op for static structs, since they don't need
+	 * reference counts.
+	 */
+	avro_value_iface_t *
+	(*incref_iface)(avro_value_iface_t *iface);
+
+	/**
+	 * Decrement the reference count of the interface struct.  If
+	 * the count falls to 0, free the struct.  This should be a
+	 * no-op for static structs, since they don't need reference
+	 * counts.
+	 */
+	void
+	(*decref_iface)(avro_value_iface_t *iface);
+
+	/*-------------------------------------------------------------
+	 * General "instance" methods
+	 */
+
+	/**
+	 * Increments the reference count of a value.
+	 */
+
+	void
+	(*incref)(avro_value_t *value);
+
+	/**
+	 * Decrements the reference count of a value, and frees the
+	 * value if the reference count drops to 0.  After calling this
+	 * method, your value instance is undefined, and cannot be used
+	 * anymore.
+	 */
+
+	void
+	(*decref)(avro_value_t *value);
+
+	/**
+	 * Reset the instance to its "empty", default value.  You don't
+	 * have to free the underlying storage, if you want to keep it
+	 * around for later values.
+	 */
+	int
+	(*reset)(const avro_value_iface_t *iface, void *self);
+
+	/**
+	 * Return the general Avro type of a value instance.
+	 */
+	avro_type_t
+	(*get_type)(const avro_value_iface_t *iface, const void *self);
+
+	/**
+	 * Return the Avro schema that a value is an instance of.
+	 */
+	avro_schema_t
+	(*get_schema)(const avro_value_iface_t *iface, const void *self);
+
+	/*-------------------------------------------------------------
+	 * Primitive value getters
+	 */
+	int (*get_boolean)(const avro_value_iface_t *iface,
+			   const void *self, int *out);
+	int (*get_bytes)(const avro_value_iface_t *iface,
+			 const void *self, const void **buf, size_t *size);
+	int (*grab_bytes)(const avro_value_iface_t *iface,
+			  const void *self, avro_wrapped_buffer_t *dest);
+	int (*get_double)(const avro_value_iface_t *iface,
+			  const void *self, double *out);
+	int (*get_float)(const avro_value_iface_t *iface,
+			 const void *self, float *out);
+	int (*get_int)(const avro_value_iface_t *iface,
+		       const void *self, int32_t *out);
+	int (*get_long)(const avro_value_iface_t *iface,
+			const void *self, int64_t *out);
+	int (*get_null)(const avro_value_iface_t *iface,
+			const void *self);
+	/* The result will be NUL-terminated; the size will INCLUDE the
+	 * NUL terminator.  str will never be NULL unless there's an
+	 * error. */
+	int (*get_string)(const avro_value_iface_t *iface,
+			  const void *self, const char **str, size_t *size);
+	int (*grab_string)(const avro_value_iface_t *iface,
+			   const void *self, avro_wrapped_buffer_t *dest);
+
+	int (*get_enum)(const avro_value_iface_t *iface,
+			const void *self, int *out);
+	int (*get_fixed)(const avro_value_iface_t *iface,
+			 const void *self, const void **buf, size_t *size);
+	int (*grab_fixed)(const avro_value_iface_t *iface,
+			  const void *self, avro_wrapped_buffer_t *dest);
+
+	/*-------------------------------------------------------------
+	 * Primitive value setters
+	 */
+
+	/*
+	 * The "give" setters can be used to give control of an existing
+	 * buffer to a bytes, fixed, or string value.  The free function
+	 * will be called when the buffer is no longer needed.  (It's
+	 * okay for free to be NULL; that just means that nothing
+	 * special needs to be done to free the buffer.  That's useful
+	 * for a static string, for instance.)
+	 *
+	 * If your class can't take control of an existing buffer, then
+	 * your give functions should pass the buffer into the
+	 * corresponding "set" method and then immediately free the
+	 * buffer.
+	 *
+	 * Note that for strings, the free function will be called with
+	 * a size that *includes* the NUL terminator, even though you
+	 * provide a size that does *not*.
+	 */
+
+	int (*set_boolean)(const avro_value_iface_t *iface,
+			   void *self, int val);
+	int (*set_bytes)(const avro_value_iface_t *iface,
+			 void *self, void *buf, size_t size);
+	int (*give_bytes)(const avro_value_iface_t *iface,
+			  void *self, avro_wrapped_buffer_t *buf);
+	int (*set_double)(const avro_value_iface_t *iface,
+			  void *self, double val);
+	int (*set_float)(const avro_value_iface_t *iface,
+			 void *self, float val);
+	int (*set_int)(const avro_value_iface_t *iface,
+		       void *self, int32_t val);
+	int (*set_long)(const avro_value_iface_t *iface,
+			void *self, int64_t val);
+	int (*set_null)(const avro_value_iface_t *iface, void *self);
+	/* The input must be NUL-terminated */
+	int (*set_string)(const avro_value_iface_t *iface,
+			  void *self, const char *str);
+	/* and size must INCLUDE the NUL terminator */
+	int (*set_string_len)(const avro_value_iface_t *iface,
+			      void *self, const char *str, size_t size);
+	int (*give_string_len)(const avro_value_iface_t *iface,
+			       void *self, avro_wrapped_buffer_t *buf);
+
+	int (*set_enum)(const avro_value_iface_t *iface,
+			void *self, int val);
+	int (*set_fixed)(const avro_value_iface_t *iface,
+			 void *self, void *buf, size_t size);
+	int (*give_fixed)(const avro_value_iface_t *iface,
+			  void *self, avro_wrapped_buffer_t *buf);
+
+	/*-------------------------------------------------------------
+	 * Compound value getters
+	 */
+
+	/* Number of elements in array/map, or the number of fields in a
+	 * record. */
+	int (*get_size)(const avro_value_iface_t *iface,
+			const void *self, size_t *size);
+
+	/*
+	 * For arrays and maps, returns the element with the given
+	 * index.  (For maps, the "index" is based on the order that the
+	 * keys were added to the map.)  For records, returns the field
+	 * with that index in the schema.
+	 *
+	 * For maps and records, the name parameter (if given) will be
+	 * filled in with the key or field name of the returned value.
+	 * For arrays, the name parameter will always be ignored.
+	 */
+	int (*get_by_index)(const avro_value_iface_t *iface,
+			    const void *self, size_t index,
+			    avro_value_t *child, const char **name);
+
+	/*
+	 * For maps, returns the element with the given key.  For
+	 * records, returns the element with the given field name.  If
+	 * index is given, it will be filled in with the numeric index
+	 * of the returned value.
+	 */
+	int (*get_by_name)(const avro_value_iface_t *iface,
+			   const void *self, const char *name,
+			   avro_value_t *child, size_t *index);
+
+	/* Discriminant of current union value */
+	int (*get_discriminant)(const avro_value_iface_t *iface,
+				const void *self, int *out);
+	/* Current union value */
+	int (*get_current_branch)(const avro_value_iface_t *iface,
+				  const void *self, avro_value_t *branch);
+
+	/*-------------------------------------------------------------
+	 * Compound value setters
+	 */
+
+	/*
+	 * For all of these, the value class should know which class to
+	 * use for its children.
+	 */
+
+	/* Creates a new array element. */
+	int (*append)(const avro_value_iface_t *iface,
+		      void *self, avro_value_t *child_out, size_t *new_index);
+
+	/* Creates a new map element, or returns an existing one. */
+	int (*add)(const avro_value_iface_t *iface,
+		   void *self, const char *key,
+		   avro_value_t *child, size_t *index, int *is_new);
+
+	/* Select a union branch. */
+	int (*set_branch)(const avro_value_iface_t *iface,
+			  void *self, int discriminant,
+			  avro_value_t *branch);
+};
+
+
+/**
+ * Increments the reference count of a value instance.  Normally you
+ * don't need to call this directly; you'll have a reference whenever
+ * you create the value, and @ref avro_value_copy and @ref
+ * avro_value_move update the reference counts correctly for you.
+ */
+
+void
+avro_value_incref(avro_value_t *value);
+
+/**
+ * Decremenets the reference count of a value instance, freeing it if
+ * its reference count drops to 0.
+ */
+
+void
+avro_value_decref(avro_value_t *value);
+
+/**
+ * Copies a reference to a value.  This does not copy any of the data
+ * in the value; you get two avro_value_t references that point at the
+ * same underlying value instance.
+ */
+
+void
+avro_value_copy_ref(avro_value_t *dest, const avro_value_t *src);
+
+/**
+ * Moves a reference to a value.  This does not copy any of the data in
+ * the value.  The @ref src value is invalidated by this function; its
+ * equivalent to the following:
+ *
+ * <code>
+ * avro_value_copy_ref(dest, src);
+ * avro_value_decref(src);
+ * </code>
+ */
+
+void
+avro_value_move_ref(avro_value_t *dest, avro_value_t *src);
+
+/**
+ * Compares two values for equality.  The two values don't need to have
+ * the same implementation of the value interface, but they do need to
+ * represent Avro values of the same schema.  This function ensures that
+ * the schemas match; if you want to skip this check, use
+ * avro_value_equal_fast.
+ */
+
+int
+avro_value_equal(avro_value_t *val1, avro_value_t *val2);
+
+/**
+ * Compares two values for equality.  The two values don't need to have
+ * the same implementation of the value interface, but they do need to
+ * represent Avro values of the same schema.  This function assumes that
+ * the schemas match; if you can't guarantee this, you should use
+ * avro_value_equal, which compares the schemas before comparing the
+ * values.
+ */
+
+int
+avro_value_equal_fast(avro_value_t *val1, avro_value_t *val2);
+
+/**
+ * Compares two values using the sort order defined in the Avro
+ * specification.  The two values don't need to have the same
+ * implementation of the value interface, but they do need to represent
+ * Avro values of the same schema.  This function ensures that the
+ * schemas match; if you want to skip this check, use
+ * avro_value_cmp_fast.
+ */
+
+int
+avro_value_cmp(avro_value_t *val1, avro_value_t *val2);
+
+/**
+ * Compares two values using the sort order defined in the Avro
+ * specification.  The two values don't need to have the same
+ * implementation of the value interface, but they do need to represent
+ * Avro values of the same schema.  This function assumes that the
+ * schemas match; if you can't guarantee this, you should use
+ * avro_value_cmp, which compares the schemas before comparing the
+ * values.
+ */
+
+int
+avro_value_cmp_fast(avro_value_t *val1, avro_value_t *val2);
+
+
+
+/**
+ * Copies the contents of src into dest.  The two values don't need to
+ * have the same implementation of the value interface, but they do need
+ * to represent Avro values of the same schema.  This function ensures
+ * that the schemas match; if you want to skip this check, use
+ * avro_value_copy_fast.
+ */
+
+int
+avro_value_copy(avro_value_t *dest, const avro_value_t *src);
+
+/**
+ * Copies the contents of src into dest.  The two values don't need to
+ * have the same implementation of the value interface, but they do need
+ * to represent Avro values of the same schema.  This function assumes
+ * that the schemas match; if you can't guarantee this, you should use
+ * avro_value_copy, which compares the schemas before comparing the
+ * values.
+ */
+
+int
+avro_value_copy_fast(avro_value_t *dest, const avro_value_t *src);
+
+/**
+ * Returns a hash value for a given Avro value.
+ */
+
+uint32_t
+avro_value_hash(avro_value_t *value);
+
+/*
+ * Returns a string containing the JSON encoding of an Avro value.  You
+ * must free this string when you're done with it, using the standard
+ * free() function.  (*Not* using the custom Avro allocator.)
+ */
+
+int
+avro_value_to_json(const avro_value_t *value,
+		   int one_line, char **json_str);
+
+
+/**
+ * A helper macro for calling a given method in a value instance, if
+ * it's present.  If the value's class doesn't implement the given
+ * method, we return dflt.  You usually won't call this directly; it's
+ * just here to implement the macros below.
+ */
+
+#define avro_value_call0(value, method, dflt) \
+    ((value)->iface->method == NULL? (dflt): \
+     (value)->iface->method((value)->iface, (value)->self))
+
+#define avro_value_call(value, method, dflt, ...) \
+    ((value)->iface->method == NULL? (dflt): \
+     (value)->iface->method((value)->iface, (value)->self, __VA_ARGS__))
+
+
+#define avro_value_iface_incref(cls) \
+    ((cls)->incref_iface == NULL? (cls): (cls)->incref_iface((cls)))
+#define avro_value_iface_decref(cls) \
+    ((cls)->decref_iface == NULL? (void) 0: (cls)->decref_iface((cls)))
+
+#define avro_value_reset(value) \
+    avro_value_call0(value, reset, EINVAL)
+#define avro_value_get_type(value) \
+    avro_value_call0(value, get_type, (avro_type_t) -1)
+#define avro_value_get_schema(value) \
+    avro_value_call0(value, get_schema, NULL)
+
+#define avro_value_get_boolean(value, out) \
+    avro_value_call(value, get_boolean, EINVAL, out)
+#define avro_value_get_bytes(value, buf, size) \
+    avro_value_call(value, get_bytes, EINVAL, buf, size)
+#define avro_value_grab_bytes(value, dest) \
+    avro_value_call(value, grab_bytes, EINVAL, dest)
+#define avro_value_get_double(value, out) \
+    avro_value_call(value, get_double, EINVAL, out)
+#define avro_value_get_float(value, out) \
+    avro_value_call(value, get_float, EINVAL, out)
+#define avro_value_get_int(value, out) \
+    avro_value_call(value, get_int, EINVAL, out)
+#define avro_value_get_long(value, out) \
+    avro_value_call(value, get_long, EINVAL, out)
+#define avro_value_get_null(value) \
+    avro_value_call0(value, get_null, EINVAL)
+#define avro_value_get_string(value, str, size) \
+    avro_value_call(value, get_string, EINVAL, str, size)
+#define avro_value_grab_string(value, dest) \
+    avro_value_call(value, grab_string, EINVAL, dest)
+#define avro_value_get_enum(value, out) \
+    avro_value_call(value, get_enum, EINVAL, out)
+#define avro_value_get_fixed(value, buf, size) \
+    avro_value_call(value, get_fixed, EINVAL, buf, size)
+#define avro_value_grab_fixed(value, dest) \
+    avro_value_call(value, grab_fixed, EINVAL, dest)
+
+#define avro_value_set_boolean(value, val) \
+    avro_value_call(value, set_boolean, EINVAL, val)
+#define avro_value_set_bytes(value, buf, size) \
+    avro_value_call(value, set_bytes, EINVAL, buf, size)
+#define avro_value_give_bytes(value, buf) \
+    avro_value_call(value, give_bytes, EINVAL, buf)
+#define avro_value_set_double(value, val) \
+    avro_value_call(value, set_double, EINVAL, val)
+#define avro_value_set_float(value, val) \
+    avro_value_call(value, set_float, EINVAL, val)
+#define avro_value_set_int(value, val) \
+    avro_value_call(value, set_int, EINVAL, val)
+#define avro_value_set_long(value, val) \
+    avro_value_call(value, set_long, EINVAL, val)
+#define avro_value_set_null(value) \
+    avro_value_call0(value, set_null, EINVAL)
+#define avro_value_set_string(value, str) \
+    avro_value_call(value, set_string, EINVAL, str)
+#define avro_value_set_string_len(value, str, size) \
+    avro_value_call(value, set_string_len, EINVAL, str, size)
+#define avro_value_give_string_len(value, buf) \
+    avro_value_call(value, give_string_len, EINVAL, buf)
+#define avro_value_set_enum(value, val) \
+    avro_value_call(value, set_enum, EINVAL, val)
+#define avro_value_set_fixed(value, buf, size) \
+    avro_value_call(value, set_fixed, EINVAL, buf, size)
+#define avro_value_give_fixed(value, buf) \
+    avro_value_call(value, give_fixed, EINVAL, buf)
+
+#define avro_value_get_size(value, size) \
+    avro_value_call(value, get_size, EINVAL, size)
+#define avro_value_get_by_index(value, idx, child, name) \
+    avro_value_call(value, get_by_index, EINVAL, idx, child, name)
+#define avro_value_get_by_name(value, name, child, index) \
+    avro_value_call(value, get_by_name, EINVAL, name, child, index)
+#define avro_value_get_discriminant(value, out) \
+    avro_value_call(value, get_discriminant, EINVAL, out)
+#define avro_value_get_current_branch(value, branch) \
+    avro_value_call(value, get_current_branch, EINVAL, branch)
+
+#define avro_value_append(value, child, new_index) \
+    avro_value_call(value, append, EINVAL, child, new_index)
+#define avro_value_add(value, key, child, index, is_new) \
+    avro_value_call(value, add, EINVAL, key, child, index, is_new)
+#define avro_value_set_branch(value, discriminant, branch) \
+    avro_value_call(value, set_branch, EINVAL, discriminant, branch)
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro_generic_internal.h b/lang/c/src/avro_generic_internal.h
new file mode 100644
index 0000000..dec7652
--- /dev/null
+++ b/lang/c/src/avro_generic_internal.h
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#ifndef AVRO_GENERIC_INTERNAL_H
+#define AVRO_GENERIC_INTERNAL_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include "avro/generic.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+
+/*
+ * Each generic value implementation struct defines a couple of extra
+ * methods that we use to control the lifecycle of the value objects.
+ */
+
+typedef struct avro_generic_value_iface {
+	avro_value_iface_t  parent;
+
+	/**
+	 * Return the size of an instance of this value type.  If this
+	 * returns 0, then this value type can't be used with any
+	 * function or type (like avro_value_new) that expects to
+	 * allocate space for the value itself.
+	 */
+	size_t
+	(*instance_size)(const avro_value_iface_t *iface);
+
+	/**
+	 * Initialize a new value instance.
+	 */
+	int
+	(*init)(const avro_value_iface_t *iface, void *self);
+
+	/**
+	 * Finalize a value instance.
+	 */
+	void
+	(*done)(const avro_value_iface_t *iface, void *self);
+} avro_generic_value_iface_t;
+
+
+#define avro_value_instance_size(gcls) \
+    ((gcls)->instance_size == NULL? 0: (gcls)->instance_size(&(gcls)->parent))
+#define avro_value_init(gcls, self) \
+    ((gcls)->init == NULL? EINVAL: (gcls)->init(&(gcls)->parent, (self)))
+#define avro_value_done(gcls, self) \
+    ((gcls)->done == NULL? (void) 0: (gcls)->done(&(gcls)->parent, (self)))
+
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avro_private.h b/lang/c/src/avro_private.h
new file mode 100644
index 0000000..c6d4c87
--- /dev/null
+++ b/lang/c/src/avro_private.h
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+#ifndef AVRO_PRIVATE_H
+#define AVRO_PRIVATE_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <errno.h>
+
+#include "avro/errors.h"
+#include "avro/platform.h"
+
+#ifdef HAVE_CONFIG_H
+/* This is only true for now in the autotools build */
+#include "config.h"
+#endif
+
+#ifdef _WIN32
+#define snprintf _snprintf
+#endif
+
+/* Note that AVRO_PLATFORM_IS_BIG_ENDIAN is *always* defined. It is
+ * either TRUE (1) or FALSE (0).
+ */
+#ifdef _WIN32
+  #define AVRO_PLATFORM_IS_BIG_ENDIAN (0)
+#else // UNIX
+  #include <sys/param.h>
+  #if BYTE_ORDER == BIG_ENDIAN
+    #define AVRO_PLATFORM_IS_BIG_ENDIAN (1)
+  #else
+    #define AVRO_PLATFORM_IS_BIG_ENDIAN (0)
+  #endif
+#endif
+
+/* Add definition of EILSEQ if it is not defined in errno.h. */
+#include <errno.h>
+#ifndef EILSEQ
+#define EILSEQ 138
+#endif
+
+
+#define check(rval, call) { rval = call; if(rval) return rval; }
+
+#define check_set(rval, call, ...)			\
+	{						\
+		rval = call;				\
+		if (rval) {				\
+			avro_set_error(__VA_ARGS__);	\
+			return rval;			\
+		}					\
+	}
+
+#define check_prefix(rval, call, ...)			\
+	{						\
+		rval = call;				\
+		if (rval) {				\
+			avro_prefix_error(__VA_ARGS__);	\
+			return rval;			\
+		}					\
+	}
+
+#define check_param(result, test, name)					\
+	{								\
+		if (!(test)) {						\
+			avro_set_error("Invalid " name " in %s",	\
+				       __FUNCTION__);			\
+			return result;					\
+		}							\
+	}
+
+#define AVRO_UNUSED(var) (void)var;
+
+#define container_of(ptr_, type_, member_)  \
+    ((type_ *)((char *)ptr_ - (size_t)&((type_ *)0)->member_))
+
+#define nullstrcmp(s1, s2) \
+    (((s1) && (s2)) ? strcmp(s1, s2) : ((s1) || (s2)))
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/avroappend.c b/lang/c/src/avroappend.c
new file mode 100644
index 0000000..e1afb95
--- /dev/null
+++ b/lang/c/src/avroappend.c
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#ifdef _WIN32
+#include <unistd.h>
+#endif
+
+#include "avro.h"
+
+int process_file(const char *in_filename, const char *out_filename)
+{
+	avro_file_reader_t  reader;
+	avro_file_writer_t  writer;
+
+	if (in_filename == NULL) {
+		if (avro_file_reader_fp(stdin, "<stdin>", 0, &reader)) {
+			fprintf(stderr, "Error opening <stdin>:\n  %s\n",
+				avro_strerror());
+			return 1;
+		}
+	} else {
+		if (avro_file_reader(in_filename, &reader)) {
+			fprintf(stderr, "Error opening %s:\n  %s\n",
+				in_filename, avro_strerror());
+			return 1;
+		}
+	}
+
+	avro_schema_t  wschema;
+	wschema = avro_file_reader_get_writer_schema(reader);
+
+	/* Check that the reader schema is the same as the writer schema */
+	{
+		avro_schema_t oschema;
+		avro_file_reader_t oreader;
+
+		if (avro_file_reader(out_filename, &oreader)) {
+			fprintf(stderr, "Error opening %s:\n   %s\n",
+					out_filename, avro_strerror());
+			avro_file_reader_close(reader);
+			return 1;
+		}
+
+		oschema = avro_file_reader_get_writer_schema(oreader);
+
+		if (avro_schema_equal(oschema, wschema) == 0) {
+			fprintf(stderr, "Error: reader and writer schema are not equal.\n");
+			avro_file_reader_close(oreader);
+			avro_file_reader_close(reader);
+			return 1;
+		}
+
+		avro_file_reader_close(oreader);
+		avro_schema_decref(oschema);
+	}
+
+	if (avro_file_writer_open(out_filename, &writer)) {
+		fprintf(stderr, "Error opening %s:\n   %s\n",
+				out_filename, avro_strerror());
+		avro_file_reader_close(reader);
+		return 1;
+	}
+
+	avro_value_iface_t  *iface;
+	avro_value_t  value;
+
+	iface = avro_generic_class_from_schema(wschema);
+	avro_generic_value_new(iface, &value);
+
+	while (avro_file_reader_read_value(reader, &value) == 0) {
+		if (avro_file_writer_append_value(writer, &value)) {
+			fprintf(stderr, "Error writing to %s:\n  %s\n",
+				out_filename, avro_strerror());
+			return 1;
+		}
+		avro_value_reset(&value);
+	}
+
+	avro_file_reader_close(reader);
+	avro_file_writer_close(writer);
+	avro_value_decref(&value);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(wschema);
+
+	return 0;
+}
+
+static void usage(void)
+{
+	fprintf(stderr,
+		"Usage: avroappend [<input avro file>] <output avro file>\n");
+}
+
+static int check_filenames(const char *in_filename, const char *out_filename)
+{
+	if (in_filename == NULL) {
+		return 0;
+	}
+
+	struct stat in_stat;
+	struct stat out_stat;
+
+	if (stat(in_filename, &in_stat) == -1) {
+		fprintf(stderr, "stat error on %s: %s\n", in_filename, strerror(errno));
+		return 2;
+	}
+
+	if (stat(out_filename, &out_stat) == -1) {
+		fprintf(stderr, "stat error on %s: %s\n", out_filename, strerror(errno));
+		return 2;
+	}
+
+	if (in_stat.st_dev == out_stat.st_dev && in_stat.st_ino == out_stat.st_ino) {
+		return 1;
+	}
+
+	return 0;
+}
+
+int main(int argc, char **argv)
+{
+	char *in_filename;
+	char *out_filename;
+
+	argc--;
+	argv++;
+
+	if (argc == 2) {
+		in_filename = argv[0];
+		out_filename = argv[1];
+	} else if (argc == 1) {
+		in_filename = NULL;
+		out_filename = argv[0];
+	} else {
+		fprintf(stderr, "Not enough arguments\n\n");
+		usage();
+		exit(1);
+	}
+
+	int ret = check_filenames(in_filename, out_filename);
+
+	if (ret == 1) {
+		fprintf(stderr, "Files are the same.\n");
+	}
+
+	if (ret > 0) {
+		exit(1);
+	}
+
+	exit(process_file(in_filename, out_filename));
+}
diff --git a/lang/c/src/avrocat.c b/lang/c/src/avrocat.c
new file mode 100644
index 0000000..df3e33f
--- /dev/null
+++ b/lang/c/src/avrocat.c
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+
+/*-- PROCESSING A FILE --*/
+
+static void
+process_file(const char *filename)
+{
+	avro_file_reader_t  reader;
+	FILE *fp;
+	int  should_close;
+
+	if (filename == NULL) {
+		fp = stdin;
+		filename = "<stdin>";
+		should_close = 0;
+	} else {
+		fp = fopen(filename, "rb");
+		should_close = 1;
+
+		if (fp == NULL) {
+			fprintf(stderr, "Error opening %s:\n  %s\n",
+				filename, strerror(errno));
+			exit(1);
+		}
+	}
+
+	if (avro_file_reader_fp(fp, filename, 0, &reader)) {
+		fprintf(stderr, "Error opening %s:\n  %s\n",
+			filename, avro_strerror());
+		if (should_close) {
+			fclose(fp);
+		}
+		exit(1);
+	}
+
+	avro_schema_t  wschema;
+	avro_value_iface_t  *iface;
+	avro_value_t  value;
+
+	wschema = avro_file_reader_get_writer_schema(reader);
+	iface = avro_generic_class_from_schema(wschema);
+	avro_generic_value_new(iface, &value);
+
+	int rval;
+
+	while ((rval = avro_file_reader_read_value(reader, &value)) == 0) {
+		char  *json;
+
+		if (avro_value_to_json(&value, 1, &json)) {
+			fprintf(stderr, "Error converting value to JSON: %s\n",
+				avro_strerror());
+		} else {
+			printf("%s\n", json);
+			free(json);
+		}
+
+		avro_value_reset(&value);
+	}
+
+	// If it was not an EOF that caused it to fail,
+	// print the error.
+	if (rval != EOF) {
+		fprintf(stderr, "Error: %s\n", avro_strerror());
+	}
+
+	avro_file_reader_close(reader);
+	avro_value_decref(&value);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(wschema);
+
+	if (should_close) {
+		fclose(fp);
+	}
+}
+
+
+/*-- MAIN PROGRAM --*/
+
+static void usage(void)
+{
+	fprintf(stderr,
+		"Usage: avrocat <avro data file>\n");
+}
+
+
+int main(int argc, char **argv)
+{
+	char  *data_filename;
+
+	if (argc == 2) {
+		data_filename = argv[1];
+	} else if (argc == 1) {
+		data_filename = NULL;
+	} else {
+		fprintf(stderr, "Can't read from multiple input files.\n");
+		usage();
+		exit(1);
+	}
+
+	/* Process the data file */
+	process_file(data_filename);
+	return 0;
+}
diff --git a/lang/c/src/avromod.c b/lang/c/src/avromod.c
new file mode 100644
index 0000000..5db1df2
--- /dev/null
+++ b/lang/c/src/avromod.c
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <getopt.h>
+#include <limits.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+
+/* The compression codec to use. */
+static const char  *codec = "null";
+
+/* The block size to use. */
+static size_t  block_size = 0;
+
+/*-- PROCESSING A FILE --*/
+
+static void
+process_file(const char *in_filename, const char *out_filename)
+{
+	avro_file_reader_t  reader;
+	avro_file_writer_t  writer;
+
+	if (in_filename == NULL) {
+		if (avro_file_reader_fp(stdin, "<stdin>", 0, &reader)) {
+			fprintf(stderr, "Error opening <stdin>:\n  %s\n",
+				avro_strerror());
+			exit(1);
+		}
+	} else {
+		if (avro_file_reader(in_filename, &reader)) {
+			fprintf(stderr, "Error opening %s:\n  %s\n",
+				in_filename, avro_strerror());
+			exit(1);
+		}
+	}
+
+	avro_schema_t  wschema;
+	avro_value_iface_t  *iface;
+	avro_value_t  value;
+	int rval;
+
+	wschema = avro_file_reader_get_writer_schema(reader);
+	iface = avro_generic_class_from_schema(wschema);
+	avro_generic_value_new(iface, &value);
+
+	if (avro_file_writer_create_with_codec
+	    (out_filename, wschema, &writer, codec, block_size)) {
+		fprintf(stderr, "Error creating %s:\n  %s\n",
+			out_filename, avro_strerror());
+		exit(1);
+	}
+
+	while ((rval = avro_file_reader_read_value(reader, &value)) == 0) {
+		if (avro_file_writer_append_value(writer, &value)) {
+			fprintf(stderr, "Error writing to %s:\n  %s\n",
+				out_filename, avro_strerror());
+			exit(1);
+		}
+		avro_value_reset(&value);
+	}
+
+	if (rval != EOF) {
+		fprintf(stderr, "Error reading value: %s", avro_strerror());
+	}
+
+	avro_file_reader_close(reader);
+	avro_file_writer_close(writer);
+	avro_value_decref(&value);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(wschema);
+}
+
+
+/*-- MAIN PROGRAM --*/
+
+static struct option longopts[] = {
+	{ "block-size", required_argument, NULL, 'b' },
+	{ "codec", required_argument, NULL, 'c' },
+	{ NULL, 0, NULL, 0 }
+};
+
+static void usage(void)
+{
+	fprintf(stderr,
+		"Usage: avromod [--codec=<compression codec>]\n"
+		"               [--block-size=<block size>]\n"
+		"               [<input avro file>]\n"
+		"                <output avro file>\n");
+}
+
+static void
+parse_block_size(const char *optarg)
+{
+	unsigned long  ul;
+	char  *end;
+
+	ul = strtoul(optarg, &end, 10);
+	if ((ul == 0 && end == optarg) ||
+	    (ul == ULONG_MAX && errno == ERANGE)) {
+		fprintf(stderr, "Invalid block size: %s\n\n", optarg);
+		usage();
+		exit(1);
+	}
+	block_size = ul;
+}
+
+
+int main(int argc, char **argv)
+{
+	char  *in_filename;
+	char  *out_filename;
+
+	int  ch;
+	while ((ch = getopt_long(argc, argv, "b:c:", longopts, NULL)) != -1) {
+		switch (ch) {
+			case 'b':
+				parse_block_size(optarg);
+				break;
+
+			case 'c':
+				codec = optarg;
+				break;
+
+			default:
+				usage();
+				exit(1);
+		}
+	}
+
+	argc -= optind;
+	argv += optind;
+
+	if (argc == 2) {
+		in_filename = argv[0];
+		out_filename = argv[1];
+	} else if (argc == 1) {
+		in_filename = NULL;
+		out_filename = argv[0];
+	} else {
+		fprintf(stderr, "Can't read from multiple input files.\n");
+		usage();
+		exit(1);
+	}
+
+	/* Process the data file */
+	process_file(in_filename, out_filename);
+	return 0;
+}
diff --git a/lang/c/src/avropipe.c b/lang/c/src/avropipe.c
new file mode 100644
index 0000000..a325cf4
--- /dev/null
+++ b/lang/c/src/avropipe.c
@@ -0,0 +1,432 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <ctype.h>
+#include <errno.h>
+#include <getopt.h>
+#include <avro/platform.h>
+#include <avro/platform.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+
+/* The path separator to use in the JSON output. */
+
+static const char  *separator = "/";
+
+
+/*-- PROCESSING A FILE --*/
+
+/**
+ * Fills in a raw string with the path to an element of an array.
+ */
+
+static void
+create_array_prefix(avro_raw_string_t *dest, const char *prefix, size_t index)
+{
+	static char  buf[100];
+	snprintf(buf, sizeof(buf), "%" PRIsz, index);
+	avro_raw_string_set(dest, prefix);
+	avro_raw_string_append(dest, separator);
+	avro_raw_string_append(dest, buf);
+}
+
+static void
+create_object_prefix(avro_raw_string_t *dest, const char *prefix, const char *key)
+{
+	/*
+	 * Make sure that the key doesn't contain the separator
+	 * character.
+	 */
+
+	if (strstr(key, separator) != NULL) {
+		fprintf(stderr,
+			"Error: Element \"%s\" in object %s "
+			"contains the separator character.\n"
+			"Please use the --separator option to choose another.\n",
+			key, prefix);
+		exit(1);
+	}
+
+	avro_raw_string_set(dest, prefix);
+	avro_raw_string_append(dest, separator);
+	avro_raw_string_append(dest, key);
+}
+
+static void
+print_bytes_value(const char *buf, size_t size)
+{
+	size_t  i;
+	printf("\"");
+	for (i = 0; i < size; i++)
+	{
+		if (buf[i] == '"') {
+			printf("\\\"");
+		} else if (buf[i] == '\\') {
+			printf("\\\\");
+		} else if (buf[i] == '\b') {
+			printf("\\b");
+		} else if (buf[i] == '\f') {
+			printf("\\f");
+		} else if (buf[i] == '\n') {
+			printf("\\n");
+		} else if (buf[i] == '\r') {
+			printf("\\r");
+		} else if (buf[i] == '\t') {
+			printf("\\t");
+		} else if (isprint(buf[i])) {
+			printf("%c", (int) buf[i]);
+		} else {
+			printf("\\u00%02x", (unsigned int) (unsigned char) buf[i]);
+		}
+	}
+	printf("\"");
+}
+
+static void
+process_value(const char *prefix, avro_value_t *value);
+
+static void
+process_array(const char *prefix, avro_value_t *value)
+{
+	printf("%s\t[]\n", prefix);
+	size_t  element_count;
+	avro_value_get_size(value, &element_count);
+
+	avro_raw_string_t  element_prefix;
+	avro_raw_string_init(&element_prefix);
+
+	size_t  i;
+	for (i = 0; i < element_count; i++) {
+		avro_value_t  element_value;
+		avro_value_get_by_index(value, i, &element_value, NULL);
+
+		create_array_prefix(&element_prefix, prefix, i);
+		process_value((const char *) avro_raw_string_get(&element_prefix), &element_value);
+	}
+
+	avro_raw_string_done(&element_prefix);
+}
+
+static void
+process_enum(const char *prefix, avro_value_t *value)
+{
+	int  val;
+	const char  *symbol_name;
+
+	avro_schema_t  schema = avro_value_get_schema(value);
+	avro_value_get_enum(value, &val);
+	symbol_name = avro_schema_enum_get(schema, val);
+	printf("%s\t", prefix);
+	print_bytes_value(symbol_name, strlen(symbol_name));
+	printf("\n");
+}
+
+static void
+process_map(const char *prefix, avro_value_t *value)
+{
+	printf("%s\t{}\n", prefix);
+	size_t  element_count;
+	avro_value_get_size(value, &element_count);
+
+	avro_raw_string_t  element_prefix;
+	avro_raw_string_init(&element_prefix);
+
+	size_t  i;
+	for (i = 0; i < element_count; i++) {
+		const char  *key;
+		avro_value_t  element_value;
+		avro_value_get_by_index(value, i, &element_value, &key);
+
+		create_object_prefix(&element_prefix, prefix, key);
+		process_value((const char *) avro_raw_string_get(&element_prefix), &element_value);
+	}
+
+	avro_raw_string_done(&element_prefix);
+}
+
+static void
+process_record(const char *prefix, avro_value_t *value)
+{
+	printf("%s\t{}\n", prefix);
+	size_t  field_count;
+	avro_value_get_size(value, &field_count);
+
+	avro_raw_string_t  field_prefix;
+	avro_raw_string_init(&field_prefix);
+
+	size_t  i;
+	for (i = 0; i < field_count; i++) {
+		avro_value_t  field_value;
+		const char  *field_name;
+		avro_value_get_by_index(value, i, &field_value, &field_name);
+
+		create_object_prefix(&field_prefix, prefix, field_name);
+		process_value((const char *) avro_raw_string_get(&field_prefix), &field_value);
+	}
+
+	avro_raw_string_done(&field_prefix);
+}
+
+static void
+process_union(const char *prefix, avro_value_t *value)
+{
+	avro_value_t  branch_value;
+	avro_value_get_current_branch(value, &branch_value);
+
+	/* nulls in a union aren't wrapped in a JSON object */
+	if (avro_value_get_type(&branch_value) == AVRO_NULL) {
+		printf("%s\tnull\n", prefix);
+		return;
+	}
+
+	int  discriminant;
+	avro_value_get_discriminant(value, &discriminant);
+
+	avro_schema_t  schema = avro_value_get_schema(value);
+	avro_schema_t  branch_schema = avro_schema_union_branch(schema, discriminant);
+	const char  *branch_name = avro_schema_type_name(branch_schema);
+
+	avro_raw_string_t  branch_prefix;
+	avro_raw_string_init(&branch_prefix);
+	create_object_prefix(&branch_prefix, prefix, branch_name);
+
+	printf("%s\t{}\n", prefix);
+	process_value((const char *) avro_raw_string_get(&branch_prefix), &branch_value);
+
+	avro_raw_string_done(&branch_prefix);
+}
+
+static void
+process_value(const char *prefix, avro_value_t *value)
+{
+	avro_type_t  type = avro_value_get_type(value);
+	switch (type) {
+		case AVRO_BOOLEAN:
+		{
+			int  val;
+			avro_value_get_boolean(value, &val);
+			printf("%s\t%s\n", prefix, val? "true": "false");
+			return;
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf;
+			size_t  size;
+			avro_value_get_bytes(value, &buf, &size);
+			printf("%s\t", prefix);
+			print_bytes_value((const char *) buf, size);
+			printf("\n");
+			return;
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			avro_value_get_double(value, &val);
+			printf("%s\t%lf\n", prefix, val);
+			return;
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			avro_value_get_float(value, &val);
+			printf("%s\t%f\n", prefix, val);
+			return;
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			avro_value_get_int(value, &val);
+			printf("%s\t%" PRId32 "\n", prefix, val);
+			return;
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			avro_value_get_long(value, &val);
+			printf("%s\t%" PRId64 "\n", prefix, val);
+			return;
+		}
+
+		case AVRO_NULL:
+		{
+			avro_value_get_null(value);
+			printf("%s\tnull\n", prefix);
+			return;
+		}
+
+		case AVRO_STRING:
+		{
+			/* TODO: Convert the UTF-8 to the current
+			 * locale's character set */
+			const char  *buf;
+			size_t  size;
+			avro_value_get_string(value, &buf, &size);
+			printf("%s\t", prefix);
+                        /* For strings, size includes the NUL terminator. */
+			print_bytes_value(buf, size-1);
+			printf("\n");
+			return;
+		}
+
+		case AVRO_ARRAY:
+			process_array(prefix, value);
+			return;
+
+		case AVRO_ENUM:
+			process_enum(prefix, value);
+			return;
+
+		case AVRO_FIXED:
+		{
+			const void  *buf;
+			size_t  size;
+			avro_value_get_fixed(value, &buf, &size);
+			printf("%s\t", prefix);
+			print_bytes_value((const char *) buf, size);
+			printf("\n");
+			return;
+		}
+
+		case AVRO_MAP:
+			process_map(prefix, value);
+			return;
+
+		case AVRO_RECORD:
+			process_record(prefix, value);
+			return;
+
+		case AVRO_UNION:
+			process_union(prefix, value);
+			return;
+
+		default:
+		{
+			fprintf(stderr, "Unknown schema type\n");
+			exit(1);
+		}
+	}
+}
+
+static void
+process_file(const char *filename)
+{
+	avro_file_reader_t  reader;
+
+	if (filename == NULL) {
+		if (avro_file_reader_fp(stdin, "<stdin>", 0, &reader)) {
+			fprintf(stderr, "Error opening <stdin>:\n  %s\n",
+				avro_strerror());
+			exit(1);
+		}
+	} else {
+		if (avro_file_reader(filename, &reader)) {
+			fprintf(stderr, "Error opening %s:\n  %s\n",
+				filename, avro_strerror());
+			exit(1);
+		}
+	}
+
+	/* The JSON root is an array */
+	printf("%s\t[]\n", separator);
+
+	avro_raw_string_t  prefix;
+	avro_raw_string_init(&prefix);
+
+	avro_schema_t  wschema = avro_file_reader_get_writer_schema(reader);
+	avro_value_iface_t  *iface = avro_generic_class_from_schema(wschema);
+	avro_value_t  value;
+	avro_generic_value_new(iface, &value);
+
+	size_t  record_number = 0;
+	int rval;
+
+	for (; (rval = avro_file_reader_read_value(reader, &value)) == 0; record_number++) {
+		create_array_prefix(&prefix, "", record_number);
+		process_value((const char *) avro_raw_string_get(&prefix), &value);
+		avro_value_reset(&value);
+	}
+
+	if (rval != EOF) {
+		fprintf(stderr, "Error reading value: %s", avro_strerror());
+	}
+
+	avro_raw_string_done(&prefix);
+	avro_value_decref(&value);
+	avro_value_iface_decref(iface);
+	avro_file_reader_close(reader);
+	avro_schema_decref(wschema);
+}
+
+
+/*-- MAIN PROGRAM --*/
+static struct option longopts[] = {
+	{ "separator", required_argument, NULL, 's' },
+	{ NULL, 0, NULL, 0 }
+};
+
+static void usage(void)
+{
+	fprintf(stderr,
+		"Usage: avropipe [--separator=<separator>]\n"
+		"                <avro data file>\n");
+}
+
+
+int main(int argc, char **argv)
+{
+	char  *data_filename;
+
+	int  ch;
+	while ((ch = getopt_long(argc, argv, "s:", longopts, NULL) ) != -1) {
+		switch (ch) {
+			case 's':
+				separator = optarg;
+				break;
+
+			default:
+				usage();
+				exit(1);
+		}
+	}
+
+	argc -= optind;
+	argv += optind;
+
+	if (argc == 1) {
+		data_filename = argv[0];
+	} else if (argc == 0) {
+		data_filename = NULL;
+	} else {
+		fprintf(stderr, "Can't read from multiple input files.\n");
+		usage();
+		exit(1);
+	}
+
+	/* Process the data file */
+	process_file(data_filename);
+	return 0;
+}
diff --git a/lang/c/src/codec.c b/lang/c/src/codec.c
new file mode 100644
index 0000000..4a2502b
--- /dev/null
+++ b/lang/c/src/codec.c
@@ -0,0 +1,614 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <string.h>
+#ifdef SNAPPY_CODEC
+#include <snappy-c.h>
+#  if defined(__APPLE__)
+#    include <libkern/OSByteOrder.h>
+#    define __bswap_32 OSSwapInt32
+#  else
+#    include <byteswap.h>
+#  endif
+#endif
+#ifdef DEFLATE_CODEC
+#include <zlib.h>
+#endif
+#ifdef LZMA_CODEC
+#include <lzma.h>
+#endif
+#include "avro/errors.h"
+#include "avro/allocation.h"
+#include "codec.h"
+
+#define DEFAULT_BLOCK_SIZE	(16 * 1024)
+
+/* NULL codec */
+
+static int
+codec_null(avro_codec_t codec)
+{
+	codec->name = "null";
+	codec->type = AVRO_CODEC_NULL;
+	codec->block_size = 0;
+	codec->used_size = 0;
+	codec->block_data = NULL;
+	codec->codec_data = NULL;
+
+	return 0;
+}
+
+static int encode_null(avro_codec_t c, void * data, int64_t len)
+{
+	c->block_data = data;
+	c->block_size = len;
+	c->used_size = len;
+
+	return 0;
+}
+
+static int decode_null(avro_codec_t c, void * data, int64_t len)
+{
+	c->block_data = data;
+	c->block_size = len;
+	c->used_size = len;
+
+	return 0;
+}
+
+static int reset_null(avro_codec_t c)
+{
+	c->block_data = NULL;
+	c->block_size = 0;
+	c->used_size = 0;
+	c->codec_data = NULL;
+
+	return 0;
+}
+
+/* Snappy codec */
+
+#ifdef SNAPPY_CODEC
+
+static int
+codec_snappy(avro_codec_t codec)
+{
+	codec->name = "snappy";
+	codec->type = AVRO_CODEC_SNAPPY;
+	codec->block_size = 0;
+	codec->used_size = 0;
+	codec->block_data = NULL;
+	codec->codec_data = NULL;
+
+	return 0;
+}
+
+static int encode_snappy(avro_codec_t c, void * data, int64_t len)
+{
+        uint32_t crc;
+        size_t outlen = snappy_max_compressed_length(len);
+
+	if (!c->block_data) {
+		c->block_data = avro_malloc(outlen+4);
+		c->block_size = outlen+4;
+	} else if (c->block_size < (int64_t) (outlen+4)) {
+            c->block_data = avro_realloc(c->block_data, c->block_size, (outlen+4));
+		c->block_size = outlen+4;
+	}
+
+	if (!c->block_data) {
+		avro_set_error("Cannot allocate memory for snappy");
+		return 1;
+	}
+
+        if (snappy_compress(data, len, c->block_data, &outlen) != SNAPPY_OK)
+        {
+                avro_set_error("Error compressing block with Snappy");
+		return 1;
+	}
+
+        crc = __bswap_32(crc32(0, data, len));
+        memcpy(c->block_data+outlen, &crc, 4);
+        c->used_size = outlen+4;
+
+	return 0;
+}
+
+static int decode_snappy(avro_codec_t c, void * data, int64_t len)
+{
+        uint32_t crc;
+        size_t outlen;
+
+        if (snappy_uncompressed_length(data, len-4, &outlen) != SNAPPY_OK) {
+		avro_set_error("Uncompressed length error in snappy");
+		return 1;
+        }
+
+	if (!c->block_data) {
+		c->block_data = avro_malloc(outlen);
+		c->block_size = outlen;
+	} else if ( (size_t)c->block_size < outlen) {
+		c->block_data = avro_realloc(c->block_data, c->block_size, outlen);
+		c->block_size = outlen;
+	}
+
+	if (!c->block_data)
+	{
+		avro_set_error("Cannot allocate memory for snappy");
+		return 1;
+	}
+
+        if (snappy_uncompress(data, len-4, c->block_data, &outlen) != SNAPPY_OK)
+        {
+                avro_set_error("Error uncompressing block with Snappy");
+		return 1;
+	}
+
+        crc = __bswap_32(crc32(0, c->block_data, outlen));
+        if (memcmp(&crc, (char*)data+len-4, 4))
+        {
+                avro_set_error("CRC32 check failure uncompressing block with Snappy");
+		return 1;
+	}
+
+        c->used_size = outlen;
+
+	return 0;
+}
+
+static int reset_snappy(avro_codec_t c)
+{
+	if (c->block_data) {
+		avro_free(c->block_data, c->block_size);
+	}
+
+	c->block_data = NULL;
+	c->block_size = 0;
+	c->used_size = 0;
+	c->codec_data = NULL;
+
+	return 0;
+}
+
+#endif // SNAPPY_CODEC
+
+/* Deflate codec */
+
+#ifdef DEFLATE_CODEC
+
+struct codec_data_deflate {
+	z_stream deflate;
+	z_stream inflate;
+};
+#define codec_data_deflate_stream(cd)	&((struct codec_data_deflate *)cd)->deflate
+#define codec_data_inflate_stream(cd)	&((struct codec_data_deflate *)cd)->inflate
+
+
+static int
+codec_deflate(avro_codec_t codec)
+{
+	codec->name = "deflate";
+	codec->type = AVRO_CODEC_DEFLATE;
+	codec->block_size = 0;
+	codec->used_size = 0;
+	codec->block_data = NULL;
+	codec->codec_data = avro_new(struct codec_data_deflate);
+
+	if (!codec->codec_data) {
+		avro_set_error("Cannot allocate memory for zlib");
+		return 1;
+	}
+
+	z_stream *ds = codec_data_deflate_stream(codec->codec_data);
+	z_stream *is = codec_data_inflate_stream(codec->codec_data);
+
+	memset(ds, 0, sizeof(z_stream));
+	memset(is, 0, sizeof(z_stream));
+
+	ds->zalloc = is->zalloc = Z_NULL;
+	ds->zfree  = is->zfree  = Z_NULL;
+	ds->opaque = is->opaque = Z_NULL;
+
+	if (deflateInit2(ds, Z_BEST_COMPRESSION, Z_DEFLATED, -15, 8, Z_DEFAULT_STRATEGY) != Z_OK) {
+		avro_freet(struct codec_data_deflate, codec->codec_data);
+		codec->codec_data = NULL;
+		avro_set_error("Cannot initialize zlib deflate");
+		return 1;
+	}
+
+	if (inflateInit2(is, -15) != Z_OK) {
+		avro_freet(struct codec_data_deflate, codec->codec_data);
+		codec->codec_data = NULL;
+		avro_set_error("Cannot initialize zlib inflate");
+		return 1;
+	}
+
+	return 0;
+}
+
+static int encode_deflate(avro_codec_t c, void * data, int64_t len)
+{
+	int err;
+	int64_t defl_len = compressBound((uLong)len * 1.2);
+
+	if (!c->block_data) {
+		c->block_data = avro_malloc(defl_len);
+		c->block_size = defl_len;
+	} else if ( c->block_size < defl_len) {
+		c->block_data = avro_realloc(c->block_data, c->block_size, defl_len);
+		c->block_size = defl_len;
+	}
+
+	if (!c->block_data)
+	{
+		avro_set_error("Cannot allocate memory for deflate");
+		return 1;
+	}
+
+	c->used_size = 0;
+
+	z_stream *s = codec_data_deflate_stream(c->codec_data);
+
+	s->next_in = (Bytef*)data;
+	s->avail_in = (uInt)len;
+
+	s->next_out = c->block_data;
+	s->avail_out = (uInt)c->block_size;
+
+	s->total_out = 0;
+
+	err = deflate(s, Z_FINISH);
+	if (err != Z_STREAM_END) {
+		deflateEnd(s);
+		if (err != Z_OK) {
+			avro_set_error("Error compressing block with deflate (%i)", err);
+			return 1;
+		}
+		return 0;
+	}
+
+	// zlib resizes the buffer?
+	c->block_size = s->total_out;
+	c->used_size = s->total_out;
+
+	if (deflateReset(s) != Z_OK) {
+		return 1;
+	}
+
+	return 0;
+}
+
+static int decode_deflate(avro_codec_t c, void * data, int64_t len)
+{
+	int err;
+	z_stream *s = codec_data_inflate_stream(c->codec_data);
+
+	if (!c->block_data) {
+		c->block_data = avro_malloc(DEFAULT_BLOCK_SIZE);
+		c->block_size = DEFAULT_BLOCK_SIZE;
+	}
+
+	if (!c->block_data)
+	{
+		avro_set_error("Cannot allocate memory for deflate");
+		return 1;
+	}
+
+	c->used_size = 0;
+
+	s->next_in = data;
+	s->avail_in = len;
+
+	s->next_out = c->block_data;
+	s->avail_out = c->block_size;
+
+	s->total_out = 0;
+
+	do
+	{
+		err = inflate(s, Z_FINISH);
+
+		// Apparently if there is yet available space in the output then something
+		// has gone wrong in decompressing the data (according to cpython zlibmodule.c)
+		if (err == Z_BUF_ERROR && s->avail_out > 0) {
+			inflateEnd(s);
+			avro_set_error("Error decompressing block with deflate, possible data error");
+			return 1;
+		}
+
+		// The buffer was not big enough. resize it.
+		if (err == Z_BUF_ERROR)
+		{
+			c->block_data = avro_realloc(c->block_data, c->block_size, c->block_size * 2);
+			s->next_out = c->block_data + s->total_out;
+			s->avail_out += c->block_size;
+			c->block_size = c->block_size * 2;
+		}
+	} while (err == Z_BUF_ERROR);
+
+	if (err != Z_STREAM_END) {
+		inflateEnd(s);
+		if (err != Z_OK) {
+			avro_set_error("Error decompressing block with deflate (%i)", err);
+			return 1;
+		}
+		return 0;
+	}
+
+	c->used_size = s->total_out;
+
+	if (inflateReset(s) != Z_OK) {
+		avro_set_error("Error resetting deflate decompression");
+		return 1;
+	}
+
+	return 0;
+}
+
+static int reset_deflate(avro_codec_t c)
+{
+	if (c->block_data) {
+		avro_free(c->block_data, c->block_size);
+	}
+	if (c->codec_data) {
+		deflateEnd(codec_data_deflate_stream(c->codec_data));
+		inflateEnd(codec_data_inflate_stream(c->codec_data));
+		avro_freet(struct codec_data_deflate, c->codec_data);
+	}
+
+	c->block_data = NULL;
+	c->block_size = 0;
+	c->used_size = 0;
+	c->codec_data = NULL;
+
+	return 0;
+}
+
+#endif // DEFLATE_CODEC
+
+/* LZMA codec */
+
+#ifdef LZMA_CODEC
+
+struct codec_data_lzma {
+	lzma_filter filters[2];
+	lzma_options_lzma options;
+};
+#define codec_data_lzma_filters(cd)	((struct codec_data_lzma *)cd)->filters
+#define codec_data_lzma_options(cd)	&((struct codec_data_lzma *)cd)->options
+
+static int
+codec_lzma(avro_codec_t codec)
+{
+	codec->name = "lzma";
+	codec->type = AVRO_CODEC_LZMA;
+	codec->block_size = 0;
+	codec->used_size = 0;
+	codec->block_data = NULL;
+	codec->codec_data = avro_new(struct codec_data_lzma);
+
+	if (!codec->codec_data) {
+		avro_set_error("Cannot allocate memory for lzma");
+		return 1;
+	}
+
+	lzma_options_lzma* opt = codec_data_lzma_options(codec->codec_data);
+	lzma_lzma_preset(opt, LZMA_PRESET_DEFAULT);
+
+	lzma_filter* filters = codec_data_lzma_filters(codec->codec_data);
+	filters[0].id = LZMA_FILTER_LZMA2;
+	filters[0].options = opt;
+	filters[1].id = LZMA_VLI_UNKNOWN;
+	filters[1].options = NULL;
+
+	return 0;
+}
+
+static int encode_lzma(avro_codec_t codec, void * data, int64_t len)
+{
+	lzma_ret ret;
+	size_t written = 0;
+	lzma_filter* filters = codec_data_lzma_filters(codec->codec_data);
+
+	int64_t buff_len = len + lzma_raw_encoder_memusage(filters);
+
+	if (!codec->block_data) {
+		codec->block_data = avro_malloc(buff_len);
+		codec->block_size = buff_len;
+	}
+
+	if (!codec->block_data)
+	{
+		avro_set_error("Cannot allocate memory for lzma encoder");
+		return 1;
+	}
+
+	ret = lzma_raw_buffer_encode(filters, NULL, data, len, codec->block_data, &written, codec->block_size);
+
+	codec->used_size = written;
+
+	if (ret != LZMA_OK) {
+		avro_set_error("Error in lzma encoder");
+		return 1;
+	}
+
+	return 0;
+}
+
+static int decode_lzma(avro_codec_t codec, void * data, int64_t len)
+{
+	size_t read_pos = 0;
+	size_t write_pos = 0;
+	lzma_ret ret;
+	lzma_filter* filters = codec_data_lzma_filters(codec->codec_data);
+
+	if (!codec->block_data) {
+		codec->block_data = avro_malloc(DEFAULT_BLOCK_SIZE);
+		codec->block_size = DEFAULT_BLOCK_SIZE;
+	}
+
+	if (!codec->block_data) {
+		avro_set_error("Cannot allocate memory for lzma decoder");
+		return 1;
+	}
+
+	do
+	{
+		ret = lzma_raw_buffer_decode(filters, NULL, data,
+			&read_pos, len, codec->block_data, &write_pos,
+			codec->block_size);
+
+		codec->used_size = write_pos;
+
+		// If it ran out of space to decode, give it more!!
+		// It will continue where it left off because of read_pos and write_pos.
+		if (ret == LZMA_BUF_ERROR) {
+			codec->block_data = avro_realloc(codec->block_data, codec->block_size, codec->block_size * 2);
+			codec->block_size = codec->block_size * 2;
+		}
+
+	} while (ret == LZMA_BUF_ERROR);
+
+	if (ret != LZMA_OK) {
+		avro_set_error("Error in lzma decoder");
+		return 1;
+	}
+
+	return 0;
+}
+
+static int reset_lzma(avro_codec_t c)
+{
+	if (c->block_data) {
+		avro_free(c->block_data, c->block_size);
+	}
+	if (c->codec_data) {
+		avro_freet(struct codec_data_lzma, c->codec_data);
+	}
+
+	c->block_data = NULL;
+	c->block_size = 0;
+	c->used_size = 0;
+	c->codec_data = NULL;
+
+	return 0;
+}
+
+#endif // LZMA_CODEC
+
+/* Common interface */
+
+int avro_codec(avro_codec_t codec, const char *type)
+{
+	if (type == NULL) {
+		return codec_null(codec);
+	}
+
+#ifdef SNAPPY_CODEC
+	if (strcmp("snappy", type) == 0) {
+		return codec_snappy(codec);
+	}
+#endif
+
+#ifdef DEFLATE_CODEC
+	if (strcmp("deflate", type) == 0) {
+		return codec_deflate(codec);
+	}
+#endif
+
+#ifdef LZMA_CODEC
+	if (strcmp("lzma", type) == 0) {
+		return codec_lzma(codec);
+	}
+#endif
+
+	if (strcmp("null", type) == 0) {
+		return codec_null(codec);
+	}
+
+	avro_set_error("Unknown codec %s", type);
+	return 1;
+}
+
+int avro_codec_encode(avro_codec_t c, void * data, int64_t len)
+{
+	switch(c->type)
+	{
+	case AVRO_CODEC_NULL:
+		return encode_null(c, data, len);
+#ifdef SNAPPY_CODEC
+	case AVRO_CODEC_SNAPPY:
+		return encode_snappy(c, data, len);
+#endif
+#ifdef DEFLATE_CODEC
+	case AVRO_CODEC_DEFLATE:
+		return encode_deflate(c, data, len);
+#endif
+#ifdef LZMA_CODEC
+	case AVRO_CODEC_LZMA:
+		return encode_lzma(c, data, len);
+#endif
+	default:
+		return 1;
+	}
+}
+
+int avro_codec_decode(avro_codec_t c, void * data, int64_t len)
+{
+	switch(c->type)
+	{
+	case AVRO_CODEC_NULL:
+		return decode_null(c, data, len);
+#ifdef SNAPPY_CODEC
+	case AVRO_CODEC_SNAPPY:
+		return decode_snappy(c, data, len);
+#endif
+#ifdef DEFLATE_CODEC
+	case AVRO_CODEC_DEFLATE:
+		return decode_deflate(c, data, len);
+#endif
+#ifdef LZMA_CODEC
+	case AVRO_CODEC_LZMA:
+		return decode_lzma(c, data, len);
+#endif
+	default:
+		return 1;
+	}
+}
+
+int avro_codec_reset(avro_codec_t c)
+{
+	switch(c->type)
+	{
+	case AVRO_CODEC_NULL:
+		return reset_null(c);
+#ifdef SNAPPY_CODEC
+	case AVRO_CODEC_SNAPPY:
+		return reset_snappy(c);
+#endif
+#ifdef DEFLATE_CODEC
+	case AVRO_CODEC_DEFLATE:
+		return reset_deflate(c);
+#endif
+#ifdef LZMA_CODEC
+	case AVRO_CODEC_LZMA:
+		return reset_lzma(c);
+#endif
+	default:
+		return 1;
+	}
+}
diff --git a/lang/c/src/codec.h b/lang/c/src/codec.h
new file mode 100644
index 0000000..cd1510c
--- /dev/null
+++ b/lang/c/src/codec.h
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#ifndef AVRO_CODEC_H
+#define	AVRO_CODEC_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+
+enum avro_codec_type_t {
+	AVRO_CODEC_NULL,
+	AVRO_CODEC_DEFLATE,
+	AVRO_CODEC_LZMA,
+	AVRO_CODEC_SNAPPY
+};
+typedef enum avro_codec_type_t avro_codec_type_t;
+
+struct avro_codec_t_ {
+	const char * name;
+	avro_codec_type_t type;
+	int64_t block_size;
+	int64_t used_size;
+	void * block_data;
+	void * codec_data;
+};
+typedef struct avro_codec_t_* avro_codec_t;
+
+int avro_codec(avro_codec_t c, const char *type);
+int avro_codec_reset(avro_codec_t c);
+int avro_codec_encode(avro_codec_t c, void * data, int64_t len);
+int avro_codec_decode(avro_codec_t c, void * data, int64_t len);
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/consume-binary.c b/lang/c/src/consume-binary.c
new file mode 100644
index 0000000..3571526
--- /dev/null
+++ b/lang/c/src/consume-binary.c
@@ -0,0 +1,328 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/consumer.h"
+#include "avro/errors.h"
+#include "avro/resolver.h"
+#include "avro/value.h"
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include "encoding.h"
+#include "schema.h"
+#include "datum.h"
+
+
+static int
+read_enum(avro_reader_t reader, const avro_encoding_t * enc,
+	  avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	int64_t index;
+
+	check_prefix(rval, enc->read_long(reader, &index),
+		     "Cannot read enum value: ");
+	return avro_consumer_call(consumer, enum_value, index, ud);
+}
+
+static int
+read_array(avro_reader_t reader, const avro_encoding_t * enc,
+	   avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	int64_t i;          /* index within the current block */
+	int64_t index = 0;  /* index within the entire array */
+	int64_t block_count;
+	int64_t block_size;
+
+	check_prefix(rval, enc->read_long(reader, &block_count),
+		     "Cannot read array block count: ");
+	check(rval, avro_consumer_call(consumer, array_start_block,
+				       1, block_count, ud));
+
+	while (block_count != 0) {
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, enc->read_long(reader, &block_size),
+				     "Cannot read array block size: ");
+		}
+
+		for (i = 0; i < block_count; i++, index++) {
+			avro_consumer_t  *element_consumer = NULL;
+			void  *element_ud = NULL;
+
+			check(rval,
+			      avro_consumer_call(consumer, array_element,
+					         index, &element_consumer, &element_ud,
+						 ud));
+
+			check(rval, avro_consume_binary(reader, element_consumer, element_ud));
+		}
+
+		check_prefix(rval, enc->read_long(reader, &block_count),
+			     "Cannot read array block count: ");
+		check(rval, avro_consumer_call(consumer, array_start_block,
+					       0, block_count, ud));
+	}
+
+	return 0;
+}
+
+static int
+read_map(avro_reader_t reader, const avro_encoding_t * enc,
+	 avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	int64_t i;          /* index within the current block */
+	int64_t index = 0;  /* index within the entire array */
+	int64_t block_count;
+	int64_t block_size;
+
+	check_prefix(rval, enc->read_long(reader, &block_count),
+		     "Cannot read map block count: ");
+	check(rval, avro_consumer_call(consumer, map_start_block,
+				       1, block_count, ud));
+
+	while (block_count != 0) {
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, enc->read_long(reader, &block_size),
+				     "Cannot read map block size: ");
+		}
+
+		for (i = 0; i < block_count; i++, index++) {
+			char *key;
+			int64_t key_size;
+			avro_consumer_t  *element_consumer = NULL;
+			void  *element_ud = NULL;
+
+			check_prefix(rval, enc->read_string(reader, &key, &key_size),
+				     "Cannot read map key: ");
+
+			rval = avro_consumer_call(consumer, map_element,
+						  index, key,
+						  &element_consumer, &element_ud,
+						  ud);
+			if (rval) {
+				avro_free(key, key_size);
+				return rval;
+			}
+
+			rval = avro_consume_binary(reader, element_consumer, element_ud);
+			if (rval) {
+				avro_free(key, key_size);
+				return rval;
+			}
+
+			avro_free(key, key_size);
+		}
+
+		check_prefix(rval, enc->read_long(reader, &block_count),
+			     "Cannot read map block count: ");
+		check(rval, avro_consumer_call(consumer, map_start_block,
+					       0, block_count, ud));
+	}
+
+	return 0;
+}
+
+static int
+read_union(avro_reader_t reader, const avro_encoding_t * enc,
+	   avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	int64_t discriminant;
+	avro_consumer_t  *branch_consumer = NULL;
+	void  *branch_ud = NULL;
+
+	check_prefix(rval, enc->read_long(reader, &discriminant),
+		     "Cannot read union discriminant: ");
+	check(rval, avro_consumer_call(consumer, union_branch,
+				       discriminant,
+				       &branch_consumer, &branch_ud, ud));
+	return avro_consume_binary(reader, branch_consumer, branch_ud);
+}
+
+static int
+read_record(avro_reader_t reader, const avro_encoding_t * enc,
+	    avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	size_t  num_fields;
+	unsigned int  i;
+
+	AVRO_UNUSED(enc);
+
+	check(rval, avro_consumer_call(consumer, record_start, ud));
+
+	num_fields = avro_schema_record_size(consumer->schema);
+	for (i = 0; i < num_fields; i++) {
+		avro_consumer_t  *field_consumer = NULL;
+		void  *field_ud = NULL;
+
+		check(rval, avro_consumer_call(consumer, record_field,
+					       i, &field_consumer, &field_ud,
+					       ud));
+
+		if (field_consumer) {
+			check(rval, avro_consume_binary(reader, field_consumer, field_ud));
+		} else {
+			avro_schema_t  field_schema =
+			    avro_schema_record_field_get_by_index(consumer->schema, i);
+			check(rval, avro_skip_data(reader, field_schema));
+		}
+	}
+
+	return 0;
+}
+
+int
+avro_consume_binary(avro_reader_t reader, avro_consumer_t *consumer, void *ud)
+{
+	int rval;
+	const avro_encoding_t *enc = &avro_binary_encoding;
+
+	check_param(EINVAL, reader, "reader");
+	check_param(EINVAL, consumer, "consumer");
+
+	switch (avro_typeof(consumer->schema)) {
+	case AVRO_NULL:
+		check_prefix(rval, enc->read_null(reader),
+			     "Cannot read null value: ");
+		check(rval, avro_consumer_call(consumer, null_value, ud));
+		break;
+
+	case AVRO_BOOLEAN:
+		{
+			int8_t b;
+			check_prefix(rval, enc->read_boolean(reader, &b),
+				     "Cannot read boolean value: ");
+			check(rval, avro_consumer_call(consumer, boolean_value, b, ud));
+		}
+		break;
+
+	case AVRO_STRING:
+		{
+			int64_t len;
+			char *s;
+			check_prefix(rval, enc->read_string(reader, &s, &len),
+				     "Cannot read string value: ");
+			check(rval, avro_consumer_call(consumer, string_value, s, len, ud));
+		}
+		break;
+
+	case AVRO_INT32:
+		{
+			int32_t i;
+			check_prefix(rval, enc->read_int(reader, &i),
+				    "Cannot read int value: ");
+			check(rval, avro_consumer_call(consumer, int_value, i, ud));
+		}
+		break;
+
+	case AVRO_INT64:
+		{
+			int64_t l;
+			check_prefix(rval, enc->read_long(reader, &l),
+				     "Cannot read long value: ");
+			check(rval, avro_consumer_call(consumer, long_value, l, ud));
+		}
+		break;
+
+	case AVRO_FLOAT:
+		{
+			float f;
+			check_prefix(rval, enc->read_float(reader, &f),
+				     "Cannot read float value: ");
+			check(rval, avro_consumer_call(consumer, float_value, f, ud));
+		}
+		break;
+
+	case AVRO_DOUBLE:
+		{
+			double d;
+			check_prefix(rval, enc->read_double(reader, &d),
+				     "Cannot read double value: ");
+			check(rval, avro_consumer_call(consumer, double_value, d, ud));
+		}
+		break;
+
+	case AVRO_BYTES:
+		{
+			char *bytes;
+			int64_t len;
+			check_prefix(rval, enc->read_bytes(reader, &bytes, &len),
+				     "Cannot read bytes value: ");
+			check(rval, avro_consumer_call(consumer, bytes_value, bytes, len, ud));
+		}
+		break;
+
+	case AVRO_FIXED:
+		{
+			char *bytes;
+			int64_t size =
+			    avro_schema_to_fixed(consumer->schema)->size;
+
+			bytes = (char *) avro_malloc(size);
+			if (!bytes) {
+				avro_prefix_error("Cannot allocate new fixed value");
+				return ENOMEM;
+			}
+			rval = avro_read(reader, bytes, size);
+			if (rval) {
+				avro_prefix_error("Cannot read fixed value: ");
+				avro_free(bytes, size);
+				return rval;
+			}
+
+			rval = avro_consumer_call(consumer, fixed_value, bytes, size, ud);
+			if (rval) {
+				avro_free(bytes, size);
+				return rval;
+			}
+		}
+		break;
+
+	case AVRO_ENUM:
+		check(rval, read_enum(reader, enc, consumer, ud));
+		break;
+
+	case AVRO_ARRAY:
+		check(rval, read_array(reader, enc, consumer, ud));
+		break;
+
+	case AVRO_MAP:
+		check(rval, read_map(reader, enc, consumer, ud));
+		break;
+
+	case AVRO_UNION:
+		check(rval, read_union(reader, enc, consumer, ud));
+		break;
+
+	case AVRO_RECORD:
+		check(rval, read_record(reader, enc, consumer, ud));
+		break;
+
+	case AVRO_LINK:
+		avro_set_error("Consumer can't consume a link schema directly");
+		return EINVAL;
+	}
+
+	return 0;
+}
diff --git a/lang/c/src/consumer.c b/lang/c/src/consumer.c
new file mode 100644
index 0000000..e41f7a9
--- /dev/null
+++ b/lang/c/src/consumer.c
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.	 You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include "avro/consumer.h"
+
+void avro_consumer_free(avro_consumer_t *consumer)
+{
+	consumer->free(consumer);
+}
diff --git a/lang/c/src/datafile.c b/lang/c/src/datafile.c
new file mode 100644
index 0000000..0ee9879
--- /dev/null
+++ b/lang/c/src/datafile.c
@@ -0,0 +1,745 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/generic.h"
+#include "avro/errors.h"
+#include "avro/value.h"
+#include "encoding.h"
+#include "codec.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <time.h>
+#include <string.h>
+
+struct avro_file_reader_t_ {
+	avro_schema_t writers_schema;
+	avro_reader_t reader;
+	avro_reader_t block_reader;
+	avro_codec_t codec;
+	char sync[16];
+	int64_t blocks_read;
+	int64_t blocks_total;
+	int64_t current_blocklen;
+	char * current_blockdata;
+};
+
+struct avro_file_writer_t_ {
+	avro_schema_t writers_schema;
+	avro_writer_t writer;
+	avro_codec_t codec;
+	char sync[16];
+	int block_count;
+	size_t block_size;
+	avro_writer_t datum_writer;
+	char* datum_buffer;
+	size_t datum_buffer_size;
+	char schema_buf[64 * 1024];
+};
+
+#define DEFAULT_BLOCK_SIZE 16 * 1024
+
+/* Note: We should not just read /dev/random here, because it may not
+ * exist on all platforms e.g. Win32.
+ */
+static void generate_sync(avro_file_writer_t w)
+{
+	unsigned int i;
+	srand(time(NULL));
+	for (i = 0; i < sizeof(w->sync); i++) {
+		w->sync[i] = ((double)rand() / (RAND_MAX + 1.0)) * 255;
+	}
+}
+
+static int write_sync(avro_file_writer_t w)
+{
+	return avro_write(w->writer, w->sync, sizeof(w->sync));
+}
+
+static int write_header(avro_file_writer_t w)
+{
+	int rval;
+	uint8_t version = 1;
+	/* TODO: remove this static buffer */
+	avro_writer_t schema_writer;
+	const avro_encoding_t *enc = &avro_binary_encoding;
+	int64_t schema_len;
+
+	/* Generate random sync */
+	generate_sync(w);
+
+	check(rval, avro_write(w->writer, "Obj", 3));
+	check(rval, avro_write(w->writer, &version, 1));
+
+	check(rval, enc->write_long(w->writer, 2));
+	check(rval, enc->write_string(w->writer, "avro.codec"));
+	check(rval, enc->write_bytes(w->writer, w->codec->name, strlen(w->codec->name)));
+	check(rval, enc->write_string(w->writer, "avro.schema"));
+	schema_writer =
+	    avro_writer_memory(&w->schema_buf[0], sizeof(w->schema_buf));
+	rval = avro_schema_to_json(w->writers_schema, schema_writer);
+	if (rval) {
+		avro_writer_free(schema_writer);
+		return rval;
+	}
+	schema_len = avro_writer_tell(schema_writer);
+	avro_writer_free(schema_writer);
+	check(rval,
+	      enc->write_bytes(w->writer, w->schema_buf, schema_len));
+	check(rval, enc->write_long(w->writer, 0));
+	return write_sync(w);
+}
+
+static int
+file_writer_init_fp(FILE *fp, const char *path, int should_close, const char *mode, avro_file_writer_t w)
+{
+	if (!fp) {
+		fp = fopen(path, mode);
+	}
+
+	if (!fp) {
+		avro_set_error("Cannot open file for %s", path);
+		return ENOMEM;
+	}
+	w->writer = avro_writer_file_fp(fp, should_close);
+	if (!w->writer) {
+		if (should_close) {
+			fclose(fp);
+		}
+		avro_set_error("Cannot create file writer for %s", path);
+		return ENOMEM;
+	}
+	return 0;
+}
+
+/* Exclusive file writing is supported by GCC using the mode
+ * "wx". Win32 does not support exclusive file writing, so for win32
+ * fall back to the non-exclusive file writing.
+ */
+#ifdef _WIN32
+  #define EXCLUSIVE_WRITE_MODE   "wb"
+#else
+  #define EXCLUSIVE_WRITE_MODE   "wbx"
+#endif
+
+static int
+file_writer_create(FILE *fp, const char *path, int should_close, avro_schema_t schema, avro_file_writer_t w, size_t block_size)
+{
+	int rval;
+
+	w->block_count = 0;
+	rval = file_writer_init_fp(fp, path, should_close, EXCLUSIVE_WRITE_MODE, w);
+	if (rval) {
+		check(rval, file_writer_init_fp(fp, path, should_close, "wb", w));
+	}
+
+	w->datum_buffer_size = block_size;
+	w->datum_buffer = (char *) avro_malloc(w->datum_buffer_size);
+
+	if(!w->datum_buffer) {
+		avro_set_error("Could not allocate datum buffer\n");
+		avro_writer_free(w->writer);
+		return ENOMEM;
+	}
+
+	w->datum_writer =
+	    avro_writer_memory(w->datum_buffer, w->datum_buffer_size);
+	if (!w->datum_writer) {
+		avro_set_error("Cannot create datum writer for file %s", path);
+		avro_writer_free(w->writer);
+		avro_free(w->datum_buffer, w->datum_buffer_size);
+		return ENOMEM;
+	}
+
+	w->writers_schema = avro_schema_incref(schema);
+	return write_header(w);
+}
+
+int
+avro_file_writer_create(const char *path, avro_schema_t schema,
+			avro_file_writer_t * writer)
+{
+	return avro_file_writer_create_with_codec_fp(NULL, path, 1, schema, writer, "null", 0);
+}
+
+int
+avro_file_writer_create_fp(FILE *fp, const char *path, int should_close, avro_schema_t schema,
+			avro_file_writer_t * writer)
+{
+	return avro_file_writer_create_with_codec_fp(fp, path, should_close, schema, writer, "null", 0);
+}
+
+int avro_file_writer_create_with_codec(const char *path,
+			avro_schema_t schema, avro_file_writer_t * writer,
+			const char *codec, size_t block_size)
+{
+	return avro_file_writer_create_with_codec_fp(NULL, path, 1, schema, writer, codec, block_size);
+}
+
+int avro_file_writer_create_with_codec_fp(FILE *fp, const char *path, int should_close,
+			avro_schema_t schema, avro_file_writer_t * writer,
+			const char *codec, size_t block_size)
+{
+	avro_file_writer_t w;
+	int rval;
+	check_param(EINVAL, path, "path");
+	check_param(EINVAL, is_avro_schema(schema), "schema");
+	check_param(EINVAL, writer, "writer");
+	check_param(EINVAL, codec, "codec");
+
+	if (block_size == 0) {
+		block_size = DEFAULT_BLOCK_SIZE;
+	}
+
+	w = (avro_file_writer_t) avro_new(struct avro_file_writer_t_);
+	if (!w) {
+		avro_set_error("Cannot allocate new file writer");
+		return ENOMEM;
+	}
+	w->codec = (avro_codec_t) avro_new(struct avro_codec_t_);
+	if (!w->codec) {
+		avro_set_error("Cannot allocate new codec");
+		avro_freet(struct avro_file_writer_t_, w);
+		return ENOMEM;
+	}
+	rval = avro_codec(w->codec, codec);
+	if (rval) {
+		avro_codec_reset(w->codec);
+		avro_freet(struct avro_codec_t_, w->codec);
+		avro_freet(struct avro_file_writer_t_, w);
+		return rval;
+	}
+	rval = file_writer_create(fp, path, should_close, schema, w, block_size);
+	if (rval) {
+		avro_codec_reset(w->codec);
+		avro_freet(struct avro_codec_t_, w->codec);
+		avro_freet(struct avro_file_writer_t_, w);
+		return rval;
+	}
+	*writer = w;
+
+	return 0;
+}
+
+static int file_read_header(avro_reader_t reader,
+			    avro_schema_t * writers_schema, avro_codec_t codec,
+			    char *sync, int synclen)
+{
+	int rval;
+	avro_schema_t meta_schema;
+	avro_schema_t meta_values_schema;
+	avro_value_iface_t *meta_iface;
+	avro_value_t meta;
+	char magic[4];
+	avro_value_t codec_val;
+	avro_value_t schema_bytes;
+	const void *p;
+	size_t len;
+
+	check(rval, avro_read(reader, magic, sizeof(magic)));
+	if (magic[0] != 'O' || magic[1] != 'b' || magic[2] != 'j'
+	    || magic[3] != 1) {
+		avro_set_error("Incorrect Avro container file magic number");
+		return EILSEQ;
+	}
+
+	meta_values_schema = avro_schema_bytes();
+	meta_schema = avro_schema_map(meta_values_schema);
+	meta_iface = avro_generic_class_from_schema(meta_schema);
+	if (meta_iface == NULL) {
+		return EILSEQ;
+	}
+	check(rval, avro_generic_value_new(meta_iface, &meta));
+	rval = avro_value_read(reader, &meta);
+	if (rval) {
+		avro_prefix_error("Cannot read file header: ");
+		return EILSEQ;
+	}
+	avro_schema_decref(meta_schema);
+
+	rval = avro_value_get_by_name(&meta, "avro.codec", &codec_val, NULL);
+	if (rval) {
+		if (avro_codec(codec, NULL) != 0) {
+			avro_set_error("Codec not specified in header and unable to set 'null' codec");
+			avro_value_decref(&meta);
+			return EILSEQ;
+		}
+	} else {
+		const void *buf;
+		size_t size;
+		char codec_name[11];
+
+		avro_type_t type = avro_value_get_type(&codec_val);
+
+		if (type != AVRO_BYTES) {
+			avro_set_error("Value type of codec is unexpected");
+			avro_value_decref(&meta);
+			return EILSEQ;
+		}
+
+		avro_value_get_bytes(&codec_val, &buf, &size);
+		memset(codec_name, 0, sizeof(codec_name));
+		strncpy(codec_name, (const char *) buf, size < 10 ? size : 10);
+
+		if (avro_codec(codec, codec_name) != 0) {
+			avro_set_error("File header contains an unknown codec");
+			avro_value_decref(&meta);
+			return EILSEQ;
+		}
+	}
+
+	rval = avro_value_get_by_name(&meta, "avro.schema", &schema_bytes, NULL);
+	if (rval) {
+		avro_set_error("File header doesn't contain a schema");
+		avro_value_decref(&meta);
+		return EILSEQ;
+	}
+
+	avro_value_get_bytes(&schema_bytes, &p, &len);
+	rval = avro_schema_from_json_length((const char *) p, len, writers_schema);
+	if (rval) {
+		avro_prefix_error("Cannot parse file header: ");
+		avro_value_decref(&meta);
+		return rval;
+	}
+
+	avro_value_decref(&meta);
+	avro_value_iface_decref(meta_iface);
+	return avro_read(reader, sync, synclen);
+}
+
+static int
+file_writer_open(const char *path, avro_file_writer_t w, size_t block_size)
+{
+	int rval;
+	FILE *fp;
+	avro_reader_t reader;
+
+	/* Open for read AND write */
+	fp = fopen(path, "r+b");
+	if (!fp) {
+		avro_set_error("Error opening file: %s",
+			       strerror(errno));
+		return errno;
+	}
+
+	/* Don`t close the underlying file descriptor, logrotate can
+	 * vanish it from sight. */
+	reader = avro_reader_file_fp(fp, 0);
+	if (!reader) {
+		fclose(fp);
+		avro_set_error("Cannot create file reader for %s", path);
+		return ENOMEM;
+	}
+	rval =
+	    file_read_header(reader, &w->writers_schema, w->codec, w->sync,
+			     sizeof(w->sync));
+
+	avro_reader_free(reader);
+	if (rval) {
+		fclose(fp);
+		return rval;
+	}
+
+	w->block_count = 0;
+
+	/* Position to end of file and get ready to write */
+	fseek(fp, 0, SEEK_END);
+
+	w->writer = avro_writer_file(fp);
+	if (!w->writer) {
+		fclose(fp);
+		avro_set_error("Cannot create file writer for %s", path);
+		return ENOMEM;
+	}
+
+	if (block_size == 0) {
+		block_size = DEFAULT_BLOCK_SIZE;
+	}
+
+	w->datum_buffer_size = block_size;
+	w->datum_buffer = (char *) avro_malloc(w->datum_buffer_size);
+
+	if(!w->datum_buffer) {
+		avro_set_error("Could not allocate datum buffer\n");
+		avro_writer_free(w->writer);
+		return ENOMEM;
+	}
+
+	w->datum_writer =
+	    avro_writer_memory(w->datum_buffer, w->datum_buffer_size);
+	if (!w->datum_writer) {
+		avro_set_error("Cannot create datum writer for file %s", path);
+		avro_writer_free(w->writer);
+		avro_free(w->datum_buffer, w->datum_buffer_size);
+		return ENOMEM;
+	}
+
+	return 0;
+}
+
+int
+avro_file_writer_open_bs(const char *path, avro_file_writer_t * writer,
+			 size_t block_size)
+{
+	avro_file_writer_t w;
+	int rval;
+	check_param(EINVAL, path, "path");
+	check_param(EINVAL, writer, "writer");
+
+	w = (avro_file_writer_t) avro_new(struct avro_file_writer_t_);
+	if (!w) {
+		avro_set_error("Cannot create new file writer for %s", path);
+		return ENOMEM;
+	}
+	w->codec = (avro_codec_t) avro_new(struct avro_codec_t_);
+	if (!w->codec) {
+		avro_set_error("Cannot allocate new codec");
+		avro_freet(struct avro_file_writer_t_, w);
+		return ENOMEM;
+	}
+	avro_codec(w->codec, NULL);
+	rval = file_writer_open(path, w, block_size);
+	if (rval) {
+		avro_codec_reset(w->codec);
+		avro_freet(struct avro_codec_t_, w->codec);
+		avro_freet(struct avro_file_writer_t_, w);
+		return rval;
+	}
+
+	*writer = w;
+	return 0;
+}
+
+int
+avro_file_writer_open(const char *path, avro_file_writer_t * writer)
+{
+	return avro_file_writer_open_bs(path, writer, 0);
+}
+
+static int file_read_block_count(avro_file_reader_t r)
+{
+	int rval;
+	int64_t len;
+	const avro_encoding_t *enc = &avro_binary_encoding;
+	check_prefix(rval, enc->read_long(r->reader, &r->blocks_total),
+		     "Cannot read file block count: ");
+	check_prefix(rval, enc->read_long(r->reader, &len),
+		     "Cannot read file block size: ");
+
+	if (r->current_blockdata && len > r->current_blocklen) {
+		r->current_blockdata = (char *) avro_realloc(r->current_blockdata, r->current_blocklen, len);
+		r->current_blocklen = len;
+	} else if (!r->current_blockdata) {
+		r->current_blockdata = (char *) avro_malloc(len);
+		r->current_blocklen = len;
+	}
+
+	check_prefix(rval, avro_read(r->reader, r->current_blockdata, len),
+		     "Cannot read file block: ");
+
+	check_prefix(rval, avro_codec_decode(r->codec, r->current_blockdata, len),
+		     "Cannot decode file block: ");
+
+	avro_reader_memory_set_source(r->block_reader, (const char *) r->codec->block_data, r->codec->used_size);
+
+	r->blocks_read = 0;
+	return 0;
+}
+
+int avro_file_reader_fp(FILE *fp, const char *path, int should_close,
+			avro_file_reader_t * reader)
+{
+	int rval;
+	avro_file_reader_t r = (avro_file_reader_t) avro_new(struct avro_file_reader_t_);
+	if (!r) {
+		if (should_close) {
+			fclose(fp);
+		}
+		avro_set_error("Cannot allocate file reader for %s", path);
+		return ENOMEM;
+	}
+
+	r->reader = avro_reader_file_fp(fp, should_close);
+	if (!r->reader) {
+		if (should_close) {
+			fclose(fp);
+		}
+		avro_set_error("Cannot allocate reader for file %s", path);
+		avro_freet(struct avro_file_reader_t_, r);
+		return ENOMEM;
+	}
+	r->block_reader = avro_reader_memory(0, 0);
+	if (!r->block_reader) {
+		avro_set_error("Cannot allocate block reader for file %s", path);
+		avro_reader_free(r->reader);
+		avro_freet(struct avro_file_reader_t_, r);
+		return ENOMEM;
+	}
+
+	r->codec = (avro_codec_t) avro_new(struct avro_codec_t_);
+	if (!r->codec) {
+		avro_set_error("Could not allocate codec for file %s", path);
+		avro_reader_free(r->reader);
+		avro_freet(struct avro_file_reader_t_, r);
+		return ENOMEM;
+	}
+	avro_codec(r->codec, NULL);
+
+	rval = file_read_header(r->reader, &r->writers_schema, r->codec,
+				r->sync, sizeof(r->sync));
+	if (rval) {
+		avro_reader_free(r->reader);
+		avro_codec_reset(r->codec);
+		avro_freet(struct avro_codec_t_, r->codec);
+		avro_freet(struct avro_file_reader_t_, r);
+		return rval;
+	}
+
+	r->current_blockdata = NULL;
+	r->current_blocklen = 0;
+
+	rval = file_read_block_count(r);
+	if (rval) {
+		avro_reader_free(r->reader);
+		avro_codec_reset(r->codec);
+		avro_freet(struct avro_codec_t_, r->codec);
+		avro_freet(struct avro_file_reader_t_, r);
+		return rval;
+	}
+
+	*reader = r;
+	return rval;
+}
+
+int avro_file_reader(const char *path, avro_file_reader_t * reader)
+{
+	FILE *fp;
+
+	fp = fopen(path, "rb");
+	if (!fp) {
+		return errno;
+	}
+
+	return avro_file_reader_fp(fp, path, 1, reader);
+}
+
+avro_schema_t
+avro_file_reader_get_writer_schema(avro_file_reader_t r)
+{
+	check_param(NULL, r, "reader");
+	return avro_schema_incref(r->writers_schema);
+}
+
+static int file_write_block(avro_file_writer_t w)
+{
+	const avro_encoding_t *enc = &avro_binary_encoding;
+	int rval;
+
+	if (w->block_count) {
+		/* Write the block count */
+		check_prefix(rval, enc->write_long(w->writer, w->block_count),
+			     "Cannot write file block count: ");
+		/* Encode the block */
+		check_prefix(rval, avro_codec_encode(w->codec, w->datum_buffer, w->block_size),
+			     "Cannot encode file block: ");
+		/* Write the block length */
+		check_prefix(rval, enc->write_long(w->writer, w->codec->used_size),
+			     "Cannot write file block size: ");
+		/* Write the block */
+		check_prefix(rval, avro_write(w->writer, w->codec->block_data, w->codec->used_size),
+			     "Cannot write file block: ");
+		/* Write the sync marker */
+		check_prefix(rval, write_sync(w),
+			     "Cannot write sync marker: ");
+		/* Reset the datum writer */
+		avro_writer_reset(w->datum_writer);
+		w->block_count = 0;
+		w->block_size = 0;
+	}
+	return 0;
+}
+
+int avro_file_writer_append(avro_file_writer_t w, avro_datum_t datum)
+{
+	int rval;
+	check_param(EINVAL, w, "writer");
+	check_param(EINVAL, datum, "datum");
+
+	rval = avro_write_data(w->datum_writer, w->writers_schema, datum);
+	if (rval) {
+		check(rval, file_write_block(w));
+		rval =
+		    avro_write_data(w->datum_writer, w->writers_schema, datum);
+		if (rval) {
+			avro_set_error("Datum too large for file block size");
+			/* TODO: if the datum encoder larger than our buffer,
+			   just write a single large datum */
+			return rval;
+		}
+	}
+	w->block_count++;
+	w->block_size = avro_writer_tell(w->datum_writer);
+	return 0;
+}
+
+int
+avro_file_writer_append_value(avro_file_writer_t w, avro_value_t *value)
+{
+	int rval;
+	check_param(EINVAL, w, "writer");
+	check_param(EINVAL, value, "value");
+
+	rval = avro_value_write(w->datum_writer, value);
+	if (rval) {
+		check(rval, file_write_block(w));
+		rval = avro_value_write(w->datum_writer, value);
+		if (rval) {
+			avro_set_error("Value too large for file block size");
+			/* TODO: if the value encoder larger than our buffer,
+			   just write a single large datum */
+			return rval;
+		}
+	}
+	w->block_count++;
+	w->block_size = avro_writer_tell(w->datum_writer);
+	return 0;
+}
+
+int
+avro_file_writer_append_encoded(avro_file_writer_t w,
+				const void *buf, int64_t len)
+{
+	int rval;
+	check_param(EINVAL, w, "writer");
+
+	rval = avro_write(w->datum_writer, (void *) buf, len);
+	if (rval) {
+		check(rval, file_write_block(w));
+		rval = avro_write(w->datum_writer, (void *) buf, len);
+		if (rval) {
+			avro_set_error("Value too large for file block size");
+			/* TODO: if the value encoder larger than our buffer,
+			   just write a single large datum */
+			return rval;
+		}
+	}
+	w->block_count++;
+	w->block_size = avro_writer_tell(w->datum_writer);
+	return 0;
+}
+
+int avro_file_writer_sync(avro_file_writer_t w)
+{
+	return file_write_block(w);
+}
+
+int avro_file_writer_flush(avro_file_writer_t w)
+{
+	int rval;
+	check(rval, file_write_block(w));
+	avro_writer_flush(w->writer);
+	return 0;
+}
+
+int avro_file_writer_close(avro_file_writer_t w)
+{
+	int rval;
+	check(rval, avro_file_writer_flush(w));
+	avro_schema_decref(w->writers_schema);
+	avro_writer_free(w->datum_writer);
+	avro_writer_free(w->writer);
+	avro_free(w->datum_buffer, w->datum_buffer_size);
+	avro_codec_reset(w->codec);
+	avro_freet(struct avro_codec_t_, w->codec);
+	avro_freet(struct avro_file_writer_t_, w);
+	return 0;
+}
+
+int avro_file_reader_read(avro_file_reader_t r, avro_schema_t readers_schema,
+			  avro_datum_t * datum)
+{
+	int rval;
+	char sync[16];
+
+	check_param(EINVAL, r, "reader");
+	check_param(EINVAL, datum, "datum");
+
+	check(rval,
+	      avro_read_data(r->block_reader, r->writers_schema, readers_schema,
+			     datum));
+	r->blocks_read++;
+
+	if (r->blocks_read == r->blocks_total) {
+		check(rval, avro_read(r->reader, sync, sizeof(sync)));
+		if (memcmp(r->sync, sync, sizeof(r->sync)) != 0) {
+			/* wrong sync bytes */
+			avro_set_error("Incorrect sync bytes");
+			return EILSEQ;
+		}
+		/* For now, ignore errors (e.g. EOF) */
+		file_read_block_count(r);
+	}
+	return 0;
+}
+
+int
+avro_file_reader_read_value(avro_file_reader_t r, avro_value_t *value)
+{
+	int rval;
+	char sync[16];
+
+	check_param(EINVAL, r, "reader");
+	check_param(EINVAL, value, "value");
+
+	if (r->blocks_read == r->blocks_total) {
+		check(rval, avro_read(r->reader, sync, sizeof(sync)));
+		if (memcmp(r->sync, sync, sizeof(r->sync)) != 0) {
+			/* wrong sync bytes */
+			avro_set_error("Incorrect sync bytes");
+			return EILSEQ;
+		}
+
+		/* Did we just hit the end of the file? */
+		if (avro_reader_is_eof(r->reader))
+			return EOF;
+
+		check(rval, file_read_block_count(r));
+	}
+
+	check(rval, avro_value_read(r->block_reader, value));
+	r->blocks_read++;
+
+	return 0;
+}
+
+int avro_file_reader_close(avro_file_reader_t reader)
+{
+	avro_schema_decref(reader->writers_schema);
+	avro_reader_free(reader->reader);
+	avro_reader_free(reader->block_reader);
+	avro_codec_reset(reader->codec);
+	avro_freet(struct avro_codec_t_, reader->codec);
+	if (reader->current_blockdata) {
+		avro_free(reader->current_blockdata, reader->current_blocklen);
+	}
+	avro_freet(struct avro_file_reader_t_, reader);
+	return 0;
+}
diff --git a/lang/c/src/datum.c b/lang/c/src/datum.c
new file mode 100644
index 0000000..3197e3f
--- /dev/null
+++ b/lang/c/src/datum.c
@@ -0,0 +1,1255 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro/allocation.h"
+#include "avro/basics.h"
+#include "avro/errors.h"
+#include "avro/legacy.h"
+#include "avro/refcount.h"
+#include "avro/schema.h"
+#include "avro_private.h"
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+#include "datum.h"
+#include "schema.h"
+#include "encoding.h"
+
+#define DEFAULT_TABLE_SIZE 32
+
+static void avro_datum_init(avro_datum_t datum, avro_type_t type)
+{
+	datum->type = type;
+	datum->class_type = AVRO_DATUM;
+	avro_refcount_set(&datum->refcount, 1);
+}
+
+static void
+avro_str_free_wrapper(void *ptr, size_t sz)
+{
+	// don't need sz, since the size is stored in the string buffer
+	AVRO_UNUSED(sz);
+	avro_str_free((char *)ptr);
+}
+
+static avro_datum_t avro_string_private(char *str, int64_t size,
+					avro_free_func_t string_free)
+{
+	struct avro_string_datum_t *datum =
+	    (struct avro_string_datum_t *) avro_new(struct avro_string_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new string datum");
+		return NULL;
+	}
+	datum->s = str;
+	datum->size = size;
+	datum->free = string_free;
+
+	avro_datum_init(&datum->obj, AVRO_STRING);
+	return &datum->obj;
+}
+
+avro_datum_t avro_string(const char *str)
+{
+	char *p = avro_strdup(str);
+	if (!p) {
+		avro_set_error("Cannot copy string content");
+		return NULL;
+	}
+	avro_datum_t s_datum = avro_string_private(p, 0, avro_str_free_wrapper);
+	if (!s_datum) {
+		avro_str_free(p);
+	}
+
+	return s_datum;
+}
+
+avro_datum_t avro_givestring(const char *str,
+			     avro_free_func_t free)
+{
+	int64_t  sz = strlen(str)+1;
+	return avro_string_private((char *)str, sz, free);
+}
+
+int avro_string_get(avro_datum_t datum, char **p)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_string(datum), "string datum");
+	check_param(EINVAL, p, "string buffer");
+
+	*p = avro_datum_to_string(datum)->s;
+	return 0;
+}
+
+static int avro_string_set_private(avro_datum_t datum,
+	       			   const char *p, int64_t size,
+				   avro_free_func_t string_free)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_string(datum), "string datum");
+	check_param(EINVAL, p, "string content");
+
+	struct avro_string_datum_t *string = avro_datum_to_string(datum);
+	if (string->free) {
+		string->free(string->s, string->size);
+	}
+	string->free = string_free;
+	string->s = (char *)p;
+	string->size = size;
+	return 0;
+}
+
+int avro_string_set(avro_datum_t datum, const char *p)
+{
+	char *string_copy = avro_strdup(p);
+	int rval;
+	if (!string_copy) {
+		avro_set_error("Cannot copy string content");
+		return ENOMEM;
+	}
+	rval = avro_string_set_private(datum, string_copy, 0,
+				       avro_str_free_wrapper);
+	if (rval) {
+		avro_str_free(string_copy);
+	}
+	return rval;
+}
+
+int avro_givestring_set(avro_datum_t datum, const char *p,
+			avro_free_func_t free)
+{
+	int64_t  size = strlen(p)+1;
+	return avro_string_set_private(datum, p, size, free);
+}
+
+static avro_datum_t avro_bytes_private(char *bytes, int64_t size,
+				       avro_free_func_t bytes_free)
+{
+	struct avro_bytes_datum_t *datum;
+	datum = (struct avro_bytes_datum_t *) avro_new(struct avro_bytes_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new bytes datum");
+		return NULL;
+	}
+	datum->bytes = bytes;
+	datum->size = size;
+	datum->free = bytes_free;
+
+	avro_datum_init(&datum->obj, AVRO_BYTES);
+	return &datum->obj;
+}
+
+avro_datum_t avro_bytes(const char *bytes, int64_t size)
+{
+	char *bytes_copy = (char *) avro_malloc(size);
+	if (!bytes_copy) {
+		avro_set_error("Cannot copy bytes content");
+		return NULL;
+	}
+	memcpy(bytes_copy, bytes, size);
+	avro_datum_t  result =
+		avro_bytes_private(bytes_copy, size, avro_alloc_free_func);
+	if (result == NULL) {
+		avro_free(bytes_copy, size);
+	}
+	return result;
+}
+
+avro_datum_t avro_givebytes(const char *bytes, int64_t size,
+			    avro_free_func_t free)
+{
+	return avro_bytes_private((char *)bytes, size, free);
+}
+
+static int avro_bytes_set_private(avro_datum_t datum, const char *bytes,
+				  const int64_t size,
+				  avro_free_func_t bytes_free)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_bytes(datum), "bytes datum");
+
+	struct avro_bytes_datum_t *b = avro_datum_to_bytes(datum);
+	if (b->free) {
+		b->free(b->bytes, b->size);
+	}
+
+	b->free = bytes_free;
+	b->bytes = (char *)bytes;
+	b->size = size;
+	return 0;
+}
+
+int avro_bytes_set(avro_datum_t datum, const char *bytes, const int64_t size)
+{
+	int rval;
+	char *bytes_copy = (char *) avro_malloc(size);
+	if (!bytes_copy) {
+		avro_set_error("Cannot copy bytes content");
+		return ENOMEM;
+	}
+	memcpy(bytes_copy, bytes, size);
+	rval = avro_bytes_set_private(datum, bytes_copy, size, avro_alloc_free_func);
+	if (rval) {
+		avro_free(bytes_copy, size);
+	}
+	return rval;
+}
+
+int avro_givebytes_set(avro_datum_t datum, const char *bytes,
+		       const int64_t size, avro_free_func_t free)
+{
+	return avro_bytes_set_private(datum, bytes, size, free);
+}
+
+int avro_bytes_get(avro_datum_t datum, char **bytes, int64_t * size)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_bytes(datum), "bytes datum");
+	check_param(EINVAL, bytes, "bytes");
+	check_param(EINVAL, size, "size");
+
+	*bytes = avro_datum_to_bytes(datum)->bytes;
+	*size = avro_datum_to_bytes(datum)->size;
+	return 0;
+}
+
+avro_datum_t avro_int32(int32_t i)
+{
+	struct avro_int32_datum_t *datum =
+	    (struct avro_int32_datum_t *) avro_new(struct avro_int32_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new int datum");
+		return NULL;
+	}
+	datum->i32 = i;
+
+	avro_datum_init(&datum->obj, AVRO_INT32);
+	return &datum->obj;
+}
+
+int avro_int32_get(avro_datum_t datum, int32_t * i)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_int32(datum), "int datum");
+	check_param(EINVAL, i, "value pointer");
+
+	*i = avro_datum_to_int32(datum)->i32;
+	return 0;
+}
+
+int avro_int32_set(avro_datum_t datum, const int32_t i)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_int32(datum), "int datum");
+
+	avro_datum_to_int32(datum)->i32 = i;
+	return 0;
+}
+
+avro_datum_t avro_int64(int64_t l)
+{
+	struct avro_int64_datum_t *datum =
+	    (struct avro_int64_datum_t *) avro_new(struct avro_int64_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new long datum");
+		return NULL;
+	}
+	datum->i64 = l;
+
+	avro_datum_init(&datum->obj, AVRO_INT64);
+	return &datum->obj;
+}
+
+int avro_int64_get(avro_datum_t datum, int64_t * l)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_int64(datum), "long datum");
+	check_param(EINVAL, l, "value pointer");
+
+	*l = avro_datum_to_int64(datum)->i64;
+	return 0;
+}
+
+int avro_int64_set(avro_datum_t datum, const int64_t l)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_int64(datum), "long datum");
+
+	avro_datum_to_int64(datum)->i64 = l;
+	return 0;
+}
+
+avro_datum_t avro_float(float f)
+{
+	struct avro_float_datum_t *datum =
+	    (struct avro_float_datum_t *) avro_new(struct avro_float_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new float datum");
+		return NULL;
+	}
+	datum->f = f;
+
+	avro_datum_init(&datum->obj, AVRO_FLOAT);
+	return &datum->obj;
+}
+
+int avro_float_set(avro_datum_t datum, const float f)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_float(datum), "float datum");
+
+	avro_datum_to_float(datum)->f = f;
+	return 0;
+}
+
+int avro_float_get(avro_datum_t datum, float *f)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_float(datum), "float datum");
+	check_param(EINVAL, f, "value pointer");
+
+	*f = avro_datum_to_float(datum)->f;
+	return 0;
+}
+
+avro_datum_t avro_double(double d)
+{
+	struct avro_double_datum_t *datum =
+	    (struct avro_double_datum_t *) avro_new(struct avro_double_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new double atom");
+		return NULL;
+	}
+	datum->d = d;
+
+	avro_datum_init(&datum->obj, AVRO_DOUBLE);
+	return &datum->obj;
+}
+
+int avro_double_set(avro_datum_t datum, const double d)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_double(datum), "double datum");
+
+	avro_datum_to_double(datum)->d = d;
+	return 0;
+}
+
+int avro_double_get(avro_datum_t datum, double *d)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_double(datum), "double datum");
+	check_param(EINVAL, d, "value pointer");
+
+	*d = avro_datum_to_double(datum)->d;
+	return 0;
+}
+
+avro_datum_t avro_boolean(int8_t i)
+{
+	struct avro_boolean_datum_t *datum =
+	    (struct avro_boolean_datum_t *) avro_new(struct avro_boolean_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new boolean datum");
+		return NULL;
+	}
+	datum->i = i;
+	avro_datum_init(&datum->obj, AVRO_BOOLEAN);
+	return &datum->obj;
+}
+
+int avro_boolean_set(avro_datum_t datum, const int8_t i)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_boolean(datum), "boolean datum");
+
+	avro_datum_to_boolean(datum)->i = i;
+	return 0;
+}
+
+int avro_boolean_get(avro_datum_t datum, int8_t * i)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_boolean(datum), "boolean datum");
+	check_param(EINVAL, i, "value pointer");
+
+	*i = avro_datum_to_boolean(datum)->i;
+	return 0;
+}
+
+avro_datum_t avro_null(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_NULL,
+		AVRO_DATUM,
+		1
+	};
+	return avro_datum_incref(&obj);
+}
+
+avro_datum_t avro_union(avro_schema_t schema,
+			int64_t discriminant, avro_datum_t value)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	struct avro_union_datum_t *datum =
+	    (struct avro_union_datum_t *) avro_new(struct avro_union_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new union datum");
+		return NULL;
+	}
+	datum->schema = avro_schema_incref(schema);
+	datum->discriminant = discriminant;
+	datum->value = avro_datum_incref(value);
+
+	avro_datum_init(&datum->obj, AVRO_UNION);
+	return &datum->obj;
+}
+
+int64_t avro_union_discriminant(const avro_datum_t datum)
+{
+	return avro_datum_to_union(datum)->discriminant;
+}
+
+avro_datum_t avro_union_current_branch(avro_datum_t datum)
+{
+	return avro_datum_to_union(datum)->value;
+}
+
+int avro_union_set_discriminant(avro_datum_t datum,
+				int discriminant,
+				avro_datum_t *branch)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_union(datum), "union datum");
+
+	struct avro_union_datum_t  *unionp = avro_datum_to_union(datum);
+
+	avro_schema_t  schema = unionp->schema;
+	avro_schema_t  branch_schema =
+	    avro_schema_union_branch(schema, discriminant);
+
+	if (branch_schema == NULL) {
+		// That branch doesn't exist!
+		avro_set_error("Branch %d doesn't exist", discriminant);
+		return EINVAL;
+	}
+
+	if (unionp->discriminant != discriminant) {
+		// If we're changing the branch, throw away any old
+		// branch value.
+		if (unionp->value != NULL) {
+			avro_datum_decref(unionp->value);
+			unionp->value = NULL;
+		}
+
+		unionp->discriminant = discriminant;
+	}
+
+	// Create a new branch value, if there isn't one already.
+	if (unionp->value == NULL) {
+		unionp->value = avro_datum_from_schema(branch_schema);
+	}
+
+	if (branch != NULL) {
+		*branch = unionp->value;
+	}
+
+	return 0;
+}
+
+avro_datum_t avro_record(avro_schema_t schema)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	struct avro_record_datum_t *datum =
+	    (struct avro_record_datum_t *) avro_new(struct avro_record_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new record datum");
+		return NULL;
+	}
+	datum->field_order = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->field_order) {
+		avro_set_error("Cannot create new record datum");
+		avro_freet(struct avro_record_datum_t, datum);
+		return NULL;
+	}
+	datum->fields_byname = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->fields_byname) {
+		avro_set_error("Cannot create new record datum");
+		st_free_table(datum->field_order);
+		avro_freet(struct avro_record_datum_t, datum);
+		return NULL;
+	}
+
+	datum->schema = avro_schema_incref(schema);
+	avro_datum_init(&datum->obj, AVRO_RECORD);
+	return &datum->obj;
+}
+
+int
+avro_record_get(const avro_datum_t datum, const char *field_name,
+		avro_datum_t * field)
+{
+	union {
+		avro_datum_t field;
+		st_data_t data;
+	} val;
+	if (is_avro_datum(datum) && is_avro_record(datum) && field_name) {
+		if (st_lookup
+		    (avro_datum_to_record(datum)->fields_byname,
+		     (st_data_t) field_name, &(val.data))) {
+			*field = val.field;
+			return 0;
+		}
+	}
+	avro_set_error("No field named %s", field_name);
+	return EINVAL;
+}
+
+int
+avro_record_set(avro_datum_t datum, const char *field_name,
+		const avro_datum_t field_value)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_record(datum), "record datum");
+	check_param(EINVAL, field_name, "field_name");
+
+	char *key = (char *)field_name;
+	avro_datum_t old_field;
+
+	if (avro_record_get(datum, field_name, &old_field) == 0) {
+		/* Overriding old value */
+		avro_datum_decref(old_field);
+	} else {
+		/* Inserting new value */
+		struct avro_record_datum_t *record =
+		    avro_datum_to_record(datum);
+		key = avro_strdup(field_name);
+		if (!key) {
+			avro_set_error("Cannot copy field name");
+			return ENOMEM;
+		}
+		st_insert(record->field_order,
+			  record->field_order->num_entries,
+			  (st_data_t) key);
+	}
+	avro_datum_incref(field_value);
+	st_insert(avro_datum_to_record(datum)->fields_byname,
+		  (st_data_t) key, (st_data_t) field_value);
+	return 0;
+}
+
+avro_datum_t avro_enum(avro_schema_t schema, int i)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	struct avro_enum_datum_t *datum =
+	    (struct avro_enum_datum_t *) avro_new(struct avro_enum_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new enum datum");
+		return NULL;
+	}
+	datum->schema = avro_schema_incref(schema);
+	datum->value = i;
+
+	avro_datum_init(&datum->obj, AVRO_ENUM);
+	return &datum->obj;
+}
+
+int avro_enum_get(const avro_datum_t datum)
+{
+	return avro_datum_to_enum(datum)->value;
+}
+
+const char *avro_enum_get_name(const avro_datum_t datum)
+{
+	int  value = avro_enum_get(datum);
+	avro_schema_t  schema = avro_datum_to_enum(datum)->schema;
+	return avro_schema_enum_get(schema, value);
+}
+
+int avro_enum_set(avro_datum_t datum, const int symbol_value)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_enum(datum), "enum datum");
+
+	avro_datum_to_enum(datum)->value = symbol_value;
+	return 0;
+}
+
+int avro_enum_set_name(avro_datum_t datum, const char *symbol_name)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_enum(datum), "enum datum");
+	check_param(EINVAL, symbol_name, "symbol name");
+
+	avro_schema_t  schema = avro_datum_to_enum(datum)->schema;
+	int  symbol_value = avro_schema_enum_get_by_name(schema, symbol_name);
+	if (symbol_value == -1) {
+		avro_set_error("No symbol named %s", symbol_name);
+		return EINVAL;
+	}
+	avro_datum_to_enum(datum)->value = symbol_value;
+	return 0;
+}
+
+static avro_datum_t avro_fixed_private(avro_schema_t schema,
+				       const char *bytes, const int64_t size,
+				       avro_free_func_t fixed_free)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+	struct avro_fixed_schema_t *fschema = avro_schema_to_fixed(schema);
+	if (size != fschema->size) {
+		avro_free((char *) bytes, size);
+		avro_set_error("Fixed size (%zu) doesn't match schema (%zu)",
+			       (size_t) size, (size_t) fschema->size);
+		return NULL;
+	}
+
+	struct avro_fixed_datum_t *datum =
+	    (struct avro_fixed_datum_t *) avro_new(struct avro_fixed_datum_t);
+	if (!datum) {
+		avro_free((char *) bytes, size);
+		avro_set_error("Cannot create new fixed datum");
+		return NULL;
+	}
+	datum->schema = avro_schema_incref(schema);
+	datum->size = size;
+	datum->bytes = (char *)bytes;
+	datum->free = fixed_free;
+
+	avro_datum_init(&datum->obj, AVRO_FIXED);
+	return &datum->obj;
+}
+
+avro_datum_t avro_fixed(avro_schema_t schema,
+			const char *bytes, const int64_t size)
+{
+	char *bytes_copy = (char *) avro_malloc(size);
+	if (!bytes_copy) {
+		avro_set_error("Cannot copy fixed content");
+		return NULL;
+	}
+	memcpy(bytes_copy, bytes, size);
+	return avro_fixed_private(schema, bytes_copy, size, avro_alloc_free_func);
+}
+
+avro_datum_t avro_givefixed(avro_schema_t schema,
+			    const char *bytes, const int64_t size,
+			    avro_free_func_t free)
+{
+	return avro_fixed_private(schema, bytes, size, free);
+}
+
+static int avro_fixed_set_private(avro_datum_t datum,
+				  const char *bytes, const int64_t size,
+				  avro_free_func_t fixed_free)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_fixed(datum), "fixed datum");
+
+	struct avro_fixed_datum_t *fixed = avro_datum_to_fixed(datum);
+	struct avro_fixed_schema_t *schema = avro_schema_to_fixed(fixed->schema);
+	if (size != schema->size) {
+		avro_set_error("Fixed size doesn't match schema");
+		return EINVAL;
+	}
+
+	if (fixed->free) {
+		fixed->free(fixed->bytes, fixed->size);
+	}
+
+	fixed->free = fixed_free;
+	fixed->bytes = (char *)bytes;
+	fixed->size = size;
+	return 0;
+}
+
+int avro_fixed_set(avro_datum_t datum, const char *bytes, const int64_t size)
+{
+	int rval;
+	char *bytes_copy = (char *) avro_malloc(size);
+	if (!bytes_copy) {
+		avro_set_error("Cannot copy fixed content");
+		return ENOMEM;
+	}
+	memcpy(bytes_copy, bytes, size);
+	rval = avro_fixed_set_private(datum, bytes_copy, size, avro_alloc_free_func);
+	if (rval) {
+		avro_free(bytes_copy, size);
+	}
+	return rval;
+}
+
+int avro_givefixed_set(avro_datum_t datum, const char *bytes,
+		       const int64_t size, avro_free_func_t free)
+{
+	return avro_fixed_set_private(datum, bytes, size, free);
+}
+
+int avro_fixed_get(avro_datum_t datum, char **bytes, int64_t * size)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_fixed(datum), "fixed datum");
+	check_param(EINVAL, bytes, "bytes");
+	check_param(EINVAL, size, "size");
+
+	*bytes = avro_datum_to_fixed(datum)->bytes;
+	*size = avro_datum_to_fixed(datum)->size;
+	return 0;
+}
+
+static int
+avro_init_map(struct avro_map_datum_t *datum)
+{
+	datum->map = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->map) {
+		avro_set_error("Cannot create new map datum");
+		return ENOMEM;
+	}
+	datum->indices_by_key = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->indices_by_key) {
+		avro_set_error("Cannot create new map datum");
+		st_free_table(datum->map);
+		return ENOMEM;
+	}
+	datum->keys_by_index = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->keys_by_index) {
+		avro_set_error("Cannot create new map datum");
+		st_free_table(datum->indices_by_key);
+		st_free_table(datum->map);
+		return ENOMEM;
+	}
+	return 0;
+}
+
+avro_datum_t avro_map(avro_schema_t schema)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	struct avro_map_datum_t *datum =
+	    (struct avro_map_datum_t *) avro_new(struct avro_map_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new map datum");
+		return NULL;
+	}
+
+	if (avro_init_map(datum) != 0) {
+		avro_freet(struct avro_map_datum_t, datum);
+		return NULL;
+	}
+
+	datum->schema = avro_schema_incref(schema);
+	avro_datum_init(&datum->obj, AVRO_MAP);
+	return &datum->obj;
+}
+
+size_t
+avro_map_size(const avro_datum_t datum)
+{
+	const struct avro_map_datum_t  *map = avro_datum_to_map(datum);
+	return map->map->num_entries;
+}
+
+int
+avro_map_get(const avro_datum_t datum, const char *key, avro_datum_t * value)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_map(datum), "map datum");
+	check_param(EINVAL, key, "key");
+	check_param(EINVAL, value, "value");
+
+	union {
+		avro_datum_t datum;
+		st_data_t data;
+	} val;
+
+	struct avro_map_datum_t *map = avro_datum_to_map(datum);
+	if (st_lookup(map->map, (st_data_t) key, &(val.data))) {
+		*value = val.datum;
+		return 0;
+	}
+
+	avro_set_error("No map element named %s", key);
+	return EINVAL;
+}
+
+int avro_map_get_key(const avro_datum_t datum, int index,
+		     const char **key)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_map(datum), "map datum");
+	check_param(EINVAL, index >= 0, "index");
+	check_param(EINVAL, key, "key");
+
+	union {
+		st_data_t data;
+		char *key;
+	} val;
+
+	struct avro_map_datum_t *map = avro_datum_to_map(datum);
+	if (st_lookup(map->keys_by_index, (st_data_t) index, &val.data)) {
+		*key = val.key;
+		return 0;
+	}
+
+	avro_set_error("No map element with index %d", index);
+	return EINVAL;
+}
+
+int avro_map_get_index(const avro_datum_t datum, const char *key,
+		       int *index)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_map(datum), "map datum");
+	check_param(EINVAL, key, "key");
+	check_param(EINVAL, index, "index");
+
+	st_data_t  data;
+
+	struct avro_map_datum_t *map = avro_datum_to_map(datum);
+	if (st_lookup(map->indices_by_key, (st_data_t) key, &data)) {
+		*index = (int) data;
+		return 0;
+	}
+
+	avro_set_error("No map element with key %s", key);
+	return EINVAL;
+}
+
+int
+avro_map_set(avro_datum_t datum, const char *key,
+	     const avro_datum_t value)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	check_param(EINVAL, is_avro_map(datum), "map datum");
+	check_param(EINVAL, key, "key");
+	check_param(EINVAL, is_avro_datum(value), "value");
+
+	char *save_key = (char *)key;
+	avro_datum_t old_datum;
+
+	struct avro_map_datum_t  *map = avro_datum_to_map(datum);
+
+	if (avro_map_get(datum, key, &old_datum) == 0) {
+		/* Overwriting an old value */
+		avro_datum_decref(old_datum);
+	} else {
+		/* Inserting a new value */
+		save_key = avro_strdup(key);
+		if (!save_key) {
+			avro_set_error("Cannot copy map key");
+			return ENOMEM;
+		}
+		int  new_index = map->map->num_entries;
+		st_insert(map->indices_by_key, (st_data_t) save_key,
+			  (st_data_t) new_index);
+		st_insert(map->keys_by_index, (st_data_t) new_index,
+			  (st_data_t) save_key);
+	}
+	avro_datum_incref(value);
+	st_insert(map->map, (st_data_t) save_key, (st_data_t) value);
+	return 0;
+}
+
+static int
+avro_init_array(struct avro_array_datum_t *datum)
+{
+	datum->els = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!datum->els) {
+		avro_set_error("Cannot create new array datum");
+		return ENOMEM;
+	}
+	return 0;
+}
+
+avro_datum_t avro_array(avro_schema_t schema)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	struct avro_array_datum_t *datum =
+	    (struct avro_array_datum_t *) avro_new(struct avro_array_datum_t);
+	if (!datum) {
+		avro_set_error("Cannot create new array datum");
+		return NULL;
+	}
+
+	if (avro_init_array(datum) != 0) {
+		avro_freet(struct avro_array_datum_t, datum);
+		return NULL;
+	}
+
+	datum->schema = avro_schema_incref(schema);
+	avro_datum_init(&datum->obj, AVRO_ARRAY);
+	return &datum->obj;
+}
+
+int
+avro_array_get(const avro_datum_t array_datum, int64_t index, avro_datum_t * value)
+{
+	check_param(EINVAL, is_avro_datum(array_datum), "datum");
+	check_param(EINVAL, is_avro_array(array_datum), "array datum");
+	check_param(EINVAL, value, "value pointer");
+
+	union {
+		st_data_t data;
+		avro_datum_t datum;
+	} val;
+
+        const struct avro_array_datum_t * array = avro_datum_to_array(array_datum);
+	if (st_lookup(array->els, index, &val.data)) {
+		*value = val.datum;
+		return 0;
+	}
+
+	avro_set_error("No array element with index %ld", (long) index);
+	return EINVAL;
+}
+
+size_t
+avro_array_size(const avro_datum_t datum)
+{
+	const struct avro_array_datum_t  *array = avro_datum_to_array(datum);
+	return array->els->num_entries;
+}
+
+int
+avro_array_append_datum(avro_datum_t array_datum,
+			const avro_datum_t datum)
+{
+	check_param(EINVAL, is_avro_datum(array_datum), "datum");
+	check_param(EINVAL, is_avro_array(array_datum), "array datum");
+	check_param(EINVAL, is_avro_datum(datum), "element datum");
+
+	struct avro_array_datum_t *array = avro_datum_to_array(array_datum);
+	st_insert(array->els, array->els->num_entries,
+		  (st_data_t) avro_datum_incref(datum));
+	return 0;
+}
+
+static int char_datum_free_foreach(char *key, avro_datum_t datum, void *arg)
+{
+	AVRO_UNUSED(arg);
+
+	avro_datum_decref(datum);
+	avro_str_free(key);
+	return ST_DELETE;
+}
+
+static int array_free_foreach(int i, avro_datum_t datum, void *arg)
+{
+	AVRO_UNUSED(i);
+	AVRO_UNUSED(arg);
+
+	avro_datum_decref(datum);
+	return ST_DELETE;
+}
+
+avro_schema_t avro_datum_get_schema(const avro_datum_t datum)
+{
+	check_param(NULL, is_avro_datum(datum), "datum");
+
+	switch (avro_typeof(datum)) {
+		/*
+		 * For the primitive types, which don't store an
+		 * explicit reference to their schema, we decref the
+		 * schema before returning.  This maintains the
+		 * invariant that this function doesn't add any
+		 * additional references to the schema.  The primitive
+		 * schemas won't be freed, because there's always at
+		 * least 1 reference for their initial static
+		 * initializers.
+		 */
+
+		case AVRO_STRING:
+			{
+				avro_schema_t  result = avro_schema_string();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_BYTES:
+			{
+				avro_schema_t  result = avro_schema_bytes();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_INT32:
+			{
+				avro_schema_t  result = avro_schema_int();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_INT64:
+			{
+				avro_schema_t  result = avro_schema_long();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_FLOAT:
+			{
+				avro_schema_t  result = avro_schema_float();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_DOUBLE:
+			{
+				avro_schema_t  result = avro_schema_double();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_BOOLEAN:
+			{
+				avro_schema_t  result = avro_schema_boolean();
+				avro_schema_decref(result);
+				return result;
+			}
+		case AVRO_NULL:
+			{
+				avro_schema_t  result = avro_schema_null();
+				avro_schema_decref(result);
+				return result;
+			}
+
+		case AVRO_RECORD:
+			return avro_datum_to_record(datum)->schema;
+		case AVRO_ENUM:
+			return avro_datum_to_enum(datum)->schema;
+		case AVRO_FIXED:
+			return avro_datum_to_fixed(datum)->schema;
+		case AVRO_MAP:
+			return avro_datum_to_map(datum)->schema;
+		case AVRO_ARRAY:
+			return avro_datum_to_array(datum)->schema;
+		case AVRO_UNION:
+			return avro_datum_to_union(datum)->schema;
+
+		default:
+			return NULL;
+	}
+}
+
+static void avro_datum_free(avro_datum_t datum)
+{
+	if (is_avro_datum(datum)) {
+		switch (avro_typeof(datum)) {
+		case AVRO_STRING:{
+				struct avro_string_datum_t *string;
+				string = avro_datum_to_string(datum);
+				if (string->free) {
+					string->free(string->s, string->size);
+				}
+				avro_freet(struct avro_string_datum_t, string);
+			}
+			break;
+		case AVRO_BYTES:{
+				struct avro_bytes_datum_t *bytes;
+				bytes = avro_datum_to_bytes(datum);
+				if (bytes->free) {
+					bytes->free(bytes->bytes, bytes->size);
+				}
+				avro_freet(struct avro_bytes_datum_t, bytes);
+			}
+			break;
+		case AVRO_INT32:{
+				avro_freet(struct avro_int32_datum_t, datum);
+			}
+			break;
+		case AVRO_INT64:{
+				avro_freet(struct avro_int64_datum_t, datum);
+			}
+			break;
+		case AVRO_FLOAT:{
+				avro_freet(struct avro_float_datum_t, datum);
+			}
+			break;
+		case AVRO_DOUBLE:{
+				avro_freet(struct avro_double_datum_t, datum);
+			}
+			break;
+		case AVRO_BOOLEAN:{
+				avro_freet(struct avro_boolean_datum_t, datum);
+			}
+			break;
+		case AVRO_NULL:
+			/* Nothing allocated */
+			break;
+
+		case AVRO_RECORD:{
+				struct avro_record_datum_t *record;
+				record = avro_datum_to_record(datum);
+				avro_schema_decref(record->schema);
+				st_foreach(record->fields_byname,
+					   HASH_FUNCTION_CAST char_datum_free_foreach, 0);
+				st_free_table(record->field_order);
+				st_free_table(record->fields_byname);
+				avro_freet(struct avro_record_datum_t, record);
+			}
+			break;
+		case AVRO_ENUM:{
+				struct avro_enum_datum_t *enump;
+				enump = avro_datum_to_enum(datum);
+				avro_schema_decref(enump->schema);
+				avro_freet(struct avro_enum_datum_t, enump);
+			}
+			break;
+		case AVRO_FIXED:{
+				struct avro_fixed_datum_t *fixed;
+				fixed = avro_datum_to_fixed(datum);
+				avro_schema_decref(fixed->schema);
+				if (fixed->free) {
+					fixed->free((void *)fixed->bytes,
+						    fixed->size);
+				}
+				avro_freet(struct avro_fixed_datum_t, fixed);
+			}
+			break;
+		case AVRO_MAP:{
+				struct avro_map_datum_t *map;
+				map = avro_datum_to_map(datum);
+				avro_schema_decref(map->schema);
+				st_foreach(map->map, HASH_FUNCTION_CAST char_datum_free_foreach,
+					   0);
+				st_free_table(map->map);
+				st_free_table(map->indices_by_key);
+				st_free_table(map->keys_by_index);
+				avro_freet(struct avro_map_datum_t, map);
+			}
+			break;
+		case AVRO_ARRAY:{
+				struct avro_array_datum_t *array;
+				array = avro_datum_to_array(datum);
+				avro_schema_decref(array->schema);
+				st_foreach(array->els, HASH_FUNCTION_CAST array_free_foreach, 0);
+				st_free_table(array->els);
+				avro_freet(struct avro_array_datum_t, array);
+			}
+			break;
+		case AVRO_UNION:{
+				struct avro_union_datum_t *unionp;
+				unionp = avro_datum_to_union(datum);
+				avro_schema_decref(unionp->schema);
+				avro_datum_decref(unionp->value);
+				avro_freet(struct avro_union_datum_t, unionp);
+			}
+			break;
+		case AVRO_LINK:{
+				/* TODO */
+			}
+			break;
+		}
+	}
+}
+
+static int
+datum_reset_foreach(int i, avro_datum_t datum, void *arg)
+{
+	AVRO_UNUSED(i);
+	int  rval;
+	int  *result = (int *) arg;
+
+	rval = avro_datum_reset(datum);
+	if (rval == 0) {
+		return ST_CONTINUE;
+	} else {
+		*result = rval;
+		return ST_STOP;
+	}
+}
+
+int
+avro_datum_reset(avro_datum_t datum)
+{
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+	int  rval;
+
+	switch (avro_typeof(datum)) {
+		case AVRO_ARRAY:
+		{
+			struct avro_array_datum_t *array;
+			array = avro_datum_to_array(datum);
+			st_foreach(array->els, HASH_FUNCTION_CAST array_free_foreach, 0);
+			st_free_table(array->els);
+
+			rval = avro_init_array(array);
+			if (rval != 0) {
+				avro_freet(struct avro_array_datum_t, array);
+				return rval;
+			}
+			return 0;
+		}
+
+		case AVRO_MAP:
+		{
+			struct avro_map_datum_t *map;
+			map = avro_datum_to_map(datum);
+			st_foreach(map->map, HASH_FUNCTION_CAST char_datum_free_foreach, 0);
+			st_free_table(map->map);
+			st_free_table(map->indices_by_key);
+			st_free_table(map->keys_by_index);
+
+			rval = avro_init_map(map);
+			if (rval != 0) {
+				avro_freet(struct avro_map_datum_t, map);
+				return rval;
+			}
+			return 0;
+		}
+
+		case AVRO_RECORD:
+		{
+			struct avro_record_datum_t *record;
+			record = avro_datum_to_record(datum);
+			rval = 0;
+			st_foreach(record->fields_byname,
+				   HASH_FUNCTION_CAST datum_reset_foreach, (st_data_t) &rval);
+			return rval;
+		}
+
+		case AVRO_UNION:
+		{
+			struct avro_union_datum_t *unionp;
+			unionp = avro_datum_to_union(datum);
+			return (unionp->value == NULL)? 0:
+			    avro_datum_reset(unionp->value);
+		}
+
+		default:
+			return 0;
+	}
+}
+
+avro_datum_t avro_datum_incref(avro_datum_t datum)
+{
+	if (datum) {
+		avro_refcount_inc(&datum->refcount);
+	}
+	return datum;
+}
+
+void avro_datum_decref(avro_datum_t datum)
+{
+	if (datum && avro_refcount_dec(&datum->refcount)) {
+		avro_datum_free(datum);
+	}
+}
+
+void avro_datum_print(avro_datum_t value, FILE * fp)
+{
+	AVRO_UNUSED(value);
+	AVRO_UNUSED(fp);
+}
diff --git a/lang/c/src/datum.h b/lang/c/src/datum.h
new file mode 100644
index 0000000..e429989
--- /dev/null
+++ b/lang/c/src/datum.h
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#ifndef AVRO_DATUM_H
+#define AVRO_DATUM_H
+#include <avro/platform.h>
+#include "avro/basics.h"
+#include "avro/data.h"
+#include "avro/legacy.h"
+#include "avro/schema.h"
+#include "avro_private.h"
+#include "st.h"
+
+struct avro_string_datum_t {
+	struct avro_obj_t obj;
+	char *s;
+	int64_t size;
+	avro_free_func_t  free;
+};
+
+struct avro_bytes_datum_t {
+	struct avro_obj_t obj;
+	char *bytes;
+	int64_t size;
+	avro_free_func_t  free;
+};
+
+struct avro_int32_datum_t {
+	struct avro_obj_t obj;
+	int32_t i32;
+};
+
+struct avro_int64_datum_t {
+	struct avro_obj_t obj;
+	int64_t i64;
+};
+
+struct avro_float_datum_t {
+	struct avro_obj_t obj;
+	float f;
+};
+
+struct avro_double_datum_t {
+	struct avro_obj_t obj;
+	double d;
+};
+
+struct avro_boolean_datum_t {
+	struct avro_obj_t obj;
+	int8_t i;
+};
+
+struct avro_fixed_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	char *bytes;
+	int64_t size;
+	avro_free_func_t  free;
+};
+
+struct avro_map_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	st_table *map;
+	st_table *indices_by_key;
+	st_table *keys_by_index;
+};
+
+struct avro_record_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	st_table *field_order;
+	st_table *fields_byname;
+};
+
+struct avro_enum_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	int value;
+};
+
+struct avro_array_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	st_table *els;
+};
+
+struct avro_union_datum_t {
+	struct avro_obj_t obj;
+	avro_schema_t schema;
+	int64_t discriminant;
+	avro_datum_t value;
+};
+
+#define avro_datum_to_string(datum_)    (container_of(datum_, struct avro_string_datum_t, obj))
+#define avro_datum_to_bytes(datum_)     (container_of(datum_, struct avro_bytes_datum_t, obj))
+#define avro_datum_to_int32(datum_)     (container_of(datum_, struct avro_int32_datum_t, obj))
+#define avro_datum_to_int64(datum_)     (container_of(datum_, struct avro_int64_datum_t, obj))
+#define avro_datum_to_float(datum_)     (container_of(datum_, struct avro_float_datum_t, obj))
+#define avro_datum_to_double(datum_)    (container_of(datum_, struct avro_double_datum_t, obj))
+#define avro_datum_to_boolean(datum_)   (container_of(datum_, struct avro_boolean_datum_t, obj))
+#define avro_datum_to_fixed(datum_)     (container_of(datum_, struct avro_fixed_datum_t, obj))
+#define avro_datum_to_map(datum_)       (container_of(datum_, struct avro_map_datum_t, obj))
+#define avro_datum_to_record(datum_)    (container_of(datum_, struct avro_record_datum_t, obj))
+#define avro_datum_to_enum(datum_)      (container_of(datum_, struct avro_enum_datum_t, obj))
+#define avro_datum_to_array(datum_)     (container_of(datum_, struct avro_array_datum_t, obj))
+#define avro_datum_to_union(datum_)	(container_of(datum_, struct avro_union_datum_t, obj))
+
+#endif
diff --git a/lang/c/src/datum_equal.c b/lang/c/src/datum_equal.c
new file mode 100644
index 0000000..068dbda
--- /dev/null
+++ b/lang/c/src/datum_equal.c
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include <string.h>
+#include "datum.h"
+
+static int
+array_equal(struct avro_array_datum_t *a, struct avro_array_datum_t *b)
+{
+	if (!avro_schema_equal(a->schema, b->schema)) {
+		return 0;
+	}
+
+	long i;
+
+	if (a->els->num_entries != b->els->num_entries) {
+		return 0;
+	}
+	for (i = 0; i < a->els->num_entries; i++) {
+		union {
+			st_data_t data;
+			avro_datum_t datum;
+		} ael, bel;
+		st_lookup(a->els, i, &ael.data);
+		st_lookup(b->els, i, &bel.data);
+		if (!avro_datum_equal(ael.datum, bel.datum)) {
+			return 0;
+		}
+	}
+	return 1;
+}
+
+struct st_equal_args {
+	int rval;
+	st_table *st;
+};
+
+static int
+st_equal_foreach(char *key, avro_datum_t datum, struct st_equal_args *args)
+{
+	union {
+		avro_datum_t datum_other;
+		st_data_t data;
+	} val;
+	if (!st_lookup(args->st, (st_data_t) key, &(val.data))) {
+		args->rval = 0;
+		return ST_STOP;
+	}
+	if (!avro_datum_equal(datum, val.datum_other)) {
+		args->rval = 0;
+		return ST_STOP;
+	}
+	return ST_CONTINUE;
+}
+
+static int map_equal(struct avro_map_datum_t *a, struct avro_map_datum_t *b)
+{
+	if (!avro_schema_equal(a->schema, b->schema)) {
+		return 0;
+	}
+
+	struct st_equal_args args = { 1, b->map };
+	if (a->map->num_entries != b->map->num_entries) {
+		return 0;
+	}
+	st_foreach(a->map, HASH_FUNCTION_CAST st_equal_foreach, (st_data_t) & args);
+	return args.rval;
+}
+
+static int record_equal(struct avro_record_datum_t *a,
+			struct avro_record_datum_t *b)
+{
+	if (!avro_schema_equal(a->schema, b->schema)) {
+		return 0;
+	}
+
+	struct st_equal_args args = { 1, b->fields_byname };
+	if (a->fields_byname->num_entries != b->fields_byname->num_entries) {
+		return 0;
+	}
+	st_foreach(a->fields_byname, HASH_FUNCTION_CAST st_equal_foreach, (st_data_t) & args);
+	return args.rval;
+}
+
+static int enum_equal(struct avro_enum_datum_t *a, struct avro_enum_datum_t *b)
+{
+	return avro_schema_equal(a->schema, b->schema) && a->value == b->value;
+}
+
+static int fixed_equal(struct avro_fixed_datum_t *a,
+		       struct avro_fixed_datum_t *b)
+{
+	if (!avro_schema_equal(a->schema, b->schema)) {
+		return 0;
+	}
+
+	return a->size == b->size && memcmp(a->bytes, b->bytes, a->size) == 0;
+}
+
+static int union_equal(struct avro_union_datum_t *a,
+		       struct avro_union_datum_t *b)
+{
+	if (!avro_schema_equal(a->schema, b->schema)) {
+		return 0;
+	}
+
+	return a->discriminant == b->discriminant && avro_datum_equal(a->value, b->value);
+}
+
+int avro_datum_equal(const avro_datum_t a, const avro_datum_t b)
+{
+	if (!(is_avro_datum(a) && is_avro_datum(b))) {
+		return 0;
+	}
+	if (avro_typeof(a) != avro_typeof(b)) {
+		return 0;
+	}
+	switch (avro_typeof(a)) {
+	case AVRO_STRING:
+		return strcmp(avro_datum_to_string(a)->s,
+			      avro_datum_to_string(b)->s) == 0;
+	case AVRO_BYTES:
+		return (avro_datum_to_bytes(a)->size ==
+			avro_datum_to_bytes(b)->size)
+		    && memcmp(avro_datum_to_bytes(a)->bytes,
+			      avro_datum_to_bytes(b)->bytes,
+			      avro_datum_to_bytes(a)->size) == 0;
+	case AVRO_INT32:
+		return avro_datum_to_int32(a)->i32 ==
+		    avro_datum_to_int32(b)->i32;
+	case AVRO_INT64:
+		return avro_datum_to_int64(a)->i64 ==
+		    avro_datum_to_int64(b)->i64;
+	case AVRO_FLOAT:
+		return avro_datum_to_float(a)->f == avro_datum_to_float(b)->f;
+	case AVRO_DOUBLE:
+		return avro_datum_to_double(a)->d == avro_datum_to_double(b)->d;
+	case AVRO_BOOLEAN:
+		return avro_datum_to_boolean(a)->i ==
+		    avro_datum_to_boolean(b)->i;
+	case AVRO_NULL:
+		return 1;
+	case AVRO_ARRAY:
+		return array_equal(avro_datum_to_array(a),
+				   avro_datum_to_array(b));
+	case AVRO_MAP:
+		return map_equal(avro_datum_to_map(a), avro_datum_to_map(b));
+
+	case AVRO_RECORD:
+		return record_equal(avro_datum_to_record(a),
+				    avro_datum_to_record(b));
+
+	case AVRO_ENUM:
+		return enum_equal(avro_datum_to_enum(a), avro_datum_to_enum(b));
+
+	case AVRO_FIXED:
+		return fixed_equal(avro_datum_to_fixed(a),
+				   avro_datum_to_fixed(b));
+
+	case AVRO_UNION:
+		return union_equal(avro_datum_to_union(a),
+				   avro_datum_to_union(b));
+
+	case AVRO_LINK:
+		/*
+		 * TODO 
+		 */
+		return 0;
+	}
+	return 0;
+}
diff --git a/lang/c/src/datum_read.c b/lang/c/src/datum_read.c
new file mode 100644
index 0000000..259b542
--- /dev/null
+++ b/lang/c/src/datum_read.c
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <stdlib.h>
+#include <errno.h>
+
+#include "avro/errors.h"
+#include "avro/io.h"
+#include "avro/legacy.h"
+#include "avro/resolver.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "avro_private.h"
+
+int
+avro_schema_match(avro_schema_t wschema, avro_schema_t rschema)
+{
+	check_param(0, is_avro_schema(wschema), "writer schema");
+	check_param(0, is_avro_schema(rschema), "reader schema");
+
+	avro_value_iface_t  *resolver =
+	    avro_resolved_writer_new(wschema, rschema);
+	if (resolver != NULL) {
+		avro_value_iface_decref(resolver);
+		return 1;
+	}
+
+	return 0;
+}
+
+int
+avro_read_data(avro_reader_t reader, avro_schema_t writers_schema,
+	       avro_schema_t readers_schema, avro_datum_t * datum)
+{
+	int rval;
+
+	check_param(EINVAL, reader, "reader");
+	check_param(EINVAL, is_avro_schema(writers_schema), "writer schema");
+	check_param(EINVAL, datum, "datum pointer");
+
+	if (!readers_schema) {
+		readers_schema = writers_schema;
+	}
+
+	avro_datum_t  result = avro_datum_from_schema(readers_schema);
+	if (!result) {
+		return EINVAL;
+	}
+
+	avro_value_t  value;
+	check(rval, avro_datum_as_value(&value, result));
+
+	avro_value_iface_t  *resolver =
+	    avro_resolved_writer_new(writers_schema, readers_schema);
+	if (!resolver) {
+		avro_value_decref(&value);
+		avro_datum_decref(result);
+		return EINVAL;
+	}
+
+	avro_value_t  resolved_value;
+	rval = avro_resolved_writer_new_value(resolver, &resolved_value);
+	if (rval) {
+		avro_value_iface_decref(resolver);
+		avro_value_decref(&value);
+		avro_datum_decref(result);
+		return rval;
+	}
+
+	avro_resolved_writer_set_dest(&resolved_value, &value);
+	rval = avro_value_read(reader, &resolved_value);
+	if (rval) {
+		avro_value_decref(&resolved_value);
+		avro_value_iface_decref(resolver);
+		avro_value_decref(&value);
+		avro_datum_decref(result);
+		return rval;
+	}
+
+	avro_value_decref(&resolved_value);
+	avro_value_iface_decref(resolver);
+	avro_value_decref(&value);
+	*datum = result;
+	return 0;
+}
diff --git a/lang/c/src/datum_size.c b/lang/c/src/datum_size.c
new file mode 100644
index 0000000..22ead84
--- /dev/null
+++ b/lang/c/src/datum_size.c
@@ -0,0 +1,292 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "avro/errors.h"
+#include <errno.h>
+#include <assert.h>
+#include <string.h>
+#include "schema.h"
+#include "datum.h"
+#include "encoding.h"
+
+#define size_check(rval, call) { rval = call; if(rval) return rval; }
+#define size_accum(rval, size, call) { rval = call; if (rval < 0) return rval; else size += rval; }
+
+static int64_t size_datum(avro_writer_t writer, const avro_encoding_t * enc,
+			  avro_schema_t writers_schema, avro_datum_t datum);
+
+static int64_t
+size_record(avro_writer_t writer, const avro_encoding_t * enc,
+	    struct avro_record_schema_t *schema, avro_datum_t datum)
+{
+	int rval;
+	long i;
+	int64_t size;
+	avro_datum_t field_datum;
+
+	size = 0;
+	if (schema) {
+		for (i = 0; i < schema->fields->num_entries; i++) {
+			union {
+				st_data_t data;
+				struct avro_record_field_t *field;
+			} val;
+			st_lookup(schema->fields, i, &val.data);
+			size_check(rval,
+				   avro_record_get(datum, val.field->name,
+						   &field_datum));
+			size_accum(rval, size,
+				   size_datum(writer, enc, val.field->type,
+					      field_datum));
+		}
+	} else {
+		/* No schema.  Just write the record datum */
+		struct avro_record_datum_t *record =
+		    avro_datum_to_record(datum);
+		for (i = 0; i < record->field_order->num_entries; i++) {
+			union {
+				st_data_t data;
+				char *name;
+			} val;
+			st_lookup(record->field_order, i, &val.data);
+			size_check(rval,
+				   avro_record_get(datum, val.name,
+						   &field_datum));
+			size_accum(rval, size,
+				   size_datum(writer, enc, NULL, field_datum));
+		}
+	}
+	return size;
+}
+
+static int64_t
+size_enum(avro_writer_t writer, const avro_encoding_t * enc,
+	  struct avro_enum_schema_t *enump, struct avro_enum_datum_t *datum)
+{
+	AVRO_UNUSED(enump);
+
+	return enc->size_long(writer, datum->value);
+}
+
+struct size_map_args {
+	int rval;
+	int64_t size;
+	avro_writer_t writer;
+	const avro_encoding_t *enc;
+	avro_schema_t values_schema;
+};
+
+static int
+size_map_foreach(char *key, avro_datum_t datum, struct size_map_args *args)
+{
+	int rval = args->enc->size_string(args->writer, key);
+	if (rval < 0) {
+		args->rval = rval;
+		return ST_STOP;
+	} else {
+		args->size += rval;
+	}
+	rval = size_datum(args->writer, args->enc, args->values_schema, datum);
+	if (rval < 0) {
+		args->rval = rval;
+		return ST_STOP;
+	} else {
+		args->size += rval;
+	}
+	return ST_CONTINUE;
+}
+
+static int64_t
+size_map(avro_writer_t writer, const avro_encoding_t * enc,
+	 struct avro_map_schema_t *writers_schema,
+	 struct avro_map_datum_t *datum)
+{
+	int rval;
+	int64_t size;
+	struct size_map_args args = { 0, 0, writer, enc,
+		writers_schema ? writers_schema->values : NULL
+	};
+
+	size = 0;
+	if (datum->map->num_entries) {
+		size_accum(rval, size,
+			   enc->size_long(writer, datum->map->num_entries));
+		st_foreach(datum->map, HASH_FUNCTION_CAST size_map_foreach, (st_data_t) & args);
+		size += args.size;
+	}
+	if (!args.rval) {
+		size_accum(rval, size, enc->size_long(writer, 0));
+	}
+	return size;
+}
+
+static int64_t
+size_array(avro_writer_t writer, const avro_encoding_t * enc,
+	   struct avro_array_schema_t *schema, struct avro_array_datum_t *array)
+{
+	int rval;
+	long i;
+	int64_t size;
+
+	size = 0;
+	if (array->els->num_entries) {
+		size_accum(rval, size,
+			   enc->size_long(writer, array->els->num_entries));
+		for (i = 0; i < array->els->num_entries; i++) {
+			union {
+				st_data_t data;
+				avro_datum_t datum;
+			} val;
+			st_lookup(array->els, i, &val.data);
+			size_accum(rval, size,
+				   size_datum(writer, enc,
+					      schema ? schema->items : NULL,
+					      val.datum));
+		}
+	}
+	size_accum(rval, size, enc->size_long(writer, 0));
+	return size;
+}
+
+static int64_t
+size_union(avro_writer_t writer, const avro_encoding_t * enc,
+	   struct avro_union_schema_t *schema,
+	   struct avro_union_datum_t *unionp)
+{
+	int rval;
+	int64_t size;
+	avro_schema_t write_schema = NULL;
+
+	size = 0;
+	size_accum(rval, size, enc->size_long(writer, unionp->discriminant));
+	if (schema) {
+		write_schema =
+		    avro_schema_union_branch(&schema->obj, unionp->discriminant);
+		if (!write_schema) {
+			return -EINVAL;
+		}
+	}
+	size_accum(rval, size,
+		   size_datum(writer, enc, write_schema, unionp->value));
+	return size;
+}
+
+static int64_t size_datum(avro_writer_t writer, const avro_encoding_t * enc,
+			  avro_schema_t writers_schema, avro_datum_t datum)
+{
+	if (is_avro_schema(writers_schema) && is_avro_link(writers_schema)) {
+		return size_datum(writer, enc,
+				  (avro_schema_to_link(writers_schema))->to,
+				  datum);
+	}
+
+	switch (avro_typeof(datum)) {
+	case AVRO_NULL:
+		return enc->size_null(writer);
+
+	case AVRO_BOOLEAN:
+		return enc->size_boolean(writer,
+					 avro_datum_to_boolean(datum)->i);
+
+	case AVRO_STRING:
+		return enc->size_string(writer, avro_datum_to_string(datum)->s);
+
+	case AVRO_BYTES:
+		return enc->size_bytes(writer,
+				       avro_datum_to_bytes(datum)->bytes,
+				       avro_datum_to_bytes(datum)->size);
+
+	case AVRO_INT32:
+	case AVRO_INT64:{
+			int64_t val = avro_typeof(datum) == AVRO_INT32 ?
+			    avro_datum_to_int32(datum)->i32 :
+			    avro_datum_to_int64(datum)->i64;
+			if (is_avro_schema(writers_schema)) {
+				/* handle promotion */
+				if (is_avro_float(writers_schema)) {
+					return enc->size_float(writer,
+							       (float)val);
+				} else if (is_avro_double(writers_schema)) {
+					return enc->size_double(writer,
+								(double)val);
+				}
+			}
+			return enc->size_long(writer, val);
+		}
+
+	case AVRO_FLOAT:{
+			float val = avro_datum_to_float(datum)->f;
+			if (is_avro_schema(writers_schema)
+			    && is_avro_double(writers_schema)) {
+				/* handle promotion */
+				return enc->size_double(writer, (double)val);
+			}
+			return enc->size_float(writer, val);
+		}
+
+	case AVRO_DOUBLE:
+		return enc->size_double(writer, avro_datum_to_double(datum)->d);
+
+	case AVRO_RECORD:
+		return size_record(writer, enc,
+				   avro_schema_to_record(writers_schema),
+				   datum);
+
+	case AVRO_ENUM:
+		return size_enum(writer, enc,
+				 avro_schema_to_enum(writers_schema),
+				 avro_datum_to_enum(datum));
+
+	case AVRO_FIXED:
+		return avro_datum_to_fixed(datum)->size;
+
+	case AVRO_MAP:
+		return size_map(writer, enc,
+				avro_schema_to_map(writers_schema),
+				avro_datum_to_map(datum));
+
+	case AVRO_ARRAY:
+		return size_array(writer, enc,
+				  avro_schema_to_array(writers_schema),
+				  avro_datum_to_array(datum));
+
+	case AVRO_UNION:
+		return size_union(writer, enc,
+				  avro_schema_to_union(writers_schema),
+				  avro_datum_to_union(datum));
+
+	case AVRO_LINK:
+		break;
+	}
+
+	return 0;
+}
+
+int64_t avro_size_data(avro_writer_t writer, avro_schema_t writers_schema,
+		       avro_datum_t datum)
+{
+	check_param(-EINVAL, writer, "writer");
+	check_param(-EINVAL, is_avro_datum(datum), "datum");
+	/* Only validate datum if a writer's schema is provided */
+	if (is_avro_schema(writers_schema)
+	    && !avro_schema_datum_validate(writers_schema, datum)) {
+		avro_set_error("Datum doesn't validate against schema");
+		return -EINVAL;
+	}
+	return size_datum(writer, &avro_binary_encoding, writers_schema, datum);
+}
diff --git a/lang/c/src/datum_skip.c b/lang/c/src/datum_skip.c
new file mode 100644
index 0000000..c73742c
--- /dev/null
+++ b/lang/c/src/datum_skip.c
@@ -0,0 +1,202 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "avro/errors.h"
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include "encoding.h"
+#include "schema.h"
+
+static int skip_array(avro_reader_t reader, const avro_encoding_t * enc,
+		      struct avro_array_schema_t *writers_schema)
+{
+	int rval;
+	int64_t i;
+	int64_t block_count;
+	int64_t block_size;
+
+	check_prefix(rval, enc->read_long(reader, &block_count),
+		     "Cannot read array block count: ");
+
+	while (block_count != 0) {
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, enc->read_long(reader, &block_size),
+				     "Cannot read array block size: ");
+		}
+
+		for (i = 0; i < block_count; i++) {
+			check_prefix(rval, avro_skip_data(reader, writers_schema->items),
+				     "Cannot skip array element: ");
+		}
+
+		check_prefix(rval, enc->read_long(reader, &block_count),
+			     "Cannot read array block count: ");
+	}
+	return 0;
+}
+
+static int skip_map(avro_reader_t reader, const avro_encoding_t * enc,
+		    struct avro_map_schema_t *writers_schema)
+{
+	int rval;
+	int64_t i, block_count;
+
+	check_prefix(rval, enc->read_long(reader, &block_count),
+		     "Cannot read map block count: ");
+	while (block_count != 0) {
+		int64_t block_size;
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, enc->read_long(reader, &block_size),
+				     "Cannot read map block size: ");
+		}
+		for (i = 0; i < block_count; i++) {
+			check_prefix(rval, enc->skip_string(reader),
+				     "Cannot skip map key: ");
+			check_prefix(rval,
+				     avro_skip_data(reader,
+						    avro_schema_to_map(writers_schema)->
+						    values),
+				     "Cannot skip map value: ");
+		}
+		check_prefix(rval, enc->read_long(reader, &block_count),
+			     "Cannot read map block count: ");
+	}
+	return 0;
+}
+
+static int skip_union(avro_reader_t reader, const avro_encoding_t * enc,
+		      struct avro_union_schema_t *writers_schema)
+{
+	int rval;
+	int64_t index;
+	avro_schema_t branch_schema;
+
+	check_prefix(rval, enc->read_long(reader, &index),
+		     "Cannot read union discriminant: ");
+	branch_schema = avro_schema_union_branch(&writers_schema->obj, index);
+	if (!branch_schema) {
+		return EILSEQ;
+	}
+	return avro_skip_data(reader, branch_schema);
+}
+
+static int skip_record(avro_reader_t reader, const avro_encoding_t * enc,
+		       struct avro_record_schema_t *writers_schema)
+{
+	int rval;
+	long i;
+
+	AVRO_UNUSED(enc);
+
+	for (i = 0; i < writers_schema->fields->num_entries; i++) {
+		avro_schema_t  field_schema;
+
+		field_schema = avro_schema_record_field_get_by_index
+		    (&writers_schema->obj, i);
+		check_prefix(rval, avro_skip_data(reader, field_schema),
+			     "Cannot skip record field: ");
+	}
+	return 0;
+}
+
+int avro_skip_data(avro_reader_t reader, avro_schema_t writers_schema)
+{
+	check_param(EINVAL, reader, "reader");
+	check_param(EINVAL, is_avro_schema(writers_schema), "writer schema");
+
+	int rval = EINVAL;
+	const avro_encoding_t *enc = &avro_binary_encoding;
+
+	switch (avro_typeof(writers_schema)) {
+	case AVRO_NULL:
+		rval = enc->skip_null(reader);
+		break;
+
+	case AVRO_BOOLEAN:
+		rval = enc->skip_boolean(reader);
+		break;
+
+	case AVRO_STRING:
+		rval = enc->skip_string(reader);
+		break;
+
+	case AVRO_INT32:
+		rval = enc->skip_int(reader);
+		break;
+
+	case AVRO_INT64:
+		rval = enc->skip_long(reader);
+		break;
+
+	case AVRO_FLOAT:
+		rval = enc->skip_float(reader);
+		break;
+
+	case AVRO_DOUBLE:
+		rval = enc->skip_double(reader);
+		break;
+
+	case AVRO_BYTES:
+		rval = enc->skip_bytes(reader);
+		break;
+
+	case AVRO_FIXED:
+		rval =
+		    avro_skip(reader,
+			      avro_schema_to_fixed(writers_schema)->size);
+		break;
+
+	case AVRO_ENUM:
+		rval = enc->skip_long(reader);
+		break;
+
+	case AVRO_ARRAY:
+		rval =
+		    skip_array(reader, enc,
+			       avro_schema_to_array(writers_schema));
+		break;
+
+	case AVRO_MAP:
+		rval =
+		    skip_map(reader, enc, avro_schema_to_map(writers_schema));
+		break;
+
+	case AVRO_UNION:
+		rval =
+		    skip_union(reader, enc,
+			       avro_schema_to_union(writers_schema));
+		break;
+
+	case AVRO_RECORD:
+		rval =
+		    skip_record(reader, enc,
+				avro_schema_to_record(writers_schema));
+		break;
+
+	case AVRO_LINK:
+		rval =
+		    avro_skip_data(reader,
+				   (avro_schema_to_link(writers_schema))->to);
+		break;
+	}
+
+	return rval;
+}
diff --git a/lang/c/src/datum_validate.c b/lang/c/src/datum_validate.c
new file mode 100644
index 0000000..28eb289
--- /dev/null
+++ b/lang/c/src/datum_validate.c
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "avro/errors.h"
+#include <limits.h>
+#include <errno.h>
+#include <string.h>
+#include "schema.h"
+#include "datum.h"
+#include "st.h"
+
+struct validate_st {
+	avro_schema_t expected_schema;
+	int rval;
+};
+
+static int
+schema_map_validate_foreach(char *key, avro_datum_t datum,
+			    struct validate_st *vst)
+{
+	AVRO_UNUSED(key);
+
+	if (!avro_schema_datum_validate(vst->expected_schema, datum)) {
+		vst->rval = 0;
+		return ST_STOP;
+	}
+	return ST_CONTINUE;
+}
+
+int
+avro_schema_datum_validate(avro_schema_t expected_schema, avro_datum_t datum)
+{
+	check_param(EINVAL, expected_schema, "expected schema");
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+
+	int rval;
+	long i;
+
+	switch (avro_typeof(expected_schema)) {
+	case AVRO_NULL:
+		return is_avro_null(datum);
+
+	case AVRO_BOOLEAN:
+		return is_avro_boolean(datum);
+
+	case AVRO_STRING:
+		return is_avro_string(datum);
+
+	case AVRO_BYTES:
+		return is_avro_bytes(datum);
+
+	case AVRO_INT32:
+		return is_avro_int32(datum)
+		    || (is_avro_int64(datum)
+			&& (INT_MIN <= avro_datum_to_int64(datum)->i64
+			    && avro_datum_to_int64(datum)->i64 <= INT_MAX));
+
+	case AVRO_INT64:
+		return is_avro_int32(datum) || is_avro_int64(datum);
+
+	case AVRO_FLOAT:
+		return is_avro_int32(datum) || is_avro_int64(datum)
+		    || is_avro_float(datum);
+
+	case AVRO_DOUBLE:
+		return is_avro_int32(datum) || is_avro_int64(datum)
+		    || is_avro_float(datum) || is_avro_double(datum);
+
+	case AVRO_FIXED:
+		return (is_avro_fixed(datum)
+			&& (avro_schema_to_fixed(expected_schema)->size ==
+			    avro_datum_to_fixed(datum)->size));
+
+	case AVRO_ENUM:
+		if (is_avro_enum(datum)) {
+			long value = avro_datum_to_enum(datum)->value;
+			long max_value =
+			    avro_schema_to_enum(expected_schema)->symbols->
+			    num_entries;
+			return 0 <= value && value <= max_value;
+		}
+		return 0;
+
+	case AVRO_ARRAY:
+		if (is_avro_array(datum)) {
+			struct avro_array_datum_t *array =
+			    avro_datum_to_array(datum);
+
+			for (i = 0; i < array->els->num_entries; i++) {
+				union {
+					st_data_t data;
+					avro_datum_t datum;
+				} val;
+				st_lookup(array->els, i, &val.data);
+				if (!avro_schema_datum_validate
+				    ((avro_schema_to_array
+				      (expected_schema))->items, val.datum)) {
+					return 0;
+				}
+			}
+			return 1;
+		}
+		return 0;
+
+	case AVRO_MAP:
+		if (is_avro_map(datum)) {
+			struct validate_st vst =
+			    { avro_schema_to_map(expected_schema)->values, 1
+			};
+			st_foreach(avro_datum_to_map(datum)->map,
+				   HASH_FUNCTION_CAST schema_map_validate_foreach,
+				   (st_data_t) & vst);
+			return vst.rval;
+		}
+		break;
+
+	case AVRO_UNION:
+		if (is_avro_union(datum)) {
+			struct avro_union_schema_t *union_schema =
+			    avro_schema_to_union(expected_schema);
+			struct avro_union_datum_t *union_datum =
+			    avro_datum_to_union(datum);
+			union {
+				st_data_t data;
+				avro_schema_t schema;
+			} val;
+
+			if (!st_lookup
+			    (union_schema->branches, union_datum->discriminant,
+			     &val.data)) {
+				return 0;
+			}
+			return avro_schema_datum_validate(val.schema,
+							  union_datum->value);
+		}
+		break;
+
+	case AVRO_RECORD:
+		if (is_avro_record(datum)) {
+			struct avro_record_schema_t *record_schema =
+			    avro_schema_to_record(expected_schema);
+			for (i = 0; i < record_schema->fields->num_entries; i++) {
+				avro_datum_t field_datum;
+				union {
+					st_data_t data;
+					struct avro_record_field_t *field;
+				} val;
+				st_lookup(record_schema->fields, i, &val.data);
+
+				rval =
+				    avro_record_get(datum, val.field->name,
+						    &field_datum);
+				if (rval) {
+					/*
+					 * TODO: check for default values 
+					 */
+					return rval;
+				}
+				if (!avro_schema_datum_validate
+				    (val.field->type, field_datum)) {
+					return 0;
+				}
+			}
+			return 1;
+		}
+		break;
+
+	case AVRO_LINK:
+		{
+			return
+			    avro_schema_datum_validate((avro_schema_to_link
+							(expected_schema))->to,
+						       datum);
+		}
+		break;
+	}
+	return 0;
+}
diff --git a/lang/c/src/datum_value.c b/lang/c/src/datum_value.c
new file mode 100644
index 0000000..a03dd43
--- /dev/null
+++ b/lang/c/src/datum_value.c
@@ -0,0 +1,784 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/basics.h"
+#include "avro/errors.h"
+#include "avro/legacy.h"
+#include "avro/refcount.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "avro_private.h"
+
+extern avro_value_iface_t  AVRO_DATUM_VALUE_CLASS;
+
+avro_value_iface_t *
+avro_datum_class(void)
+{
+	return &AVRO_DATUM_VALUE_CLASS;
+}
+
+int
+avro_datum_as_value(avro_value_t *value, avro_datum_t src)
+{
+	value->iface = &AVRO_DATUM_VALUE_CLASS;
+	value->self = avro_datum_incref(src);
+	return 0;
+}
+
+static int
+avro_datum_as_child_value(avro_value_t *value, avro_datum_t src)
+{
+	value->iface = &AVRO_DATUM_VALUE_CLASS;
+	value->self = src;
+	return 0;
+}
+
+static void
+avro_datum_value_incref(avro_value_t *value)
+{
+	avro_datum_t  self = (avro_datum_t) value->self;
+	avro_datum_incref(self);
+}
+
+static void
+avro_datum_value_decref(avro_value_t *value)
+{
+	avro_datum_t  self = (avro_datum_t) value->self;
+	avro_datum_decref(self);
+}
+
+static int
+avro_datum_value_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_datum_reset(self);
+}
+
+static avro_type_t
+avro_datum_value_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+#ifdef _WIN32
+#pragma message("#warning: Bug: EINVAL is not of type avro_type_t.")
+#else
+#warning "Bug: EINVAL is not of type avro_type_t."
+#endif
+        /* We shouldn't use EINVAL as the return value to
+         * check_param(), because EINVAL (= 22) is not a valid enum
+         * avro_type_t. This is a structural issue -- we would need a
+         * different interface on all the get_type functions to fix
+         * this. For now, suppressing the error by casting EINVAL to
+         * (avro_type_t) so the code compiles under C++.
+         */
+	check_param((avro_type_t) EINVAL, self, "datum instance");
+	return avro_typeof(self);
+}
+
+static avro_schema_t
+avro_datum_value_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(NULL, self, "datum instance");
+	return avro_datum_get_schema(self);
+}
+
+
+static int
+avro_datum_value_get_boolean(const avro_value_iface_t *iface,
+			     const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	int8_t  value;
+	check(rval, avro_boolean_get(self, &value));
+	*out = value;
+	return 0;
+}
+
+static int
+avro_datum_value_get_bytes(const avro_value_iface_t *iface,
+			   const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *bytes;
+	int64_t  sz;
+	check(rval, avro_bytes_get(self, &bytes, &sz));
+	if (buf != NULL) {
+		*buf = (const void *) bytes;
+	}
+	if (size != NULL) {
+		*size = sz;
+	}
+	return 0;
+}
+
+static int
+avro_datum_value_grab_bytes(const avro_value_iface_t *iface,
+			    const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *bytes;
+	int64_t  sz;
+	check(rval, avro_bytes_get(self, &bytes, &sz));
+
+	/* nothing clever, just make a copy */
+	return avro_wrapped_buffer_new_copy(dest, bytes, sz);
+}
+
+static int
+avro_datum_value_get_double(const avro_value_iface_t *iface,
+			    const void *vself, double *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	double  value;
+	check(rval, avro_double_get(self, &value));
+	*out = value;
+	return 0;
+}
+
+static int
+avro_datum_value_get_float(const avro_value_iface_t *iface,
+			   const void *vself, float *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	float  value;
+	check(rval, avro_float_get(self, &value));
+	*out = value;
+	return 0;
+}
+
+static int
+avro_datum_value_get_int(const avro_value_iface_t *iface,
+			 const void *vself, int32_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	int32_t  value;
+	check(rval, avro_int32_get(self, &value));
+	*out = value;
+	return 0;
+}
+
+static int
+avro_datum_value_get_long(const avro_value_iface_t *iface,
+			  const void *vself, int64_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	int64_t  value;
+	check(rval, avro_int64_get(self, &value));
+	*out = value;
+	return 0;
+}
+
+static int
+avro_datum_value_get_null(const avro_value_iface_t *iface,
+			  const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, is_avro_null(self), "datum instance");
+	return 0;
+}
+
+static int
+avro_datum_value_get_string(const avro_value_iface_t *iface,
+			    const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *value;
+	check(rval, avro_string_get(self, &value));
+	if (str != NULL) {
+		*str = (const char *) value;
+	}
+	if (size != NULL) {
+		*size = strlen(value)+1;
+	}
+	return 0;
+}
+
+static int
+avro_datum_value_grab_string(const avro_value_iface_t *iface,
+			     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *str;
+	size_t  sz;
+	check(rval, avro_string_get(self, &str));
+	sz = strlen(str);
+
+	/* nothing clever, just make a copy */
+	/* sz doesn't contain NUL terminator */
+	return avro_wrapped_buffer_new_copy(dest, str, sz+1);
+}
+
+static int
+avro_datum_value_get_enum(const avro_value_iface_t *iface,
+			  const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, is_avro_enum(self), "datum instance");
+	*out = avro_enum_get(self);
+	return 0;
+}
+
+static int
+avro_datum_value_get_fixed(const avro_value_iface_t *iface,
+			   const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *bytes;
+	int64_t  sz;
+	check(rval, avro_fixed_get(self, &bytes, &sz));
+	if (buf != NULL) {
+		*buf = (const void *) bytes;
+	}
+	if (size != NULL) {
+		*size = sz;
+	}
+	return 0;
+}
+
+static int
+avro_datum_value_grab_fixed(const avro_value_iface_t *iface,
+			    const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	char  *bytes;
+	int64_t  sz;
+	check(rval, avro_fixed_get(self, &bytes, &sz));
+
+	/* nothing clever, just make a copy */
+	return avro_wrapped_buffer_new_copy(dest, bytes, sz);
+}
+
+
+static int
+avro_datum_value_set_boolean(const avro_value_iface_t *iface,
+			     void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_boolean_set(self, val);
+}
+
+static int
+avro_datum_value_set_bytes(const avro_value_iface_t *iface,
+			   void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_bytes_set(self, (const char *) buf, size);
+}
+
+static int
+avro_datum_value_give_bytes(const avro_value_iface_t *iface,
+			    void *vself, avro_wrapped_buffer_t *buf)
+{
+	/*
+	 * We actually can't use avro_givebytes_set, since it can't
+	 * handle the extra free_ud parameter.  Ah well, this is
+	 * deprecated, so go ahead and make a copy.
+	 */
+
+	int rval = avro_datum_value_set_bytes
+	    (iface, vself, (void *) buf->buf, buf->size);
+	avro_wrapped_buffer_free(buf);
+	return rval;
+}
+
+static int
+avro_datum_value_set_double(const avro_value_iface_t *iface,
+			    void *vself, double val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_double_set(self, val);
+}
+
+static int
+avro_datum_value_set_float(const avro_value_iface_t *iface,
+			   void *vself, float val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_float_set(self, val);
+}
+
+static int
+avro_datum_value_set_int(const avro_value_iface_t *iface,
+			 void *vself, int32_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_int32_set(self, val);
+}
+
+static int
+avro_datum_value_set_long(const avro_value_iface_t *iface,
+			  void *vself, int64_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_int64_set(self, val);
+}
+
+static int
+avro_datum_value_set_null(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, is_avro_null(self), "datum instance");
+	return 0;
+}
+
+static int
+avro_datum_value_set_string(const avro_value_iface_t *iface,
+			    void *vself, const char *str)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_string_set(self, str);
+}
+
+static int
+avro_datum_value_set_string_len(const avro_value_iface_t *iface,
+				void *vself, const char *str, size_t size)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(size);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_string_set(self, str);
+}
+
+static int
+avro_datum_value_give_string_len(const avro_value_iface_t *iface,
+				 void *vself, avro_wrapped_buffer_t *buf)
+{
+	/*
+	 * We actually can't use avro_givestring_set, since it can't
+	 * handle the extra free_ud parameter.  Ah well, this is
+	 * deprecated, so go ahead and make a copy.
+	 */
+
+	int rval = avro_datum_value_set_string_len
+	    (iface, vself, (char *) buf->buf, buf->size-1);
+	avro_wrapped_buffer_free(buf);
+	return rval;
+}
+
+static int
+avro_datum_value_set_enum(const avro_value_iface_t *iface,
+			  void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_enum_set(self, val);
+}
+
+static int
+avro_datum_value_set_fixed(const avro_value_iface_t *iface,
+			   void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+	return avro_fixed_set(self, (const char *) buf, size);
+}
+
+static int
+avro_datum_value_give_fixed(const avro_value_iface_t *iface,
+			    void *vself, avro_wrapped_buffer_t *buf)
+{
+	/*
+	 * We actually can't use avro_givefixed_set, since it can't
+	 * handle the extra free_ud parameter.  Ah well, this is
+	 * deprecated, so go ahead and make a copy.
+	 */
+
+	int rval = avro_datum_value_set_fixed
+	    (iface, vself, (void *) buf->buf, buf->size);
+	avro_wrapped_buffer_free(buf);
+	return rval;
+}
+
+
+static int
+avro_datum_value_get_size(const avro_value_iface_t *iface,
+			  const void *vself, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (is_avro_array(self)) {
+		*size = avro_array_size(self);
+		return 0;
+	}
+
+	if (is_avro_map(self)) {
+		*size = avro_map_size(self);
+		return 0;
+	}
+
+	if (is_avro_record(self)) {
+		avro_schema_t  schema = avro_datum_get_schema(self);
+		*size = avro_schema_record_size(schema);
+		return 0;
+	}
+
+	avro_set_error("Can only get size of array, map, or record, %d", avro_typeof(self));
+	return EINVAL;
+}
+
+static int
+avro_datum_value_get_by_index(const avro_value_iface_t *iface,
+			      const void *vself, size_t index,
+			      avro_value_t *child, const char **name)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	avro_datum_t  child_datum;
+
+	if (is_avro_array(self)) {
+		check(rval, avro_array_get(self, index, &child_datum));
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	if (is_avro_map(self)) {
+		const char  *real_key;
+		check(rval, avro_map_get_key(self, index, &real_key));
+		if (name != NULL) {
+			*name = real_key;
+		}
+		check(rval, avro_map_get(self, real_key, &child_datum));
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	if (is_avro_record(self)) {
+		avro_schema_t  schema = avro_datum_get_schema(self);
+		const char  *field_name =
+		    avro_schema_record_field_name(schema, index);
+		if (field_name == NULL) {
+			return EINVAL;
+		}
+		if (name != NULL) {
+			*name = field_name;
+		}
+		check(rval, avro_record_get(self, field_name, &child_datum));
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	avro_set_error("Can only get by index from array, map, or record");
+	return EINVAL;
+}
+
+static int
+avro_datum_value_get_by_name(const avro_value_iface_t *iface,
+			     const void *vself, const char *name,
+			     avro_value_t *child, size_t *index)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	int  rval;
+	avro_datum_t  child_datum;
+
+	if (is_avro_map(self)) {
+		if (index != NULL) {
+			int  real_index;
+			check(rval, avro_map_get_index(self, name, &real_index));
+			*index = real_index;
+		}
+
+		check(rval, avro_map_get(self, name, &child_datum));
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	if (is_avro_record(self)) {
+		if (index != NULL) {
+			avro_schema_t  schema = avro_datum_get_schema(self);
+			*index = avro_schema_record_field_get_index(schema, name);
+		}
+
+		check(rval, avro_record_get(self, name, &child_datum));
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	avro_set_error("Can only get by name from map or record");
+	return EINVAL;
+}
+
+static int
+avro_datum_value_get_discriminant(const avro_value_iface_t *iface,
+				  const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (!is_avro_union(self)) {
+		avro_set_error("Can only get discriminant of union");
+		return EINVAL;
+	}
+
+	*out = avro_union_discriminant(self);
+	return 0;
+}
+
+static int
+avro_datum_value_get_current_branch(const avro_value_iface_t *iface,
+				    const void *vself, avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (!is_avro_union(self)) {
+		avro_set_error("Can only get current branch of union");
+		return EINVAL;
+	}
+
+	avro_datum_t  child_datum = avro_union_current_branch(self);
+	return avro_datum_as_child_value(branch, child_datum);
+}
+
+
+static int
+avro_datum_value_append(const avro_value_iface_t *iface,
+			void *vself, avro_value_t *child_out, size_t *new_index)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (!is_avro_array(self)) {
+		avro_set_error("Can only append to array");
+		return EINVAL;
+	}
+
+	int  rval;
+
+	avro_schema_t  array_schema = avro_datum_get_schema(self);
+	avro_schema_t  child_schema = avro_schema_array_items(array_schema);
+	avro_datum_t  child_datum = avro_datum_from_schema(child_schema);
+	if (child_datum == NULL) {
+		return ENOMEM;
+	}
+
+	rval = avro_array_append_datum(self, child_datum);
+	avro_datum_decref(child_datum);
+	if (rval != 0) {
+		return rval;
+	}
+
+	if (new_index != NULL) {
+		*new_index = avro_array_size(self) - 1;
+	}
+	return avro_datum_as_child_value(child_out, child_datum);
+}
+
+static int
+avro_datum_value_add(const avro_value_iface_t *iface,
+		     void *vself, const char *key,
+		     avro_value_t *child, size_t *index, int *is_new)
+{
+	AVRO_UNUSED(iface);
+	avro_datum_t  self = (avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (!is_avro_map(self)) {
+		avro_set_error("Can only add to map");
+		return EINVAL;
+	}
+
+	int  rval;
+	avro_datum_t  child_datum;
+
+	if (avro_map_get(self, key, &child_datum) == 0) {
+		/* key already exists */
+		if (is_new != NULL) {
+			*is_new = 0;
+		}
+		if (index != NULL) {
+			int  real_index;
+			avro_map_get_index(self, key, &real_index);
+			*index = real_index;
+		}
+		return avro_datum_as_child_value(child, child_datum);
+	}
+
+	/* key is new */
+	avro_schema_t  map_schema = avro_datum_get_schema(self);
+	avro_schema_t  child_schema = avro_schema_map_values(map_schema);
+	child_datum = avro_datum_from_schema(child_schema);
+	if (child_datum == NULL) {
+		return ENOMEM;
+	}
+
+	rval = avro_map_set(self, key, child_datum);
+	avro_datum_decref(child_datum);
+	if (rval != 0) {
+		return rval;
+	}
+
+	if (is_new != NULL) {
+		*is_new = 1;
+	}
+	if (index != NULL) {
+		*index = avro_map_size(self) - 1;
+	}
+
+	return avro_datum_as_child_value(child, child_datum);
+}
+
+static int
+avro_datum_value_set_branch(const avro_value_iface_t *iface,
+			    void *vself, int discriminant,
+			    avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	const avro_datum_t  self = (const avro_datum_t) vself;
+	check_param(EINVAL, self, "datum instance");
+
+	if (!is_avro_union(self)) {
+		avro_set_error("Can only set branch of union");
+		return EINVAL;
+	}
+
+	int  rval;
+	avro_datum_t  child_datum;
+	check(rval, avro_union_set_discriminant(self, discriminant, &child_datum));
+	return avro_datum_as_child_value(branch, child_datum);
+}
+
+
+avro_value_iface_t  AVRO_DATUM_VALUE_CLASS =
+{
+	/* "class" methods */
+	NULL, /* incref */
+	NULL, /* decref */
+	/* general "instance" methods */
+	avro_datum_value_incref,
+	avro_datum_value_decref,
+	avro_datum_value_reset,
+	avro_datum_value_get_type,
+	avro_datum_value_get_schema,
+	/* primitive getters */
+	avro_datum_value_get_boolean,
+	avro_datum_value_get_bytes,
+	avro_datum_value_grab_bytes,
+	avro_datum_value_get_double,
+	avro_datum_value_get_float,
+	avro_datum_value_get_int,
+	avro_datum_value_get_long,
+	avro_datum_value_get_null,
+	avro_datum_value_get_string,
+	avro_datum_value_grab_string,
+	avro_datum_value_get_enum,
+	avro_datum_value_get_fixed,
+	avro_datum_value_grab_fixed,
+	/* primitive setters */
+	avro_datum_value_set_boolean,
+	avro_datum_value_set_bytes,
+	avro_datum_value_give_bytes,
+	avro_datum_value_set_double,
+	avro_datum_value_set_float,
+	avro_datum_value_set_int,
+	avro_datum_value_set_long,
+	avro_datum_value_set_null,
+	avro_datum_value_set_string,
+	avro_datum_value_set_string_len,
+	avro_datum_value_give_string_len,
+	avro_datum_value_set_enum,
+	avro_datum_value_set_fixed,
+	avro_datum_value_give_fixed,
+	/* compound getters */
+	avro_datum_value_get_size,
+	avro_datum_value_get_by_index,
+	avro_datum_value_get_by_name,
+	avro_datum_value_get_discriminant,
+	avro_datum_value_get_current_branch,
+	/* compound setters */
+	avro_datum_value_append,
+	avro_datum_value_add,
+	avro_datum_value_set_branch
+};
diff --git a/lang/c/src/datum_write.c b/lang/c/src/datum_write.c
new file mode 100644
index 0000000..504be45
--- /dev/null
+++ b/lang/c/src/datum_write.c
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <assert.h>
+#include <errno.h>
+#include <string.h>
+
+#include "avro/basics.h"
+#include "avro/errors.h"
+#include "avro/io.h"
+#include "avro/legacy.h"
+#include "avro/resolver.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "avro_private.h"
+
+int avro_write_data(avro_writer_t writer, avro_schema_t writers_schema,
+		    avro_datum_t datum)
+{
+	int  rval;
+
+	check_param(EINVAL, writer, "writer");
+	check_param(EINVAL, is_avro_datum(datum), "datum");
+
+	/* Only validate datum if a writer's schema is provided */
+	if (is_avro_schema(writers_schema)) {
+	    if (!avro_schema_datum_validate(writers_schema, datum)) {
+		avro_set_error("Datum doesn't validate against schema");
+		return EINVAL;
+	    }
+
+	    /*
+	     * Some confusing terminology here.  The "writers_schema"
+	     * parameter is the schema we want to use to write the data
+	     * into the "writer" buffer.  Before doing that, we need to
+	     * resolve the datum from its actual schema into this
+	     * "writer" schema.  For the purposes of that resolution,
+	     * the writer schema is the datum's actual schema, and the
+	     * reader schema is our eventual (when writing to the
+	     * buffer) "writer" schema.
+	     */
+
+	    avro_schema_t  datum_schema = avro_datum_get_schema(datum);
+	    avro_value_iface_t  *resolver =
+		avro_resolved_reader_new(datum_schema, writers_schema);
+	    if (resolver == NULL) {
+		    return EINVAL;
+	    }
+
+	    avro_value_t  value;
+	    check(rval, avro_datum_as_value(&value, datum));
+
+	    avro_value_t  resolved;
+	    rval = avro_resolved_reader_new_value(resolver, &resolved);
+	    if (rval != 0) {
+		    avro_value_decref(&value);
+		    avro_value_iface_decref(resolver);
+		    return rval;
+	    }
+
+	    avro_resolved_reader_set_source(&resolved, &value);
+	    rval = avro_value_write(writer, &resolved);
+	    avro_value_decref(&resolved);
+	    avro_value_decref(&value);
+	    avro_value_iface_decref(resolver);
+	    return rval;
+	}
+
+	/* If we're writing using the datum's actual schema, we don't
+	 * need a resolver. */
+
+	avro_value_t  value;
+	check(rval, avro_datum_as_value(&value, datum));
+	check(rval, avro_value_write(writer, &value));
+	avro_value_decref(&value);
+	return 0;
+}
diff --git a/lang/c/src/dump.c b/lang/c/src/dump.c
new file mode 100644
index 0000000..adbbf24
--- /dev/null
+++ b/lang/c/src/dump.c
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <ctype.h>
+#include <string.h>
+#include "avro_private.h"
+#include "dump.h"
+
+static void dump_line(FILE * out, const char *addr, const long len)
+{
+	int i;
+	fprintf(out, "|");
+	for (i = 0; i < 16; i++) {
+		if (i < len) {
+			fprintf(out, " %02X", ((uint8_t *) addr)[i]);
+		} else {
+			fprintf(out, " ..");
+		}
+		if (!((i + 1) % 8)) {
+			fprintf(out, " |");
+		}
+	}
+	fprintf(out, "\t");
+	for (i = 0; i < 16; i++) {
+		char c = 0x7f & ((uint8_t *) addr)[i];
+		if (i < len && isprint(c)) {
+			fprintf(out, "%c", c);
+		} else {
+			fprintf(out, ".");
+		}
+	}
+}
+
+void dump(FILE * out, const char *addr, const long len)
+{
+	int i;
+	for (i = 0; i < len; i += 16) {
+		dump_line(out, addr + i, (len - i) < 16 ? (len - i) : 16);
+		fprintf(out, "\n");
+	}
+	fflush(out);
+}
diff --git a/lang/c/src/dump.h b/lang/c/src/dump.h
new file mode 100644
index 0000000..396c638
--- /dev/null
+++ b/lang/c/src/dump.h
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#ifndef DUMP_H
+#define DUMP_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <stdio.h>
+
+#pragma GCC visibility push(hidden)
+void dump(FILE * out, const char *addr, const long len);
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/encoding.h b/lang/c/src/encoding.h
new file mode 100644
index 0000000..a2732fb
--- /dev/null
+++ b/lang/c/src/encoding.h
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+#ifndef AVRO_ENCODING_H
+#define AVRO_ENCODING_H
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>
+#include "avro/io.h"
+
+/*
+ * TODO: this will need more functions when JSON encoding is added 
+ */
+struct avro_encoding_t {
+	const char *description;
+	/*
+	 * string 
+	 */
+	int (*read_string) (avro_reader_t reader, char **s, int64_t *len);
+	int (*skip_string) (avro_reader_t reader);
+	int (*write_string) (avro_writer_t writer, const char *s);
+	 int64_t(*size_string) (avro_writer_t writer, const char *s);
+	/*
+	 * bytes 
+	 */
+	int (*read_bytes) (avro_reader_t reader, char **bytes, int64_t * len);
+	int (*skip_bytes) (avro_reader_t reader);
+	int (*write_bytes) (avro_writer_t writer,
+			    const char *bytes, const int64_t len);
+	 int64_t(*size_bytes) (avro_writer_t writer,
+			       const char *bytes, const int64_t len);
+	/*
+	 * int 
+	 */
+	int (*read_int) (avro_reader_t reader, int32_t * i);
+	int (*skip_int) (avro_reader_t reader);
+	int (*write_int) (avro_writer_t writer, const int32_t i);
+	 int64_t(*size_int) (avro_writer_t writer, const int32_t i);
+	/*
+	 * long 
+	 */
+	int (*read_long) (avro_reader_t reader, int64_t * l);
+	int (*skip_long) (avro_reader_t reader);
+	int (*write_long) (avro_writer_t writer, const int64_t l);
+	 int64_t(*size_long) (avro_writer_t writer, const int64_t l);
+	/*
+	 * float 
+	 */
+	int (*read_float) (avro_reader_t reader, float *f);
+	int (*skip_float) (avro_reader_t reader);
+	int (*write_float) (avro_writer_t writer, const float f);
+	 int64_t(*size_float) (avro_writer_t writer, const float f);
+	/*
+	 * double 
+	 */
+	int (*read_double) (avro_reader_t reader, double *d);
+	int (*skip_double) (avro_reader_t reader);
+	int (*write_double) (avro_writer_t writer, const double d);
+	 int64_t(*size_double) (avro_writer_t writer, const double d);
+	/*
+	 * boolean 
+	 */
+	int (*read_boolean) (avro_reader_t reader, int8_t * b);
+	int (*skip_boolean) (avro_reader_t reader);
+	int (*write_boolean) (avro_writer_t writer, const int8_t b);
+	 int64_t(*size_boolean) (avro_writer_t writer, const int8_t b);
+	/*
+	 * null 
+	 */
+	int (*read_null) (avro_reader_t reader);
+	int (*skip_null) (avro_reader_t reader);
+	int (*write_null) (avro_writer_t writer);
+	 int64_t(*size_null) (avro_writer_t writer);
+};
+typedef struct avro_encoding_t avro_encoding_t;
+
+#define AVRO_WRITE(writer, buf, len) \
+{ int rval = avro_write( writer, buf, len ); if(rval) return rval; }
+#define AVRO_READ(reader, buf, len)  \
+{ int rval = avro_read( reader, buf, len ); if(rval) return rval; }
+#define AVRO_SKIP(reader, len) \
+{ int rval = avro_skip( reader, len); if (rval) return rval; }
+
+extern const avro_encoding_t avro_binary_encoding;	/* in
+							 * encoding_binary 
+							 */
+CLOSE_EXTERN
+#endif
diff --git a/lang/c/src/encoding_binary.c b/lang/c/src/encoding_binary.c
new file mode 100644
index 0000000..b59c094
--- /dev/null
+++ b/lang/c/src/encoding_binary.c
@@ -0,0 +1,446 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/errors.h"
+#include "encoding.h"
+#include <stdlib.h>
+#include <limits.h>
+#include <errno.h>
+#include <string.h>
+
+#define MAX_VARINT_BUF_SIZE 10
+
+static int read_long(avro_reader_t reader, int64_t * l)
+{
+	uint64_t value = 0;
+	uint8_t b;
+	int offset = 0;
+	do {
+		if (offset == MAX_VARINT_BUF_SIZE) {
+			/*
+			 * illegal byte sequence 
+			 */
+			avro_set_error("Varint too long");
+			return EILSEQ;
+		}
+		AVRO_READ(reader, &b, 1);
+		value |= (int64_t) (b & 0x7F) << (7 * offset);
+		++offset;
+	}
+	while (b & 0x80);
+	*l = ((value >> 1) ^ -(value & 1));
+	return 0;
+}
+
+static int skip_long(avro_reader_t reader)
+{
+	uint8_t b;
+	int offset = 0;
+	do {
+		if (offset == MAX_VARINT_BUF_SIZE) {
+			avro_set_error("Varint too long");
+			return EILSEQ;
+		}
+		AVRO_READ(reader, &b, 1);
+		++offset;
+	}
+	while (b & 0x80);
+	return 0;
+}
+
+static int write_long(avro_writer_t writer, int64_t l)
+{
+	char buf[MAX_VARINT_BUF_SIZE];
+	uint8_t bytes_written = 0;
+	uint64_t n = (l << 1) ^ (l >> 63);
+	while (n & ~0x7F) {
+		buf[bytes_written++] = (char)((((uint8_t) n) & 0x7F) | 0x80);
+		n >>= 7;
+	}
+	buf[bytes_written++] = (char)n;
+	AVRO_WRITE(writer, buf, bytes_written);
+	return 0;
+}
+
+static int64_t size_long(avro_writer_t writer, int64_t l)
+{
+	AVRO_UNUSED(writer);
+
+	int64_t len = 0;
+	uint64_t n = (l << 1) ^ (l >> 63);
+	while (n & ~0x7F) {
+		len++;
+		n >>= 7;
+	}
+	len++;
+	return len;
+}
+
+static int read_int(avro_reader_t reader, int32_t * i)
+{
+	int64_t l;
+	int rval;
+	check(rval, read_long(reader, &l));
+	if (!(INT_MIN <= l && l <= INT_MAX)) {
+		avro_set_error("Varint out of range for int type");
+		return ERANGE;
+	}
+	*i = l;
+	return 0;
+}
+
+static int skip_int(avro_reader_t reader)
+{
+	return skip_long(reader);
+}
+
+static int write_int(avro_writer_t writer, const int32_t i)
+{
+	int64_t l = i;
+	return write_long(writer, l);
+}
+
+static int64_t size_int(avro_writer_t writer, const int32_t i)
+{
+	int64_t l = i;
+	return size_long(writer, l);
+}
+
+static int read_bytes(avro_reader_t reader, char **bytes, int64_t * len)
+{
+	int rval;
+	check_prefix(rval, read_long(reader, len),
+		     "Cannot read bytes length: ");
+	*bytes = (char *) avro_malloc(*len + 1);
+	if (!*bytes) {
+		avro_set_error("Cannot allocate buffer for bytes value");
+		return ENOMEM;
+	}
+	AVRO_READ(reader, *bytes, *len);
+	(*bytes)[*len] = '\0';
+	return 0;
+}
+
+static int skip_bytes(avro_reader_t reader)
+{
+	int64_t len = 0;
+	int rval;
+	check_prefix(rval, read_long(reader, &len),
+		     "Cannot read bytes length: ");
+	AVRO_SKIP(reader, len);
+	return 0;
+}
+
+static int
+write_bytes(avro_writer_t writer, const char *bytes, const int64_t len)
+{
+	int rval;
+	if (len < 0) {
+		avro_set_error("Invalid bytes value length");
+		return EINVAL;
+	}
+	check_prefix(rval, write_long(writer, len),
+		     "Cannot write bytes length: ");
+	AVRO_WRITE(writer, (char *)bytes, len);
+	return 0;
+}
+
+static int64_t
+size_bytes(avro_writer_t writer, const char *bytes, const int64_t len)
+{
+	AVRO_UNUSED(bytes);
+
+	return size_long(writer, len) + len;
+}
+
+static int read_string(avro_reader_t reader, char **s, int64_t *len)
+{
+	int64_t  str_len = 0;
+	int rval;
+	check_prefix(rval, read_long(reader, &str_len),
+		     "Cannot read string length: ");
+	*len = str_len + 1;
+	*s = (char *) avro_malloc(*len);
+	if (!*s) {
+		avro_set_error("Cannot allocate buffer for string value");
+		return ENOMEM;
+	}
+	(*s)[str_len] = '\0';
+	AVRO_READ(reader, *s, str_len);
+	return 0;
+}
+
+static int skip_string(avro_reader_t reader)
+{
+	return skip_bytes(reader);
+}
+
+static int write_string(avro_writer_t writer, const char *s)
+{
+	int64_t len = strlen(s);
+	return write_bytes(writer, s, len);
+}
+
+static int64_t size_string(avro_writer_t writer, const char *s)
+{
+	int64_t len = strlen(s);
+	return size_bytes(writer, s, len);
+}
+
+static int read_float(avro_reader_t reader, float *f)
+{
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	uint8_t buf[4];
+#endif
+	union {
+		float f;
+		int32_t i;
+	} v;
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	AVRO_READ(reader, buf, 4);
+	v.i = ((int32_t) buf[0] << 0)
+	    | ((int32_t) buf[1] << 8)
+	    | ((int32_t) buf[2] << 16) | ((int32_t) buf[3] << 24);
+#else
+	AVRO_READ(reader, (void *)&v.i, 4);
+#endif
+	*f = v.f;
+	return 0;
+}
+
+static int skip_float(avro_reader_t reader)
+{
+	AVRO_SKIP(reader, 4);
+	return 0;
+}
+
+static int write_float(avro_writer_t writer, const float f)
+{
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	uint8_t buf[4];
+#endif
+	union {
+		float f;
+		int32_t i;
+	} v;
+
+	v.f = f;
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	buf[0] = (uint8_t) (v.i >> 0);
+	buf[1] = (uint8_t) (v.i >> 8);
+	buf[2] = (uint8_t) (v.i >> 16);
+	buf[3] = (uint8_t) (v.i >> 24);
+	AVRO_WRITE(writer, buf, 4);
+#else
+	AVRO_WRITE(writer, (void *)&v.i, 4);
+#endif
+	return 0;
+}
+
+static int64_t size_float(avro_writer_t writer, const float f)
+{
+	AVRO_UNUSED(writer);
+	AVRO_UNUSED(f);
+
+	return 4;
+}
+
+static int read_double(avro_reader_t reader, double *d)
+{
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	uint8_t buf[8];
+#endif
+	union {
+		double d;
+		int64_t l;
+	} v;
+
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	AVRO_READ(reader, buf, 8);
+	v.l = ((int64_t) buf[0] << 0)
+	    | ((int64_t) buf[1] << 8)
+	    | ((int64_t) buf[2] << 16)
+	    | ((int64_t) buf[3] << 24)
+	    | ((int64_t) buf[4] << 32)
+	    | ((int64_t) buf[5] << 40)
+	    | ((int64_t) buf[6] << 48) | ((int64_t) buf[7] << 56);
+#else
+	AVRO_READ(reader, (void *)&v.l, 8);
+#endif
+	*d = v.d;
+	return 0;
+}
+
+static int skip_double(avro_reader_t reader)
+{
+	AVRO_SKIP(reader, 8);
+	return 0;
+}
+
+static int write_double(avro_writer_t writer, const double d)
+{
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	uint8_t buf[8];
+#endif
+	union {
+		double d;
+		int64_t l;
+	} v;
+
+	v.d = d;
+#if AVRO_PLATFORM_IS_BIG_ENDIAN
+	buf[0] = (uint8_t) (v.l >> 0);
+	buf[1] = (uint8_t) (v.l >> 8);
+	buf[2] = (uint8_t) (v.l >> 16);
+	buf[3] = (uint8_t) (v.l >> 24);
+	buf[4] = (uint8_t) (v.l >> 32);
+	buf[5] = (uint8_t) (v.l >> 40);
+	buf[6] = (uint8_t) (v.l >> 48);
+	buf[7] = (uint8_t) (v.l >> 56);
+	AVRO_WRITE(writer, buf, 8);
+#else
+	AVRO_WRITE(writer, (void *)&v.l, 8);
+#endif
+	return 0;
+}
+
+static int64_t size_double(avro_writer_t writer, const double d)
+{
+	AVRO_UNUSED(writer);
+	AVRO_UNUSED(d);
+
+	return 8;
+}
+
+static int read_boolean(avro_reader_t reader, int8_t * b)
+{
+	AVRO_READ(reader, b, 1);
+	return 0;
+}
+
+static int skip_boolean(avro_reader_t reader)
+{
+	AVRO_SKIP(reader, 1);
+	return 0;
+}
+
+static int write_boolean(avro_writer_t writer, const int8_t b)
+{
+	AVRO_WRITE(writer, (char *)&b, 1);
+	return 0;
+}
+
+static int64_t size_boolean(avro_writer_t writer, const int8_t b)
+{
+	AVRO_UNUSED(writer);
+	AVRO_UNUSED(b);
+
+	return 1;
+}
+
+static int read_skip_null(avro_reader_t reader)
+{
+	/*
+	 * no-op 
+	 */
+	AVRO_UNUSED(reader);
+
+	return 0;
+}
+
+static int write_null(avro_writer_t writer)
+{
+	/*
+	 * no-op 
+	 */
+	AVRO_UNUSED(writer);
+
+	return 0;
+}
+
+static int64_t size_null(avro_writer_t writer)
+{
+	AVRO_UNUSED(writer);
+
+	return 0;
+}
+
+/* Win32 doesn't support the C99 method of initializing named elements
+ * in a struct declaration. So hide the named parameters for Win32,
+ * and initialize in the order the code was written.
+ */
+const avro_encoding_t avro_binary_encoding = {
+	/* .description = */ "BINARY FORMAT",
+	/*
+	 * string 
+	 */
+	/* .read_string = */ read_string,
+	/* .skip_string = */ skip_string,
+	/* .write_string = */ write_string,
+	/* .size_string = */ size_string,
+	/*
+	 * bytes 
+	 */
+	/* .read_bytes = */ read_bytes,
+	/* .skip_bytes = */ skip_bytes,
+	/* .write_bytes = */ write_bytes,
+	/* .size_bytes = */ size_bytes,
+	/*
+	 * int 
+	 */
+	/* .read_int = */ read_int,
+	/* .skip_int = */ skip_int,
+	/* .write_int = */ write_int,
+	/* .size_int = */ size_int,
+	/*
+	 * long 
+	 */
+	/* .read_long = */ read_long,
+	/* .skip_long = */ skip_long,
+	/* .write_long = */ write_long,
+	/* .size_long = */ size_long,
+	/*
+	 * float 
+	 */
+	/* .read_float = */ read_float,
+	/* .skip_float = */ skip_float,
+	/* .write_float = */ write_float,
+	/* .size_float = */ size_float,
+	/*
+	 * double 
+	 */
+	/* .read_double = */ read_double,
+	/* .skip_double = */ skip_double,
+	/* .write_double = */ write_double,
+	/* .size_double = */ size_double,
+	/*
+	 * boolean 
+	 */
+	/* .read_boolean = */ read_boolean,
+	/* .skip_boolean = */ skip_boolean,
+	/* .write_boolean = */ write_boolean,
+	/* .size_boolean = */ size_boolean,
+	/*
+	 * null 
+	 */
+	/* .read_null = */ read_skip_null,
+	/* .skip_null = */ read_skip_null,
+	/* .write_null = */ write_null,
+	/* .size_null = */ size_null
+};
diff --git a/lang/c/src/errors.c b/lang/c/src/errors.c
new file mode 100644
index 0000000..a033be7
--- /dev/null
+++ b/lang/c/src/errors.c
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stdarg.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "avro/errors.h"
+
+#if defined THREADSAFE && (defined __unix__ || defined __unix)
+#include <pthread.h>
+static pthread_key_t error_data_key;
+static pthread_once_t error_data_key_once = PTHREAD_ONCE_INIT;
+
+static void make_error_data_key()
+{
+    pthread_key_create(&error_data_key, free);
+}
+#endif
+
+/* 4K should be enough, right? */
+#define AVRO_ERROR_SIZE 4096
+
+/*
+ * To support the avro_prefix_error function, we keep two string buffers
+ * around.  The AVRO_CURRENT_ERROR points at the buffer that's holding
+ * the current error message.  avro_prefix error writes into the other
+ * buffer, and then swaps them.
+ */
+
+struct avro_error_data_t {
+    char  AVRO_ERROR1[AVRO_ERROR_SIZE];
+    char  AVRO_ERROR2[AVRO_ERROR_SIZE];
+
+    char  *AVRO_CURRENT_ERROR;
+    char  *AVRO_OTHER_ERROR;
+};
+
+
+static struct avro_error_data_t *
+avro_get_error_data(void)
+{
+#if defined THREADSAFE && (defined __unix__ || defined __unix)
+    pthread_once(&error_data_key_once, make_error_data_key);
+
+    struct avro_error_data_t *ERROR_DATA =
+        (struct avro_error_data_t*) pthread_getspecific(error_data_key);
+
+    if (!ERROR_DATA) {
+        ERROR_DATA = (struct avro_error_data_t*) malloc(sizeof(struct avro_error_data_t));
+        pthread_setspecific(error_data_key, ERROR_DATA);
+
+        ERROR_DATA->AVRO_ERROR1[0] = '\0';
+        ERROR_DATA->AVRO_ERROR2[0] = '\0';
+        ERROR_DATA->AVRO_CURRENT_ERROR = ERROR_DATA->AVRO_ERROR1;
+        ERROR_DATA->AVRO_OTHER_ERROR = ERROR_DATA->AVRO_ERROR2;
+    }
+
+    return ERROR_DATA;
+#else
+    static struct avro_error_data_t ERROR_DATA = {
+      /* .AVRO_ERROR1 = */ {'\0'},
+      /* .AVRO_ERROR2 = */ {'\0'},
+      /* .AVRO_CURRENT_ERROR = */ ERROR_DATA.AVRO_ERROR1,
+      /* .AVRO_OTHER_ERROR = */ ERROR_DATA.AVRO_ERROR2,
+    };
+
+    return &ERROR_DATA;
+#endif
+}
+
+
+void
+avro_set_error(const char *fmt, ...)
+{
+	va_list  args;
+	va_start(args, fmt);
+	vsnprintf(avro_get_error_data()->AVRO_CURRENT_ERROR, AVRO_ERROR_SIZE, fmt, args);
+	va_end(args);
+	//fprintf(stderr, "--- %s\n", AVRO_CURRENT_ERROR);
+}
+
+
+void
+avro_prefix_error(const char *fmt, ...)
+{
+    struct avro_error_data_t *ERROR_DATA = avro_get_error_data();
+
+	/*
+	 * First render the prefix into OTHER_ERROR.
+	 */
+
+	va_list  args;
+	va_start(args, fmt);
+	int  bytes_written = vsnprintf(ERROR_DATA->AVRO_OTHER_ERROR, AVRO_ERROR_SIZE, fmt, args);
+	va_end(args);
+
+	/*
+	 * Then concatenate the existing error onto the end.
+	 */
+
+	if (bytes_written < AVRO_ERROR_SIZE) {
+		strncpy(&ERROR_DATA->AVRO_OTHER_ERROR[bytes_written], ERROR_DATA->AVRO_CURRENT_ERROR,
+			AVRO_ERROR_SIZE - bytes_written);
+		ERROR_DATA->AVRO_OTHER_ERROR[AVRO_ERROR_SIZE-1] = '\0';
+	}
+
+	/*
+	 * Swap the two error pointers.
+	 */
+
+	char  *tmp;
+	tmp = ERROR_DATA->AVRO_OTHER_ERROR;
+	ERROR_DATA->AVRO_OTHER_ERROR = ERROR_DATA->AVRO_CURRENT_ERROR;
+	ERROR_DATA->AVRO_CURRENT_ERROR = tmp;
+	//fprintf(stderr, "+++ %s\n", AVRO_CURRENT_ERROR);
+}
+
+
+const char *avro_strerror(void)
+{
+	return avro_get_error_data()->AVRO_CURRENT_ERROR;
+}
diff --git a/lang/c/src/generic.c b/lang/c/src/generic.c
new file mode 100644
index 0000000..ddef81a
--- /dev/null
+++ b/lang/c/src/generic.c
@@ -0,0 +1,3697 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/generic.h"
+#include "avro/refcount.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "avro_generic_internal.h"
+#include "avro_private.h"
+
+
+/*-----------------------------------------------------------------------
+ * Forward definitions
+ */
+
+typedef struct avro_generic_link_value_iface  avro_generic_link_value_iface_t;
+
+typedef struct memoize_state_t {
+	avro_memoize_t  mem;
+	avro_generic_link_value_iface_t  *links;
+} memoize_state_t;
+
+static avro_generic_value_iface_t *
+avro_generic_class_from_schema_memoized(avro_schema_t schema,
+					memoize_state_t *state);
+
+
+/*-----------------------------------------------------------------------
+ * Generic support functions
+ */
+
+int
+avro_generic_value_new(avro_value_iface_t *iface, avro_value_t *dest)
+{
+	int  rval;
+	avro_generic_value_iface_t  *giface =
+	    container_of(iface, avro_generic_value_iface_t, parent);
+	size_t  instance_size = avro_value_instance_size(giface);
+	void  *self = avro_malloc(instance_size + sizeof(volatile int));
+	if (self == NULL) {
+		avro_set_error(strerror(ENOMEM));
+		dest->iface = NULL;
+		dest->self = NULL;
+		return ENOMEM;
+	}
+
+	volatile int  *refcount = (volatile int *) self;
+	self = (char *) self + sizeof(volatile int);
+
+	*refcount = 1;
+	rval = avro_value_init(giface, self);
+	if (rval != 0) {
+		avro_free(self, instance_size);
+		dest->iface = NULL;
+		dest->self = NULL;
+		return rval;
+	}
+
+	dest->iface = avro_value_iface_incref(&giface->parent);
+	dest->self = self;
+	return 0;
+}
+
+static void
+avro_generic_value_free(const avro_value_iface_t *iface, void *self)
+{
+	if (self != NULL) {
+		const avro_generic_value_iface_t  *giface =
+		    container_of(iface, avro_generic_value_iface_t, parent);
+		size_t  instance_size = avro_value_instance_size(giface);
+		avro_value_done(giface, self);
+		self = (char *) self - sizeof(volatile int);
+		avro_free(self, instance_size + sizeof(volatile int));
+	}
+}
+
+static void
+avro_generic_value_incref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	avro_refcount_inc(refcount);
+}
+
+static void
+avro_generic_value_decref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	if (avro_refcount_dec(refcount)) {
+		avro_generic_value_free(value->iface, value->self);
+	}
+}
+
+
+/*-----------------------------------------------------------------------
+ * Recursive schemas
+ */
+
+/*
+ * Recursive schemas are handled specially; the value implementation for
+ * an AVRO_LINK schema is simply a wrapper around the value
+ * implementation for the link's target schema.  The value methods all
+ * delegate to the wrapped implementation.
+ *
+ * We don't set the target_iface pointer when the link implementation is
+ * first created, since we might not have finished creating the
+ * implementation for the target schema.  (We create the implementations
+ * for child schemas depth-first, so the target schema's implementation
+ * won't be done until all of its descendants — including the link
+ * schema — have been instantiated.)
+ *
+ * So anyway, we set the target_iface pointer to NULL at first.  And
+ * then in a fix-up stage, once all of the non-link schemas have been
+ * instantiated, we go through and set the target_iface pointers for any
+ * link schemas we encountered.
+ */
+
+struct avro_generic_link_value_iface {
+	avro_generic_value_iface_t  parent;
+
+	/** The reference count for this interface. */
+	volatile int  refcount;
+
+	/** The schema for this interface. */
+	avro_schema_t  schema;
+
+	/** The target's implementation. */
+	avro_generic_value_iface_t  *target_giface;
+
+	/**
+	 * A pointer to the “next” link interface that we've had to
+	 * create.  We use this as we're creating the overall top-level
+	 * value interface to keep track of which ones we have to fix up
+	 * afterwards.
+	 */
+	avro_generic_link_value_iface_t  *next;
+};
+
+
+static avro_value_iface_t *
+avro_generic_link_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_link_value_iface_t  *iface =
+	    container_of(viface, avro_generic_link_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_link_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_link_value_iface_t  *iface =
+	    container_of(viface, avro_generic_link_value_iface_t, parent.parent);
+
+	if (avro_refcount_dec(&iface->refcount)) {
+		/* We don't keep a reference to the target
+		 * implementation, since that would give us a reference
+		 * cycle. */
+		/* We do however keep a reference to the target
+		 * schema, which we need to decrement before freeing
+		 * the link */
+		avro_schema_decref(iface->schema);
+		avro_freet(avro_generic_link_value_iface_t, iface);
+	}
+}
+
+
+static int
+avro_generic_link_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_reset(self);
+}
+
+static avro_type_t
+avro_generic_link_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_type(self);
+}
+
+static avro_schema_t
+avro_generic_link_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_schema(self);
+}
+
+static int
+avro_generic_link_get_boolean(const avro_value_iface_t *iface,
+			      const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_boolean(self, out);
+}
+
+static int
+avro_generic_link_get_bytes(const avro_value_iface_t *iface,
+			    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_bytes(self, buf, size);
+}
+
+static int
+avro_generic_link_grab_bytes(const avro_value_iface_t *iface,
+			     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_grab_bytes(self, dest);
+}
+
+static int
+avro_generic_link_get_double(const avro_value_iface_t *iface,
+			     const void *vself, double *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_double(self, out);
+}
+
+static int
+avro_generic_link_get_float(const avro_value_iface_t *iface,
+			    const void *vself, float *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_float(self, out);
+}
+
+static int
+avro_generic_link_get_int(const avro_value_iface_t *iface,
+			  const void *vself, int32_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_int(self, out);
+}
+
+static int
+avro_generic_link_get_long(const avro_value_iface_t *iface,
+			   const void *vself, int64_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_long(self, out);
+}
+
+static int
+avro_generic_link_get_null(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_null(self);
+}
+
+static int
+avro_generic_link_get_string(const avro_value_iface_t *iface,
+			     const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_string(self, str, size);
+}
+
+static int
+avro_generic_link_grab_string(const avro_value_iface_t *iface,
+			      const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_grab_string(self, dest);
+}
+
+static int
+avro_generic_link_get_enum(const avro_value_iface_t *iface,
+			   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_enum(self, out);
+}
+
+static int
+avro_generic_link_get_fixed(const avro_value_iface_t *iface,
+			    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_fixed(self, buf, size);
+}
+
+static int
+avro_generic_link_grab_fixed(const avro_value_iface_t *iface,
+			     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_grab_fixed(self, dest);
+}
+
+static int
+avro_generic_link_set_boolean(const avro_value_iface_t *iface,
+			      void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_boolean(self, val);
+}
+
+static int
+avro_generic_link_set_bytes(const avro_value_iface_t *iface,
+			    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_bytes(self, buf, size);
+}
+
+static int
+avro_generic_link_give_bytes(const avro_value_iface_t *iface,
+			     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_give_bytes(self, buf);
+}
+
+static int
+avro_generic_link_set_double(const avro_value_iface_t *iface,
+			     void *vself, double val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_double(self, val);
+}
+
+static int
+avro_generic_link_set_float(const avro_value_iface_t *iface,
+			    void *vself, float val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_float(self, val);
+}
+
+static int
+avro_generic_link_set_int(const avro_value_iface_t *iface,
+			  void *vself, int32_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_int(self, val);
+}
+
+static int
+avro_generic_link_set_long(const avro_value_iface_t *iface,
+			   void *vself, int64_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_long(self, val);
+}
+
+static int
+avro_generic_link_set_null(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_null(self);
+}
+
+static int
+avro_generic_link_set_string(const avro_value_iface_t *iface,
+			     void *vself, const char *str)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_string(self, str);
+}
+
+static int
+avro_generic_link_set_string_len(const avro_value_iface_t *iface,
+				 void *vself, const char *str, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_string_len(self, str, size);
+}
+
+static int
+avro_generic_link_give_string_len(const avro_value_iface_t *iface,
+				  void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_give_string_len(self, buf);
+}
+
+static int
+avro_generic_link_set_enum(const avro_value_iface_t *iface,
+			   void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_enum(self, val);
+}
+
+static int
+avro_generic_link_set_fixed(const avro_value_iface_t *iface,
+			    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_fixed(self, buf, size);
+}
+
+static int
+avro_generic_link_give_fixed(const avro_value_iface_t *iface,
+			     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_give_fixed(self, buf);
+}
+
+static int
+avro_generic_link_get_size(const avro_value_iface_t *iface,
+			   const void *vself, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_size(self, size);
+}
+
+static int
+avro_generic_link_get_by_index(const avro_value_iface_t *iface,
+			       const void *vself, size_t index,
+			       avro_value_t *child, const char **name)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_by_index(self, index, child, name);
+}
+
+static int
+avro_generic_link_get_by_name(const avro_value_iface_t *iface,
+			      const void *vself, const char *name,
+			      avro_value_t *child, size_t *index)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_by_name(self, name, child, index);
+}
+
+static int
+avro_generic_link_get_discriminant(const avro_value_iface_t *iface,
+				   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_discriminant(self, out);
+}
+
+static int
+avro_generic_link_get_current_branch(const avro_value_iface_t *iface,
+				     const void *vself, avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	const avro_value_t  *self = (const avro_value_t *) vself;
+	return avro_value_get_current_branch(self, branch);
+}
+
+static int
+avro_generic_link_append(const avro_value_iface_t *iface,
+			 void *vself, avro_value_t *child_out,
+			 size_t *new_index)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_append(self, child_out, new_index);
+}
+
+static int
+avro_generic_link_add(const avro_value_iface_t *iface,
+		      void *vself, const char *key,
+		      avro_value_t *child, size_t *index, int *is_new)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_add(self, key, child, index, is_new);
+}
+
+static int
+avro_generic_link_set_branch(const avro_value_iface_t *iface,
+			     void *vself, int discriminant,
+			     avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	return avro_value_set_branch(self, discriminant, branch);
+}
+
+static size_t
+avro_generic_link_instance_size(const avro_value_iface_t *viface)
+{
+	AVRO_UNUSED(viface);
+	return sizeof(avro_value_t);
+}
+
+static int
+avro_generic_link_init(const avro_value_iface_t *viface, void *vself)
+{
+	int  rval;
+
+	avro_generic_link_value_iface_t  *iface =
+	    container_of(viface, avro_generic_link_value_iface_t, parent.parent);
+
+	avro_value_t  *self = (avro_value_t *) vself;
+	size_t  target_instance_size =
+	    avro_value_instance_size(iface->target_giface);
+	if (target_instance_size == 0) {
+		return EINVAL;
+	}
+
+	self->iface = &iface->target_giface->parent;
+	self->self = avro_malloc(target_instance_size);
+	if (self->self == NULL) {
+		return ENOMEM;
+	}
+
+	rval = avro_value_init(iface->target_giface, self->self);
+	if (rval != 0) {
+		avro_free(self->self, target_instance_size);
+	}
+	return rval;
+}
+
+static void
+avro_generic_link_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_generic_value_iface_t  *target_giface =
+	    container_of(self->iface, avro_generic_value_iface_t, parent);
+	size_t  target_instance_size = avro_value_instance_size(target_giface);
+	avro_value_done(target_giface, self->self);
+	avro_free(self->self, target_instance_size);
+	self->iface = NULL;
+	self->self = NULL;
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_LINK_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_link_incref_iface,
+		avro_generic_link_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_link_reset,
+		avro_generic_link_get_type,
+		avro_generic_link_get_schema,
+		/* primitive getters */
+		avro_generic_link_get_boolean,
+		avro_generic_link_get_bytes,
+		avro_generic_link_grab_bytes,
+		avro_generic_link_get_double,
+		avro_generic_link_get_float,
+		avro_generic_link_get_int,
+		avro_generic_link_get_long,
+		avro_generic_link_get_null,
+		avro_generic_link_get_string,
+		avro_generic_link_grab_string,
+		avro_generic_link_get_enum,
+		avro_generic_link_get_fixed,
+		avro_generic_link_grab_fixed,
+		/* primitive setters */
+		avro_generic_link_set_boolean,
+		avro_generic_link_set_bytes,
+		avro_generic_link_give_bytes,
+		avro_generic_link_set_double,
+		avro_generic_link_set_float,
+		avro_generic_link_set_int,
+		avro_generic_link_set_long,
+		avro_generic_link_set_null,
+		avro_generic_link_set_string,
+		avro_generic_link_set_string_len,
+		avro_generic_link_give_string_len,
+		avro_generic_link_set_enum,
+		avro_generic_link_set_fixed,
+		avro_generic_link_give_fixed,
+		/* compound getters */
+		avro_generic_link_get_size,
+		avro_generic_link_get_by_index,
+		avro_generic_link_get_by_name,
+		avro_generic_link_get_discriminant,
+		avro_generic_link_get_current_branch,
+		/* compound setters */
+		avro_generic_link_append,
+		avro_generic_link_add,
+		avro_generic_link_set_branch
+	},
+	avro_generic_link_instance_size,
+	avro_generic_link_init,
+	avro_generic_link_done
+};
+
+static avro_generic_link_value_iface_t *
+avro_generic_link_class(avro_schema_t schema)
+{
+	if (!is_avro_link(schema)) {
+		avro_set_error("Expected link schema");
+		return NULL;
+	}
+
+	avro_generic_link_value_iface_t  *iface =
+		(avro_generic_link_value_iface_t *) avro_new(avro_generic_link_value_iface_t);
+	if (iface == NULL) {
+		return NULL;
+	}
+
+	iface->parent = AVRO_GENERIC_LINK_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+	iface->next = NULL;
+	return iface;
+}
+
+
+/*-----------------------------------------------------------------------
+ * boolean
+ */
+
+static int
+avro_generic_boolean_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_boolean_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_BOOLEAN;
+}
+
+static avro_schema_t
+avro_generic_boolean_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_boolean();
+}
+
+static int
+avro_generic_boolean_get(const avro_value_iface_t *iface,
+			 const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const int  *self = (const int *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_boolean_set(const avro_value_iface_t *iface,
+			 void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	int  *self = (int *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_boolean_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(int);
+}
+
+static int
+avro_generic_boolean_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static void
+avro_generic_boolean_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_BOOLEAN_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_boolean_reset,
+		avro_generic_boolean_get_type,
+		avro_generic_boolean_get_schema,
+		/* primitive getters */
+		avro_generic_boolean_get,
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		avro_generic_boolean_set,
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_boolean_instance_size,
+	avro_generic_boolean_init,
+	avro_generic_boolean_done
+};
+
+avro_value_iface_t *
+avro_generic_boolean_class(void)
+{
+	return &AVRO_GENERIC_BOOLEAN_CLASS.parent;
+}
+
+int
+avro_generic_boolean_new(avro_value_t *value, int val)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_BOOLEAN_CLASS.parent, value));
+	return avro_generic_boolean_set(value->iface, value->self, val);
+}
+
+/*-----------------------------------------------------------------------
+ * bytes
+ */
+
+static int
+avro_generic_bytes_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_clear(self);
+	return 0;
+}
+
+static avro_type_t
+avro_generic_bytes_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_BYTES;
+}
+
+static avro_schema_t
+avro_generic_bytes_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_bytes();
+}
+
+static int
+avro_generic_bytes_get(const avro_value_iface_t *iface,
+		       const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_raw_string_t  *self = (const avro_raw_string_t *) vself;
+	if (buf != NULL) {
+		*buf = avro_raw_string_get(self);
+	}
+	if (size != NULL) {
+		*size = avro_raw_string_length(self);
+	}
+	return 0;
+}
+
+static int
+avro_generic_bytes_grab(const avro_value_iface_t *iface,
+			const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_raw_string_t  *self = (const avro_raw_string_t *) vself;
+	return avro_raw_string_grab(self, dest);
+}
+
+static int
+avro_generic_bytes_set(const avro_value_iface_t *iface,
+		       void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	check_param(EINVAL, buf != NULL, "bytes contents");
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_set_length(self, buf, size);
+	return 0;
+}
+
+static int
+avro_generic_bytes_give(const avro_value_iface_t *iface,
+			void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_give(self, buf);
+	return 0;
+}
+
+static size_t
+avro_generic_bytes_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(avro_raw_string_t);
+}
+
+static int
+avro_generic_bytes_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_init(self);
+	return 0;
+}
+
+static void
+avro_generic_bytes_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_done(self);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_BYTES_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_bytes_reset,
+		avro_generic_bytes_get_type,
+		avro_generic_bytes_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		avro_generic_bytes_get,
+		avro_generic_bytes_grab,
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		avro_generic_bytes_set,
+		avro_generic_bytes_give,
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_bytes_instance_size,
+	avro_generic_bytes_init,
+	avro_generic_bytes_done
+};
+
+avro_value_iface_t *
+avro_generic_bytes_class(void)
+{
+	return &AVRO_GENERIC_BYTES_CLASS.parent;
+}
+
+int
+avro_generic_bytes_new(avro_value_t *value, void *buf, size_t size)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_BYTES_CLASS.parent, value));
+	return avro_generic_bytes_set(value->iface, value->self, buf, size);
+}
+
+/*-----------------------------------------------------------------------
+ * double
+ */
+
+static int
+avro_generic_double_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	double  *self = (double *) vself;
+	*self = 0.0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_double_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_DOUBLE;
+}
+
+static avro_schema_t
+avro_generic_double_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_double();
+}
+
+static int
+avro_generic_double_get(const avro_value_iface_t *iface,
+			const void *vself, double *out)
+{
+	AVRO_UNUSED(iface);
+	const double  *self = (const double *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_double_set(const avro_value_iface_t *iface,
+			void *vself, double val)
+{
+	AVRO_UNUSED(iface);
+	double  *self = (double *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_double_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(double);
+}
+
+static int
+avro_generic_double_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	double  *self = (double *) vself;
+	*self = 0.0;
+	return 0;
+}
+
+static void
+avro_generic_double_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_DOUBLE_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_double_reset,
+		avro_generic_double_get_type,
+		avro_generic_double_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		avro_generic_double_get,
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		avro_generic_double_set,
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_double_instance_size,
+	avro_generic_double_init,
+	avro_generic_double_done
+};
+
+avro_value_iface_t *
+avro_generic_double_class(void)
+{
+	return &AVRO_GENERIC_DOUBLE_CLASS.parent;
+}
+
+int
+avro_generic_double_new(avro_value_t *value, double val)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_DOUBLE_CLASS.parent, value));
+	return avro_generic_double_set(value->iface, value->self, val);
+}
+
+/*-----------------------------------------------------------------------
+ * float
+ */
+
+static int
+avro_generic_float_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	float  *self = (float *) vself;
+	*self = 0.0f;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_float_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_FLOAT;
+}
+
+static avro_schema_t
+avro_generic_float_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_float();
+}
+
+static int
+avro_generic_float_get(const avro_value_iface_t *iface,
+		       const void *vself, float *out)
+{
+	AVRO_UNUSED(iface);
+	const float  *self = (const float *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_float_set(const avro_value_iface_t *iface,
+		       void *vself, float val)
+{
+	AVRO_UNUSED(iface);
+	float  *self = (float *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_float_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(float);
+}
+
+static int
+avro_generic_float_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	float  *self = (float *) vself;
+	*self = 0.0f;
+	return 0;
+}
+
+static void
+avro_generic_float_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_FLOAT_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_float_reset,
+		avro_generic_float_get_type,
+		avro_generic_float_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		avro_generic_float_get,
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		avro_generic_float_set,
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_float_instance_size,
+	avro_generic_float_init,
+	avro_generic_float_done
+};
+
+avro_value_iface_t *
+avro_generic_float_class(void)
+{
+	return &AVRO_GENERIC_FLOAT_CLASS.parent;
+}
+
+int
+avro_generic_float_new(avro_value_t *value, float val)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_FLOAT_CLASS.parent, value));
+	return avro_generic_float_set(value->iface, value->self, val);
+}
+
+/*-----------------------------------------------------------------------
+ * int
+ */
+
+static int
+avro_generic_int_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int32_t  *self = (int32_t *) vself;
+	*self = 0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_int_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_INT32;
+}
+
+static avro_schema_t
+avro_generic_int_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_int();
+}
+
+static int
+avro_generic_int_get(const avro_value_iface_t *iface,
+		     const void *vself, int32_t *out)
+{
+	AVRO_UNUSED(iface);
+	const int32_t  *self = (const int32_t *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_int_set(const avro_value_iface_t *iface,
+		     void *vself, int32_t val)
+{
+	AVRO_UNUSED(iface);
+	int32_t  *self = (int32_t *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_int_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(int32_t);
+}
+
+static int
+avro_generic_int_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int32_t  *self = (int32_t *) vself;
+	*self = 0;
+	return 0;
+}
+
+static void
+avro_generic_int_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_INT_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_int_reset,
+		avro_generic_int_get_type,
+		avro_generic_int_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		avro_generic_int_get,
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		avro_generic_int_set,
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_int_instance_size,
+	avro_generic_int_init,
+	avro_generic_int_done
+};
+
+avro_value_iface_t *
+avro_generic_int_class(void)
+{
+	return &AVRO_GENERIC_INT_CLASS.parent;
+}
+
+int
+avro_generic_int_new(avro_value_t *value, int32_t val)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_INT_CLASS.parent, value));
+	return avro_generic_int_set(value->iface, value->self, val);
+}
+
+/*-----------------------------------------------------------------------
+ * long
+ */
+
+static int
+avro_generic_long_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int64_t  *self = (int64_t *) vself;
+	*self = 0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_long_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_INT64;
+}
+
+static avro_schema_t
+avro_generic_long_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_long();
+}
+
+static int
+avro_generic_long_get(const avro_value_iface_t *iface,
+		      const void *vself, int64_t *out)
+{
+	AVRO_UNUSED(iface);
+	const int64_t  *self = (const int64_t *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_long_set(const avro_value_iface_t *iface,
+		      void *vself, int64_t val)
+{
+	AVRO_UNUSED(iface);
+	int64_t  *self = (int64_t *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_long_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(int64_t);
+}
+
+static int
+avro_generic_long_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int64_t  *self = (int64_t *) vself;
+	*self = 0;
+	return 0;
+}
+
+static void
+avro_generic_long_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_LONG_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_long_reset,
+		avro_generic_long_get_type,
+		avro_generic_long_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		avro_generic_long_get,
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		avro_generic_long_set,
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_long_instance_size,
+	avro_generic_long_init,
+	avro_generic_long_done
+};
+
+avro_value_iface_t *
+avro_generic_long_class(void)
+{
+	return &AVRO_GENERIC_LONG_CLASS.parent;
+}
+
+int
+avro_generic_long_new(avro_value_t *value, int64_t val)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_LONG_CLASS.parent, value));
+	return avro_generic_long_set(value->iface, value->self, val);
+}
+
+/*-----------------------------------------------------------------------
+ * null
+ */
+
+static int
+avro_generic_null_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_null_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_NULL;
+}
+
+static avro_schema_t
+avro_generic_null_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_null();
+}
+
+static int
+avro_generic_null_get(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return 0;
+}
+
+static int
+avro_generic_null_set(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return 0;
+}
+
+static size_t
+avro_generic_null_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(int);
+}
+
+static int
+avro_generic_null_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static void
+avro_generic_null_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_NULL_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_null_reset,
+		avro_generic_null_get_type,
+		avro_generic_null_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		avro_generic_null_get,
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		avro_generic_null_set,
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_null_instance_size,
+	avro_generic_null_init,
+	avro_generic_null_done
+};
+
+avro_value_iface_t *
+avro_generic_null_class(void)
+{
+	return &AVRO_GENERIC_NULL_CLASS.parent;
+}
+
+int
+avro_generic_null_new(avro_value_t *value)
+{
+	return avro_generic_value_new(&AVRO_GENERIC_NULL_CLASS.parent, value);
+}
+
+/*-----------------------------------------------------------------------
+ * string
+ */
+
+static int
+avro_generic_string_reset(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_clear(self);
+	return 0;
+}
+
+static avro_type_t
+avro_generic_string_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_STRING;
+}
+
+static avro_schema_t
+avro_generic_string_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return avro_schema_string();
+}
+
+static int
+avro_generic_string_get(const avro_value_iface_t *iface,
+			const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_raw_string_t  *self = (const avro_raw_string_t *) vself;
+	const char  *contents = (const char *) avro_raw_string_get(self);
+
+	if (str != NULL) {
+		/*
+		 * We can't return a NULL string, we have to return an
+		 * *empty* string
+		 */
+
+		*str = (contents == NULL)? "": contents;
+	}
+	if (size != NULL) {
+		/* raw_string's length includes the NUL terminator,
+		 * unless it's empty */
+		*size = (contents == NULL)? 1: avro_raw_string_length(self);
+	}
+	return 0;
+}
+
+static int
+avro_generic_string_grab(const avro_value_iface_t *iface,
+			 const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_raw_string_t  *self = (const avro_raw_string_t *) vself;
+	const char  *contents = (const char *) avro_raw_string_get(self);
+
+	if (contents == NULL) {
+		return avro_wrapped_buffer_new(dest, "", 1);
+	} else {
+		return avro_raw_string_grab(self, dest);
+	}
+}
+
+static int
+avro_generic_string_set(const avro_value_iface_t *iface,
+			void *vself, const char *val)
+{
+	AVRO_UNUSED(iface);
+	check_param(EINVAL, val != NULL, "string contents");
+
+	/*
+	 * This raw_string method ensures that we copy the NUL
+	 * terminator from val, and will include the NUL terminator in
+	 * the raw_string's length, which is what we want.
+	 */
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_set(self, val);
+	return 0;
+}
+
+static int
+avro_generic_string_set_length(const avro_value_iface_t *iface,
+			       void *vself, const char *val, size_t size)
+{
+	AVRO_UNUSED(iface);
+	check_param(EINVAL, val != NULL, "string contents");
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_set_length(self, val, size);
+	return 0;
+}
+
+static int
+avro_generic_string_give_length(const avro_value_iface_t *iface,
+				void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_give(self, buf);
+	return 0;
+}
+
+static size_t
+avro_generic_string_instance_size(const avro_value_iface_t *iface)
+{
+	AVRO_UNUSED(iface);
+	return sizeof(avro_raw_string_t);
+}
+
+static int
+avro_generic_string_init(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_init(self);
+	return 0;
+}
+
+static void
+avro_generic_string_done(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_raw_string_t  *self = (avro_raw_string_t *) vself;
+	avro_raw_string_done(self);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_STRING_CLASS =
+{
+	{
+		/* "class" methods */
+		NULL, /* incref_iface */
+		NULL, /* decref_iface */
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_string_reset,
+		avro_generic_string_get_type,
+		avro_generic_string_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		avro_generic_string_get,
+		avro_generic_string_grab,
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		avro_generic_string_set,
+		avro_generic_string_set_length,
+		avro_generic_string_give_length,
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_string_instance_size,
+	avro_generic_string_init,
+	avro_generic_string_done
+};
+
+avro_value_iface_t *
+avro_generic_string_class(void)
+{
+	return &AVRO_GENERIC_STRING_CLASS.parent;
+}
+
+int
+avro_generic_string_new(avro_value_t *value, const char *str)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_STRING_CLASS.parent, value));
+	return avro_generic_string_set(value->iface, value->self, str);
+}
+
+int
+avro_generic_string_new_length(avro_value_t *value, const char *str, size_t size)
+{
+	int  rval;
+	check(rval, avro_generic_value_new(&AVRO_GENERIC_STRING_CLASS.parent, value));
+	return avro_generic_string_set_length(value->iface, value->self, str, size);
+}
+
+
+/*-----------------------------------------------------------------------
+ * array
+ */
+
+/*
+ * For generic arrays, we need to store the value implementation for the
+ * array's elements.
+ */
+
+typedef struct avro_generic_array_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+	avro_generic_value_iface_t  *child_giface;
+} avro_generic_array_value_iface_t;
+
+typedef struct avro_generic_array {
+	avro_raw_array_t  array;
+} avro_generic_array_t;
+
+
+static avro_value_iface_t *
+avro_generic_array_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_array_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	if (avro_refcount_dec(&iface->refcount)) {
+		avro_schema_decref(iface->schema);
+		avro_value_iface_decref(&iface->child_giface->parent);
+		avro_freet(avro_generic_array_value_iface_t, iface);
+	}
+}
+
+
+static void
+avro_generic_array_free_elements(const avro_generic_value_iface_t *child_giface,
+				 avro_generic_array_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_array_size(&self->array); i++) {
+		void  *child_self = avro_raw_array_get_raw(&self->array, i);
+		avro_value_done(child_giface, child_self);
+	}
+}
+
+static int
+avro_generic_array_reset(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	avro_generic_array_t  *self = (avro_generic_array_t *) vself;
+	avro_generic_array_free_elements(iface->child_giface, self);
+	avro_raw_array_clear(&self->array);
+	return 0;
+}
+
+static avro_type_t
+avro_generic_array_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+	return AVRO_ARRAY;
+}
+
+static avro_schema_t
+avro_generic_array_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_array_get_size(const avro_value_iface_t *viface,
+			    const void *vself, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_generic_array_t  *self = (const avro_generic_array_t *) vself;
+	if (size != NULL) {
+		*size = avro_raw_array_size(&self->array);
+	}
+	return 0;
+}
+
+static int
+avro_generic_array_get_by_index(const avro_value_iface_t *viface,
+				const void *vself, size_t index,
+				avro_value_t *child, const char **name)
+{
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	AVRO_UNUSED(name);
+	const avro_generic_array_t  *self = (avro_generic_array_t *) vself;
+	if (index >= avro_raw_array_size(&self->array)) {
+		avro_set_error("Array index %" PRIsz " out of range", index);
+		return EINVAL;
+	}
+	child->iface = &iface->child_giface->parent;
+	child->self = avro_raw_array_get_raw(&self->array, index);
+	return 0;
+}
+
+static int
+avro_generic_array_append(const avro_value_iface_t *viface,
+			  void *vself, avro_value_t *child,
+			  size_t *new_index)
+{
+	int  rval;
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	avro_generic_array_t  *self = (avro_generic_array_t *) vself;
+	child->iface = &iface->child_giface->parent;
+	child->self = avro_raw_array_append(&self->array);
+	if (child->self == NULL) {
+		avro_set_error("Couldn't expand array");
+		return ENOMEM;
+	}
+	check(rval, avro_value_init(iface->child_giface, child->self));
+	if (new_index != NULL) {
+		*new_index = avro_raw_array_size(&self->array) - 1;
+	}
+	return 0;
+}
+
+static size_t
+avro_generic_array_instance_size(const avro_value_iface_t *viface)
+{
+	AVRO_UNUSED(viface);
+	return sizeof(avro_generic_array_t);
+}
+
+static int
+avro_generic_array_init(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	avro_generic_array_t  *self = (avro_generic_array_t *) vself;
+
+	size_t  child_size = avro_value_instance_size(iface->child_giface);
+	avro_raw_array_init(&self->array, child_size);
+	return 0;
+}
+
+static void
+avro_generic_array_done(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_array_value_iface_t  *iface =
+	    container_of(viface, avro_generic_array_value_iface_t, parent);
+	avro_generic_array_t  *self = (avro_generic_array_t *) vself;
+	avro_generic_array_free_elements(iface->child_giface, self);
+	avro_raw_array_done(&self->array);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_ARRAY_CLASS =
+{
+{
+	/* "class" methods */
+	avro_generic_array_incref_iface,
+	avro_generic_array_decref_iface,
+	/* general "instance" methods */
+	avro_generic_value_incref,
+	avro_generic_value_decref,
+	avro_generic_array_reset,
+	avro_generic_array_get_type,
+	avro_generic_array_get_schema,
+	/* primitive getters */
+	NULL, /* get_boolean */
+	NULL, /* get_bytes */
+	NULL, /* grab_bytes */
+	NULL, /* get_double */
+	NULL, /* get_float */
+	NULL, /* get_int */
+	NULL, /* get_long */
+	NULL, /* get_null */
+	NULL, /* get_string */
+	NULL, /* grab_string */
+	NULL, /* get_enum */
+	NULL, /* get_fixed */
+	NULL, /* grab_fixed */
+	/* primitive setters */
+	NULL, /* set_boolean */
+	NULL, /* set_bytes */
+	NULL, /* give_bytes */
+	NULL, /* set_double */
+	NULL, /* set_float */
+	NULL, /* set_int */
+	NULL, /* set_long */
+	NULL, /* set_null */
+	NULL, /* set_string */
+	NULL, /* set_string_length */
+	NULL, /* give_string_length */
+	NULL, /* set_enum */
+	NULL, /* set_fixed */
+	NULL, /* give_fixed */
+	/* compound getters */
+	avro_generic_array_get_size,
+	avro_generic_array_get_by_index,
+	NULL, /* get_by_name */
+	NULL, /* get_discriminant */
+	NULL, /* get_current_branch */
+	/* compound setters */
+	avro_generic_array_append,
+	NULL, /* add */
+	NULL  /* set_branch */
+},
+	avro_generic_array_instance_size,
+	avro_generic_array_init,
+	avro_generic_array_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_array_class(avro_schema_t schema, memoize_state_t *state)
+{
+	avro_schema_t  child_schema = avro_schema_array_items(schema);
+	avro_generic_value_iface_t  *child_giface =
+	    avro_generic_class_from_schema_memoized(child_schema, state);
+	if (child_giface == NULL) {
+		return NULL;
+	}
+
+	size_t  child_size = avro_value_instance_size(child_giface);
+	if (child_size == 0) {
+		avro_set_error("Array item class must provide instance_size");
+		avro_value_iface_decref(&child_giface->parent);
+		return NULL;
+	}
+
+	avro_generic_array_value_iface_t  *iface =
+		(avro_generic_array_value_iface_t *) avro_new(avro_generic_array_value_iface_t);
+	if (iface == NULL) {
+		avro_value_iface_decref(&child_giface->parent);
+		return NULL;
+	}
+
+	/*
+	 * TODO: Maybe check that schema.items matches
+	 * child_iface.get_schema?
+	 */
+
+	iface->parent = AVRO_GENERIC_ARRAY_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+	iface->child_giface = child_giface;
+	return &iface->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * enum
+ */
+
+typedef struct avro_generic_enum_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+} avro_generic_enum_value_iface_t;
+
+
+static avro_value_iface_t *
+avro_generic_enum_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_enum_value_iface_t  *iface =
+	    (avro_generic_enum_value_iface_t *) viface;
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_enum_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_enum_value_iface_t  *iface =
+	    (avro_generic_enum_value_iface_t *) viface;
+	if (avro_refcount_dec(&iface->refcount)) {
+		avro_schema_decref(iface->schema);
+		avro_freet(avro_generic_enum_value_iface_t, iface);
+	}
+}
+
+static int
+avro_generic_enum_reset(const avro_value_iface_t *viface, void *vself)
+{
+	AVRO_UNUSED(viface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static avro_type_t
+avro_generic_enum_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_ENUM;
+}
+
+static avro_schema_t
+avro_generic_enum_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_enum_value_iface_t  *iface =
+	    container_of(viface, avro_generic_enum_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_enum_get(const avro_value_iface_t *viface,
+		      const void *vself, int *out)
+{
+	AVRO_UNUSED(viface);
+	const int  *self = (const int *) vself;
+	*out = *self;
+	return 0;
+}
+
+static int
+avro_generic_enum_set(const avro_value_iface_t *viface,
+		      void *vself, int val)
+{
+	AVRO_UNUSED(viface);
+	int  *self = (int *) vself;
+	*self = val;
+	return 0;
+}
+
+static size_t
+avro_generic_enum_instance_size(const avro_value_iface_t *viface)
+{
+	AVRO_UNUSED(viface);
+	return sizeof(int);
+}
+
+static int
+avro_generic_enum_init(const avro_value_iface_t *viface, void *vself)
+{
+	AVRO_UNUSED(viface);
+	int  *self = (int *) vself;
+	*self = 0;
+	return 0;
+}
+
+static void
+avro_generic_enum_done(const avro_value_iface_t *viface, void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_ENUM_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_enum_incref_iface,
+		avro_generic_enum_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_enum_reset,
+		avro_generic_enum_get_type,
+		avro_generic_enum_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		avro_generic_enum_get,
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		avro_generic_enum_set,
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_enum_instance_size,
+	avro_generic_enum_init,
+	avro_generic_enum_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_enum_class(avro_schema_t schema)
+{
+	avro_generic_enum_value_iface_t  *iface =
+		(avro_generic_enum_value_iface_t *) avro_new(avro_generic_enum_value_iface_t);
+	if (iface == NULL) {
+		return NULL;
+	}
+
+	iface->parent = AVRO_GENERIC_ENUM_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+	return &iface->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * fixed
+ */
+
+typedef struct avro_generic_fixed_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+	size_t  data_size;
+} avro_generic_fixed_value_iface_t;
+
+
+static avro_value_iface_t *
+avro_generic_fixed_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_fixed_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	if (avro_refcount_dec(&iface->refcount)) {
+		avro_schema_decref(iface->schema);
+		avro_freet(avro_generic_fixed_value_iface_t, iface);
+	}
+}
+
+static int
+avro_generic_fixed_reset(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	memset(vself, 0, iface->data_size);
+	return 0;
+}
+
+static avro_type_t
+avro_generic_fixed_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	AVRO_UNUSED(vself);
+	return AVRO_FIXED;
+}
+
+static avro_schema_t
+avro_generic_fixed_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_fixed_get(const avro_value_iface_t *viface,
+		       const void *vself, const void **buf, size_t *size)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	if (buf != NULL) {
+		*buf = vself;
+	}
+	if (size != NULL) {
+		*size = iface->data_size;
+	}
+	return 0;
+}
+
+static int
+avro_generic_fixed_grab(const avro_value_iface_t *viface,
+			const void *vself, avro_wrapped_buffer_t *dest)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	return avro_wrapped_buffer_new(dest, vself, iface->data_size);
+}
+
+static int
+avro_generic_fixed_set(const avro_value_iface_t *viface,
+		       void *vself, void *buf, size_t size)
+{
+	check_param(EINVAL, buf != NULL, "fixed contents");
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	if (size != iface->data_size) {
+		avro_set_error("Invalid data size in set_fixed");
+		return EINVAL;
+	}
+	memcpy(vself, buf, size);
+	return 0;
+}
+
+static int
+avro_generic_fixed_give(const avro_value_iface_t *viface,
+			void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval = avro_generic_fixed_set
+	    (viface, vself, (void *) buf->buf, buf->size);
+	avro_wrapped_buffer_free(buf);
+	return rval;
+}
+
+static size_t
+avro_generic_fixed_instance_size(const avro_value_iface_t *viface)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	return iface->data_size;
+}
+
+static int
+avro_generic_fixed_init(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_fixed_value_iface_t  *iface =
+	    container_of(viface, avro_generic_fixed_value_iface_t, parent);
+	memset(vself, 0, iface->data_size);
+	return 0;
+}
+
+static void
+avro_generic_fixed_done(const avro_value_iface_t *viface, void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_FIXED_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_fixed_incref_iface,
+		avro_generic_fixed_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_fixed_reset,
+		avro_generic_fixed_get_type,
+		avro_generic_fixed_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		avro_generic_fixed_get,
+		avro_generic_fixed_grab,
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		avro_generic_fixed_set,
+		avro_generic_fixed_give,
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_fixed_instance_size,
+	avro_generic_fixed_init,
+	avro_generic_fixed_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_fixed_class(avro_schema_t schema)
+{
+	avro_generic_fixed_value_iface_t  *iface =
+		(avro_generic_fixed_value_iface_t *) avro_new(avro_generic_fixed_value_iface_t);
+	if (iface == NULL) {
+		return NULL;
+	}
+
+	iface->parent = AVRO_GENERIC_FIXED_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+	iface->data_size = avro_schema_fixed_size(schema);
+	return &iface->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * map
+ */
+
+/*
+ * For generic maps, we need to store the value implementation for the
+ * map's elements.
+ */
+
+typedef struct avro_generic_map_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+	avro_generic_value_iface_t  *child_giface;
+} avro_generic_map_value_iface_t;
+
+typedef struct avro_generic_map {
+	avro_raw_map_t  map;
+} avro_generic_map_t;
+
+
+static avro_value_iface_t *
+avro_generic_map_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_map_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	if (avro_refcount_dec(&iface->refcount)) {
+		avro_schema_decref(iface->schema);
+		avro_value_iface_decref(&iface->child_giface->parent);
+		avro_freet(avro_generic_map_value_iface_t, iface);
+	}
+}
+
+
+static void
+avro_generic_map_free_elements(const avro_generic_value_iface_t *child_giface,
+			       avro_generic_map_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_map_size(&self->map); i++) {
+		void  *child_self = avro_raw_map_get_raw(&self->map, i);
+		avro_value_done(child_giface, child_self);
+	}
+}
+
+static int
+avro_generic_map_reset(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	avro_generic_map_t  *self = (avro_generic_map_t *) vself;
+	avro_generic_map_free_elements(iface->child_giface, self);
+	avro_raw_map_clear(&self->map);
+	return 0;
+}
+
+static avro_type_t
+avro_generic_map_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+	return AVRO_MAP;
+}
+
+static avro_schema_t
+avro_generic_map_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_map_get_size(const avro_value_iface_t *viface,
+			  const void *vself, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_generic_map_t  *self = (const avro_generic_map_t *) vself;
+	if (size != NULL) {
+		*size = avro_raw_map_size(&self->map);
+	}
+	return 0;
+}
+
+static int
+avro_generic_map_get_by_index(const avro_value_iface_t *viface,
+			      const void *vself, size_t index,
+			      avro_value_t *child, const char **name)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	const avro_generic_map_t  *self = (const avro_generic_map_t *) vself;
+	if (index >= avro_raw_map_size(&self->map)) {
+		avro_set_error("Map index %" PRIsz " out of range", index);
+		return EINVAL;
+	}
+	child->iface = &iface->child_giface->parent;
+	child->self = avro_raw_map_get_raw(&self->map, index);
+	if (name != NULL) {
+		*name = avro_raw_map_get_key(&self->map, index);
+	}
+	return 0;
+}
+
+static int
+avro_generic_map_get_by_name(const avro_value_iface_t *viface,
+			     const void *vself, const char *name,
+			     avro_value_t *child, size_t *index)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	const avro_generic_map_t  *self = (const avro_generic_map_t *) vself;
+	child->iface = &iface->child_giface->parent;
+	child->self = avro_raw_map_get(&self->map, name, index);
+	if (child->self == NULL) {
+		avro_set_error("No map element named %s", name);
+		return EINVAL;
+	}
+	return 0;
+}
+
+static int
+avro_generic_map_add(const avro_value_iface_t *viface,
+		     void *vself, const char *key,
+		     avro_value_t *child, size_t *index, int *is_new)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	int  rval;
+	avro_generic_map_t  *self = (avro_generic_map_t *) vself;
+	child->iface = &iface->child_giface->parent;
+	rval = avro_raw_map_get_or_create(&self->map, key,
+					  &child->self, index);
+	if (rval < 0) {
+		return -rval;
+	}
+	if (is_new != NULL) {
+		*is_new = rval;
+	}
+	if (rval) {
+		check(rval, avro_value_init(iface->child_giface, child->self));
+	}
+	return 0;
+}
+
+static size_t
+avro_generic_map_instance_size(const avro_value_iface_t *viface)
+{
+	AVRO_UNUSED(viface);
+	return sizeof(avro_generic_map_t);
+}
+
+static int
+avro_generic_map_init(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	avro_generic_map_t  *self = (avro_generic_map_t *) vself;
+
+	size_t  child_size = avro_value_instance_size(iface->child_giface);
+	avro_raw_map_init(&self->map, child_size);
+	return 0;
+}
+
+static void
+avro_generic_map_done(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_map_value_iface_t  *iface =
+	    container_of(viface, avro_generic_map_value_iface_t, parent);
+	avro_generic_map_t  *self = (avro_generic_map_t *) vself;
+	avro_generic_map_free_elements(iface->child_giface, self);
+	avro_raw_map_done(&self->map);
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_MAP_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_map_incref_iface,
+		avro_generic_map_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_map_reset,
+		avro_generic_map_get_type,
+		avro_generic_map_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		avro_generic_map_get_size,
+		avro_generic_map_get_by_index,
+		avro_generic_map_get_by_name,
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		avro_generic_map_add,
+		NULL  /* set_branch */
+	},
+	avro_generic_map_instance_size,
+	avro_generic_map_init,
+	avro_generic_map_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_map_class(avro_schema_t schema, memoize_state_t *state)
+{
+	avro_schema_t  child_schema = avro_schema_array_items(schema);
+	avro_generic_value_iface_t  *child_giface =
+	    avro_generic_class_from_schema_memoized(child_schema, state);
+	if (child_giface == NULL) {
+		return NULL;
+	}
+
+	size_t  child_size = avro_value_instance_size(child_giface);
+	if (child_size == 0) {
+		avro_set_error("Map value class must provide instance_size");
+		avro_value_iface_decref(&child_giface->parent);
+		return NULL;
+	}
+
+	avro_generic_map_value_iface_t  *iface =
+		(avro_generic_map_value_iface_t *) avro_new(avro_generic_map_value_iface_t);
+	if (iface == NULL) {
+		avro_value_iface_decref(&child_giface->parent);
+		return NULL;
+	}
+
+	/*
+	 * TODO: Maybe check that schema.items matches
+	 * child_iface.get_schema?
+	 */
+
+	iface->parent = AVRO_GENERIC_MAP_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+	iface->child_giface = child_giface;
+	return &iface->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * record
+ */
+
+#ifndef DEBUG_FIELD_OFFSETS
+#define DEBUG_FIELD_OFFSETS 0
+#endif
+
+#if DEBUG_FIELD_OFFSETS
+#include <stdio.h>
+#endif
+
+/*
+ * For generic records, we need to store the value implementation for
+ * each field.  We also need to store an offset for each field, since
+ * we're going to store the contents of each field directly in the
+ * record, rather than via pointers.
+ */
+
+typedef struct avro_generic_record_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+
+	/** The total size of each value struct for this record. */
+	size_t  instance_size;
+
+	/** The number of fields in this record.  Yes, we could get this
+	 * from schema, but this is easier. */
+	size_t  field_count;
+
+	/** The offset of each field within the record struct. */
+	size_t  *field_offsets;
+
+	/** The value implementation for each field. */
+	avro_generic_value_iface_t  **field_ifaces;
+} avro_generic_record_value_iface_t;
+
+typedef struct avro_generic_record {
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for each field. */
+} avro_generic_record_t;
+
+
+/** Return a pointer to the given field within a record struct. */
+#define avro_generic_record_field(iface, rec, index) \
+	(((char *) (rec)) + (iface)->field_offsets[(index)])
+
+
+static avro_value_iface_t *
+avro_generic_record_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_record_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+
+	if (avro_refcount_dec(&iface->refcount)) {
+		size_t  i;
+		for (i = 0; i < iface->field_count; i++) {
+			avro_value_iface_decref(&iface->field_ifaces[i]->parent);
+		}
+
+		avro_schema_decref(iface->schema);
+		avro_free(iface->field_offsets,
+			  sizeof(size_t) * iface->field_count);
+		avro_free(iface->field_ifaces,
+			  sizeof(avro_generic_value_iface_t *) * iface->field_count);
+
+		avro_freet(avro_generic_record_value_iface_t, iface);
+	}
+}
+
+
+static int
+avro_generic_record_reset(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	int  rval;
+	avro_generic_record_t  *self = (avro_generic_record_t *) vself;
+	size_t  i;
+	for (i = 0; i < iface->field_count; i++) {
+		avro_value_t  value = {
+			&iface->field_ifaces[i]->parent,
+			avro_generic_record_field(iface, self, i)
+		};
+		check(rval, avro_value_reset(&value));
+	}
+	return 0;
+}
+
+static avro_type_t
+avro_generic_record_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+	return AVRO_RECORD;
+}
+
+static avro_schema_t
+avro_generic_record_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_record_get_size(const avro_value_iface_t *viface,
+			     const void *vself, size_t *size)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	if (size != NULL) {
+		*size = iface->field_count;
+	}
+	return 0;
+}
+
+static int
+avro_generic_record_get_by_index(const avro_value_iface_t *viface,
+				 const void *vself, size_t index,
+				 avro_value_t *child, const char **name)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	const avro_generic_record_t  *self = (const avro_generic_record_t *) vself;
+	if (index >= iface->field_count) {
+		avro_set_error("Field index %" PRIsz " out of range", index);
+		return EINVAL;
+	}
+	child->iface = &iface->field_ifaces[index]->parent;
+	child->self = avro_generic_record_field(iface, self, index);
+
+	/*
+	 * Grab the field name from the schema if asked for.
+	 */
+	if (name != NULL) {
+		avro_schema_t  schema = iface->schema;
+		*name = avro_schema_record_field_name(schema, index);
+	}
+
+	return 0;
+}
+
+static int
+avro_generic_record_get_by_name(const avro_value_iface_t *viface,
+				const void *vself, const char *name,
+				avro_value_t *child, size_t *index_out)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	const avro_generic_record_t  *self = (const avro_generic_record_t *) vself;
+
+	avro_schema_t  schema = iface->schema;
+	int  index = avro_schema_record_field_get_index(schema, name);
+	if (index < 0) {
+		avro_set_error("Unknown record field %s", name);
+		return EINVAL;
+	}
+
+	child->iface = &iface->field_ifaces[index]->parent;
+	child->self = avro_generic_record_field(iface, self, index);
+	if (index_out != NULL) {
+		*index_out = index;
+	}
+	return 0;
+}
+
+static size_t
+avro_generic_record_instance_size(const avro_value_iface_t *viface)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	return iface->instance_size;
+}
+
+static int
+avro_generic_record_init(const avro_value_iface_t *viface, void *vself)
+{
+	int  rval;
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	avro_generic_record_t  *self = (avro_generic_record_t *) vself;
+
+	/* Initialize each field */
+	size_t  i;
+	for (i = 0; i < iface->field_count; i++) {
+		check(rval, avro_value_init
+		      (iface->field_ifaces[i],
+		       avro_generic_record_field(iface, self, i)));
+	}
+
+	return 0;
+}
+
+static void
+avro_generic_record_done(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_record_value_iface_t  *iface =
+	    container_of(viface, avro_generic_record_value_iface_t, parent);
+	avro_generic_record_t  *self = (avro_generic_record_t *) vself;
+	size_t  i;
+	for (i = 0; i < iface->field_count; i++) {
+		avro_value_done(iface->field_ifaces[i],
+				avro_generic_record_field(iface, self, i));
+	}
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_RECORD_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_record_incref_iface,
+		avro_generic_record_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_record_reset,
+		avro_generic_record_get_type,
+		avro_generic_record_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		avro_generic_record_get_size,
+		avro_generic_record_get_by_index,
+		avro_generic_record_get_by_name,
+		NULL, /* get_discriminant */
+		NULL, /* get_current_branch */
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		NULL  /* set_branch */
+	},
+	avro_generic_record_instance_size,
+	avro_generic_record_init,
+	avro_generic_record_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_record_class(avro_schema_t schema, memoize_state_t *state)
+{
+	avro_generic_record_value_iface_t  *iface =
+		(avro_generic_record_value_iface_t *) avro_new(avro_generic_record_value_iface_t);
+	if (iface == NULL) {
+		return NULL;
+	}
+
+	memset(iface, 0, sizeof(avro_generic_record_value_iface_t));
+	iface->parent = AVRO_GENERIC_RECORD_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+
+	iface->field_count = avro_schema_record_size(schema);
+	size_t  field_offsets_size =
+		sizeof(size_t) * iface->field_count;
+	size_t  field_ifaces_size =
+		sizeof(avro_generic_value_iface_t *) * iface->field_count;
+
+	iface->field_offsets = (size_t *) avro_malloc(field_offsets_size);
+	if (iface->field_offsets == NULL) {
+		goto error;
+	}
+
+	iface->field_ifaces = (avro_generic_value_iface_t **) avro_malloc(field_ifaces_size);
+	if (iface->field_ifaces == NULL) {
+		goto error;
+	}
+
+	size_t  next_offset = sizeof(avro_generic_record_t);
+#if DEBUG_FIELD_OFFSETS
+	fprintf(stderr, "  Record %s\n  Header: Offset 0, size %" PRIsz "\n",
+		avro_schema_type_name(schema),
+		sizeof(avro_generic_record_t));
+#endif
+	size_t  i;
+	for (i = 0; i < iface->field_count; i++) {
+#if DEBUG_FIELD_OFFSETS
+		fprintf(stderr, "  Field %" PRIsz ":\n", i);
+#endif
+		avro_schema_t  field_schema =
+		    avro_schema_record_field_get_by_index(schema, i);
+#if DEBUG_FIELD_OFFSETS
+		fprintf(stderr, "    Schema %s\n",
+			avro_schema_type_name(field_schema));
+#endif
+
+		iface->field_offsets[i] = next_offset;
+
+		iface->field_ifaces[i] =
+		    avro_generic_class_from_schema_memoized(field_schema, state);
+		if (iface->field_ifaces[i] == NULL) {
+			goto error;
+		}
+
+		size_t  field_size =
+		    avro_value_instance_size(iface->field_ifaces[i]);
+		if (field_size == 0) {
+			avro_set_error("Record field class must provide instance_size");
+			goto error;
+		}
+
+#if DEBUG_FIELD_OFFSETS
+		fprintf(stderr, "    Offset %" PRIsz ", size %" PRIsz "\n",
+			next_offset, field_size);
+#endif
+		next_offset += field_size;
+	}
+
+	iface->instance_size = next_offset;
+#if DEBUG_FIELD_OFFSETS
+	fprintf(stderr, "  TOTAL SIZE: %" PRIsz "\n", next_offset);
+#endif
+
+	return &iface->parent;
+
+error:
+	avro_schema_decref(iface->schema);
+	if (iface->field_offsets != NULL) {
+		avro_free(iface->field_offsets, field_offsets_size);
+	}
+	if (iface->field_ifaces != NULL) {
+		for (i = 0; i < iface->field_count; i++) {
+			if (iface->field_ifaces[i] != NULL) {
+				avro_value_iface_decref(&iface->field_ifaces[i]->parent);
+			}
+		}
+		avro_free(iface->field_ifaces, field_ifaces_size);
+	}
+	avro_freet(avro_generic_record_value_iface_t, iface);
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * union
+ */
+
+#ifndef DEBUG_BRANCHES_OFFSETS
+#define DEBUG_BRANCHES_OFFSETS 0
+#endif
+
+#if DEBUG_BRANCHES_OFFSETS
+#include <stdio.h>
+#endif
+
+/*
+ * For generic unions, we need to store the value implementation for
+ * each branch, just like for generic records.  However, for unions, we
+ * can only have one branch active at a time, so we can reuse the space
+ * in the union struct, just like is done with C unions.
+ */
+
+typedef struct avro_generic_union_value_iface {
+	avro_generic_value_iface_t  parent;
+	volatile int  refcount;
+	avro_schema_t  schema;
+
+	/** The total size of each value struct for this union. */
+	size_t  instance_size;
+
+	/** The number of branches in this union.  Yes, we could get
+	 * this from schema, but this is easier. */
+	size_t  branch_count;
+
+	/** The value implementation for each branch. */
+	avro_generic_value_iface_t  **branch_ifaces;
+} avro_generic_union_value_iface_t;
+
+typedef struct avro_generic_union {
+	/** The currently active branch of the union.  -1 if no branch
+	 * is selected. */
+	int  discriminant;
+
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for the active branch. */
+} avro_generic_union_t;
+
+
+/** Return the child interface for the active branch. */
+#define avro_generic_union_branch_giface(iface, _union) \
+	((iface)->branch_ifaces[(_union)->discriminant])
+#define avro_generic_union_branch_iface(iface, _union) \
+	(&(avro_generic_union_branch_giface((iface), (_union)))->parent)
+
+/** Return a pointer to the active branch within a union struct. */
+#define avro_generic_union_branch(_union) \
+	(((char *) (_union)) + sizeof(avro_generic_union_t))
+
+
+static avro_value_iface_t *
+avro_generic_union_incref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+avro_generic_union_decref_iface(avro_value_iface_t *viface)
+{
+	avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+
+	if (avro_refcount_dec(&iface->refcount)) {
+		size_t  i;
+		for (i = 0; i < iface->branch_count; i++) {
+			avro_value_iface_decref(&iface->branch_ifaces[i]->parent);
+		}
+
+		avro_schema_decref(iface->schema);
+		avro_free(iface->branch_ifaces,
+			  sizeof(avro_generic_value_iface_t *) * iface->branch_count);
+
+		avro_freet(avro_generic_union_value_iface_t, iface);
+	}
+}
+
+
+static int
+avro_generic_union_reset(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	avro_generic_union_t  *self = (avro_generic_union_t *) vself;
+	/* Keep the same branch selected, for the common case that we're
+	 * about to reuse it. */
+	if (self->discriminant >= 0) {
+#if DEBUG_BRANCHES
+		fprintf(stderr, "Resetting branch %d\n",
+			self->discriminant);
+#endif
+		avro_value_t  value = {
+			avro_generic_union_branch_iface(iface, self),
+			avro_generic_union_branch(self)
+		};
+		return avro_value_reset(&value);
+	}
+	return 0;
+}
+
+static avro_type_t
+avro_generic_union_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(viface);
+	AVRO_UNUSED(vself);
+	return AVRO_UNION;
+}
+
+static avro_schema_t
+avro_generic_union_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	AVRO_UNUSED(vself);
+	return iface->schema;
+}
+
+static int
+avro_generic_union_get_discriminant(const avro_value_iface_t *viface,
+				    const void *vself, int *out)
+{
+	AVRO_UNUSED(viface);
+	const avro_generic_union_t  *self = (const avro_generic_union_t *) vself;
+	*out = self->discriminant;
+	return 0;
+}
+
+static int
+avro_generic_union_get_current_branch(const avro_value_iface_t *viface,
+				      const void *vself, avro_value_t *branch)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	const avro_generic_union_t  *self = (const avro_generic_union_t *) vself;
+	if (self->discriminant < 0) {
+		avro_set_error("Union has no selected branch");
+		return EINVAL;
+	}
+	branch->iface = avro_generic_union_branch_iface(iface, self);
+	branch->self = avro_generic_union_branch(self);
+	return 0;
+}
+
+static int
+avro_generic_union_set_branch(const avro_value_iface_t *viface,
+			      void *vself, int discriminant,
+			      avro_value_t *branch)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	int  rval;
+	avro_generic_union_t  *self = (avro_generic_union_t *) vself;
+
+#if DEBUG_BRANCHES
+	fprintf(stderr, "Selecting branch %d (was %d)\n",
+		discriminant, self->discriminant);
+#endif
+
+	/*
+	 * If the new desired branch is different than the currently
+	 * active one, then finalize the old branch and initialize the
+	 * new one.
+	 */
+	if (self->discriminant != discriminant) {
+		if (self->discriminant >= 0) {
+#if DEBUG_BRANCHES
+			fprintf(stderr, "Finalizing branch %d\n",
+				self->discriminant);
+#endif
+			avro_value_done
+			    (avro_generic_union_branch_giface(iface, self),
+			     avro_generic_union_branch(self));
+		}
+		self->discriminant = discriminant;
+		if (discriminant >= 0) {
+#if DEBUG_BRANCHES
+			fprintf(stderr, "Initializing branch %d\n",
+				self->discriminant);
+#endif
+			check(rval, avro_value_init
+			      (avro_generic_union_branch_giface(iface, self),
+			       avro_generic_union_branch(self)));
+		}
+	}
+
+	if (branch != NULL) {
+		branch->iface = avro_generic_union_branch_iface(iface, self);
+		branch->self = avro_generic_union_branch(self);
+	}
+
+	return 0;
+}
+
+static size_t
+avro_generic_union_instance_size(const avro_value_iface_t *viface)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	return iface->instance_size;
+}
+
+static int
+avro_generic_union_init(const avro_value_iface_t *viface, void *vself)
+{
+	AVRO_UNUSED(viface);
+	avro_generic_union_t  *self = (avro_generic_union_t *) vself;
+	self->discriminant = -1;
+	return 0;
+}
+
+static void
+avro_generic_union_done(const avro_value_iface_t *viface, void *vself)
+{
+	const avro_generic_union_value_iface_t  *iface =
+	    container_of(viface, avro_generic_union_value_iface_t, parent);
+	avro_generic_union_t  *self = (avro_generic_union_t *) vself;
+	if (self->discriminant >= 0) {
+#if DEBUG_BRANCHES
+		fprintf(stderr, "Finalizing branch %d\n",
+			self->discriminant);
+#endif
+		avro_value_done
+		    (avro_generic_union_branch_giface(iface, self),
+		     avro_generic_union_branch(self));
+		self->discriminant = -1;
+	}
+}
+
+static avro_generic_value_iface_t  AVRO_GENERIC_UNION_CLASS =
+{
+	{
+		/* "class" methods */
+		avro_generic_union_incref_iface,
+		avro_generic_union_decref_iface,
+		/* general "instance" methods */
+		avro_generic_value_incref,
+		avro_generic_value_decref,
+		avro_generic_union_reset,
+		avro_generic_union_get_type,
+		avro_generic_union_get_schema,
+		/* primitive getters */
+		NULL, /* get_boolean */
+		NULL, /* get_bytes */
+		NULL, /* grab_bytes */
+		NULL, /* get_double */
+		NULL, /* get_float */
+		NULL, /* get_int */
+		NULL, /* get_long */
+		NULL, /* get_null */
+		NULL, /* get_string */
+		NULL, /* grab_string */
+		NULL, /* get_enum */
+		NULL, /* get_fixed */
+		NULL, /* grab_fixed */
+		/* primitive setters */
+		NULL, /* set_boolean */
+		NULL, /* set_bytes */
+		NULL, /* give_bytes */
+		NULL, /* set_double */
+		NULL, /* set_float */
+		NULL, /* set_int */
+		NULL, /* set_long */
+		NULL, /* set_null */
+		NULL, /* set_string */
+		NULL, /* set_string_length */
+		NULL, /* give_string_length */
+		NULL, /* set_enum */
+		NULL, /* set_fixed */
+		NULL, /* give_fixed */
+		/* compound getters */
+		NULL, /* get_size */
+		NULL, /* get_by_index */
+		NULL, /* get_by_name */
+		avro_generic_union_get_discriminant,
+		avro_generic_union_get_current_branch,
+		/* compound setters */
+		NULL, /* append */
+		NULL, /* add */
+		avro_generic_union_set_branch
+	},
+	avro_generic_union_instance_size,
+	avro_generic_union_init,
+	avro_generic_union_done
+};
+
+static avro_generic_value_iface_t *
+avro_generic_union_class(avro_schema_t schema, memoize_state_t *state)
+{
+	avro_generic_union_value_iface_t  *iface =
+		(avro_generic_union_value_iface_t *) avro_new(avro_generic_union_value_iface_t);
+	if (iface == NULL) {
+		return NULL;
+	}
+
+	memset(iface, 0, sizeof(avro_generic_union_value_iface_t));
+	iface->parent = AVRO_GENERIC_UNION_CLASS;
+	iface->refcount = 1;
+	iface->schema = avro_schema_incref(schema);
+
+	iface->branch_count = avro_schema_union_size(schema);
+	size_t  branch_ifaces_size =
+		sizeof(avro_generic_value_iface_t *) * iface->branch_count;
+
+	iface->branch_ifaces = (avro_generic_value_iface_t **) avro_malloc(branch_ifaces_size);
+	if (iface->branch_ifaces == NULL) {
+		goto error;
+	}
+
+	size_t  max_branch_size = 0;
+	size_t  i;
+	for (i = 0; i < iface->branch_count; i++) {
+		avro_schema_t  branch_schema =
+		    avro_schema_union_branch(schema, i);
+
+		iface->branch_ifaces[i] =
+		    avro_generic_class_from_schema_memoized(branch_schema, state);
+		if (iface->branch_ifaces[i] == NULL) {
+			goto error;
+		}
+
+		size_t  branch_size =
+		    avro_value_instance_size(iface->branch_ifaces[i]);
+		if (branch_size == 0) {
+			avro_set_error("Union branch class must provide instance_size");
+			goto error;
+		}
+
+#if DEBUG_BRANCHES
+		fprintf(stderr, "Branch %" PRIsz ", size %" PRIsz "\n",
+			i, branch_size);
+#endif
+
+		if (branch_size > max_branch_size) {
+			max_branch_size = branch_size;
+		}
+	}
+
+	iface->instance_size =
+		sizeof(avro_generic_union_t) + max_branch_size;
+#if DEBUG_BRANCHES
+	fprintf(stderr, "MAX BRANCH SIZE: %" PRIsz "\n", max_branch_size);
+#endif
+
+	return &iface->parent;
+
+error:
+	avro_schema_decref(iface->schema);
+	if (iface->branch_ifaces != NULL) {
+		for (i = 0; i < iface->branch_count; i++) {
+			if (iface->branch_ifaces[i] != NULL) {
+				avro_value_iface_decref(&iface->branch_ifaces[i]->parent);
+			}
+		}
+		avro_free(iface->branch_ifaces, branch_ifaces_size);
+	}
+	avro_freet(avro_generic_union_value_iface_t, iface);
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Schema type dispatcher
+ */
+
+static avro_generic_value_iface_t *
+avro_generic_class_from_schema_memoized(avro_schema_t schema,
+					memoize_state_t *state)
+{
+	/*
+	 * If we've already instantiated a value class for this schema,
+	 * just return it.
+	 */
+
+	avro_generic_value_iface_t  *result = NULL;
+	if (avro_memoize_get(&state->mem, schema, NULL, (void **) &result)) {
+		avro_value_iface_incref(&result->parent);
+		return result;
+	}
+
+	/*
+	 * Otherwise instantiate the value class based on the schema
+	 * type.
+	 */
+
+	switch (schema->type) {
+	case AVRO_BOOLEAN:
+		result = &AVRO_GENERIC_BOOLEAN_CLASS;
+		break;
+	case AVRO_BYTES:
+		result = &AVRO_GENERIC_BYTES_CLASS;
+		break;
+	case AVRO_DOUBLE:
+		result = &AVRO_GENERIC_DOUBLE_CLASS;
+		break;
+	case AVRO_FLOAT:
+		result = &AVRO_GENERIC_FLOAT_CLASS;
+		break;
+	case AVRO_INT32:
+		result = &AVRO_GENERIC_INT_CLASS;
+		break;
+	case AVRO_INT64:
+		result = &AVRO_GENERIC_LONG_CLASS;
+		break;
+	case AVRO_NULL:
+		result = &AVRO_GENERIC_NULL_CLASS;
+		break;
+	case AVRO_STRING:
+		result = &AVRO_GENERIC_STRING_CLASS;
+		break;
+
+	case AVRO_ARRAY:
+		result = avro_generic_array_class(schema, state);
+		break;
+	case AVRO_ENUM:
+		result = avro_generic_enum_class(schema);
+		break;
+	case AVRO_FIXED:
+		result = avro_generic_fixed_class(schema);
+		break;
+	case AVRO_MAP:
+		result = avro_generic_map_class(schema, state);
+		break;
+	case AVRO_RECORD:
+		result = avro_generic_record_class(schema, state);
+		break;
+	case AVRO_UNION:
+		result = avro_generic_union_class(schema, state);
+		break;
+
+	case AVRO_LINK:
+		{
+			avro_generic_link_value_iface_t  *lresult =
+			    avro_generic_link_class(schema);
+			lresult->next = state->links;
+			state->links = lresult;
+			result = &lresult->parent;
+			break;
+		}
+
+	default:
+		avro_set_error("Unknown schema type");
+		return NULL;
+	}
+
+	/*
+	 * Add the new value implementation to the memoized state before
+	 * we return.
+	 */
+
+	avro_memoize_set(&state->mem, schema, NULL, result);
+	return result;
+}
+
+avro_value_iface_t *
+avro_generic_class_from_schema(avro_schema_t schema)
+{
+	/*
+	 * Create a state to keep track of the value implementations
+	 * that we create for each subschema.
+	 */
+
+	memoize_state_t  state;
+	avro_memoize_init(&state.mem);
+	state.links = NULL;
+
+	/*
+	 * Create the value implementations.
+	 */
+
+	avro_generic_value_iface_t  *result =
+	    avro_generic_class_from_schema_memoized(schema, &state);
+	if (result == NULL) {
+		avro_memoize_done(&state.mem);
+		return NULL;
+	}
+
+	/*
+	 * Fix up any link schemas so that their value implementations
+	 * point to their target schemas' implementations.
+	 */
+
+	while (state.links != NULL) {
+		avro_generic_link_value_iface_t  *link_iface = state.links;
+		avro_schema_t  target_schema =
+		    avro_schema_link_target(link_iface->schema);
+
+		avro_generic_value_iface_t  *target_iface = NULL;
+		if (!avro_memoize_get(&state.mem, target_schema, NULL,
+				      (void **) &target_iface)) {
+			avro_set_error("Never created a value implementation for %s",
+				       avro_schema_type_name(target_schema));
+			return NULL;
+		}
+
+		/* We don't keep a reference to the target
+		 * implementation, since that would give us a reference
+		 * cycle. */
+		link_iface->target_giface = target_iface;
+		state.links = link_iface->next;
+		link_iface->next = NULL;
+	}
+
+	/*
+	 * And now we can return.
+	 */
+
+	avro_memoize_done(&state.mem);
+	return &result->parent;
+}
diff --git a/lang/c/src/io.c b/lang/c/src/io.c
new file mode 100644
index 0000000..96ec25e
--- /dev/null
+++ b/lang/c/src/io.c
@@ -0,0 +1,447 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro/allocation.h"
+#include "avro/refcount.h"
+#include "avro/errors.h"
+#include "avro/io.h"
+#include "avro_private.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include "dump.h"
+
+enum avro_io_type_t {
+	AVRO_FILE_IO,
+	AVRO_MEMORY_IO
+};
+typedef enum avro_io_type_t avro_io_type_t;
+
+struct avro_reader_t_ {
+	avro_io_type_t type;
+	volatile int  refcount;
+};
+
+struct avro_writer_t_ {
+	avro_io_type_t type;
+	volatile int  refcount;
+};
+
+struct _avro_reader_file_t {
+	struct avro_reader_t_ reader;
+	FILE *fp;
+	int should_close;
+	char *cur;
+	char *end;
+	char buffer[4096];
+};
+
+struct _avro_writer_file_t {
+	struct avro_writer_t_ writer;
+	FILE *fp;
+	int should_close;
+};
+
+struct _avro_reader_memory_t {
+	struct avro_reader_t_ reader;
+	const char *buf;
+	int64_t len;
+	int64_t read;
+};
+
+struct _avro_writer_memory_t {
+	struct avro_writer_t_ writer;
+	const char *buf;
+	int64_t len;
+	int64_t written;
+};
+
+#define avro_io_typeof(obj)      ((obj)->type)
+#define is_memory_io(obj)        (obj && avro_io_typeof(obj) == AVRO_MEMORY_IO)
+#define is_file_io(obj)          (obj && avro_io_typeof(obj) == AVRO_FILE_IO)
+
+#define avro_reader_to_memory(reader_)  container_of(reader_, struct _avro_reader_memory_t, reader)
+#define avro_reader_to_file(reader_)    container_of(reader_, struct _avro_reader_file_t, reader)
+#define avro_writer_to_memory(writer_)  container_of(writer_, struct _avro_writer_memory_t, writer)
+#define avro_writer_to_file(writer_)    container_of(writer_, struct _avro_writer_file_t, writer)
+
+static void reader_init(avro_reader_t reader, avro_io_type_t type)
+{
+	reader->type = type;
+	avro_refcount_set(&reader->refcount, 1);
+}
+
+static void writer_init(avro_writer_t writer, avro_io_type_t type)
+{
+	writer->type = type;
+	avro_refcount_set(&writer->refcount, 1);
+}
+
+avro_reader_t avro_reader_file_fp(FILE * fp, int should_close)
+{
+	struct _avro_reader_file_t *file_reader =
+	    (struct _avro_reader_file_t *) avro_new(struct _avro_reader_file_t);
+	if (!file_reader) {
+		avro_set_error("Cannot allocate new file reader");
+		return NULL;
+	}
+	memset(file_reader, 0, sizeof(struct _avro_reader_file_t));
+	file_reader->fp = fp;
+	file_reader->should_close = should_close;
+	reader_init(&file_reader->reader, AVRO_FILE_IO);
+	return &file_reader->reader;
+}
+
+avro_reader_t avro_reader_file(FILE * fp)
+{
+	return avro_reader_file_fp(fp, 1);
+}
+
+avro_writer_t avro_writer_file_fp(FILE * fp, int should_close)
+{
+	struct _avro_writer_file_t *file_writer =
+	    (struct _avro_writer_file_t *) avro_new(struct _avro_writer_file_t);
+	if (!file_writer) {
+		avro_set_error("Cannot allocate new file writer");
+		return NULL;
+	}
+	file_writer->fp = fp;
+	file_writer->should_close = should_close;
+	writer_init(&file_writer->writer, AVRO_FILE_IO);
+	return &file_writer->writer;
+}
+
+avro_writer_t avro_writer_file(FILE * fp)
+{
+	return avro_writer_file_fp(fp, 1);
+}
+
+avro_reader_t avro_reader_memory(const char *buf, int64_t len)
+{
+	struct _avro_reader_memory_t *mem_reader =
+	    (struct _avro_reader_memory_t *) avro_new(struct _avro_reader_memory_t);
+	if (!mem_reader) {
+		avro_set_error("Cannot allocate new memory reader");
+		return NULL;
+	}
+	mem_reader->buf = buf;
+	mem_reader->len = len;
+	mem_reader->read = 0;
+	reader_init(&mem_reader->reader, AVRO_MEMORY_IO);
+	return &mem_reader->reader;
+}
+
+void
+avro_reader_memory_set_source(avro_reader_t reader, const char *buf, int64_t len)
+{
+	if (is_memory_io(reader)) {
+		struct _avro_reader_memory_t *mem_reader = avro_reader_to_memory(reader);
+		mem_reader->buf = buf;
+		mem_reader->len = len;
+		mem_reader->read = 0;
+	}
+}
+
+avro_writer_t avro_writer_memory(const char *buf, int64_t len)
+{
+	struct _avro_writer_memory_t *mem_writer =
+	    (struct _avro_writer_memory_t *) avro_new(struct _avro_writer_memory_t);
+	if (!mem_writer) {
+		avro_set_error("Cannot allocate new memory writer");
+		return NULL;
+	}
+	mem_writer->buf = buf;
+	mem_writer->len = len;
+	mem_writer->written = 0;
+	writer_init(&mem_writer->writer, AVRO_MEMORY_IO);
+	return &mem_writer->writer;
+}
+
+void
+avro_writer_memory_set_dest(avro_writer_t writer, const char *buf, int64_t len)
+{
+	if (is_memory_io(writer)) {
+		struct _avro_writer_memory_t *mem_writer = avro_writer_to_memory(writer);
+		mem_writer->buf = buf;
+		mem_writer->len = len;
+		mem_writer->written = 0;
+	}
+}
+
+static int
+avro_read_memory(struct _avro_reader_memory_t *reader, void *buf, int64_t len)
+{
+	if (len > 0) {
+		if ((reader->len - reader->read) < len) {
+			avro_prefix_error("Cannot read %" PRIsz " bytes from memory buffer",
+					  (size_t) len);
+			return ENOSPC;
+		}
+		memcpy(buf, reader->buf + reader->read, len);
+		reader->read += len;
+	}
+	return 0;
+}
+
+#define bytes_available(reader) (reader->end - reader->cur)
+#define buffer_reset(reader) {reader->cur = reader->end = reader->buffer;}
+
+static int
+avro_read_file(struct _avro_reader_file_t *reader, void *buf, int64_t len)
+{
+	int64_t needed = len;
+	char *p = (char *) buf;
+	int rval;
+
+	if (len == 0) {
+		return 0;
+	}
+
+	if (needed > (int64_t) sizeof(reader->buffer)) {
+		if (bytes_available(reader) > 0) {
+			memcpy(p, reader->cur, bytes_available(reader));
+			p += bytes_available(reader);
+			needed -= bytes_available(reader);
+			buffer_reset(reader);
+		}
+		rval = fread(p, 1, needed, reader->fp);
+		if (rval != needed) {
+			avro_set_error("Cannot read %" PRIsz " bytes from file",
+				       (size_t) needed);
+			return EILSEQ;
+		}
+		return 0;
+	} else if (needed <= bytes_available(reader)) {
+		memcpy(p, reader->cur, needed);
+		reader->cur += needed;
+		return 0;
+	} else {
+		memcpy(p, reader->cur, bytes_available(reader));
+		p += bytes_available(reader);
+		needed -= bytes_available(reader);
+
+		rval =
+		    fread(reader->buffer, 1, sizeof(reader->buffer),
+			  reader->fp);
+		if (rval == 0) {
+			avro_set_error("Cannot read %" PRIsz " bytes from file",
+				       (size_t) needed);
+			return EILSEQ;
+		}
+		reader->cur = reader->buffer;
+		reader->end = reader->cur + rval;
+
+		if (bytes_available(reader) < needed) {
+			avro_set_error("Cannot read %" PRIsz " bytes from file",
+				       (size_t) needed);
+			return EILSEQ;
+		}
+		memcpy(p, reader->cur, needed);
+		reader->cur += needed;
+		return 0;
+	}
+	avro_set_error("Cannot read %" PRIsz " bytes from file",
+		       (size_t) needed);
+	return EILSEQ;
+}
+
+int avro_read(avro_reader_t reader, void *buf, int64_t len)
+{
+	if (buf && len >= 0) {
+		if (is_memory_io(reader)) {
+			return avro_read_memory(avro_reader_to_memory(reader),
+						buf, len);
+		} else if (is_file_io(reader)) {
+			return avro_read_file(avro_reader_to_file(reader), buf,
+					      len);
+		}
+	}
+	return EINVAL;
+}
+
+static int avro_skip_memory(struct _avro_reader_memory_t *reader, int64_t len)
+{
+	if (len > 0) {
+		if ((reader->len - reader->read) < len) {
+			avro_set_error("Cannot skip %" PRIsz " bytes in memory buffer",
+				       (size_t) len);
+			return ENOSPC;
+		}
+		reader->read += len;
+	}
+	return 0;
+}
+
+static int avro_skip_file(struct _avro_reader_file_t *reader, int64_t len)
+{
+	int rval;
+	int64_t needed = len;
+
+	if (len == 0) {
+		return 0;
+	}
+	if (needed <= bytes_available(reader)) {
+		reader->cur += needed;
+	} else {
+		needed -= bytes_available(reader);
+		buffer_reset(reader);
+		rval = fseek(reader->fp, needed, SEEK_CUR);
+		if (rval < 0) {
+			avro_set_error("Cannot skip %" PRIsz " bytes in file",
+				       (size_t) len);
+			return rval;
+		}
+	}
+	return 0;
+}
+
+int avro_skip(avro_reader_t reader, int64_t len)
+{
+	if (len >= 0) {
+		if (is_memory_io(reader)) {
+			return avro_skip_memory(avro_reader_to_memory(reader),
+						len);
+		} else if (is_file_io(reader)) {
+			return avro_skip_file(avro_reader_to_file(reader), len);
+		}
+	}
+	return 0;
+}
+
+static int
+avro_write_memory(struct _avro_writer_memory_t *writer, void *buf, int64_t len)
+{
+	if (len) {
+		if ((writer->len - writer->written) < len) {
+			avro_set_error("Cannot write %" PRIsz " bytes in memory buffer",
+				       (size_t) len);
+			return ENOSPC;
+		}
+		memcpy((void *)(writer->buf + writer->written), buf, len);
+		writer->written += len;
+	}
+	return 0;
+}
+
+static int
+avro_write_file(struct _avro_writer_file_t *writer, void *buf, int64_t len)
+{
+	int rval;
+	if (len > 0) {
+		rval = fwrite(buf, len, 1, writer->fp);
+		if (rval == 0) {
+			return feof(writer->fp) ? EOF : 0;
+		}
+	}
+	return 0;
+}
+
+int avro_write(avro_writer_t writer, void *buf, int64_t len)
+{
+	if (buf && len >= 0) {
+		if (is_memory_io(writer)) {
+			return avro_write_memory(avro_writer_to_memory(writer),
+						 buf, len);
+		} else if (is_file_io(writer)) {
+			return avro_write_file(avro_writer_to_file(writer), buf,
+					       len);
+		}
+	}
+	return EINVAL;
+}
+
+void
+avro_reader_reset(avro_reader_t reader)
+{
+	if (is_memory_io(reader)) {
+		avro_reader_to_memory(reader)->read = 0;
+	}
+}
+
+void avro_writer_reset(avro_writer_t writer)
+{
+	if (is_memory_io(writer)) {
+		avro_writer_to_memory(writer)->written = 0;
+	}
+}
+
+int64_t avro_writer_tell(avro_writer_t writer)
+{
+	if (is_memory_io(writer)) {
+		return avro_writer_to_memory(writer)->written;
+	}
+	return EINVAL;
+}
+
+void avro_writer_flush(avro_writer_t writer)
+{
+	if (is_file_io(writer)) {
+		fflush(avro_writer_to_file(writer)->fp);
+	}
+}
+
+void avro_writer_dump(avro_writer_t writer, FILE * fp)
+{
+	if (is_memory_io(writer)) {
+		dump(fp, (char *)avro_writer_to_memory(writer)->buf,
+		     avro_writer_to_memory(writer)->written);
+	}
+}
+
+void avro_reader_dump(avro_reader_t reader, FILE * fp)
+{
+	if (is_memory_io(reader)) {
+		dump(fp, (char *)avro_reader_to_memory(reader)->buf,
+		     avro_reader_to_memory(reader)->read);
+	}
+}
+
+void avro_reader_free(avro_reader_t reader)
+{
+	if (is_memory_io(reader)) {
+		avro_freet(struct _avro_reader_memory_t, reader);
+	} else if (is_file_io(reader)) {
+		if (avro_reader_to_file(reader)->should_close) {
+			fclose(avro_reader_to_file(reader)->fp);
+		}
+		avro_freet(struct _avro_reader_file_t, reader);
+	}
+}
+
+void avro_writer_free(avro_writer_t writer)
+{
+	if (is_memory_io(writer)) {
+		avro_freet(struct _avro_writer_memory_t, writer);
+	} else if (is_file_io(writer)) {
+		if (avro_writer_to_file(writer)->should_close) {
+			fclose(avro_writer_to_file(writer)->fp);
+		}
+		avro_freet(struct _avro_writer_file_t, writer);
+	}
+}
+
+int avro_reader_is_eof(avro_reader_t reader)
+{
+	if (is_file_io(reader)) {
+		struct _avro_reader_file_t *file = avro_reader_to_file(reader);
+		if (feof(file->fp)) {
+			return file->cur == file->end;
+		}
+	}
+	return 0;
+}
diff --git a/lang/c/src/map.c b/lang/c/src/map.c
new file mode 100644
index 0000000..f63a83a
--- /dev/null
+++ b/lang/c/src/map.c
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <string.h>
+
+#include "avro/data.h"
+#include "avro/allocation.h"
+#include "avro/errors.h"
+#include "st.h"
+
+
+#define raw_entry_size(element_size) \
+	(sizeof(avro_raw_map_entry_t) + element_size)
+
+void avro_raw_map_init(avro_raw_map_t *map, size_t element_size)
+{
+	memset(map, 0, sizeof(avro_raw_map_t));
+	avro_raw_array_init(&map->elements, raw_entry_size(element_size));
+	map->indices_by_key = st_init_strtable();
+}
+
+
+static void
+avro_raw_map_free_keys(avro_raw_map_t *map)
+{
+	unsigned int  i;
+	for (i = 0; i < avro_raw_map_size(map); i++) {
+		void  *ventry =
+		    ((char *) map->elements.data + map->elements.element_size * i);
+		avro_raw_map_entry_t  *entry = (avro_raw_map_entry_t *) ventry;
+		avro_str_free((char *) entry->key);
+	}
+}
+
+
+void avro_raw_map_done(avro_raw_map_t *map)
+{
+	avro_raw_map_free_keys(map);
+	avro_raw_array_done(&map->elements);
+	st_free_table((st_table *) map->indices_by_key);
+	memset(map, 0, sizeof(avro_raw_map_t));
+}
+
+
+void avro_raw_map_clear(avro_raw_map_t *map)
+{
+	avro_raw_map_free_keys(map);
+	avro_raw_array_clear(&map->elements);
+	st_free_table((st_table *) map->indices_by_key);
+	map->indices_by_key = st_init_strtable();
+}
+
+
+int
+avro_raw_map_ensure_size(avro_raw_map_t *map, size_t desired_count)
+{
+	return avro_raw_array_ensure_size(&map->elements, desired_count);
+}
+
+
+void *avro_raw_map_get(const avro_raw_map_t *map, const char *key,
+		       size_t *index)
+{
+	st_data_t  data;
+	if (st_lookup((st_table *) map->indices_by_key, (st_data_t) key, &data)) {
+		unsigned int  i = (unsigned int) data;
+		if (index) {
+			*index = i;
+		}
+		void  *raw_entry =
+		    ((char *) map->elements.data + map->elements.element_size * i);
+		return (char *) raw_entry + sizeof(avro_raw_map_entry_t);
+	} else {
+		return NULL;
+	}
+}
+
+
+int avro_raw_map_get_or_create(avro_raw_map_t *map, const char *key,
+			       void **element, size_t *index)
+{
+	st_data_t  data;
+	void  *el;
+	unsigned int  i;
+	int  is_new;
+
+	if (st_lookup((st_table *) map->indices_by_key, (st_data_t) key, &data)) {
+		i = (unsigned int) data;
+		void  *raw_entry =
+		    ((char *) map->elements.data + map->elements.element_size * i);
+		el = (char *) raw_entry + sizeof(avro_raw_map_entry_t);
+		is_new = 0;
+	} else {
+		i = map->elements.element_count;
+		avro_raw_map_entry_t  *raw_entry =
+		    (avro_raw_map_entry_t *) avro_raw_array_append(&map->elements);
+		raw_entry->key = avro_strdup(key);
+		st_insert((st_table *) map->indices_by_key,
+			  (st_data_t) raw_entry->key, (st_data_t) i);
+		if (!raw_entry) {
+			avro_str_free((char*)raw_entry->key);
+			return -ENOMEM;
+		}
+		el = ((char *) raw_entry) + sizeof(avro_raw_map_entry_t);
+		is_new = 1;
+	}
+
+	if (element) {
+		*element = el;
+	}
+	if (index) {
+		*index = i;
+	}
+	return is_new;
+}
diff --git a/lang/c/src/memoize.c b/lang/c/src/memoize.c
new file mode 100644
index 0000000..e958df0
--- /dev/null
+++ b/lang/c/src/memoize.c
@@ -0,0 +1,165 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <string.h>
+
+#include "avro/data.h"
+#include "avro/allocation.h"
+#include "avro/errors.h"
+#include "avro_private.h"
+#include "st.h"
+
+
+typedef struct avro_memoize_key {
+	void  *key1;
+	void  *key2;
+} avro_memoize_key_t;
+
+
+static int
+avro_memoize_key_cmp(avro_memoize_key_t *a, avro_memoize_key_t *b)
+{
+	/*
+	 * This isn't a proper cmp operation, since it always returns 1
+	 * if the keys are different.  But that's okay for the hash
+	 * table implementation we're using.
+	 */
+
+	return (a->key1 != b->key1) || (a->key2 != b->key2);
+}
+
+
+static int
+avro_memoize_key_hash(avro_memoize_key_t *a)
+{
+	return ((uintptr_t) a->key1) ^ ((uintptr_t) a->key2);
+}
+
+
+static struct st_hash_type  avro_memoize_hash_type = {
+	HASH_FUNCTION_CAST avro_memoize_key_cmp,
+	HASH_FUNCTION_CAST avro_memoize_key_hash
+};
+
+
+void
+avro_memoize_init(avro_memoize_t *mem)
+{
+	memset(mem, 0, sizeof(avro_memoize_t));
+	mem->cache = st_init_table(&avro_memoize_hash_type);
+}
+
+
+static int
+avro_memoize_free_key(avro_memoize_key_t *key, void *result, void *dummy)
+{
+	AVRO_UNUSED(result);
+	AVRO_UNUSED(dummy);
+	avro_freet(avro_memoize_key_t, key);
+	return ST_CONTINUE;
+}
+
+
+void
+avro_memoize_done(avro_memoize_t *mem)
+{
+	st_foreach((st_table *) mem->cache, HASH_FUNCTION_CAST avro_memoize_free_key, 0);
+	st_free_table((st_table *) mem->cache);
+	memset(mem, 0, sizeof(avro_memoize_t));
+}
+
+
+int
+avro_memoize_get(avro_memoize_t *mem,
+		 void *key1, void *key2,
+		 void **result)
+{
+	avro_memoize_key_t  key;
+	key.key1 = key1;
+	key.key2 = key2;
+
+	union {
+		st_data_t  data;
+		void  *value;
+	} val;
+
+	if (st_lookup((st_table *) mem->cache, (st_data_t) &key, &val.data)) {
+		if (result) {
+			*result = val.value;
+		}
+		return 1;
+	} else {
+		return 0;
+	}
+}
+
+
+void
+avro_memoize_set(avro_memoize_t *mem,
+		 void *key1, void *key2,
+		 void *result)
+{
+	/*
+	 * First see if there's already a cached value for this key.  If
+	 * so, we don't want to allocate a new avro_memoize_key_t
+	 * instance.
+	 */
+
+	avro_memoize_key_t  key;
+	key.key1 = key1;
+	key.key2 = key2;
+
+	union {
+		st_data_t  data;
+		void  *value;
+	} val;
+
+	if (st_lookup((st_table *) mem->cache, (st_data_t) &key, &val.data)) {
+		st_insert((st_table *) mem->cache, (st_data_t) &key, (st_data_t) result);
+		return;
+	}
+
+	/*
+	 * If it's a new key pair, then we do need to allocate.
+	 */
+
+	avro_memoize_key_t  *real_key = (avro_memoize_key_t *) avro_new(avro_memoize_key_t);
+	real_key->key1 = key1;
+	real_key->key2 = key2;
+
+	st_insert((st_table *) mem->cache, (st_data_t) real_key, (st_data_t) result);
+}
+
+
+void
+avro_memoize_delete(avro_memoize_t *mem, void *key1, void *key2)
+{
+	avro_memoize_key_t  key;
+	key.key1 = key1;
+	key.key2 = key2;
+
+	union {
+		st_data_t  data;
+		avro_memoize_key_t  *key;
+	} real_key;
+
+	real_key.key = &key;
+	if (st_delete((st_table *) mem->cache, &real_key.data, NULL)) {
+		avro_freet(avro_memoize_key_t, real_key.key);
+	}
+}
diff --git a/lang/c/src/resolved-reader.c b/lang/c/src/resolved-reader.c
new file mode 100644
index 0000000..f7e2281
--- /dev/null
+++ b/lang/c/src/resolved-reader.c
@@ -0,0 +1,3377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.	 You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/basics.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/refcount.h"
+#include "avro/resolver.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "st.h"
+
+#ifndef AVRO_RESOLVER_DEBUG
+#define AVRO_RESOLVER_DEBUG 0
+#endif
+
+#if AVRO_RESOLVER_DEBUG
+#include <stdio.h>
+#define DEBUG(...) \
+	do { \
+		fprintf(stderr, __VA_ARGS__); \
+		fprintf(stderr, "\n"); \
+	} while (0)
+#else
+#define DEBUG(...)  /* don't print messages */
+#endif
+
+
+typedef struct avro_resolved_reader  avro_resolved_reader_t;
+
+struct avro_resolved_reader {
+	avro_value_iface_t  parent;
+
+	/** The reference count for this interface. */
+	volatile int  refcount;
+
+	/** The writer schema. */
+	avro_schema_t  wschema;
+
+	/** The reader schema. */
+	avro_schema_t  rschema;
+
+	/* The size of the value instances for this resolver. */
+	size_t  instance_size;
+
+	/* A function to calculate the instance size once the overall
+	 * top-level resolver (and all of its children) have been
+	 * constructed. */
+	void
+	(*calculate_size)(avro_resolved_reader_t *iface);
+
+	/* A free function for this resolver */
+	void
+	(*free_iface)(avro_resolved_reader_t *iface, st_table *freeing);
+
+	/* An initialization function for instances of this resolver. */
+	int
+	(*init)(const avro_resolved_reader_t *iface, void *self);
+
+	/* A finalization function for instances of this resolver. */
+	void
+	(*done)(const avro_resolved_reader_t *iface, void *self);
+
+	/* Clear out any existing wrappers, if any */
+	int
+	(*reset_wrappers)(const avro_resolved_reader_t *iface, void *self);
+};
+
+#define avro_resolved_reader_calculate_size(iface) \
+	do { \
+		if ((iface)->calculate_size != NULL) { \
+			(iface)->calculate_size((iface)); \
+		} \
+	} while (0)
+#define avro_resolved_reader_init(iface, self) \
+	((iface)->init == NULL? 0: (iface)->init((iface), (self)))
+#define avro_resolved_reader_done(iface, self) \
+	((iface)->done == NULL? (void) 0: (iface)->done((iface), (self)))
+#define avro_resolved_reader_reset_wrappers(iface, self) \
+	((iface)->reset_wrappers == NULL? 0: \
+	 (iface)->reset_wrappers((iface), (self)))
+
+
+/*
+ * We assume that each instance type in this value contains an an
+ * avro_value_t as its first element, which is the current wrapped
+ * value.
+ */
+
+void
+avro_resolved_reader_set_source(avro_value_t *resolved,
+				avro_value_t *dest)
+{
+	avro_value_t  *self = (avro_value_t *) resolved->self;
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+	avro_value_copy_ref(self, dest);
+}
+
+void
+avro_resolved_reader_clear_source(avro_value_t *resolved)
+{
+	avro_value_t  *self = (avro_value_t *) resolved->self;
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+	self->iface = NULL;
+	self->self = NULL;
+}
+
+int
+avro_resolved_reader_new_value(avro_value_iface_t *viface,
+			       avro_value_t *value)
+{
+	int  rval;
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	void  *self = avro_malloc(iface->instance_size + sizeof(volatile int));
+	if (self == NULL) {
+		value->iface = NULL;
+		value->self = NULL;
+		return ENOMEM;
+	}
+
+	memset(self, 0, iface->instance_size + sizeof(volatile int));
+	volatile int  *refcount = (volatile int *) self;
+	self = (char *) self + sizeof(volatile int);
+
+	rval = avro_resolved_reader_init(iface, self);
+	if (rval != 0) {
+		avro_free(self, iface->instance_size + sizeof(volatile int));
+		value->iface = NULL;
+		value->self = NULL;
+		return rval;
+	}
+
+	*refcount = 1;
+	value->iface = avro_value_iface_incref(viface);
+	value->self = self;
+	return 0;
+}
+
+static void
+avro_resolved_reader_free_value(const avro_value_iface_t *viface, void *vself)
+{
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+
+	avro_resolved_reader_done(iface, vself);
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+
+	vself = (char *) vself - sizeof(volatile int);
+	avro_free(vself, iface->instance_size + sizeof(volatile int));
+}
+
+static void
+avro_resolved_reader_incref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	avro_refcount_inc(refcount);
+}
+
+static void
+avro_resolved_reader_decref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	if (avro_refcount_dec(refcount)) {
+		avro_resolved_reader_free_value(value->iface, value->self);
+	}
+}
+
+
+static avro_value_iface_t *
+avro_resolved_reader_incref_iface(avro_value_iface_t *viface)
+{
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+free_resolver(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	/* First check if we've already started freeing this resolver. */
+	if (st_lookup(freeing, (st_data_t) iface, NULL)) {
+		DEBUG("Already freed %p", iface);
+		return;
+	}
+
+	/* Otherwise add this resolver to the freeing set, then free it. */
+	st_insert(freeing, (st_data_t) iface, (st_data_t) NULL);
+	DEBUG("Freeing resolver %p (%s->%s)", iface,
+	      avro_schema_type_name(iface->wschema),
+	      avro_schema_type_name(iface->rschema));
+
+	iface->free_iface(iface, freeing);
+}
+
+static void
+avro_resolved_reader_calculate_size_(avro_resolved_reader_t *iface)
+{
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_value_t);
+}
+
+static void
+avro_resolved_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	AVRO_UNUSED(freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_reader_t, iface);
+}
+
+static void
+avro_resolved_reader_decref_iface(avro_value_iface_t *viface)
+{
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	DEBUG("Decref resolver %p (before=%d)", iface, iface->refcount);
+	if (avro_refcount_dec(&iface->refcount)) {
+		st_table  *freeing = st_init_numtable();
+		free_resolver(iface, freeing);
+		st_free_table(freeing);
+	}
+}
+
+static int
+avro_resolved_reader_reset(const avro_value_iface_t *viface, void *vself)
+{
+	/*
+	 * To reset a wrapped value, we first clear out any wrappers,
+	 * and then have the wrapped value reset itself.
+	 */
+
+	int  rval;
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	check(rval, avro_resolved_reader_reset_wrappers(iface, vself));
+	return avro_value_reset(self);
+}
+
+static avro_type_t
+avro_resolved_reader_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(vself);
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	return avro_typeof(iface->rschema);
+}
+
+static avro_schema_t
+avro_resolved_reader_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(vself);
+	avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	return iface->rschema;
+}
+
+
+static avro_resolved_reader_t *
+avro_resolved_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t  *) avro_new(avro_resolved_reader_t);
+	memset(self, 0, sizeof(avro_resolved_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_reader_calculate_size_;
+	self->free_iface = avro_resolved_reader_free_iface;
+	self->reset_wrappers = NULL;
+	return self;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Memoized resolvers
+ */
+
+typedef struct avro_resolved_link_reader  avro_resolved_link_reader_t;
+
+typedef struct memoize_state_t {
+	avro_memoize_t  mem;
+	avro_resolved_link_reader_t  *links;
+} memoize_state_t;
+
+static avro_resolved_reader_t *
+avro_resolved_reader_new_memoized(memoize_state_t *state,
+				  avro_schema_t wschema, avro_schema_t rschema);
+
+
+/*-----------------------------------------------------------------------
+ * Recursive schemas
+ */
+
+/*
+ * Recursive schemas are handled specially; the value implementation for
+ * an AVRO_LINK schema is simply a wrapper around the value
+ * implementation for the link's target schema.  The value methods all
+ * delegate to the wrapped implementation.
+ *
+ * Complicating the links here is that we might be linking to the writer
+ * schema or the reader schema.  This only matters for a couple of
+ * methods, so instead of keeping a boolean flag in the value interface,
+ * we just have separate method implementations that we slot in
+ * appropriately.
+ */
+
+struct avro_resolved_link_reader {
+	avro_resolved_reader_t  parent;
+
+	/**
+	 * A pointer to the “next” link resolver that we've had to
+	 * create.  We use this as we're creating the overall top-level
+	 * resolver to keep track of which ones we have to fix up
+	 * afterwards.
+	 */
+	avro_resolved_link_reader_t  *next;
+
+	/** The target's implementation. */
+	avro_resolved_reader_t  *target_resolver;
+};
+
+typedef struct avro_resolved_link_value {
+	avro_value_t  wrapped;
+	avro_value_t  target;
+} avro_resolved_link_value_t;
+
+static void
+avro_resolved_wlink_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for [%s]->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_link_value_t);
+}
+
+static void
+avro_resolved_rlink_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->[%s]",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_link_value_t);
+}
+
+static void
+avro_resolved_link_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_link_reader_t  *liface =
+	    container_of(iface, avro_resolved_link_reader_t, parent);
+	if (liface->target_resolver != NULL) {
+		free_resolver(liface->target_resolver, freeing);
+	}
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_link_reader_t, iface);
+}
+
+static int
+avro_resolved_link_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_link_reader_t  *liface =
+	    container_of(iface, avro_resolved_link_reader_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	size_t  target_instance_size = liface->target_resolver->instance_size;
+
+	self->target.iface = &liface->target_resolver->parent;
+	self->target.self = avro_malloc(target_instance_size);
+	if (self->target.self == NULL) {
+		return ENOMEM;
+	}
+	DEBUG("Allocated <%p:%" PRIsz "> for link", self->target.self, target_instance_size);
+
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+
+	rval = avro_resolved_reader_init(liface->target_resolver, self->target.self);
+	if (rval != 0) {
+		avro_free(self->target.self, target_instance_size);
+	}
+	return rval;
+}
+
+static void
+avro_resolved_link_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_link_reader_t  *liface =
+	    container_of(iface, avro_resolved_link_reader_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	size_t  target_instance_size = liface->target_resolver->instance_size;
+	DEBUG("Freeing <%p:%" PRIsz "> for link", self->target.self, target_instance_size);
+	avro_resolved_reader_done(liface->target_resolver, self->target.self);
+	avro_free(self->target.self, target_instance_size);
+	self->target.iface = NULL;
+	self->target.self = NULL;
+}
+
+static int
+avro_resolved_link_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_link_reader_t  *liface =
+	    container_of(iface, avro_resolved_link_reader_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	return avro_resolved_reader_reset_wrappers
+	    (liface->target_resolver, self->target.self);
+}
+
+static avro_type_t
+avro_resolved_link_reader_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_type(&self->target);
+}
+
+static avro_schema_t
+avro_resolved_link_reader_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_schema(&self->target);
+}
+
+static int
+avro_resolved_link_reader_get_boolean(const avro_value_iface_t *iface,
+				      const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_boolean(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_bytes(const avro_value_iface_t *iface,
+				    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_bytes(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_reader_grab_bytes(const avro_value_iface_t *iface,
+				     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_bytes(&self->target, dest);
+}
+
+static int
+avro_resolved_link_reader_get_double(const avro_value_iface_t *iface,
+				     const void *vself, double *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_double(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_float(const avro_value_iface_t *iface,
+				    const void *vself, float *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_float(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_int(const avro_value_iface_t *iface,
+				  const void *vself, int32_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_int(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_long(const avro_value_iface_t *iface,
+				   const void *vself, int64_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_long(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_null(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_null(&self->target);
+}
+
+static int
+avro_resolved_link_reader_get_string(const avro_value_iface_t *iface,
+				     const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_string(&self->target, str, size);
+}
+
+static int
+avro_resolved_link_reader_grab_string(const avro_value_iface_t *iface,
+				      const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_string(&self->target, dest);
+}
+
+static int
+avro_resolved_link_reader_get_enum(const avro_value_iface_t *iface,
+				   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_enum(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_fixed(const avro_value_iface_t *iface,
+				    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_fixed(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_reader_grab_fixed(const avro_value_iface_t *iface,
+				     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_fixed(&self->target, dest);
+}
+
+static int
+avro_resolved_link_reader_set_boolean(const avro_value_iface_t *iface,
+				      void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_boolean(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_bytes(const avro_value_iface_t *iface,
+				    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_bytes(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_reader_give_bytes(const avro_value_iface_t *iface,
+				     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_bytes(&self->target, buf);
+}
+
+static int
+avro_resolved_link_reader_set_double(const avro_value_iface_t *iface,
+				     void *vself, double val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_double(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_float(const avro_value_iface_t *iface,
+				    void *vself, float val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_float(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_int(const avro_value_iface_t *iface,
+				  void *vself, int32_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_int(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_long(const avro_value_iface_t *iface,
+				   void *vself, int64_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_long(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_null(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_null(&self->target);
+}
+
+static int
+avro_resolved_link_reader_set_string(const avro_value_iface_t *iface,
+				     void *vself, const char *str)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_string(&self->target, str);
+}
+
+static int
+avro_resolved_link_reader_set_string_len(const avro_value_iface_t *iface,
+					 void *vself, const char *str, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_string_len(&self->target, str, size);
+}
+
+static int
+avro_resolved_link_reader_give_string_len(const avro_value_iface_t *iface,
+					  void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_string_len(&self->target, buf);
+}
+
+static int
+avro_resolved_link_reader_set_enum(const avro_value_iface_t *iface,
+				   void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_enum(&self->target, val);
+}
+
+static int
+avro_resolved_link_reader_set_fixed(const avro_value_iface_t *iface,
+				    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_fixed(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_reader_give_fixed(const avro_value_iface_t *iface,
+				     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_fixed(&self->target, buf);
+}
+
+static int
+avro_resolved_link_reader_get_size(const avro_value_iface_t *iface,
+				   const void *vself, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_size(&self->target, size);
+}
+
+static int
+avro_resolved_link_reader_get_by_index(const avro_value_iface_t *iface,
+				       const void *vself, size_t index,
+				       avro_value_t *child, const char **name)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_by_index(&self->target, index, child, name);
+}
+
+static int
+avro_resolved_link_reader_get_by_name(const avro_value_iface_t *iface,
+				      const void *vself, const char *name,
+				      avro_value_t *child, size_t *index)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_by_name(&self->target, name, child, index);
+}
+
+static int
+avro_resolved_link_reader_get_discriminant(const avro_value_iface_t *iface,
+					   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_discriminant(&self->target, out);
+}
+
+static int
+avro_resolved_link_reader_get_current_branch(const avro_value_iface_t *iface,
+					     const void *vself, avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_current_branch(&self->target, branch);
+}
+
+static int
+avro_resolved_link_reader_append(const avro_value_iface_t *iface,
+				 void *vself, avro_value_t *child_out,
+				 size_t *new_index)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_append(&self->target, child_out, new_index);
+}
+
+static int
+avro_resolved_link_reader_add(const avro_value_iface_t *iface,
+			      void *vself, const char *key,
+			      avro_value_t *child, size_t *index, int *is_new)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_add(&self->target, key, child, index, is_new);
+}
+
+static int
+avro_resolved_link_reader_set_branch(const avro_value_iface_t *iface,
+				     void *vself, int discriminant,
+				     avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_branch(&self->target, discriminant, branch);
+}
+
+static avro_resolved_link_reader_t *
+avro_resolved_link_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t  *) avro_new(avro_resolved_link_reader_t);
+	memset(self, 0, sizeof(avro_resolved_link_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_link_reader_get_type;
+	self->parent.get_schema = avro_resolved_link_reader_get_schema;
+	self->parent.get_size = avro_resolved_link_reader_get_size;
+	self->parent.get_by_index = avro_resolved_link_reader_get_by_index;
+	self->parent.get_by_name = avro_resolved_link_reader_get_by_name;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->free_iface = avro_resolved_link_reader_free_iface;
+	self->init = avro_resolved_link_reader_init;
+	self->done = avro_resolved_link_reader_done;
+	self->reset_wrappers = avro_resolved_link_reader_reset;
+
+	self->parent.get_boolean = avro_resolved_link_reader_get_boolean;
+	self->parent.get_bytes = avro_resolved_link_reader_get_bytes;
+	self->parent.grab_bytes = avro_resolved_link_reader_grab_bytes;
+	self->parent.get_double = avro_resolved_link_reader_get_double;
+	self->parent.get_float = avro_resolved_link_reader_get_float;
+	self->parent.get_int = avro_resolved_link_reader_get_int;
+	self->parent.get_long = avro_resolved_link_reader_get_long;
+	self->parent.get_null = avro_resolved_link_reader_get_null;
+	self->parent.get_string = avro_resolved_link_reader_get_string;
+	self->parent.grab_string = avro_resolved_link_reader_grab_string;
+	self->parent.get_enum = avro_resolved_link_reader_get_enum;
+	self->parent.get_fixed = avro_resolved_link_reader_get_fixed;
+	self->parent.grab_fixed = avro_resolved_link_reader_grab_fixed;
+
+	self->parent.set_boolean = avro_resolved_link_reader_set_boolean;
+	self->parent.set_bytes = avro_resolved_link_reader_set_bytes;
+	self->parent.give_bytes = avro_resolved_link_reader_give_bytes;
+	self->parent.set_double = avro_resolved_link_reader_set_double;
+	self->parent.set_float = avro_resolved_link_reader_set_float;
+	self->parent.set_int = avro_resolved_link_reader_set_int;
+	self->parent.set_long = avro_resolved_link_reader_set_long;
+	self->parent.set_null = avro_resolved_link_reader_set_null;
+	self->parent.set_string = avro_resolved_link_reader_set_string;
+	self->parent.set_string_len = avro_resolved_link_reader_set_string_len;
+	self->parent.give_string_len = avro_resolved_link_reader_give_string_len;
+	self->parent.set_enum = avro_resolved_link_reader_set_enum;
+	self->parent.set_fixed = avro_resolved_link_reader_set_fixed;
+	self->parent.give_fixed = avro_resolved_link_reader_give_fixed;
+
+	self->parent.get_size = avro_resolved_link_reader_get_size;
+	self->parent.get_by_index = avro_resolved_link_reader_get_by_index;
+	self->parent.get_by_name = avro_resolved_link_reader_get_by_name;
+	self->parent.get_discriminant = avro_resolved_link_reader_get_discriminant;
+	self->parent.get_current_branch = avro_resolved_link_reader_get_current_branch;
+
+	self->parent.append = avro_resolved_link_reader_append;
+	self->parent.add = avro_resolved_link_reader_add;
+	self->parent.set_branch = avro_resolved_link_reader_set_branch;
+
+	return container_of(self, avro_resolved_link_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_wlink(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	AVRO_UNUSED(rschema);
+
+	/*
+	 * For link schemas, we create a special value implementation
+	 * that allocates space for its wrapped value at runtime.  This
+	 * lets us handle recursive types without having to instantiate
+	 * in infinite-size value.
+	 */
+
+	avro_schema_t  wtarget = avro_schema_link_target(wschema);
+	avro_resolved_link_reader_t  *lself =
+	    avro_resolved_link_reader_create(wtarget, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, lself);
+
+	avro_resolved_reader_t  *target_resolver =
+	    avro_resolved_reader_new_memoized(state, wtarget, rschema);
+	if (target_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, rschema);
+		avro_value_iface_decref(&lself->parent.parent);
+		avro_prefix_error("Link target isn't compatible: ");
+		DEBUG("%s", avro_strerror());
+		return NULL;
+	}
+
+	lself->parent.calculate_size = avro_resolved_wlink_reader_calculate_size;
+	lself->target_resolver = target_resolver;
+	lself->next = state->links;
+	state->links = lself;
+
+	return &lself->parent;
+}
+
+static avro_resolved_reader_t *
+try_rlink(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	AVRO_UNUSED(rschema);
+
+	/*
+	 * For link schemas, we create a special value implementation
+	 * that allocates space for its wrapped value at runtime.  This
+	 * lets us handle recursive types without having to instantiate
+	 * in infinite-size value.
+	 */
+
+	avro_schema_t  rtarget = avro_schema_link_target(rschema);
+	avro_resolved_link_reader_t  *lself =
+	    avro_resolved_link_reader_create(wschema, rtarget);
+	avro_memoize_set(&state->mem, wschema, rschema, lself);
+
+	avro_resolved_reader_t  *target_resolver =
+	    avro_resolved_reader_new_memoized(state, wschema, rtarget);
+	if (target_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, rschema);
+		avro_value_iface_decref(&lself->parent.parent);
+		avro_prefix_error("Link target isn't compatible: ");
+		DEBUG("%s", avro_strerror());
+		return NULL;
+	}
+
+	lself->parent.calculate_size = avro_resolved_rlink_reader_calculate_size;
+	lself->target_resolver = target_resolver;
+	lself->next = state->links;
+	state->links = lself;
+
+	return &lself->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * boolean
+ */
+
+static int
+avro_resolved_reader_get_boolean(const avro_value_iface_t *viface,
+				 const void *vself, int *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting boolean from %p", src->self);
+	return avro_value_get_boolean(src, val);
+}
+
+static avro_resolved_reader_t *
+try_boolean(memoize_state_t *state,
+	    avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_boolean(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_boolean = avro_resolved_reader_get_boolean;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader boolean",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * bytes
+ */
+
+static int
+avro_resolved_reader_get_bytes(const avro_value_iface_t *viface,
+			       const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting bytes from %p", src->self);
+	return avro_value_get_bytes(src, buf, size);
+}
+
+static int
+avro_resolved_reader_grab_bytes(const avro_value_iface_t *viface,
+				const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Grabbing bytes from %p", src->self);
+	return avro_value_grab_bytes(src, dest);
+}
+
+static avro_resolved_reader_t *
+try_bytes(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_bytes(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_bytes = avro_resolved_reader_get_bytes;
+		self->parent.grab_bytes = avro_resolved_reader_grab_bytes;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader bytes",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * double
+ */
+
+static int
+avro_resolved_reader_get_double(const avro_value_iface_t *viface,
+				const void *vself, double *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting double from %p", src->self);
+	return avro_value_get_double(src, val);
+}
+
+static int
+avro_resolved_reader_get_double_float(const avro_value_iface_t *viface,
+				      const void *vself, double *val)
+{
+	int  rval;
+	float  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting double from float %p", src->self);
+	check(rval, avro_value_get_float(src, &real_val));
+	*val = real_val;
+	return 0;
+}
+
+static int
+avro_resolved_reader_get_double_int(const avro_value_iface_t *viface,
+				    const void *vself, double *val)
+{
+	int  rval;
+	int32_t  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting double from int %p", src->self);
+	check(rval, avro_value_get_int(src, &real_val));
+	*val = real_val;
+	return 0;
+}
+
+static int
+avro_resolved_reader_get_double_long(const avro_value_iface_t *viface,
+				     const void *vself, double *val)
+{
+	int  rval;
+	int64_t  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting double from long %p", src->self);
+	check(rval, avro_value_get_long(src, &real_val));
+	*val = (double) real_val;
+	return 0;
+}
+
+static avro_resolved_reader_t *
+try_double(memoize_state_t *state,
+	   avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_double(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_double = avro_resolved_reader_get_double;
+		return self;
+	}
+
+	else if (is_avro_float(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_double = avro_resolved_reader_get_double_float;
+		return self;
+	}
+
+	else if (is_avro_int32(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_double = avro_resolved_reader_get_double_int;
+		return self;
+	}
+
+	else if (is_avro_int64(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_double = avro_resolved_reader_get_double_long;
+		return self;
+	}
+
+	avro_set_error("Writer %s not compatible with reader double",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * float
+ */
+
+static int
+avro_resolved_reader_get_float(const avro_value_iface_t *viface,
+			       const void *vself, float *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting float from %p", src->self);
+	return avro_value_get_float(src, val);
+}
+
+static int
+avro_resolved_reader_get_float_int(const avro_value_iface_t *viface,
+				   const void *vself, float *val)
+{
+	int  rval;
+	int32_t  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting float from int %p", src->self);
+	check(rval, avro_value_get_int(src, &real_val));
+	*val = (float) real_val;
+	return 0;
+}
+
+static int
+avro_resolved_reader_get_float_long(const avro_value_iface_t *viface,
+				    const void *vself, float *val)
+{
+	int  rval;
+	int64_t  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting float from long %p", src->self);
+	check(rval, avro_value_get_long(src, &real_val));
+	*val = (float) real_val;
+	return 0;
+}
+
+static avro_resolved_reader_t *
+try_float(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_float(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_float = avro_resolved_reader_get_float;
+		return self;
+	}
+
+	else if (is_avro_int32(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_float = avro_resolved_reader_get_float_int;
+		return self;
+	}
+
+	else if (is_avro_int64(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_float = avro_resolved_reader_get_float_long;
+		return self;
+	}
+
+	avro_set_error("Writer %s not compatible with reader float",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * int
+ */
+
+static int
+avro_resolved_reader_get_int(const avro_value_iface_t *viface,
+			     const void *vself, int32_t *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting int from %p", src->self);
+	return avro_value_get_int(src, val);
+}
+
+static avro_resolved_reader_t *
+try_int(memoize_state_t *state,
+	avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_int32(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_int = avro_resolved_reader_get_int;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader int",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * long
+ */
+
+static int
+avro_resolved_reader_get_long(const avro_value_iface_t *viface,
+			      const void *vself, int64_t *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting long from %p", src->self);
+	return avro_value_get_long(src, val);
+}
+
+static int
+avro_resolved_reader_get_long_int(const avro_value_iface_t *viface,
+				  const void *vself, int64_t *val)
+{
+	int  rval;
+	int32_t  real_val;
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Promoting long from int %p", src->self);
+	check(rval, avro_value_get_int(src, &real_val));
+	*val = real_val;
+	return 0;
+}
+
+static avro_resolved_reader_t *
+try_long(memoize_state_t *state,
+	 avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_int64(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_long = avro_resolved_reader_get_long;
+		return self;
+	}
+
+	else if (is_avro_int32(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_long = avro_resolved_reader_get_long_int;
+		return self;
+	}
+
+	avro_set_error("Writer %s not compatible with reader long",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * null
+ */
+
+static int
+avro_resolved_reader_get_null(const avro_value_iface_t *viface,
+			      const void *vself)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting null from %p", src->self);
+	return avro_value_get_null(src);
+}
+
+static avro_resolved_reader_t *
+try_null(memoize_state_t *state,
+	 avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_null(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_null = avro_resolved_reader_get_null;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader null",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * string
+ */
+
+static int
+avro_resolved_reader_get_string(const avro_value_iface_t *viface,
+				const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting string from %p", src->self);
+	return avro_value_get_string(src, str, size);
+}
+
+static int
+avro_resolved_reader_grab_string(const avro_value_iface_t *viface,
+				 const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Grabbing string from %p", src->self);
+	return avro_value_grab_string(src, dest);
+}
+
+static avro_resolved_reader_t *
+try_string(memoize_state_t *state,
+	   avro_schema_t wschema, avro_schema_t rschema)
+{
+	if (is_avro_string(wschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_string = avro_resolved_reader_get_string;
+		self->parent.grab_string = avro_resolved_reader_grab_string;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader string",
+		       avro_schema_type_name(wschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * array
+ */
+
+typedef struct avro_resolved_array_reader {
+	avro_resolved_reader_t  parent;
+	avro_resolved_reader_t  *child_resolver;
+} avro_resolved_array_reader_t;
+
+typedef struct avro_resolved_array_value {
+	avro_value_t  wrapped;
+	avro_raw_array_t  children;
+} avro_resolved_array_value_t;
+
+static void
+avro_resolved_array_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_array_value_t);
+
+	avro_resolved_reader_calculate_size(aiface->child_resolver);
+}
+
+static void
+avro_resolved_array_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+	free_resolver(aiface->child_resolver, freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_array_reader_t, iface);
+}
+
+static int
+avro_resolved_array_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+	size_t  child_instance_size = aiface->child_resolver->instance_size;
+	DEBUG("Initializing child array (child_size=%" PRIsz ")", child_instance_size);
+	avro_raw_array_init(&self->children, child_instance_size);
+	return 0;
+}
+
+static void
+avro_resolved_array_reader_free_elements(const avro_resolved_reader_t *child_iface,
+					 avro_resolved_array_value_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_array_size(&self->children); i++) {
+		void  *child_self = avro_raw_array_get_raw(&self->children, i);
+		avro_resolved_reader_done(child_iface, child_self);
+	}
+}
+
+static void
+avro_resolved_array_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+	avro_resolved_array_reader_free_elements(aiface->child_resolver, self);
+	avro_raw_array_done(&self->children);
+}
+
+static int
+avro_resolved_array_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+
+	/* Clear out our cache of wrapped children */
+	avro_resolved_array_reader_free_elements(aiface->child_resolver, self);
+	avro_raw_array_clear(&self->children);
+	return 0;
+}
+
+static int
+avro_resolved_array_reader_get_size(const avro_value_iface_t *viface,
+				    const void *vself, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_resolved_array_value_t  *self = (const avro_resolved_array_value_t *) vself;
+	return avro_value_get_size(&self->wrapped, size);
+}
+
+static int
+avro_resolved_array_reader_get_by_index(const avro_value_iface_t *viface,
+					const void *vself, size_t index,
+					avro_value_t *child, const char **name)
+{
+	int  rval;
+	size_t  old_size;
+	size_t  new_size;
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_array_reader_t  *aiface =
+	    container_of(iface, avro_resolved_array_reader_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+
+	/*
+	 * Ensure that our child wrapper array is big enough to hold
+	 * this many elements.
+	 */
+	new_size = index + 1;
+	check(rval, avro_raw_array_ensure_size0(&self->children, new_size));
+	old_size = avro_raw_array_size(&self->children);
+	if (old_size <= index) {
+		size_t  i;
+		for (i = old_size; i < new_size; i++) {
+			check(rval, avro_resolved_reader_init
+			      (aiface->child_resolver,
+			       avro_raw_array_get_raw(&self->children, i)));
+		}
+		avro_raw_array_size(&self->children) = index+1;
+	}
+
+	child->iface = &aiface->child_resolver->parent;
+	child->self = avro_raw_array_get_raw(&self->children, index);
+
+	DEBUG("Getting element %" PRIsz " from array %p", index, self->wrapped.self);
+	return avro_value_get_by_index(&self->wrapped, index, (avro_value_t *) child->self, name);
+}
+
+static avro_resolved_array_reader_t *
+avro_resolved_array_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t  *) avro_new(avro_resolved_array_reader_t);
+	memset(self, 0, sizeof(avro_resolved_array_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+	self->parent.get_size = avro_resolved_array_reader_get_size;
+	self->parent.get_by_index = avro_resolved_array_reader_get_by_index;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_array_reader_calculate_size;
+	self->free_iface = avro_resolved_array_reader_free_iface;
+	self->init = avro_resolved_array_reader_init;
+	self->done = avro_resolved_array_reader_done;
+	self->reset_wrappers = avro_resolved_array_reader_reset;
+	return container_of(self, avro_resolved_array_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_array(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * First verify that the writer is an array.
+	 */
+
+	if (!is_avro_array(wschema)) {
+		return 0;
+	}
+
+	/*
+	 * Array schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an resolver to check
+	 * the compatibility.
+	 */
+
+	avro_resolved_array_reader_t  *aself =
+	    avro_resolved_array_reader_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, aself);
+
+	avro_schema_t  witems = avro_schema_array_items(wschema);
+	avro_schema_t  ritems = avro_schema_array_items(rschema);
+
+	avro_resolved_reader_t  *item_resolver =
+	    avro_resolved_reader_new_memoized(state, witems, ritems);
+	if (item_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, rschema);
+		avro_value_iface_decref(&aself->parent.parent);
+		avro_prefix_error("Array values aren't compatible: ");
+		return NULL;
+	}
+
+	/*
+	 * The two schemas are compatible.  Store the item schema's
+	 * resolver into the child_resolver field.
+	 */
+
+	aself->child_resolver = item_resolver;
+	return &aself->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * enum
+ */
+
+static int
+avro_resolved_reader_get_enum(const avro_value_iface_t *viface,
+			      const void *vself, int *val)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting enum from %p", src->self);
+	return avro_value_get_enum(src, val);
+}
+
+static avro_resolved_reader_t *
+try_enum(memoize_state_t *state,
+	 avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * Enum schemas have to have the same name — but not the same
+	 * list of symbols — to be compatible.
+	 */
+
+	if (is_avro_enum(wschema)) {
+		const char  *wname = avro_schema_name(wschema);
+		const char  *rname = avro_schema_name(rschema);
+
+		if (strcmp(wname, rname) == 0) {
+			avro_resolved_reader_t  *self =
+			    avro_resolved_reader_create(wschema, rschema);
+			avro_memoize_set(&state->mem, wschema, rschema, self);
+			self->parent.get_enum = avro_resolved_reader_get_enum;
+			return self;
+		}
+	}
+	avro_set_error("Writer %s not compatible with reader %s",
+		       avro_schema_type_name(wschema),
+		       avro_schema_type_name(rschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * fixed
+ */
+
+static int
+avro_resolved_reader_get_fixed(const avro_value_iface_t *viface,
+			       const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Getting fixed from %p", vself);
+	return avro_value_get_fixed(src, buf, size);
+}
+
+static int
+avro_resolved_reader_grab_fixed(const avro_value_iface_t *viface,
+				const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	DEBUG("Grabbing fixed from %p", vself);
+	return avro_value_grab_fixed(src, dest);
+}
+
+static avro_resolved_reader_t *
+try_fixed(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * Fixed schemas need the same name and size to be compatible.
+	 */
+
+	if (avro_schema_equal(wschema, rschema)) {
+		avro_resolved_reader_t  *self =
+		    avro_resolved_reader_create(wschema, rschema);
+		avro_memoize_set(&state->mem, wschema, rschema, self);
+		self->parent.get_fixed = avro_resolved_reader_get_fixed;
+		self->parent.grab_fixed = avro_resolved_reader_grab_fixed;
+		return self;
+	}
+	avro_set_error("Writer %s not compatible with reader %s",
+		       avro_schema_type_name(wschema),
+		       avro_schema_type_name(rschema));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * map
+ */
+
+typedef struct avro_resolved_map_reader {
+	avro_resolved_reader_t  parent;
+	avro_resolved_reader_t  *child_resolver;
+} avro_resolved_map_reader_t;
+
+typedef struct avro_resolved_map_value {
+	avro_value_t  wrapped;
+	avro_raw_array_t  children;
+} avro_resolved_map_value_t;
+
+static void
+avro_resolved_map_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_map_value_t);
+
+	avro_resolved_reader_calculate_size(miface->child_resolver);
+}
+
+static void
+avro_resolved_map_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	free_resolver(miface->child_resolver, freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_map_reader_t, iface);
+}
+
+static int
+avro_resolved_map_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+	size_t  child_instance_size = miface->child_resolver->instance_size;
+	DEBUG("Initializing child array for map (child_size=%" PRIsz ")", child_instance_size);
+	avro_raw_array_init(&self->children, child_instance_size);
+	return 0;
+}
+
+static void
+avro_resolved_map_reader_free_elements(const avro_resolved_reader_t *child_iface,
+				       avro_resolved_map_value_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_array_size(&self->children); i++) {
+		void  *child_self = avro_raw_array_get_raw(&self->children, i);
+		avro_resolved_reader_done(child_iface, child_self);
+	}
+}
+
+static void
+avro_resolved_map_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+	avro_resolved_map_reader_free_elements(miface->child_resolver, self);
+	avro_raw_array_done(&self->children);
+}
+
+static int
+avro_resolved_map_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+
+	/* Clear out our cache of wrapped children */
+	avro_resolved_map_reader_free_elements(miface->child_resolver, self);
+	return 0;
+}
+
+static int
+avro_resolved_map_reader_get_size(const avro_value_iface_t *viface,
+				  const void *vself, size_t *size)
+{
+	AVRO_UNUSED(viface);
+	const avro_value_t  *src = (const avro_value_t *) vself;
+	return avro_value_get_size(src, size);
+}
+
+static int
+avro_resolved_map_reader_get_by_index(const avro_value_iface_t *viface,
+				      const void *vself, size_t index,
+				      avro_value_t *child, const char **name)
+{
+	int  rval;
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+
+	/*
+	 * Ensure that our child wrapper array is big enough to hold
+	 * this many elements.
+	 */
+	check(rval, avro_raw_array_ensure_size0(&self->children, index+1));
+	if (avro_raw_array_size(&self->children) <= index) {
+		avro_raw_array_size(&self->children) = index+1;
+	}
+
+	child->iface = &miface->child_resolver->parent;
+	child->self = avro_raw_array_get_raw(&self->children, index);
+
+	DEBUG("Getting element %" PRIsz " from map %p", index, self->wrapped.self);
+	return avro_value_get_by_index(&self->wrapped, index, (avro_value_t *) child->self, name);
+}
+
+static int
+avro_resolved_map_reader_get_by_name(const avro_value_iface_t *viface,
+				     const void *vself, const char *name,
+				     avro_value_t *child, size_t *index)
+{
+	int  rval;
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_map_reader_t  *miface =
+	    container_of(iface, avro_resolved_map_reader_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+
+	/*
+	 * This is a bit convoluted.  We need to stash the wrapped child
+	 * value somewhere in our children array.  But we don't know
+	 * where to put it until the wrapped map tells us what its index
+	 * is.
+	 */
+
+	avro_value_t  real_child;
+	size_t  real_index;
+
+	DEBUG("Getting element %s from map %p", name, self->wrapped.self);
+	check(rval, avro_value_get_by_name
+	      (&self->wrapped, name, &real_child, &real_index));
+
+	/*
+	 * Ensure that our child wrapper array is big enough to hold
+	 * this many elements.
+	 */
+	check(rval, avro_raw_array_ensure_size0(&self->children, real_index+1));
+	if (avro_raw_array_size(&self->children) <= real_index) {
+		avro_raw_array_size(&self->children) = real_index+1;
+	}
+
+	child->iface = &miface->child_resolver->parent;
+	child->self = avro_raw_array_get_raw(&self->children, real_index);
+	avro_value_t  *child_vself = (avro_value_t *) child->self;
+	*child_vself = real_child;
+
+	if (index != NULL) {
+		*index = real_index;
+	}
+	return 0;
+}
+
+static avro_resolved_map_reader_t *
+avro_resolved_map_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t *) avro_new(avro_resolved_map_reader_t);
+	memset(self, 0, sizeof(avro_resolved_map_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+	self->parent.get_size = avro_resolved_map_reader_get_size;
+	self->parent.get_by_index = avro_resolved_map_reader_get_by_index;
+	self->parent.get_by_name = avro_resolved_map_reader_get_by_name;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_map_reader_calculate_size;
+	self->free_iface = avro_resolved_map_reader_free_iface;
+	self->init = avro_resolved_map_reader_init;
+	self->done = avro_resolved_map_reader_done;
+	self->reset_wrappers = avro_resolved_map_reader_reset;
+	return container_of(self, avro_resolved_map_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_map(memoize_state_t *state,
+	avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * First verify that the reader is an map.
+	 */
+
+	if (!is_avro_map(wschema)) {
+		return 0;
+	}
+
+	/*
+	 * Map schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an resolver to check
+	 * the compatibility.
+	 */
+
+	avro_resolved_map_reader_t  *mself =
+	    avro_resolved_map_reader_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, mself);
+
+	avro_schema_t  witems = avro_schema_map_values(wschema);
+	avro_schema_t  ritems = avro_schema_map_values(rschema);
+
+	avro_resolved_reader_t  *item_resolver =
+	    avro_resolved_reader_new_memoized(state, witems, ritems);
+	if (item_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, rschema);
+		avro_value_iface_decref(&mself->parent.parent);
+		avro_prefix_error("Map values aren't compatible: ");
+		return NULL;
+	}
+
+	/*
+	 * The two schemas are compatible.  Store the item schema's
+	 * resolver into the child_resolver field.
+	 */
+
+	mself->child_resolver = item_resolver;
+	return &mself->parent;
+}
+
+
+/*-----------------------------------------------------------------------
+ * record
+ */
+
+typedef struct avro_resolved_record_reader {
+	avro_resolved_reader_t  parent;
+	size_t  field_count;
+	size_t  *field_offsets;
+	avro_resolved_reader_t  **field_resolvers;
+	size_t  *index_mapping;
+} avro_resolved_record_reader_t;
+
+typedef struct avro_resolved_record_value {
+	avro_value_t  wrapped;
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for each field. */
+} avro_resolved_record_value_t;
+
+/** Return a pointer to the given field within a record struct. */
+#define avro_resolved_record_field(iface, rec, index) \
+	(((char *) (rec)) + (iface)->field_offsets[(index)])
+
+
+static void
+avro_resolved_record_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+
+	/*
+	 * Once we've figured out which reader fields we actually need,
+	 * calculate an offset for each one.
+	 */
+
+	size_t  ri;
+	size_t  next_offset = sizeof(avro_resolved_record_value_t);
+	for (ri = 0; ri < riface->field_count; ri++) {
+		riface->field_offsets[ri] = next_offset;
+		if (riface->field_resolvers[ri] != NULL) {
+			avro_resolved_reader_calculate_size
+			    (riface->field_resolvers[ri]);
+			size_t  field_size =
+			    riface->field_resolvers[ri]->instance_size;
+			DEBUG("Field %" PRIsz " has size %" PRIsz, ri, field_size);
+			next_offset += field_size;
+		} else {
+			DEBUG("Field %" PRIsz " is being skipped", ri);
+		}
+	}
+
+	DEBUG("Record has size %" PRIsz, next_offset);
+	iface->instance_size = next_offset;
+}
+
+
+static void
+avro_resolved_record_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+
+	if (riface->field_offsets != NULL) {
+		avro_free(riface->field_offsets,
+			  riface->field_count * sizeof(size_t));
+	}
+
+	if (riface->field_resolvers != NULL) {
+		size_t  i;
+		for (i = 0; i < riface->field_count; i++) {
+			if (riface->field_resolvers[i] != NULL) {
+				DEBUG("Freeing field %" PRIsz " %p", i,
+				      riface->field_resolvers[i]);
+				free_resolver(riface->field_resolvers[i], freeing);
+			}
+		}
+		avro_free(riface->field_resolvers,
+			  riface->field_count * sizeof(avro_resolved_reader_t *));
+	}
+
+	if (riface->index_mapping != NULL) {
+		avro_free(riface->index_mapping,
+			  riface->field_count * sizeof(size_t));
+	}
+
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_record_reader_t, iface);
+}
+
+static int
+avro_resolved_record_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	/* Initialize each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			check(rval, avro_resolved_reader_init
+			      (riface->field_resolvers[i],
+			       avro_resolved_record_field(riface, self, i)));
+		}
+	}
+
+	return 0;
+}
+
+static void
+avro_resolved_record_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t  *) vself;
+
+	/* Finalize each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			avro_resolved_reader_done
+			    (riface->field_resolvers[i],
+			     avro_resolved_record_field(riface, self, i));
+		}
+	}
+}
+
+static int
+avro_resolved_record_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	/* Reset each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			check(rval, avro_resolved_reader_reset_wrappers
+			      (riface->field_resolvers[i],
+			       avro_resolved_record_field(riface, self, i)));
+		}
+	}
+
+	return 0;
+}
+
+static int
+avro_resolved_record_reader_get_size(const avro_value_iface_t *viface,
+				     const void *vself, size_t *size)
+{
+	AVRO_UNUSED(vself);
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+	*size = riface->field_count;
+	return 0;
+}
+
+static int
+avro_resolved_record_reader_get_by_index(const avro_value_iface_t *viface,
+					 const void *vself, size_t index,
+					 avro_value_t *child, const char **name)
+{
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_record_reader_t  *riface =
+	    container_of(iface, avro_resolved_record_reader_t, parent);
+	const avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	DEBUG("Getting reader field %" PRIsz " from record %p", index, self->wrapped.self);
+	if (riface->field_resolvers[index] == NULL) {
+		/*
+		 * TODO: Return the default value if the writer record
+		 * doesn't contain this field.
+		 */
+		DEBUG("Writer doesn't have field");
+		avro_set_error("NIY: Default values");
+		return EINVAL;
+	}
+
+	size_t  writer_index = riface->index_mapping[index];
+	DEBUG("  Writer field is %" PRIsz, writer_index);
+	child->iface = &riface->field_resolvers[index]->parent;
+	child->self = avro_resolved_record_field(riface, self, index);
+	return avro_value_get_by_index(&self->wrapped, writer_index, (avro_value_t *) child->self, name);
+}
+
+static int
+avro_resolved_record_reader_get_by_name(const avro_value_iface_t *viface,
+					const void *vself, const char *name,
+					avro_value_t *child, size_t *index)
+{
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+
+	int  ri = avro_schema_record_field_get_index(iface->rschema, name);
+	if (ri == -1) {
+		avro_set_error("Record doesn't have field named %s", name);
+		return EINVAL;
+	}
+
+	DEBUG("Reader field %s is at index %d", name, ri);
+	if (index != NULL) {
+		*index = ri;
+	}
+	return avro_resolved_record_reader_get_by_index(viface, vself, ri, child, NULL);
+}
+
+static avro_resolved_record_reader_t *
+avro_resolved_record_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t *) avro_new(avro_resolved_record_reader_t);
+	memset(self, 0, sizeof(avro_resolved_record_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+	self->parent.get_size = avro_resolved_record_reader_get_size;
+	self->parent.get_by_index = avro_resolved_record_reader_get_by_index;
+	self->parent.get_by_name = avro_resolved_record_reader_get_by_name;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_record_reader_calculate_size;
+	self->free_iface = avro_resolved_record_reader_free_iface;
+	self->init = avro_resolved_record_reader_init;
+	self->done = avro_resolved_record_reader_done;
+	self->reset_wrappers = avro_resolved_record_reader_reset;
+	return container_of(self, avro_resolved_record_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_record(memoize_state_t *state,
+	   avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * First verify that the writer is also a record, and has the
+	 * same name as the reader.
+	 */
+
+	if (!is_avro_record(wschema)) {
+		return 0;
+	}
+
+	const char  *wname = avro_schema_name(wschema);
+	const char  *rname = avro_schema_name(rschema);
+
+	if (strcmp(wname, rname) != 0) {
+		return 0;
+	}
+
+	/*
+	 * Categorize the fields in the record schemas.  Fields that are
+	 * only in the writer are ignored.  Fields that are only in the
+	 * reader raise a schema mismatch error, unless the field has a
+	 * default value.  Fields that are in both are resolved
+	 * recursively.
+	 *
+	 * The field_resolvers array will contain an avro_value_iface_t
+	 * for each field in the reader schema.  To build this array, we
+	 * loop through the fields of the reader schema.  If that field
+	 * is also in the writer schema, we resolve them recursively,
+	 * and store the resolver into the array.  If the field isn't in
+	 * the writer schema, we raise an error.  (TODO: Eventually,
+	 * we'll handle default values here.)  After this loop finishes,
+	 * any NULLs in the field_resolvers array will represent fields
+	 * in the writer but not the reader; these fields should be
+	 * skipped, and won't be accessible in the resolved reader.
+	 */
+
+	avro_resolved_record_reader_t  *rself =
+	    avro_resolved_record_reader_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, rself);
+
+	size_t  rfields = avro_schema_record_size(rschema);
+
+	DEBUG("Checking reader record schema %s", wname);
+
+	avro_resolved_reader_t  **field_resolvers =
+	    (avro_resolved_reader_t **) avro_calloc(rfields, sizeof(avro_resolved_reader_t *));
+	size_t  *field_offsets = (size_t *) avro_calloc(rfields, sizeof(size_t));
+	size_t  *index_mapping = (size_t *) avro_calloc(rfields, sizeof(size_t));
+
+	size_t  ri;
+	for (ri = 0; ri < rfields; ri++) {
+		avro_schema_t  rfield =
+		    avro_schema_record_field_get_by_index(rschema, ri);
+		const char  *field_name =
+		    avro_schema_record_field_name(rschema, ri);
+
+		DEBUG("Resolving reader record field %" PRIsz " (%s)", ri, field_name);
+
+		/*
+		 * See if this field is also in the writer schema.
+		 */
+
+		int  wi = avro_schema_record_field_get_index(wschema, field_name);
+
+		if (wi == -1) {
+			/*
+			 * This field isn't in the writer schema —
+			 * that's an error!  TODO: Handle default
+			 * values!
+			 */
+
+			DEBUG("Field %s isn't in writer", field_name);
+			avro_set_error("Reader field %s doesn't appear in writer",
+				       field_name);
+			goto error;
+		}
+
+		/*
+		 * Try to recursively resolve the schemas for this
+		 * field.  If they're not compatible, that's an error.
+		 */
+
+		avro_schema_t  wfield =
+		    avro_schema_record_field_get_by_index(wschema, wi);
+		avro_resolved_reader_t  *field_resolver =
+		    avro_resolved_reader_new_memoized(state, wfield, rfield);
+
+		if (field_resolver == NULL) {
+			avro_prefix_error("Field %s isn't compatible: ", field_name);
+			goto error;
+		}
+
+		/*
+		 * Save the details for this field.
+		 */
+
+		DEBUG("Found match for field %s (%" PRIsz " in reader, %d in writer)",
+		      field_name, ri, wi);
+		field_resolvers[ri] = field_resolver;
+		index_mapping[ri] = wi;
+	}
+
+	/*
+	 * We might not have found matches for all of the writer fields,
+	 * but that's okay — any extras will be ignored.
+	 */
+
+	rself->field_count = rfields;
+	rself->field_offsets = field_offsets;
+	rself->field_resolvers = field_resolvers;
+	rself->index_mapping = index_mapping;
+	return &rself->parent;
+
+error:
+	/*
+	 * Clean up any resolver we might have already created.
+	 */
+
+	avro_memoize_delete(&state->mem, wschema, rschema);
+	avro_value_iface_decref(&rself->parent.parent);
+
+	{
+		unsigned int  i;
+		for (i = 0; i < rfields; i++) {
+			if (field_resolvers[i]) {
+				avro_value_iface_decref(&field_resolvers[i]->parent);
+			}
+		}
+	}
+
+	avro_free(field_resolvers, rfields * sizeof(avro_resolved_reader_t *));
+	avro_free(field_offsets, rfields * sizeof(size_t));
+	avro_free(index_mapping, rfields * sizeof(size_t));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * writer union
+ */
+
+/*
+ * For writer unions, we maintain a list of resolvers for each branch of
+ * the union.  When we encounter a writer value, we see which branch it
+ * is, and choose a reader resolver based on that.
+ */
+
+typedef struct avro_resolved_wunion_reader {
+	avro_resolved_reader_t  parent;
+
+	/* The number of branches in the writer union */
+	size_t  branch_count;
+
+	/* A child resolver for each branch of the writer union.  If any
+	 * of these are NULL, then we don't have anything on the reader
+	 * side that's compatible with that writer branch. */
+	avro_resolved_reader_t  **branch_resolvers;
+
+} avro_resolved_wunion_reader_t;
+
+typedef struct avro_resolved_wunion_value {
+	avro_value_t  wrapped;
+
+	/** The currently active branch of the union.  -1 if no branch
+	 * is selected. */
+	int  discriminant;
+
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for the active branch. */
+} avro_resolved_wunion_value_t;
+
+/** Return a pointer to the active branch within a union struct. */
+#define avro_resolved_wunion_branch(_wunion) \
+	(((char *) (_wunion)) + sizeof(avro_resolved_wunion_value_t))
+
+
+static void
+avro_resolved_wunion_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	avro_resolved_wunion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_wunion_reader_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+
+	size_t  i;
+	size_t  max_branch_size = 0;
+	for (i = 0; i < uiface->branch_count; i++) {
+		if (uiface->branch_resolvers[i] == NULL) {
+			DEBUG("No match for writer union branch %" PRIsz, i);
+		} else {
+			avro_resolved_reader_calculate_size
+			    (uiface->branch_resolvers[i]);
+			size_t  branch_size =
+			    uiface->branch_resolvers[i]->instance_size;
+			DEBUG("Writer branch %" PRIsz " has size %" PRIsz, i, branch_size);
+			if (branch_size > max_branch_size) {
+				max_branch_size = branch_size;
+			}
+		}
+	}
+
+	DEBUG("Maximum branch size is %" PRIsz, max_branch_size);
+	iface->instance_size =
+	    sizeof(avro_resolved_wunion_value_t) + max_branch_size;
+	DEBUG("Total union size is %" PRIsz, iface->instance_size);
+}
+
+
+static void
+avro_resolved_wunion_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_wunion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_wunion_reader_t, parent);
+
+	if (uiface->branch_resolvers != NULL) {
+		size_t  i;
+		for (i = 0; i < uiface->branch_count; i++) {
+			if (uiface->branch_resolvers[i] != NULL) {
+				free_resolver(uiface->branch_resolvers[i], freeing);
+			}
+		}
+		avro_free(uiface->branch_resolvers,
+			  uiface->branch_count * sizeof(avro_resolved_reader_t *));
+	}
+
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_wunion_reader_t, iface);
+}
+
+static int
+avro_resolved_wunion_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_wunion_value_t  *self = (avro_resolved_wunion_value_t *) vself;
+	self->discriminant = -1;
+	return 0;
+}
+
+static void
+avro_resolved_wunion_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_wunion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_wunion_reader_t, parent);
+	avro_resolved_wunion_value_t  *self = (avro_resolved_wunion_value_t *) vself;
+	if (self->discriminant >= 0) {
+		avro_resolved_reader_done
+		    (uiface->branch_resolvers[self->discriminant],
+		     avro_resolved_wunion_branch(self));
+		self->discriminant = -1;
+	}
+}
+
+static int
+avro_resolved_wunion_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	const avro_resolved_wunion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_wunion_reader_t, parent);
+	avro_resolved_wunion_value_t  *self = (avro_resolved_wunion_value_t *) vself;
+
+	/* Keep the same branch selected, for the common case that we're
+	 * about to reuse it. */
+	if (self->discriminant >= 0) {
+		return avro_resolved_reader_reset_wrappers
+		    (uiface->branch_resolvers[self->discriminant],
+		     avro_resolved_wunion_branch(self));
+	}
+
+	return 0;
+}
+
+static int
+avro_resolved_wunion_get_real_src(const avro_value_iface_t *viface,
+				  const void *vself, avro_value_t *real_src)
+{
+	int  rval;
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_wunion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_wunion_reader_t, parent);
+	avro_resolved_wunion_value_t  *self = (avro_resolved_wunion_value_t *) vself;
+	int  writer_disc;
+	check(rval, avro_value_get_discriminant(&self->wrapped, &writer_disc));
+	DEBUG("Writer is branch %d", writer_disc);
+
+	if (uiface->branch_resolvers[writer_disc] == NULL) {
+		avro_set_error("Reader isn't compatible with writer branch %d",
+			       writer_disc);
+		return EINVAL;
+	}
+
+	if (self->discriminant == writer_disc) {
+		DEBUG("Writer branch %d already selected", writer_disc);
+	} else {
+		if (self->discriminant >= 0) {
+			DEBUG("Finalizing old writer branch %d", self->discriminant);
+			avro_resolved_reader_done
+			    (uiface->branch_resolvers[self->discriminant],
+			     avro_resolved_wunion_branch(self));
+		}
+		DEBUG("Initializing writer branch %d", writer_disc);
+		check(rval, avro_resolved_reader_init
+		      (uiface->branch_resolvers[writer_disc],
+		       avro_resolved_wunion_branch(self)));
+		self->discriminant = writer_disc;
+	}
+
+	real_src->iface = &uiface->branch_resolvers[writer_disc]->parent;
+	real_src->self = avro_resolved_wunion_branch(self);
+	return avro_value_get_current_branch(&self->wrapped, (avro_value_t *) real_src->self);
+}
+
+static int
+avro_resolved_wunion_reader_get_boolean(const avro_value_iface_t *viface,
+					const void *vself, int *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_boolean(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_bytes(const avro_value_iface_t *viface,
+				      const void *vself, const void **buf, size_t *size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_bytes(&src, buf, size);
+}
+
+static int
+avro_resolved_wunion_reader_grab_bytes(const avro_value_iface_t *viface,
+				       const void *vself, avro_wrapped_buffer_t *dest)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_grab_bytes(&src, dest);
+}
+
+static int
+avro_resolved_wunion_reader_get_double(const avro_value_iface_t *viface,
+				       const void *vself, double *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_double(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_float(const avro_value_iface_t *viface,
+				      const void *vself, float *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_float(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_int(const avro_value_iface_t *viface,
+				    const void *vself, int32_t *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_int(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_long(const avro_value_iface_t *viface,
+				     const void *vself, int64_t *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_long(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_null(const avro_value_iface_t *viface,
+				     const void *vself)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_null(&src);
+}
+
+static int
+avro_resolved_wunion_reader_get_string(const avro_value_iface_t *viface,
+				       const void *vself, const char **str, size_t *size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_string(&src, str, size);
+}
+
+static int
+avro_resolved_wunion_reader_grab_string(const avro_value_iface_t *viface,
+					const void *vself, avro_wrapped_buffer_t *dest)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_grab_string(&src, dest);
+}
+
+static int
+avro_resolved_wunion_reader_get_enum(const avro_value_iface_t *viface,
+				     const void *vself, int *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_enum(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_fixed(const avro_value_iface_t *viface,
+				      const void *vself, const void **buf, size_t *size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_fixed(&src, buf, size);
+}
+
+static int
+avro_resolved_wunion_reader_grab_fixed(const avro_value_iface_t *viface,
+				       const void *vself, avro_wrapped_buffer_t *dest)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_grab_fixed(&src, dest);
+}
+
+static int
+avro_resolved_wunion_reader_set_boolean(const avro_value_iface_t *viface,
+					void *vself, int val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_boolean(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_bytes(const avro_value_iface_t *viface,
+				      void *vself, void *buf, size_t size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_bytes(&src, buf, size);
+}
+
+static int
+avro_resolved_wunion_reader_give_bytes(const avro_value_iface_t *viface,
+				       void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_give_bytes(&src, buf);
+}
+
+static int
+avro_resolved_wunion_reader_set_double(const avro_value_iface_t *viface,
+				       void *vself, double val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_double(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_float(const avro_value_iface_t *viface,
+				      void *vself, float val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_float(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_int(const avro_value_iface_t *viface,
+				    void *vself, int32_t val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_int(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_long(const avro_value_iface_t *viface,
+				     void *vself, int64_t val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_long(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_null(const avro_value_iface_t *viface,
+				     void *vself)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_null(&src);
+}
+
+static int
+avro_resolved_wunion_reader_set_string(const avro_value_iface_t *viface,
+				       void *vself, const char *str)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_string(&src, str);
+}
+
+static int
+avro_resolved_wunion_reader_set_string_len(const avro_value_iface_t *viface,
+					   void *vself, const char *str, size_t size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_string_len(&src, str, size);
+}
+
+static int
+avro_resolved_wunion_reader_give_string_len(const avro_value_iface_t *viface,
+					    void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_give_string_len(&src, buf);
+}
+
+static int
+avro_resolved_wunion_reader_set_enum(const avro_value_iface_t *viface,
+				     void *vself, int val)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_enum(&src, val);
+}
+
+static int
+avro_resolved_wunion_reader_set_fixed(const avro_value_iface_t *viface,
+				      void *vself, void *buf, size_t size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_fixed(&src, buf, size);
+}
+
+static int
+avro_resolved_wunion_reader_give_fixed(const avro_value_iface_t *viface,
+				       void *vself, avro_wrapped_buffer_t *dest)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_give_fixed(&src, dest);
+}
+
+static int
+avro_resolved_wunion_reader_get_size(const avro_value_iface_t *viface,
+				     const void *vself, size_t *size)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_size(&src, size);
+}
+
+static int
+avro_resolved_wunion_reader_get_by_index(const avro_value_iface_t *viface,
+					 const void *vself, size_t index,
+					 avro_value_t *child, const char **name)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_by_index(&src, index, child, name);
+}
+
+static int
+avro_resolved_wunion_reader_get_by_name(const avro_value_iface_t *viface,
+					const void *vself, const char *name,
+					avro_value_t *child, size_t *index)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_by_name(&src, name, child, index);
+}
+
+static int
+avro_resolved_wunion_reader_get_discriminant(const avro_value_iface_t *viface,
+					     const void *vself, int *out)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_discriminant(&src, out);
+}
+
+static int
+avro_resolved_wunion_reader_get_current_branch(const avro_value_iface_t *viface,
+					       const void *vself, avro_value_t *branch)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_get_current_branch(&src, branch);
+}
+
+static int
+avro_resolved_wunion_reader_append(const avro_value_iface_t *viface,
+				   void *vself, avro_value_t *child_out,
+				   size_t *new_index)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_append(&src, child_out, new_index);
+}
+
+static int
+avro_resolved_wunion_reader_add(const avro_value_iface_t *viface,
+				void *vself, const char *key,
+				avro_value_t *child, size_t *index, int *is_new)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_add(&src, key, child, index, is_new);
+}
+
+static int
+avro_resolved_wunion_reader_set_branch(const avro_value_iface_t *viface,
+				       void *vself, int discriminant,
+				       avro_value_t *branch)
+{
+	int  rval;
+	avro_value_t  src;
+	check(rval, avro_resolved_wunion_get_real_src(viface, vself, &src));
+	return avro_value_set_branch(&src, discriminant, branch);
+}
+
+static avro_resolved_wunion_reader_t *
+avro_resolved_wunion_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t *) avro_new(avro_resolved_wunion_reader_t);
+	memset(self, 0, sizeof(avro_resolved_wunion_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+
+	self->parent.get_boolean = avro_resolved_wunion_reader_get_boolean;
+	self->parent.grab_bytes = avro_resolved_wunion_reader_grab_bytes;
+	self->parent.get_bytes = avro_resolved_wunion_reader_get_bytes;
+	self->parent.get_double = avro_resolved_wunion_reader_get_double;
+	self->parent.get_float = avro_resolved_wunion_reader_get_float;
+	self->parent.get_int = avro_resolved_wunion_reader_get_int;
+	self->parent.get_long = avro_resolved_wunion_reader_get_long;
+	self->parent.get_null = avro_resolved_wunion_reader_get_null;
+	self->parent.get_string = avro_resolved_wunion_reader_get_string;
+	self->parent.grab_string = avro_resolved_wunion_reader_grab_string;
+	self->parent.get_enum = avro_resolved_wunion_reader_get_enum;
+	self->parent.get_fixed = avro_resolved_wunion_reader_get_fixed;
+	self->parent.grab_fixed = avro_resolved_wunion_reader_grab_fixed;
+
+	self->parent.set_boolean = avro_resolved_wunion_reader_set_boolean;
+	self->parent.set_bytes = avro_resolved_wunion_reader_set_bytes;
+	self->parent.give_bytes = avro_resolved_wunion_reader_give_bytes;
+	self->parent.set_double = avro_resolved_wunion_reader_set_double;
+	self->parent.set_float = avro_resolved_wunion_reader_set_float;
+	self->parent.set_int = avro_resolved_wunion_reader_set_int;
+	self->parent.set_long = avro_resolved_wunion_reader_set_long;
+	self->parent.set_null = avro_resolved_wunion_reader_set_null;
+	self->parent.set_string = avro_resolved_wunion_reader_set_string;
+	self->parent.set_string_len = avro_resolved_wunion_reader_set_string_len;
+	self->parent.give_string_len = avro_resolved_wunion_reader_give_string_len;
+	self->parent.set_enum = avro_resolved_wunion_reader_set_enum;
+	self->parent.set_fixed = avro_resolved_wunion_reader_set_fixed;
+	self->parent.give_fixed = avro_resolved_wunion_reader_give_fixed;
+
+	self->parent.get_size = avro_resolved_wunion_reader_get_size;
+	self->parent.get_by_index = avro_resolved_wunion_reader_get_by_index;
+	self->parent.get_by_name = avro_resolved_wunion_reader_get_by_name;
+	self->parent.get_discriminant = avro_resolved_wunion_reader_get_discriminant;
+	self->parent.get_current_branch = avro_resolved_wunion_reader_get_current_branch;
+
+	self->parent.append = avro_resolved_wunion_reader_append;
+	self->parent.add = avro_resolved_wunion_reader_add;
+	self->parent.set_branch = avro_resolved_wunion_reader_set_branch;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_wunion_reader_calculate_size;
+	self->free_iface = avro_resolved_wunion_reader_free_iface;
+	self->init = avro_resolved_wunion_reader_init;
+	self->done = avro_resolved_wunion_reader_done;
+	self->reset_wrappers = avro_resolved_wunion_reader_reset;
+	return container_of(self, avro_resolved_wunion_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_writer_union(memoize_state_t *state,
+		 avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * For a writer union, we check each branch of the union in turn
+	 * against the reader schema.  For each one that is compatible,
+	 * we save the child resolver that can be used to process a
+	 * writer value of that branch.
+	 */
+
+	size_t  branch_count = avro_schema_union_size(wschema);
+	DEBUG("Checking %" PRIsz "-branch writer union schema", branch_count);
+
+	avro_resolved_wunion_reader_t  *uself =
+	    avro_resolved_wunion_reader_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, uself);
+
+	avro_resolved_reader_t  **branch_resolvers =
+	    (avro_resolved_reader_t **) avro_calloc(branch_count, sizeof(avro_resolved_reader_t *));
+	int  some_branch_compatible = 0;
+
+	size_t  i;
+	for (i = 0; i < branch_count; i++) {
+		avro_schema_t  branch_schema =
+		    avro_schema_union_branch(wschema, i);
+
+		DEBUG("Resolving writer union branch %" PRIsz " (%s)", i,
+		      avro_schema_type_name(branch_schema));
+
+		/*
+		 * Try to recursively resolve this branch of the writer
+		 * union against the reader schema.  Don't raise
+		 * an error if this fails — we just need one of
+		 * the writer branches to be compatible.
+		 */
+
+		branch_resolvers[i] =
+		    avro_resolved_reader_new_memoized(state, branch_schema, rschema);
+		if (branch_resolvers[i] == NULL) {
+			DEBUG("No match for writer union branch %" PRIsz, i);
+		} else {
+			DEBUG("Found match for writer union branch %" PRIsz, i);
+			some_branch_compatible = 1;
+		}
+	}
+
+	/*
+	 * If we didn't find a match, that's an error.
+	 */
+
+	if (!some_branch_compatible) {
+		DEBUG("No writer union branches match");
+		avro_set_error("No branches in the writer are compatible "
+			       "with reader schema %s",
+			       avro_schema_type_name(rschema));
+		goto error;
+	}
+
+	uself->branch_count = branch_count;
+	uself->branch_resolvers = branch_resolvers;
+	return &uself->parent;
+
+error:
+	/*
+	 * Clean up any resolver we might have already created.
+	 */
+
+	avro_memoize_delete(&state->mem, wschema, rschema);
+	avro_value_iface_decref(&uself->parent.parent);
+
+	{
+		unsigned int  i;
+		for (i = 0; i < branch_count; i++) {
+			if (branch_resolvers[i]) {
+				avro_value_iface_decref(&branch_resolvers[i]->parent);
+			}
+		}
+	}
+
+	avro_free(branch_resolvers, branch_count * sizeof(avro_resolved_reader_t *));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * reader union
+ */
+
+/*
+ * For reader unions, we only resolve them against writers which aren't
+ * unions.  (We'll have already broken any writer union apart into its
+ * separate branches.)  We just have to record which branch of the
+ * reader union the writer schema is compatible with.
+ */
+
+typedef struct avro_resolved_runion_reader {
+	avro_resolved_reader_t  parent;
+
+	/* The reader union branch that's compatible with the writer
+	 * schema. */
+	size_t  active_branch;
+
+	/* A child resolver for the reader branch. */
+	avro_resolved_reader_t  *branch_resolver;
+} avro_resolved_runion_reader_t;
+
+
+static void
+avro_resolved_runion_reader_calculate_size(avro_resolved_reader_t *iface)
+{
+	avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+
+	avro_resolved_reader_calculate_size(uiface->branch_resolver);
+	iface->instance_size = uiface->branch_resolver->instance_size;
+}
+
+
+static void
+avro_resolved_runion_reader_free_iface(avro_resolved_reader_t *iface, st_table *freeing)
+{
+	avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+
+	if (uiface->branch_resolver != NULL) {
+		free_resolver(uiface->branch_resolver, freeing);
+	}
+
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_runion_reader_t, iface);
+}
+
+static int
+avro_resolved_runion_reader_init(const avro_resolved_reader_t *iface, void *vself)
+{
+	avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+	return avro_resolved_reader_init(uiface->branch_resolver, vself);
+}
+
+static void
+avro_resolved_runion_reader_done(const avro_resolved_reader_t *iface, void *vself)
+{
+	avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+	avro_resolved_reader_done(uiface->branch_resolver, vself);
+}
+
+static int
+avro_resolved_runion_reader_reset(const avro_resolved_reader_t *iface, void *vself)
+{
+	avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+	return avro_resolved_reader_reset_wrappers(uiface->branch_resolver, vself);
+}
+
+static int
+avro_resolved_runion_reader_get_discriminant(const avro_value_iface_t *viface,
+					     const void *vself, int *out)
+{
+	AVRO_UNUSED(vself);
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+	DEBUG("Reader union is branch %" PRIsz, uiface->active_branch);
+	*out = uiface->active_branch;
+	return 0;
+}
+
+static int
+avro_resolved_runion_reader_get_current_branch(const avro_value_iface_t *viface,
+					       const void *vself, avro_value_t *branch)
+{
+	const avro_resolved_reader_t  *iface =
+	    container_of(viface, avro_resolved_reader_t, parent);
+	const avro_resolved_runion_reader_t  *uiface =
+	    container_of(iface, avro_resolved_runion_reader_t, parent);
+	DEBUG("Getting reader branch %" PRIsz " for union %p", uiface->active_branch, vself);
+	branch->iface = &uiface->branch_resolver->parent;
+	branch->self = (void *) vself;
+	return 0;
+}
+
+static avro_resolved_runion_reader_t *
+avro_resolved_runion_reader_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_reader_t  *self = (avro_resolved_reader_t *) avro_new(avro_resolved_runion_reader_t);
+	memset(self, 0, sizeof(avro_resolved_runion_reader_t));
+
+	self->parent.incref_iface = avro_resolved_reader_incref_iface;
+	self->parent.decref_iface = avro_resolved_reader_decref_iface;
+	self->parent.incref = avro_resolved_reader_incref;
+	self->parent.decref = avro_resolved_reader_decref;
+	self->parent.reset = avro_resolved_reader_reset;
+	self->parent.get_type = avro_resolved_reader_get_type;
+	self->parent.get_schema = avro_resolved_reader_get_schema;
+	self->parent.get_discriminant = avro_resolved_runion_reader_get_discriminant;
+	self->parent.get_current_branch = avro_resolved_runion_reader_get_current_branch;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->calculate_size = avro_resolved_runion_reader_calculate_size;
+	self->free_iface = avro_resolved_runion_reader_free_iface;
+	self->init = avro_resolved_runion_reader_init;
+	self->done = avro_resolved_runion_reader_done;
+	self->reset_wrappers = avro_resolved_runion_reader_reset;
+	return container_of(self, avro_resolved_runion_reader_t, parent);
+}
+
+static avro_resolved_reader_t *
+try_reader_union(memoize_state_t *state,
+		 avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * For a reader union, we have to identify which branch
+	 * corresponds to the writer schema.  (The writer won't be a
+	 * union, since we'll have already broken it into its branches.)
+	 */
+
+	size_t  branch_count = avro_schema_union_size(rschema);
+	DEBUG("Checking %" PRIsz "-branch reader union schema", branch_count);
+
+	avro_resolved_runion_reader_t  *uself =
+	    avro_resolved_runion_reader_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, uself);
+
+	size_t  i;
+	for (i = 0; i < branch_count; i++) {
+		avro_schema_t  branch_schema =
+		    avro_schema_union_branch(rschema, i);
+
+		DEBUG("Resolving reader union branch %" PRIsz " (%s)", i,
+		      avro_schema_type_name(branch_schema));
+
+		/*
+		 * Try to recursively resolve this branch of the reader
+		 * union against the writer schema.  Don't raise
+		 * an error if this fails — we just need one of
+		 * the reader branches to be compatible.
+		 */
+
+		uself->branch_resolver =
+		    avro_resolved_reader_new_memoized(state, wschema, branch_schema);
+		if (uself->branch_resolver == NULL) {
+			DEBUG("No match for reader union branch %" PRIsz, i);
+		} else {
+			DEBUG("Found match for reader union branch %" PRIsz, i);
+			uself->active_branch = i;
+			return &uself->parent;
+		}
+	}
+
+	/*
+	 * If we didn't find a match, that's an error.
+	 */
+
+	DEBUG("No reader union branches match");
+	avro_set_error("No branches in the reader are compatible "
+		       "with writer schema %s",
+		       avro_schema_type_name(wschema));
+	goto error;
+
+error:
+	/*
+	 * Clean up any resolver we might have already created.
+	 */
+
+	avro_memoize_delete(&state->mem, wschema, rschema);
+	avro_value_iface_decref(&uself->parent.parent);
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Schema type dispatcher
+ */
+
+static avro_resolved_reader_t *
+avro_resolved_reader_new_memoized(memoize_state_t *state,
+				  avro_schema_t wschema, avro_schema_t rschema)
+{
+	check_param(NULL, is_avro_schema(wschema), "writer schema");
+	check_param(NULL, is_avro_schema(rschema), "reader schema");
+
+	/*
+	 * First see if we've already matched these two schemas.  If so,
+	 * just return that resolver.
+	 */
+
+	avro_resolved_reader_t  *saved = NULL;
+	if (avro_memoize_get(&state->mem, wschema, rschema, (void **) &saved)) {
+		DEBUG("Already resolved %s%s%s->%s%s%s",
+		      is_avro_link(wschema)? "[": "",
+		      avro_schema_type_name(wschema),
+		      is_avro_link(wschema)? "]": "",
+		      is_avro_link(rschema)? "[": "",
+		      avro_schema_type_name(rschema),
+		      is_avro_link(rschema)? "]": "");
+		return saved;
+	} else {
+		DEBUG("Resolving %s%s%s->%s%s%s",
+		      is_avro_link(wschema)? "[": "",
+		      avro_schema_type_name(wschema),
+		      is_avro_link(wschema)? "]": "",
+		      is_avro_link(rschema)? "[": "",
+		      avro_schema_type_name(rschema),
+		      is_avro_link(rschema)? "]": "");
+	}
+
+	/*
+	 * Otherwise we have some work to do.  First check if the writer
+	 * schema is a union.  If so, break it apart.
+	 */
+
+	if (is_avro_union(wschema)) {
+		return try_writer_union(state, wschema, rschema);
+	}
+
+	else if (is_avro_link(wschema)) {
+		return try_wlink(state, wschema, rschema);
+	}
+
+	/*
+	 * If the writer isn't a union, than choose a resolver based on
+	 * the reader schema.
+	 */
+
+	switch (avro_typeof(rschema))
+	{
+		case AVRO_BOOLEAN:
+			return try_boolean(state, wschema, rschema);
+
+		case AVRO_BYTES:
+			return try_bytes(state, wschema, rschema);
+
+		case AVRO_DOUBLE:
+			return try_double(state, wschema, rschema);
+
+		case AVRO_FLOAT:
+			return try_float(state, wschema, rschema);
+
+		case AVRO_INT32:
+			return try_int(state, wschema, rschema);
+
+		case AVRO_INT64:
+			return try_long(state, wschema, rschema);
+
+		case AVRO_NULL:
+			return try_null(state, wschema, rschema);
+
+		case AVRO_STRING:
+			return try_string(state, wschema, rschema);
+
+		case AVRO_ARRAY:
+			return try_array(state, wschema, rschema);
+
+		case AVRO_ENUM:
+			return try_enum(state, wschema, rschema);
+
+		case AVRO_FIXED:
+			return try_fixed(state, wschema, rschema);
+
+		case AVRO_MAP:
+			return try_map(state, wschema, rschema);
+
+		case AVRO_RECORD:
+			return try_record(state, wschema, rschema);
+
+		case AVRO_UNION:
+			return try_reader_union(state, wschema, rschema);
+
+		case AVRO_LINK:
+			return try_rlink(state, wschema, rschema);
+
+		default:
+			avro_set_error("Unknown reader schema type");
+			return NULL;
+	}
+
+	return NULL;
+}
+
+
+avro_value_iface_t *
+avro_resolved_reader_new(avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * Create a state to keep track of the value implementations
+	 * that we create for each subschema.
+	 */
+
+	memoize_state_t  state;
+	avro_memoize_init(&state.mem);
+	state.links = NULL;
+
+	/*
+	 * Create the value implementations.
+	 */
+
+	avro_resolved_reader_t  *result =
+	    avro_resolved_reader_new_memoized(&state, wschema, rschema);
+	if (result == NULL) {
+		avro_memoize_done(&state.mem);
+		return NULL;
+	}
+
+	/*
+	 * Fix up any link schemas so that their value implementations
+	 * point to their target schemas' implementations.
+	 */
+
+	avro_resolved_reader_calculate_size(result);
+	while (state.links != NULL) {
+		avro_resolved_link_reader_t  *liface = state.links;
+		avro_resolved_reader_calculate_size(liface->target_resolver);
+		state.links = liface->next;
+		liface->next = NULL;
+	}
+
+	/*
+	 * And now we can return.
+	 */
+
+	avro_memoize_done(&state.mem);
+	return &result->parent;
+}
diff --git a/lang/c/src/resolved-writer.c b/lang/c/src/resolved-writer.c
new file mode 100644
index 0000000..96439e6
--- /dev/null
+++ b/lang/c/src/resolved-writer.c
@@ -0,0 +1,2911 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.	 You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/basics.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/refcount.h"
+#include "avro/resolver.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "st.h"
+
+#ifndef AVRO_RESOLVER_DEBUG
+#define AVRO_RESOLVER_DEBUG 0
+#endif
+
+#if AVRO_RESOLVER_DEBUG
+#include <stdio.h>
+#define DEBUG(...) \
+	do { \
+		fprintf(stderr, __VA_ARGS__); \
+		fprintf(stderr, "\n"); \
+	} while (0)
+#else
+#define DEBUG(...)  /* don't print messages */
+#endif
+
+
+typedef struct avro_resolved_writer  avro_resolved_writer_t;
+
+struct avro_resolved_writer {
+	avro_value_iface_t  parent;
+
+	/** The reference count for this interface. */
+	volatile int  refcount;
+
+	/** The writer schema. */
+	avro_schema_t  wschema;
+
+	/** The reader schema. */
+	avro_schema_t  rschema;
+
+	/* If the reader schema is a union, but the writer schema is
+	 * not, this field indicates which branch of the reader union
+	 * should be selected. */
+	int  reader_union_branch;
+
+	/* The size of the value instances for this resolver. */
+	size_t  instance_size;
+
+	/* A function to calculate the instance size once the overall
+	 * top-level resolver (and all of its children) have been
+	 * constructed. */
+	void
+	(*calculate_size)(avro_resolved_writer_t *iface);
+
+	/* A free function for this resolver interface */
+	void
+	(*free_iface)(avro_resolved_writer_t *iface, st_table *freeing);
+
+	/* An initialization function for instances of this resolver. */
+	int
+	(*init)(const avro_resolved_writer_t *iface, void *self);
+
+	/* A finalization function for instances of this resolver. */
+	void
+	(*done)(const avro_resolved_writer_t *iface, void *self);
+
+	/* Clear out any existing wrappers, if any */
+	int
+	(*reset_wrappers)(const avro_resolved_writer_t *iface, void *self);
+};
+
+#define avro_resolved_writer_calculate_size(iface) \
+	do { \
+		if ((iface)->calculate_size != NULL) { \
+			(iface)->calculate_size((iface)); \
+		} \
+	} while (0)
+#define avro_resolved_writer_init(iface, self) \
+	((iface)->init == NULL? 0: (iface)->init((iface), (self)))
+#define avro_resolved_writer_done(iface, self) \
+	((iface)->done == NULL? (void) 0: (iface)->done((iface), (self)))
+#define avro_resolved_writer_reset_wrappers(iface, self) \
+	((iface)->reset_wrappers == NULL? 0: \
+	 (iface)->reset_wrappers((iface), (self)))
+
+
+/*
+ * We assume that each instance type in this value contains an an
+ * avro_value_t as its first element, which is the current wrapped
+ * value.
+ */
+
+void
+avro_resolved_writer_set_dest(avro_value_t *resolved,
+			      avro_value_t *dest)
+{
+	avro_value_t  *self = (avro_value_t *) resolved->self;
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+	avro_value_copy_ref(self, dest);
+}
+
+void
+avro_resolved_writer_clear_dest(avro_value_t *resolved)
+{
+	avro_value_t  *self = (avro_value_t *) resolved->self;
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+	self->iface = NULL;
+	self->self = NULL;
+}
+
+int
+avro_resolved_writer_new_value(avro_value_iface_t *viface,
+			       avro_value_t *value)
+{
+	int  rval;
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	void  *self = avro_malloc(iface->instance_size + sizeof(volatile int));
+	if (self == NULL) {
+		value->iface = NULL;
+		value->self = NULL;
+		return ENOMEM;
+	}
+
+	memset(self, 0, iface->instance_size + sizeof(volatile int));
+	volatile int  *refcount = (volatile int *) self;
+	self = (char *) self + sizeof(volatile int);
+
+	rval = avro_resolved_writer_init(iface, self);
+	if (rval != 0) {
+		avro_free(self, iface->instance_size + sizeof(volatile int));
+		value->iface = NULL;
+		value->self = NULL;
+		return rval;
+	}
+
+	*refcount = 1;
+	value->iface = avro_value_iface_incref(viface);
+	value->self = self;
+	return 0;
+}
+
+static void
+avro_resolved_writer_free_value(const avro_value_iface_t *viface, void *vself)
+{
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+
+	avro_resolved_writer_done(iface, vself);
+	if (self->self != NULL) {
+		avro_value_decref(self);
+	}
+
+	vself = (char *) vself - sizeof(volatile int);
+	avro_free(vself, iface->instance_size + sizeof(volatile int));
+}
+
+static void
+avro_resolved_writer_incref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	avro_refcount_inc(refcount);
+}
+
+static void
+avro_resolved_writer_decref(avro_value_t *value)
+{
+	/*
+	 * This only works if you pass in the top-level value.
+	 */
+
+	volatile int  *refcount = (volatile int *) ((char *) value->self - sizeof(volatile int));
+	if (avro_refcount_dec(refcount)) {
+		avro_resolved_writer_free_value(value->iface, value->self);
+	}
+}
+
+
+static avro_value_iface_t *
+avro_resolved_writer_incref_iface(avro_value_iface_t *viface)
+{
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_refcount_inc(&iface->refcount);
+	return viface;
+}
+
+static void
+free_resolver(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	/* First check if we've already started freeing this resolver. */
+	if (st_lookup(freeing, (st_data_t) iface, NULL)) {
+		DEBUG("Already freed %p", iface);
+		return;
+	}
+
+	/* Otherwise add this resolver to the freeing set, then free it. */
+	st_insert(freeing, (st_data_t) iface, (st_data_t) NULL);
+	DEBUG("Freeing resolver %p (%s->%s)", iface,
+	      avro_schema_type_name(iface->wschema),
+	      avro_schema_type_name(iface->rschema));
+
+	iface->free_iface(iface, freeing);
+}
+
+static void
+avro_resolved_writer_calculate_size_(avro_resolved_writer_t *iface)
+{
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_value_t);
+}
+
+static void
+avro_resolved_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	AVRO_UNUSED(freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_writer_t, iface);
+}
+
+static void
+avro_resolved_writer_decref_iface(avro_value_iface_t *viface)
+{
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	DEBUG("Decref resolver %p (before=%d)", iface, iface->refcount);
+	if (avro_refcount_dec(&iface->refcount)) {
+		avro_resolved_writer_t  *iface =
+		    container_of(viface, avro_resolved_writer_t, parent);
+
+		st_table  *freeing = st_init_numtable();
+		free_resolver(iface, freeing);
+		st_free_table(freeing);
+	}
+}
+
+
+static int
+avro_resolved_writer_reset(const avro_value_iface_t *viface, void *vself)
+{
+	/*
+	 * To reset a wrapped value, we first clear out any wrappers,
+	 * and then have the wrapped value reset itself.
+	 */
+
+	int  rval;
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	check(rval, avro_resolved_writer_reset_wrappers(iface, vself));
+	return avro_value_reset(self);
+}
+
+static avro_type_t
+avro_resolved_writer_get_type(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(vself);
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	return avro_typeof(iface->wschema);
+}
+
+static avro_schema_t
+avro_resolved_writer_get_schema(const avro_value_iface_t *viface, const void *vself)
+{
+	AVRO_UNUSED(vself);
+	avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	return iface->wschema;
+}
+
+
+static avro_resolved_writer_t *
+avro_resolved_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_writer_t);
+	memset(self, 0, sizeof(avro_resolved_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_writer_get_type;
+	self->parent.get_schema = avro_resolved_writer_get_schema;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_writer_calculate_size_;
+	self->free_iface = avro_resolved_writer_free_iface;
+	self->reset_wrappers = NULL;
+	return self;
+}
+
+static inline int
+avro_resolved_writer_get_real_dest(const avro_resolved_writer_t *iface,
+				   const avro_value_t *dest, avro_value_t *real_dest)
+{
+	if (iface->reader_union_branch < 0) {
+		/*
+		 * The reader schema isn't a union, so use the dest
+		 * field as-is.
+		 */
+
+		*real_dest = *dest;
+		return 0;
+	}
+
+	DEBUG("Retrieving union branch %d for %s value",
+	      iface->reader_union_branch,
+	      avro_schema_type_name(iface->wschema));
+
+	return avro_value_set_branch(dest, iface->reader_union_branch, real_dest);
+}
+
+
+#define skip_links(schema)					\
+	while (is_avro_link(schema)) {				\
+		schema = avro_schema_link_target(schema);	\
+	}
+
+
+/*-----------------------------------------------------------------------
+ * Memoized resolvers
+ */
+
+typedef struct avro_resolved_link_writer  avro_resolved_link_writer_t;
+
+typedef struct memoize_state_t {
+	avro_memoize_t  mem;
+	avro_resolved_link_writer_t  *links;
+} memoize_state_t;
+
+static avro_resolved_writer_t *
+avro_resolved_writer_new_memoized(memoize_state_t *state,
+				  avro_schema_t wschema, avro_schema_t rschema);
+
+
+/*-----------------------------------------------------------------------
+ * Reader unions
+ */
+
+/*
+ * For each Avro type, we have to check whether the reader schema on its
+ * own is compatible, and also whether the reader is a union that
+ * contains a compatible type.  The macros in this section help us
+ * perform both of these checks with less code.
+ */
+
+
+/**
+ * A helper macro that handles the case where neither writer nor reader
+ * are unions.  Uses @ref check_func to see if the two schemas are
+ * compatible.
+ */
+
+#define check_non_union(saved, wschema, rschema, check_func) \
+do {								\
+	avro_resolved_writer_t  *self = NULL;			\
+	int  rc = check_func(saved, &self, wschema, rschema,	\
+			     rschema);				\
+	if (self) {						\
+		DEBUG("Non-union schemas %s (writer) "		\
+		      "and %s (reader) match",			\
+		      avro_schema_type_name(wschema),		\
+		      avro_schema_type_name(rschema));		\
+								\
+		self->reader_union_branch = -1;			\
+		return self;					\
+        }							\
+								\
+        if (rc) {						\
+		return NULL;					\
+	}							\
+} while (0)
+
+
+/**
+ * Helper macro that handles the case where the reader is a union, and
+ * the writer is not.  Checks each branch of the reader union schema,
+ * looking for the first branch that is compatible with the writer
+ * schema.  The @ref check_func argument should be a function that can
+ * check the compatiblity of each branch schema.
+ */
+
+#define check_reader_union(saved, wschema, rschema, check_func)		\
+do {									\
+	if (!is_avro_union(rschema)) {					\
+		break;							\
+	}								\
+									\
+	DEBUG("Checking reader union schema");				\
+	size_t  num_branches = avro_schema_union_size(rschema);		\
+	unsigned int  i;						\
+									\
+	for (i = 0; i < num_branches; i++) {				\
+		avro_schema_t  branch_schema =				\
+		    avro_schema_union_branch(rschema, i);		\
+		skip_links(branch_schema);				\
+									\
+		DEBUG("Trying branch %u %s%s%s->%s", i, \
+		      is_avro_link(wschema)? "[": "", \
+		      avro_schema_type_name(wschema), \
+		      is_avro_link(wschema)? "]": "", \
+		      avro_schema_type_name(branch_schema)); \
+									\
+		avro_resolved_writer_t  *self = NULL;			\
+		int  rc = check_func(saved, &self,			\
+				     wschema, branch_schema, rschema);	\
+		if (self) {						\
+			DEBUG("Reader union branch %d (%s) "		\
+			      "and writer %s match",			\
+			      i, avro_schema_type_name(branch_schema),	\
+			      avro_schema_type_name(wschema));		\
+			self->reader_union_branch = i;			\
+			return self;					\
+		} else {						\
+			DEBUG("Reader union branch %d (%s) "		\
+			      "doesn't match",				\
+			      i, avro_schema_type_name(branch_schema));	\
+		}							\
+									\
+		if (rc) {						\
+			return NULL;					\
+		}							\
+	}								\
+									\
+	DEBUG("No reader union branches match");			\
+} while (0)
+
+/**
+ * A helper macro that wraps together check_non_union and
+ * check_reader_union for a simple (non-union) writer schema type.
+ */
+
+#define check_simple_writer(saved, wschema, rschema, type_name)		\
+do {									\
+	check_non_union(saved, wschema, rschema, try_##type_name);	\
+	check_reader_union(saved, wschema, rschema, try_##type_name);	\
+	DEBUG("Writer %s doesn't match reader %s",			\
+	      avro_schema_type_name(wschema),				\
+	      avro_schema_type_name(rschema));				\
+	avro_set_error("Cannot store " #type_name " into %s",		\
+		       avro_schema_type_name(rschema));			\
+	return NULL;							\
+} while (0)
+
+
+/*-----------------------------------------------------------------------
+ * Recursive schemas
+ */
+
+/*
+ * Recursive schemas are handled specially; the value implementation for
+ * an AVRO_LINK schema is simply a wrapper around the value
+ * implementation for the link's target schema.  The value methods all
+ * delegate to the wrapped implementation.
+ */
+
+struct avro_resolved_link_writer {
+	avro_resolved_writer_t  parent;
+
+	/**
+	 * A pointer to the “next” link resolver that we've had to
+	 * create.  We use this as we're creating the overall top-level
+	 * resolver to keep track of which ones we have to fix up
+	 * afterwards.
+	 */
+	avro_resolved_link_writer_t  *next;
+
+	/** The target's implementation. */
+	avro_resolved_writer_t  *target_resolver;
+};
+
+typedef struct avro_resolved_link_value {
+	avro_value_t  wrapped;
+	avro_value_t  target;
+} avro_resolved_link_value_t;
+
+static void
+avro_resolved_link_writer_calculate_size(avro_resolved_writer_t *iface)
+{
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for [%s]->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_link_value_t);
+}
+
+static void
+avro_resolved_link_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	avro_resolved_link_writer_t  *liface =
+	    container_of(iface, avro_resolved_link_writer_t, parent);
+	if (liface->target_resolver != NULL) {
+		free_resolver(liface->target_resolver, freeing);
+	}
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_link_writer_t, iface);
+}
+
+static int
+avro_resolved_link_writer_init(const avro_resolved_writer_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_link_writer_t  *liface =
+	    container_of(iface, avro_resolved_link_writer_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	size_t  target_instance_size = liface->target_resolver->instance_size;
+
+	self->target.iface = &liface->target_resolver->parent;
+	self->target.self = avro_malloc(target_instance_size);
+	if (self->target.self == NULL) {
+		return ENOMEM;
+	}
+	DEBUG("Allocated <%p:%" PRIsz "> for link", self->target.self, target_instance_size);
+
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+
+	rval = avro_resolved_writer_init(liface->target_resolver, self->target.self);
+	if (rval != 0) {
+		avro_free(self->target.self, target_instance_size);
+	}
+	return rval;
+}
+
+static void
+avro_resolved_link_writer_done(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_link_writer_t  *liface =
+	    container_of(iface, avro_resolved_link_writer_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	size_t  target_instance_size = liface->target_resolver->instance_size;
+	DEBUG("Freeing <%p:%" PRIsz "> for link", self->target.self, target_instance_size);
+	avro_resolved_writer_done(liface->target_resolver, self->target.self);
+	avro_free(self->target.self, target_instance_size);
+	self->target.iface = NULL;
+	self->target.self = NULL;
+}
+
+static int
+avro_resolved_link_writer_reset(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_link_writer_t  *liface =
+	    container_of(iface, avro_resolved_link_writer_t, parent);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	return avro_resolved_writer_reset_wrappers
+	    (liface->target_resolver, self->target.self);
+}
+
+static avro_type_t
+avro_resolved_link_writer_get_type(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_type(&self->target);
+}
+
+static avro_schema_t
+avro_resolved_link_writer_get_schema(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_schema(&self->target);
+}
+
+static int
+avro_resolved_link_writer_get_boolean(const avro_value_iface_t *iface,
+				      const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_boolean(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_bytes(const avro_value_iface_t *iface,
+				    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_bytes(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_writer_grab_bytes(const avro_value_iface_t *iface,
+				     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_bytes(&self->target, dest);
+}
+
+static int
+avro_resolved_link_writer_get_double(const avro_value_iface_t *iface,
+				     const void *vself, double *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_double(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_float(const avro_value_iface_t *iface,
+				    const void *vself, float *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_float(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_int(const avro_value_iface_t *iface,
+				  const void *vself, int32_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_int(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_long(const avro_value_iface_t *iface,
+				   const void *vself, int64_t *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_long(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_null(const avro_value_iface_t *iface, const void *vself)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_null(&self->target);
+}
+
+static int
+avro_resolved_link_writer_get_string(const avro_value_iface_t *iface,
+				     const void *vself, const char **str, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_string(&self->target, str, size);
+}
+
+static int
+avro_resolved_link_writer_grab_string(const avro_value_iface_t *iface,
+				      const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_string(&self->target, dest);
+}
+
+static int
+avro_resolved_link_writer_get_enum(const avro_value_iface_t *iface,
+				   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_enum(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_fixed(const avro_value_iface_t *iface,
+				    const void *vself, const void **buf, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_fixed(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_writer_grab_fixed(const avro_value_iface_t *iface,
+				     const void *vself, avro_wrapped_buffer_t *dest)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_grab_fixed(&self->target, dest);
+}
+
+static int
+avro_resolved_link_writer_set_boolean(const avro_value_iface_t *iface,
+				      void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_boolean(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_bytes(const avro_value_iface_t *iface,
+				    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_bytes(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_writer_give_bytes(const avro_value_iface_t *iface,
+				     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_bytes(&self->target, buf);
+}
+
+static int
+avro_resolved_link_writer_set_double(const avro_value_iface_t *iface,
+				     void *vself, double val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_double(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_float(const avro_value_iface_t *iface,
+				    void *vself, float val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_float(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_int(const avro_value_iface_t *iface,
+				  void *vself, int32_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_int(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_long(const avro_value_iface_t *iface,
+				   void *vself, int64_t val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_long(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_null(const avro_value_iface_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_null(&self->target);
+}
+
+static int
+avro_resolved_link_writer_set_string(const avro_value_iface_t *iface,
+				     void *vself, const char *str)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_string(&self->target, str);
+}
+
+static int
+avro_resolved_link_writer_set_string_len(const avro_value_iface_t *iface,
+					 void *vself, const char *str, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_string_len(&self->target, str, size);
+}
+
+static int
+avro_resolved_link_writer_give_string_len(const avro_value_iface_t *iface,
+					  void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_string_len(&self->target, buf);
+}
+
+static int
+avro_resolved_link_writer_set_enum(const avro_value_iface_t *iface,
+				   void *vself, int val)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_enum(&self->target, val);
+}
+
+static int
+avro_resolved_link_writer_set_fixed(const avro_value_iface_t *iface,
+				    void *vself, void *buf, size_t size)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_fixed(&self->target, buf, size);
+}
+
+static int
+avro_resolved_link_writer_give_fixed(const avro_value_iface_t *iface,
+				     void *vself, avro_wrapped_buffer_t *buf)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_give_fixed(&self->target, buf);
+}
+
+static int
+avro_resolved_link_writer_get_size(const avro_value_iface_t *iface,
+				   const void *vself, size_t *size)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_size(&self->target, size);
+}
+
+static int
+avro_resolved_link_writer_get_by_index(const avro_value_iface_t *iface,
+				       const void *vself, size_t index,
+				       avro_value_t *child, const char **name)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_by_index(&self->target, index, child, name);
+}
+
+static int
+avro_resolved_link_writer_get_by_name(const avro_value_iface_t *iface,
+				      const void *vself, const char *name,
+				      avro_value_t *child, size_t *index)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_by_name(&self->target, name, child, index);
+}
+
+static int
+avro_resolved_link_writer_get_discriminant(const avro_value_iface_t *iface,
+					   const void *vself, int *out)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_discriminant(&self->target, out);
+}
+
+static int
+avro_resolved_link_writer_get_current_branch(const avro_value_iface_t *iface,
+					     const void *vself, avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	const avro_resolved_link_value_t  *self = (const avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_get_current_branch(&self->target, branch);
+}
+
+static int
+avro_resolved_link_writer_append(const avro_value_iface_t *iface,
+				 void *vself, avro_value_t *child_out,
+				 size_t *new_index)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_append(&self->target, child_out, new_index);
+}
+
+static int
+avro_resolved_link_writer_add(const avro_value_iface_t *iface,
+			      void *vself, const char *key,
+			      avro_value_t *child, size_t *index, int *is_new)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_add(&self->target, key, child, index, is_new);
+}
+
+static int
+avro_resolved_link_writer_set_branch(const avro_value_iface_t *iface,
+				     void *vself, int discriminant,
+				     avro_value_t *branch)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_link_value_t  *self = (avro_resolved_link_value_t *) vself;
+	avro_value_t  *target_vself = (avro_value_t *) self->target.self;
+	*target_vself = self->wrapped;
+	return avro_value_set_branch(&self->target, discriminant, branch);
+}
+
+static avro_resolved_link_writer_t *
+avro_resolved_link_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_link_writer_t);
+	memset(self, 0, sizeof(avro_resolved_link_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_link_writer_get_type;
+	self->parent.get_schema = avro_resolved_link_writer_get_schema;
+	self->parent.get_size = avro_resolved_link_writer_get_size;
+	self->parent.get_by_index = avro_resolved_link_writer_get_by_index;
+	self->parent.get_by_name = avro_resolved_link_writer_get_by_name;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_link_writer_calculate_size;
+	self->free_iface = avro_resolved_link_writer_free_iface;
+	self->init = avro_resolved_link_writer_init;
+	self->done = avro_resolved_link_writer_done;
+	self->reset_wrappers = avro_resolved_link_writer_reset;
+
+	self->parent.get_boolean = avro_resolved_link_writer_get_boolean;
+	self->parent.get_bytes = avro_resolved_link_writer_get_bytes;
+	self->parent.grab_bytes = avro_resolved_link_writer_grab_bytes;
+	self->parent.get_double = avro_resolved_link_writer_get_double;
+	self->parent.get_float = avro_resolved_link_writer_get_float;
+	self->parent.get_int = avro_resolved_link_writer_get_int;
+	self->parent.get_long = avro_resolved_link_writer_get_long;
+	self->parent.get_null = avro_resolved_link_writer_get_null;
+	self->parent.get_string = avro_resolved_link_writer_get_string;
+	self->parent.grab_string = avro_resolved_link_writer_grab_string;
+	self->parent.get_enum = avro_resolved_link_writer_get_enum;
+	self->parent.get_fixed = avro_resolved_link_writer_get_fixed;
+	self->parent.grab_fixed = avro_resolved_link_writer_grab_fixed;
+
+	self->parent.set_boolean = avro_resolved_link_writer_set_boolean;
+	self->parent.set_bytes = avro_resolved_link_writer_set_bytes;
+	self->parent.give_bytes = avro_resolved_link_writer_give_bytes;
+	self->parent.set_double = avro_resolved_link_writer_set_double;
+	self->parent.set_float = avro_resolved_link_writer_set_float;
+	self->parent.set_int = avro_resolved_link_writer_set_int;
+	self->parent.set_long = avro_resolved_link_writer_set_long;
+	self->parent.set_null = avro_resolved_link_writer_set_null;
+	self->parent.set_string = avro_resolved_link_writer_set_string;
+	self->parent.set_string_len = avro_resolved_link_writer_set_string_len;
+	self->parent.give_string_len = avro_resolved_link_writer_give_string_len;
+	self->parent.set_enum = avro_resolved_link_writer_set_enum;
+	self->parent.set_fixed = avro_resolved_link_writer_set_fixed;
+	self->parent.give_fixed = avro_resolved_link_writer_give_fixed;
+
+	self->parent.get_size = avro_resolved_link_writer_get_size;
+	self->parent.get_by_index = avro_resolved_link_writer_get_by_index;
+	self->parent.get_by_name = avro_resolved_link_writer_get_by_name;
+	self->parent.get_discriminant = avro_resolved_link_writer_get_discriminant;
+	self->parent.get_current_branch = avro_resolved_link_writer_get_current_branch;
+
+	self->parent.append = avro_resolved_link_writer_append;
+	self->parent.add = avro_resolved_link_writer_add;
+	self->parent.set_branch = avro_resolved_link_writer_set_branch;
+
+	return container_of(self, avro_resolved_link_writer_t, parent);
+}
+
+static int
+try_link(memoize_state_t *state, avro_resolved_writer_t **self,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	AVRO_UNUSED(rschema);
+
+	/*
+	 * For link schemas, we create a special value implementation
+	 * that allocates space for its wrapped value at runtime.  This
+	 * lets us handle recursive types without having to instantiate
+	 * in infinite-size value.
+	 */
+
+	avro_schema_t  wtarget = avro_schema_link_target(wschema);
+	avro_resolved_link_writer_t  *lself =
+	    avro_resolved_link_writer_create(wtarget, root_rschema);
+	avro_memoize_set(&state->mem, wschema, root_rschema, lself);
+
+	avro_resolved_writer_t  *target_resolver =
+	    avro_resolved_writer_new_memoized(state, wtarget, rschema);
+	if (target_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, root_rschema);
+		avro_value_iface_decref(&lself->parent.parent);
+		avro_prefix_error("Link target isn't compatible: ");
+		DEBUG("%s", avro_strerror());
+		return EINVAL;
+	}
+
+	lself->target_resolver = target_resolver;
+	lself->next = state->links;
+	state->links = lself;
+
+	*self = &lself->parent;
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * boolean
+ */
+
+static int
+avro_resolved_writer_set_boolean(const avro_value_iface_t *viface,
+				 void *vself, int val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %s into %p", val? "TRUE": "FALSE", dest.self);
+	return avro_value_set_boolean(&dest, val);
+}
+
+static int
+try_boolean(memoize_state_t *state, avro_resolved_writer_t **self,
+	    avro_schema_t wschema, avro_schema_t rschema,
+	    avro_schema_t root_rschema)
+{
+	if (is_avro_boolean(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_boolean = avro_resolved_writer_set_boolean;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * bytes
+ */
+
+static int
+avro_resolved_writer_set_bytes(const avro_value_iface_t *viface,
+			       void *vself, void *buf, size_t size)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing <%p:%" PRIsz "> into %p", buf, size, dest.self);
+	return avro_value_set_bytes(&dest, buf, size);
+}
+
+static int
+avro_resolved_writer_give_bytes(const avro_value_iface_t *viface,
+				void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing [%p] into %p", buf, dest.self);
+	return avro_value_give_bytes(&dest, buf);
+}
+
+static int
+try_bytes(memoize_state_t *state, avro_resolved_writer_t **self,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	if (is_avro_bytes(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_bytes = avro_resolved_writer_set_bytes;
+		(*self)->parent.give_bytes = avro_resolved_writer_give_bytes;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * double
+ */
+
+static int
+avro_resolved_writer_set_double(const avro_value_iface_t *viface,
+				void *vself, double val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %le into %p", val, dest.self);
+	return avro_value_set_double(&dest, val);
+}
+
+static int
+try_double(memoize_state_t *state, avro_resolved_writer_t **self,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	if (is_avro_double(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_double = avro_resolved_writer_set_double;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * float
+ */
+
+static int
+avro_resolved_writer_set_float(const avro_value_iface_t *viface,
+			       void *vself, float val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %e into %p", val, dest.self);
+	return avro_value_set_float(&dest, val);
+}
+
+static int
+avro_resolved_writer_set_float_double(const avro_value_iface_t *viface,
+				      void *vself, float val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting float %e into double %p", val, dest.self);
+	return avro_value_set_double(&dest, val);
+}
+
+static int
+try_float(memoize_state_t *state, avro_resolved_writer_t **self,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	if (is_avro_float(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_float = avro_resolved_writer_set_float;
+	}
+
+	else if (is_avro_double(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_float = avro_resolved_writer_set_float_double;
+	}
+
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * int
+ */
+
+static int
+avro_resolved_writer_set_int(const avro_value_iface_t *viface,
+			     void *vself, int32_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %" PRId32 " into %p", val, dest.self);
+	return avro_value_set_int(&dest, val);
+}
+
+static int
+avro_resolved_writer_set_int_double(const avro_value_iface_t *viface,
+				    void *vself, int32_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting int %" PRId32 " into double %p", val, dest.self);
+	return avro_value_set_double(&dest, val);
+}
+
+static int
+avro_resolved_writer_set_int_float(const avro_value_iface_t *viface,
+				   void *vself, int32_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting int %" PRId32 " into float %p", val, dest.self);
+	return avro_value_set_float(&dest, (float) val);
+}
+
+static int
+avro_resolved_writer_set_int_long(const avro_value_iface_t *viface,
+				  void *vself, int32_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting int %" PRId32 " into long %p", val, dest.self);
+	return avro_value_set_long(&dest, val);
+}
+
+static int
+try_int(memoize_state_t *state, avro_resolved_writer_t **self,
+	avro_schema_t wschema, avro_schema_t rschema,
+	avro_schema_t root_rschema)
+{
+	if (is_avro_int32(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_int = avro_resolved_writer_set_int;
+	}
+
+	else if (is_avro_int64(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_int = avro_resolved_writer_set_int_long;
+	}
+
+	else if (is_avro_double(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_int = avro_resolved_writer_set_int_double;
+	}
+
+	else if (is_avro_float(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_int = avro_resolved_writer_set_int_float;
+	}
+
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * long
+ */
+
+static int
+avro_resolved_writer_set_long(const avro_value_iface_t *viface,
+			      void *vself, int64_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %" PRId64 " into %p", val, dest.self);
+	return avro_value_set_long(&dest, val);
+}
+
+static int
+avro_resolved_writer_set_long_double(const avro_value_iface_t *viface,
+				     void *vself, int64_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting long %" PRId64 " into double %p", val, dest.self);
+	return avro_value_set_double(&dest, (double) val);
+}
+
+static int
+avro_resolved_writer_set_long_float(const avro_value_iface_t *viface,
+				    void *vself, int64_t val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Promoting long %" PRId64 " into float %p", val, dest.self);
+	return avro_value_set_float(&dest, (float) val);
+}
+
+static int
+try_long(memoize_state_t *state, avro_resolved_writer_t **self,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	if (is_avro_int64(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_long = avro_resolved_writer_set_long;
+	}
+
+	else if (is_avro_double(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_long = avro_resolved_writer_set_long_double;
+	}
+
+	else if (is_avro_float(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_long = avro_resolved_writer_set_long_float;
+	}
+
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * null
+ */
+
+static int
+avro_resolved_writer_set_null(const avro_value_iface_t *viface,
+			      void *vself)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing NULL into %p", dest.self);
+	return avro_value_set_null(&dest);
+}
+
+static int
+try_null(memoize_state_t *state, avro_resolved_writer_t **self,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	if (is_avro_null(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_null = avro_resolved_writer_set_null;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * string
+ */
+
+static int
+avro_resolved_writer_set_string(const avro_value_iface_t *viface,
+				void *vself, const char *str)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing \"%s\" into %p", str, dest.self);
+	return avro_value_set_string(&dest, str);
+}
+
+static int
+avro_resolved_writer_set_string_len(const avro_value_iface_t *viface,
+				    void *vself, const char *str, size_t size)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing <%p:%" PRIsz "> into %p", str, size, dest.self);
+	return avro_value_set_string_len(&dest, str, size);
+}
+
+static int
+avro_resolved_writer_give_string_len(const avro_value_iface_t *viface,
+				     void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing [%p] into %p", buf, dest.self);
+	return avro_value_give_string_len(&dest, buf);
+}
+
+static int
+try_string(memoize_state_t *state, avro_resolved_writer_t **self,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	if (is_avro_string(rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_string = avro_resolved_writer_set_string;
+		(*self)->parent.set_string_len = avro_resolved_writer_set_string_len;
+		(*self)->parent.give_string_len = avro_resolved_writer_give_string_len;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * array
+ */
+
+typedef struct avro_resolved_array_writer {
+	avro_resolved_writer_t  parent;
+	avro_resolved_writer_t  *child_resolver;
+} avro_resolved_array_writer_t;
+
+typedef struct avro_resolved_array_value {
+	avro_value_t  wrapped;
+	avro_raw_array_t  children;
+} avro_resolved_array_value_t;
+
+static void
+avro_resolved_array_writer_calculate_size(avro_resolved_writer_t *iface)
+{
+	avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_array_value_t);
+
+	avro_resolved_writer_calculate_size(aiface->child_resolver);
+}
+
+static void
+avro_resolved_array_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+	free_resolver(aiface->child_resolver, freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_array_writer_t, iface);
+}
+
+static int
+avro_resolved_array_writer_init(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+	size_t  child_instance_size = aiface->child_resolver->instance_size;
+	DEBUG("Initializing child array (child_size=%" PRIsz ")", child_instance_size);
+	avro_raw_array_init(&self->children, child_instance_size);
+	return 0;
+}
+
+static void
+avro_resolved_array_writer_free_elements(const avro_resolved_writer_t *child_iface,
+					 avro_resolved_array_value_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_array_size(&self->children); i++) {
+		void  *child_self = avro_raw_array_get_raw(&self->children, i);
+		avro_resolved_writer_done(child_iface, child_self);
+	}
+}
+
+static void
+avro_resolved_array_writer_done(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+	avro_resolved_array_writer_free_elements(aiface->child_resolver, self);
+	avro_raw_array_done(&self->children);
+}
+
+static int
+avro_resolved_array_writer_reset(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+
+	/* Clear out our cache of wrapped children */
+	avro_resolved_array_writer_free_elements(aiface->child_resolver, self);
+	avro_raw_array_clear(&self->children);
+	return 0;
+}
+
+static int
+avro_resolved_array_writer_get_size(const avro_value_iface_t *viface,
+				    const void *vself, size_t *size)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_array_value_t  *self = (const avro_resolved_array_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, &self->wrapped, &dest));
+	return avro_value_get_size(&dest, size);
+}
+
+static int
+avro_resolved_array_writer_append(const avro_value_iface_t *viface,
+				  void *vself, avro_value_t *child_out,
+				  size_t *new_index)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_array_writer_t  *aiface =
+	    container_of(iface, avro_resolved_array_writer_t, parent);
+	avro_resolved_array_value_t  *self = (avro_resolved_array_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, &self->wrapped, &dest));
+
+	child_out->iface = &aiface->child_resolver->parent;
+	child_out->self = avro_raw_array_append(&self->children);
+	if (child_out->self == NULL) {
+		avro_set_error("Couldn't expand array");
+		return ENOMEM;
+	}
+
+	DEBUG("Appending to array %p", dest.self);
+	check(rval, avro_value_append(&dest, (avro_value_t *) child_out->self, new_index));
+	return avro_resolved_writer_init(aiface->child_resolver, child_out->self);
+}
+
+static avro_resolved_array_writer_t *
+avro_resolved_array_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_array_writer_t);
+	memset(self, 0, sizeof(avro_resolved_array_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_writer_get_type;
+	self->parent.get_schema = avro_resolved_writer_get_schema;
+	self->parent.get_size = avro_resolved_array_writer_get_size;
+	self->parent.append = avro_resolved_array_writer_append;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_array_writer_calculate_size;
+	self->free_iface = avro_resolved_array_writer_free_iface;
+	self->init = avro_resolved_array_writer_init;
+	self->done = avro_resolved_array_writer_done;
+	self->reset_wrappers = avro_resolved_array_writer_reset;
+	return container_of(self, avro_resolved_array_writer_t, parent);
+}
+
+static int
+try_array(memoize_state_t *state, avro_resolved_writer_t **self,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is an array.
+	 */
+
+	if (!is_avro_array(rschema)) {
+		return 0;
+	}
+
+	/*
+	 * Array schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an resolver to check
+	 * the compatibility.
+	 */
+
+	avro_resolved_array_writer_t  *aself =
+	    avro_resolved_array_writer_create(wschema, root_rschema);
+	avro_memoize_set(&state->mem, wschema, root_rschema, aself);
+
+	avro_schema_t  witems = avro_schema_array_items(wschema);
+	avro_schema_t  ritems = avro_schema_array_items(rschema);
+
+	avro_resolved_writer_t  *item_resolver =
+	    avro_resolved_writer_new_memoized(state, witems, ritems);
+	if (item_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, root_rschema);
+		avro_value_iface_decref(&aself->parent.parent);
+		avro_prefix_error("Array values aren't compatible: ");
+		return EINVAL;
+	}
+
+	/*
+	 * The two schemas are compatible.  Store the item schema's
+	 * resolver into the child_resolver field.
+	 */
+
+	aself->child_resolver = item_resolver;
+	*self = &aself->parent;
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * enum
+ */
+
+static int
+avro_resolved_writer_set_enum(const avro_value_iface_t *viface,
+			      void *vself, int val)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing %d into %p", val, dest.self);
+	return avro_value_set_enum(&dest, val);
+}
+
+static int
+try_enum(memoize_state_t *state, avro_resolved_writer_t **self,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	/*
+	 * Enum schemas have to have the same name — but not the same
+	 * list of symbols — to be compatible.
+	 */
+
+	if (is_avro_enum(rschema)) {
+		const char  *wname = avro_schema_name(wschema);
+		const char  *rname = avro_schema_name(rschema);
+
+		if (strcmp(wname, rname) == 0) {
+			*self = avro_resolved_writer_create(wschema, root_rschema);
+			avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+			(*self)->parent.set_enum = avro_resolved_writer_set_enum;
+		}
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * fixed
+ */
+
+static int
+avro_resolved_writer_set_fixed(const avro_value_iface_t *viface,
+			       void *vself, void *buf, size_t size)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing <%p:%" PRIsz "> into (fixed) %p", buf, size, dest.self);
+	return avro_value_set_fixed(&dest, buf, size);
+}
+
+static int
+avro_resolved_writer_give_fixed(const avro_value_iface_t *viface,
+				void *vself, avro_wrapped_buffer_t *buf)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	avro_value_t  *self = (avro_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, self, &dest));
+	DEBUG("Storing [%p] into (fixed) %p", buf, dest.self);
+	return avro_value_give_fixed(&dest, buf);
+}
+
+static int
+try_fixed(memoize_state_t *state, avro_resolved_writer_t **self,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	/*
+	 * Fixed schemas need the same name and size to be compatible.
+	 */
+
+	if (avro_schema_equal(wschema, rschema)) {
+		*self = avro_resolved_writer_create(wschema, root_rschema);
+		avro_memoize_set(&state->mem, wschema, root_rschema, *self);
+		(*self)->parent.set_fixed = avro_resolved_writer_set_fixed;
+		(*self)->parent.give_fixed = avro_resolved_writer_give_fixed;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * map
+ */
+
+typedef struct avro_resolved_map_writer {
+	avro_resolved_writer_t  parent;
+	avro_resolved_writer_t  *child_resolver;
+} avro_resolved_map_writer_t;
+
+typedef struct avro_resolved_map_value {
+	avro_value_t  wrapped;
+	avro_raw_array_t  children;
+} avro_resolved_map_value_t;
+
+static void
+avro_resolved_map_writer_calculate_size(avro_resolved_writer_t *iface)
+{
+	avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+	iface->instance_size = sizeof(avro_resolved_map_value_t);
+
+	avro_resolved_writer_calculate_size(miface->child_resolver);
+}
+
+static void
+avro_resolved_map_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+	free_resolver(miface->child_resolver, freeing);
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_map_writer_t, iface);
+}
+
+static int
+avro_resolved_map_writer_init(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+	size_t  child_instance_size = miface->child_resolver->instance_size;
+	DEBUG("Initializing child array for map (child_size=%" PRIsz ")", child_instance_size);
+	avro_raw_array_init(&self->children, child_instance_size);
+	return 0;
+}
+
+static void
+avro_resolved_map_writer_free_elements(const avro_resolved_writer_t *child_iface,
+				       avro_resolved_map_value_t *self)
+{
+	size_t  i;
+	for (i = 0; i < avro_raw_array_size(&self->children); i++) {
+		void  *child_self = avro_raw_array_get_raw(&self->children, i);
+		avro_resolved_writer_done(child_iface, child_self);
+	}
+}
+
+static void
+avro_resolved_map_writer_done(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+	avro_resolved_map_writer_free_elements(miface->child_resolver, self);
+	avro_raw_array_done(&self->children);
+}
+
+static int
+avro_resolved_map_writer_reset(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+
+	/* Clear out our cache of wrapped children */
+	avro_resolved_map_writer_free_elements(miface->child_resolver, self);
+	return 0;
+}
+
+static int
+avro_resolved_map_writer_get_size(const avro_value_iface_t *viface,
+				  const void *vself, size_t *size)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_map_value_t  *self = (const avro_resolved_map_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, &self->wrapped, &dest));
+	return avro_value_get_size(&dest, size);
+}
+
+static int
+avro_resolved_map_writer_add(const avro_value_iface_t *viface,
+			     void *vself, const char *key,
+			     avro_value_t *child, size_t *index, int *is_new)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_map_writer_t  *miface =
+	    container_of(iface, avro_resolved_map_writer_t, parent);
+	avro_resolved_map_value_t  *self = (avro_resolved_map_value_t *) vself;
+	avro_value_t  dest;
+	check(rval, avro_resolved_writer_get_real_dest(iface, &self->wrapped, &dest));
+
+	/*
+	 * This is a bit convoluted.  We need to stash the wrapped child
+	 * value somewhere in our children array.  But we don't know
+	 * where to put it until the wrapped map tells us whether this
+	 * is a new value, and if not, which index the value should go
+	 * in.
+	 */
+
+	avro_value_t  real_child;
+	size_t  real_index;
+	int  real_is_new;
+
+	DEBUG("Adding %s to map %p", key, dest.self);
+	check(rval, avro_value_add(&dest, key, &real_child, &real_index, &real_is_new));
+
+	child->iface = &miface->child_resolver->parent;
+	if (real_is_new) {
+		child->self = avro_raw_array_append(&self->children);
+		DEBUG("Element is new (child resolver=%p)", child->self);
+		if (child->self == NULL) {
+			avro_set_error("Couldn't expand map");
+			return ENOMEM;
+		}
+		check(rval, avro_resolved_writer_init
+		      (miface->child_resolver, child->self));
+	} else {
+		child->self = avro_raw_array_get_raw(&self->children, real_index);
+		DEBUG("Element is old (child resolver=%p)", child->self);
+	}
+	avro_value_t  *child_vself = (avro_value_t *) child->self;
+	*child_vself = real_child;
+
+	if (index != NULL) {
+		*index = real_index;
+	}
+	if (is_new != NULL) {
+		*is_new = real_is_new;
+	}
+	return 0;
+}
+
+static avro_resolved_map_writer_t *
+avro_resolved_map_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_map_writer_t);
+	memset(self, 0, sizeof(avro_resolved_map_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_writer_get_type;
+	self->parent.get_schema = avro_resolved_writer_get_schema;
+	self->parent.get_size = avro_resolved_map_writer_get_size;
+	self->parent.add = avro_resolved_map_writer_add;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_map_writer_calculate_size;
+	self->free_iface = avro_resolved_map_writer_free_iface;
+	self->init = avro_resolved_map_writer_init;
+	self->done = avro_resolved_map_writer_done;
+	self->reset_wrappers = avro_resolved_map_writer_reset;
+	return container_of(self, avro_resolved_map_writer_t, parent);
+}
+
+static int
+try_map(memoize_state_t *state, avro_resolved_writer_t **self,
+	avro_schema_t wschema, avro_schema_t rschema,
+	avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is an map.
+	 */
+
+	if (!is_avro_map(rschema)) {
+		return 0;
+	}
+
+	/*
+	 * Map schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an resolver to check
+	 * the compatibility.
+	 */
+
+	avro_resolved_map_writer_t  *mself =
+	    avro_resolved_map_writer_create(wschema, root_rschema);
+	avro_memoize_set(&state->mem, wschema, root_rschema, mself);
+
+	avro_schema_t  witems = avro_schema_map_values(wschema);
+	avro_schema_t  ritems = avro_schema_map_values(rschema);
+
+	avro_resolved_writer_t  *item_resolver =
+	    avro_resolved_writer_new_memoized(state, witems, ritems);
+	if (item_resolver == NULL) {
+		avro_memoize_delete(&state->mem, wschema, root_rschema);
+		avro_value_iface_decref(&mself->parent.parent);
+		avro_prefix_error("Map values aren't compatible: ");
+		return EINVAL;
+	}
+
+	/*
+	 * The two schemas are compatible.  Store the item schema's
+	 * resolver into the child_resolver field.
+	 */
+
+	mself->child_resolver = item_resolver;
+	*self = &mself->parent;
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * record
+ */
+
+typedef struct avro_resolved_record_writer {
+	avro_resolved_writer_t  parent;
+	size_t  field_count;
+	size_t  *field_offsets;
+	avro_resolved_writer_t  **field_resolvers;
+	size_t  *index_mapping;
+} avro_resolved_record_writer_t;
+
+typedef struct avro_resolved_record_value {
+	avro_value_t  wrapped;
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for each field. */
+} avro_resolved_record_value_t;
+
+/** Return a pointer to the given field within a record struct. */
+#define avro_resolved_record_field(iface, rec, index) \
+	(((char *) (rec)) + (iface)->field_offsets[(index)])
+
+
+static void
+avro_resolved_record_writer_calculate_size(avro_resolved_writer_t *iface)
+{
+	avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+
+	/*
+	 * Once we've figured out which writer fields we actually need,
+	 * calculate an offset for each one.
+	 */
+
+	size_t  wi;
+	size_t  next_offset = sizeof(avro_resolved_record_value_t);
+	for (wi = 0; wi < riface->field_count; wi++) {
+		riface->field_offsets[wi] = next_offset;
+		if (riface->field_resolvers[wi] != NULL) {
+			avro_resolved_writer_calculate_size
+			    (riface->field_resolvers[wi]);
+			size_t  field_size =
+			    riface->field_resolvers[wi]->instance_size;
+			DEBUG("Field %" PRIsz " has size %" PRIsz, wi, field_size);
+			next_offset += field_size;
+		} else {
+			DEBUG("Field %" PRIsz " is being skipped", wi);
+		}
+	}
+
+	DEBUG("Record has size %" PRIsz, next_offset);
+	iface->instance_size = next_offset;
+}
+
+static void
+avro_resolved_record_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+
+	if (riface->field_offsets != NULL) {
+		avro_free(riface->field_offsets,
+			  riface->field_count * sizeof(size_t));
+	}
+
+	if (riface->field_resolvers != NULL) {
+		size_t  i;
+		for (i = 0; i < riface->field_count; i++) {
+			if (riface->field_resolvers[i] != NULL) {
+				DEBUG("Freeing field %" PRIsz " %p", i,
+				      riface->field_resolvers[i]);
+				free_resolver(riface->field_resolvers[i], freeing);
+			}
+		}
+		avro_free(riface->field_resolvers,
+			  riface->field_count * sizeof(avro_resolved_writer_t *));
+	}
+
+	if (riface->index_mapping != NULL) {
+		avro_free(riface->index_mapping,
+			  riface->field_count * sizeof(size_t));
+	}
+
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_record_writer_t, iface);
+}
+
+static int
+avro_resolved_record_writer_init(const avro_resolved_writer_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	/* Initialize each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			check(rval, avro_resolved_writer_init
+			      (riface->field_resolvers[i],
+			       avro_resolved_record_field(riface, self, i)));
+		}
+	}
+
+	return 0;
+}
+
+static void
+avro_resolved_record_writer_done(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	/* Finalize each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			avro_resolved_writer_done
+			    (riface->field_resolvers[i],
+			     avro_resolved_record_field(riface, self, i));
+		}
+	}
+}
+
+static int
+avro_resolved_record_writer_reset(const avro_resolved_writer_t *iface, void *vself)
+{
+	int  rval;
+	const avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+	avro_resolved_record_value_t  *self = (avro_resolved_record_value_t *) vself;
+
+	/* Reset each field */
+	size_t  i;
+	for (i = 0; i < riface->field_count; i++) {
+		if (riface->field_resolvers[i] != NULL) {
+			check(rval, avro_resolved_writer_reset_wrappers
+			      (riface->field_resolvers[i],
+			       avro_resolved_record_field(riface, self, i)));
+		}
+	}
+
+	return 0;
+}
+
+static int
+avro_resolved_record_writer_get_size(const avro_value_iface_t *viface,
+				     const void *vself, size_t *size)
+{
+	AVRO_UNUSED(vself);
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+	*size = riface->field_count;
+	return 0;
+}
+
+static int
+avro_resolved_record_writer_get_by_index(const avro_value_iface_t *viface,
+					 const void *vself, size_t index,
+					 avro_value_t *child, const char **name)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_record_writer_t  *riface =
+	    container_of(iface, avro_resolved_record_writer_t, parent);
+	const avro_resolved_record_value_t  *self = (const avro_resolved_record_value_t *) vself;
+	avro_value_t  dest;
+
+	DEBUG("Getting writer field %" PRIsz " from record %p", index, self);
+	if (riface->field_resolvers[index] == NULL) {
+		DEBUG("Reader doesn't have field, skipping");
+		child->iface = NULL;
+		child->self = NULL;
+		return 0;
+	}
+
+	check(rval, avro_resolved_writer_get_real_dest(iface, &self->wrapped, &dest));
+	size_t  reader_index = riface->index_mapping[index];
+	DEBUG("  Reader field is %" PRIsz, reader_index);
+	child->iface = &riface->field_resolvers[index]->parent;
+	child->self = avro_resolved_record_field(riface, self, index);
+
+	return avro_value_get_by_index(&dest, reader_index, (avro_value_t *) child->self, name);
+}
+
+static int
+avro_resolved_record_writer_get_by_name(const avro_value_iface_t *viface,
+					const void *vself, const char *name,
+					avro_value_t *child, size_t *index)
+{
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+
+	int  wi = avro_schema_record_field_get_index(iface->wschema, name);
+	if (wi == -1) {
+		avro_set_error("Record doesn't have field named %s", name);
+		return EINVAL;
+	}
+
+	DEBUG("Writer field %s is at index %d", name, wi);
+	if (index != NULL) {
+		*index = wi;
+	}
+	return avro_resolved_record_writer_get_by_index(viface, vself, wi, child, NULL);
+}
+
+static avro_resolved_record_writer_t *
+avro_resolved_record_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_record_writer_t);
+	memset(self, 0, sizeof(avro_resolved_record_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_writer_get_type;
+	self->parent.get_schema = avro_resolved_writer_get_schema;
+	self->parent.get_size = avro_resolved_record_writer_get_size;
+	self->parent.get_by_index = avro_resolved_record_writer_get_by_index;
+	self->parent.get_by_name = avro_resolved_record_writer_get_by_name;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_record_writer_calculate_size;
+	self->free_iface = avro_resolved_record_writer_free_iface;
+	self->init = avro_resolved_record_writer_init;
+	self->done = avro_resolved_record_writer_done;
+	self->reset_wrappers = avro_resolved_record_writer_reset;
+	return container_of(self, avro_resolved_record_writer_t, parent);
+}
+
+static int
+try_record(memoize_state_t *state, avro_resolved_writer_t **self,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is also a record, and has the
+	 * same name as the writer.
+	 */
+
+	if (!is_avro_record(rschema)) {
+		return 0;
+	}
+
+	const char  *wname = avro_schema_name(wschema);
+	const char  *rname = avro_schema_name(rschema);
+
+	if (strcmp(wname, rname) != 0) {
+		return 0;
+	}
+
+	/*
+	 * Categorize the fields in the record schemas.  Fields that are
+	 * only in the writer are ignored.  Fields that are only in the
+	 * reader raise a schema mismatch error, unless the field has a
+	 * default value.  Fields that are in both are resolved
+	 * recursively.
+	 *
+	 * The field_resolvers array will contain an avro_value_iface_t
+	 * for each field in the writer schema.  To build this array, we
+	 * loop through the fields of the reader schema.  If that field
+	 * is also in the writer schema, we resolve them recursively,
+	 * and store the resolver into the array.  If the field isn't in
+	 * the writer schema, we raise an error.  (TODO: Eventually,
+	 * we'll handle default values here.)  After this loop finishes,
+	 * any NULLs in the field_resolvers array will represent fields
+	 * in the writer but not the reader; these fields will be
+	 * skipped when processing the input.
+	 */
+
+	avro_resolved_record_writer_t  *rself =
+	    avro_resolved_record_writer_create(wschema, root_rschema);
+	avro_memoize_set(&state->mem, wschema, root_rschema, rself);
+
+	size_t  wfields = avro_schema_record_size(wschema);
+	size_t  rfields = avro_schema_record_size(rschema);
+
+	DEBUG("Checking writer record schema %s", wname);
+
+	avro_resolved_writer_t  **field_resolvers =
+	    (avro_resolved_writer_t **) avro_calloc(wfields, sizeof(avro_resolved_writer_t *));
+	size_t  *field_offsets = (size_t *) avro_calloc(wfields, sizeof(size_t));
+	size_t  *index_mapping = (size_t *) avro_calloc(wfields, sizeof(size_t));
+
+	size_t  ri;
+	for (ri = 0; ri < rfields; ri++) {
+		avro_schema_t  rfield =
+		    avro_schema_record_field_get_by_index(rschema, ri);
+		const char  *field_name =
+		    avro_schema_record_field_name(rschema, ri);
+
+		DEBUG("Resolving reader record field %" PRIsz " (%s)", ri, field_name);
+
+		/*
+		 * See if this field is also in the writer schema.
+		 */
+
+		int  wi = avro_schema_record_field_get_index(wschema, field_name);
+
+		if (wi == -1) {
+			/*
+			 * This field isn't in the writer schema —
+			 * that's an error!  TODO: Handle default
+			 * values!
+			 */
+
+			DEBUG("Field %s isn't in writer", field_name);
+
+			/* Allow missing fields in the writer. They
+			 * will default to zero. So skip over the
+			 * missing field, and continue building the
+			 * resolver. Note also that all missing values
+			 * are zero because avro_generic_value_new()
+			 * initializes all values of the reader to 0
+			 * on creation. This is a work-around because
+			 * default values are not implemented yet.
+			 */
+			#ifdef AVRO_ALLOW_MISSING_FIELDS_IN_RESOLVED_WRITER
+			continue;
+			#else
+			avro_set_error("Reader field %s doesn't appear in writer",
+				       field_name);
+			goto error;
+			#endif
+		}
+
+		/*
+		 * Try to recursively resolve the schemas for this
+		 * field.  If they're not compatible, that's an error.
+		 */
+
+		avro_schema_t  wfield =
+		    avro_schema_record_field_get_by_index(wschema, wi);
+		avro_resolved_writer_t  *field_resolver =
+		    avro_resolved_writer_new_memoized(state, wfield, rfield);
+
+		if (field_resolver == NULL) {
+			avro_prefix_error("Field %s isn't compatible: ", field_name);
+			goto error;
+		}
+
+		/*
+		 * Save the details for this field.
+		 */
+
+		DEBUG("Found match for field %s (%" PRIsz " in reader, %d in writer)",
+		      field_name, ri, wi);
+		field_resolvers[wi] = field_resolver;
+		index_mapping[wi] = ri;
+	}
+
+	/*
+	 * We might not have found matches for all of the writer fields,
+	 * but that's okay — any extras will be ignored.
+	 */
+
+	rself->field_count = wfields;
+	rself->field_offsets = field_offsets;
+	rself->field_resolvers = field_resolvers;
+	rself->index_mapping = index_mapping;
+	*self = &rself->parent;
+	return 0;
+
+error:
+	/*
+	 * Clean up any resolver we might have already created.
+	 */
+
+	avro_memoize_delete(&state->mem, wschema, root_rschema);
+	avro_value_iface_decref(&rself->parent.parent);
+
+	{
+		unsigned int  i;
+		for (i = 0; i < wfields; i++) {
+			if (field_resolvers[i]) {
+				avro_value_iface_decref(&field_resolvers[i]->parent);
+			}
+		}
+	}
+
+	avro_free(field_resolvers, wfields * sizeof(avro_resolved_writer_t *));
+	avro_free(field_offsets, wfields * sizeof(size_t));
+	avro_free(index_mapping, wfields * sizeof(size_t));
+	return EINVAL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * union
+ */
+
+typedef struct avro_resolved_union_writer {
+	avro_resolved_writer_t  parent;
+	size_t  branch_count;
+	avro_resolved_writer_t  **branch_resolvers;
+} avro_resolved_union_writer_t;
+
+typedef struct avro_resolved_union_value {
+	avro_value_t  wrapped;
+
+	/** The currently active branch of the union.  -1 if no branch
+	 * is selected. */
+	int  discriminant;
+
+	/* The rest of the struct is taken up by the inline storage
+	 * needed for the active branch. */
+} avro_resolved_union_value_t;
+
+/** Return a pointer to the active branch within a union struct. */
+#define avro_resolved_union_branch(_union) \
+	(((char *) (_union)) + sizeof(avro_resolved_union_value_t))
+
+
+static void
+avro_resolved_union_writer_calculate_size(avro_resolved_writer_t *iface)
+{
+	avro_resolved_union_writer_t  *uiface =
+	    container_of(iface, avro_resolved_union_writer_t, parent);
+
+	/* Only calculate the size for any resolver once */
+	iface->calculate_size = NULL;
+
+	DEBUG("Calculating size for %s->%s",
+	      avro_schema_type_name((iface)->wschema),
+	      avro_schema_type_name((iface)->rschema));
+
+	size_t  i;
+	size_t  max_branch_size = 0;
+	for (i = 0; i < uiface->branch_count; i++) {
+		if (uiface->branch_resolvers[i] == NULL) {
+			DEBUG("No match for writer union branch %" PRIsz, i);
+		} else {
+			avro_resolved_writer_calculate_size
+			    (uiface->branch_resolvers[i]);
+			size_t  branch_size =
+			    uiface->branch_resolvers[i]->instance_size;
+			DEBUG("Writer branch %" PRIsz " has size %" PRIsz, i, branch_size);
+			if (branch_size > max_branch_size) {
+				max_branch_size = branch_size;
+			}
+		}
+	}
+
+	DEBUG("Maximum branch size is %" PRIsz, max_branch_size);
+	iface->instance_size =
+	    sizeof(avro_resolved_union_value_t) + max_branch_size;
+	DEBUG("Total union size is %" PRIsz, iface->instance_size);
+}
+
+static void
+avro_resolved_union_writer_free_iface(avro_resolved_writer_t *iface, st_table *freeing)
+{
+	avro_resolved_union_writer_t  *uiface =
+	    container_of(iface, avro_resolved_union_writer_t, parent);
+
+	if (uiface->branch_resolvers != NULL) {
+		size_t  i;
+		for (i = 0; i < uiface->branch_count; i++) {
+			if (uiface->branch_resolvers[i] != NULL) {
+				free_resolver(uiface->branch_resolvers[i], freeing);
+			}
+		}
+		avro_free(uiface->branch_resolvers,
+			  uiface->branch_count * sizeof(avro_resolved_writer_t *));
+	}
+
+	avro_schema_decref(iface->wschema);
+	avro_schema_decref(iface->rschema);
+	avro_freet(avro_resolved_union_writer_t, iface);
+}
+
+static int
+avro_resolved_union_writer_init(const avro_resolved_writer_t *iface, void *vself)
+{
+	AVRO_UNUSED(iface);
+	avro_resolved_union_value_t  *self = (avro_resolved_union_value_t *) vself;
+	self->discriminant = -1;
+	return 0;
+}
+
+static void
+avro_resolved_union_writer_done(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_union_writer_t  *uiface =
+	    container_of(iface, avro_resolved_union_writer_t, parent);
+	avro_resolved_union_value_t  *self = (avro_resolved_union_value_t *) vself;
+	if (self->discriminant >= 0) {
+		avro_resolved_writer_done
+		    (uiface->branch_resolvers[self->discriminant],
+		     avro_resolved_union_branch(self));
+		self->discriminant = -1;
+	}
+}
+
+static int
+avro_resolved_union_writer_reset(const avro_resolved_writer_t *iface, void *vself)
+{
+	const avro_resolved_union_writer_t  *uiface =
+	    container_of(iface, avro_resolved_union_writer_t, parent);
+	avro_resolved_union_value_t  *self = (avro_resolved_union_value_t *) vself;
+
+	/* Keep the same branch selected, for the common case that we're
+	 * about to reuse it. */
+	if (self->discriminant >= 0) {
+		return avro_resolved_writer_reset_wrappers
+		    (uiface->branch_resolvers[self->discriminant],
+		     avro_resolved_union_branch(self));
+	}
+
+	return 0;
+}
+
+static int
+avro_resolved_union_writer_set_branch(const avro_value_iface_t *viface,
+				      void *vself, int discriminant,
+				      avro_value_t *branch)
+{
+	int  rval;
+	const avro_resolved_writer_t  *iface =
+	    container_of(viface, avro_resolved_writer_t, parent);
+	const avro_resolved_union_writer_t  *uiface =
+	    container_of(iface, avro_resolved_union_writer_t, parent);
+	avro_resolved_union_value_t  *self = (avro_resolved_union_value_t *) vself;
+
+	DEBUG("Getting writer branch %d from union %p", discriminant, vself);
+	avro_resolved_writer_t  *branch_resolver =
+	    uiface->branch_resolvers[discriminant];
+	if (branch_resolver == NULL) {
+		DEBUG("Reader doesn't have branch, skipping");
+		avro_set_error("Writer union branch %d is incompatible "
+			       "with reader schema \"%s\"",
+			       discriminant, avro_schema_type_name(iface->rschema));
+		return EINVAL;
+	}
+
+	if (self->discriminant == discriminant) {
+		DEBUG("Writer branch %d already selected", discriminant);
+	} else {
+		if (self->discriminant >= 0) {
+			DEBUG("Finalizing old writer branch %d", self->discriminant);
+			avro_resolved_writer_done
+			    (uiface->branch_resolvers[self->discriminant],
+			     avro_resolved_union_branch(self));
+		}
+		DEBUG("Initializing writer branch %d", discriminant);
+		check(rval, avro_resolved_writer_init
+		      (uiface->branch_resolvers[discriminant],
+		       avro_resolved_union_branch(self)));
+		self->discriminant = discriminant;
+	}
+
+	branch->iface = &branch_resolver->parent;
+	branch->self = avro_resolved_union_branch(self);
+	avro_value_t  *branch_vself = (avro_value_t *) branch->self;
+	*branch_vself = self->wrapped;
+	return 0;
+}
+
+static avro_resolved_union_writer_t *
+avro_resolved_union_writer_create(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_resolved_writer_t  *self = (avro_resolved_writer_t *) avro_new(avro_resolved_union_writer_t);
+	memset(self, 0, sizeof(avro_resolved_union_writer_t));
+
+	self->parent.incref_iface = avro_resolved_writer_incref_iface;
+	self->parent.decref_iface = avro_resolved_writer_decref_iface;
+	self->parent.incref = avro_resolved_writer_incref;
+	self->parent.decref = avro_resolved_writer_decref;
+	self->parent.reset = avro_resolved_writer_reset;
+	self->parent.get_type = avro_resolved_writer_get_type;
+	self->parent.get_schema = avro_resolved_writer_get_schema;
+	self->parent.set_branch = avro_resolved_union_writer_set_branch;
+
+	self->refcount = 1;
+	self->wschema = avro_schema_incref(wschema);
+	self->rschema = avro_schema_incref(rschema);
+	self->reader_union_branch = -1;
+	self->calculate_size = avro_resolved_union_writer_calculate_size;
+	self->free_iface = avro_resolved_union_writer_free_iface;
+	self->init = avro_resolved_union_writer_init;
+	self->done = avro_resolved_union_writer_done;
+	self->reset_wrappers = avro_resolved_union_writer_reset;
+	return container_of(self, avro_resolved_union_writer_t, parent);
+}
+
+static avro_resolved_writer_t *
+try_union(memoize_state_t *state,
+	  avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * For a writer union, we recursively try to resolve each branch
+	 * against the reader schema.  This will work correctly whether
+	 * or not the reader is also a union — if the reader is a union,
+	 * then we'll resolve each (non-union) writer branch against the
+	 * reader union, which will be checked in our calls to
+	 * check_simple_writer below.  The net result is that we might
+	 * end up trying every combination of writer and reader
+	 * branches, when looking for compatible schemas.
+	 *
+	 * Regardless of what the reader schema is, for each writer
+	 * branch, we stash away the recursive resolver into the
+	 * branch_resolvers array.  A NULL entry in this array means
+	 * that that branch isn't compatible with the reader.  This
+	 * isn't an immediate schema resolution error, since we allow
+	 * incompatible branches in the types as long as that branch
+	 * never appears in the actual data.  We only return an error if
+	 * there are *no* branches that are compatible.
+	 */
+
+	size_t  branch_count = avro_schema_union_size(wschema);
+	DEBUG("Checking %" PRIsz "-branch writer union schema", branch_count);
+
+	avro_resolved_union_writer_t  *uself =
+	    avro_resolved_union_writer_create(wschema, rschema);
+	avro_memoize_set(&state->mem, wschema, rschema, uself);
+
+	avro_resolved_writer_t  **branch_resolvers =
+	    (avro_resolved_writer_t **) avro_calloc(branch_count, sizeof(avro_resolved_writer_t *));
+	int  some_branch_compatible = 0;
+
+	size_t  i;
+	for (i = 0; i < branch_count; i++) {
+		avro_schema_t  branch_schema =
+		    avro_schema_union_branch(wschema, i);
+
+		DEBUG("Resolving writer union branch %" PRIsz " (%s)", i,
+		      avro_schema_type_name(branch_schema));
+
+		/*
+		 * Try to recursively resolve this branch of the writer
+		 * union.  Don't raise an error if this fails — it's
+		 * okay for some of the branches to not be compatible
+		 * with the reader, as long as those branches never
+		 * appear in the input.
+		 */
+
+		branch_resolvers[i] =
+		    avro_resolved_writer_new_memoized(state, branch_schema, rschema);
+		if (branch_resolvers[i] == NULL) {
+			DEBUG("No match for writer union branch %" PRIsz, i);
+		} else {
+			DEBUG("Found match for writer union branch %" PRIsz, i);
+			some_branch_compatible = 1;
+		}
+	}
+
+	/*
+	 * As long as there's at least one branch that's compatible with
+	 * the reader, then we consider this schema resolution a
+	 * success.
+	 */
+
+	if (!some_branch_compatible) {
+		DEBUG("No writer union branches match");
+		avro_set_error("No branches in the writer are compatible "
+			       "with reader schema %s",
+			       avro_schema_type_name(rschema));
+		goto error;
+	}
+
+	uself->branch_count = branch_count;
+	uself->branch_resolvers = branch_resolvers;
+	return &uself->parent;
+
+error:
+	/*
+	 * Clean up any resolver we might have already created.
+	 */
+
+	avro_memoize_delete(&state->mem, wschema, rschema);
+	avro_value_iface_decref(&uself->parent.parent);
+
+	{
+		unsigned int  i;
+		for (i = 0; i < branch_count; i++) {
+			if (branch_resolvers[i]) {
+				avro_value_iface_decref(&branch_resolvers[i]->parent);
+			}
+		}
+	}
+
+	avro_free(branch_resolvers, branch_count * sizeof(avro_resolved_writer_t *));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * Schema type dispatcher
+ */
+
+static avro_resolved_writer_t *
+avro_resolved_writer_new_memoized(memoize_state_t *state,
+				  avro_schema_t wschema, avro_schema_t rschema)
+{
+	check_param(NULL, is_avro_schema(wschema), "writer schema");
+	check_param(NULL, is_avro_schema(rschema), "reader schema");
+
+	skip_links(rschema);
+
+	/*
+	 * First see if we've already matched these two schemas.  If so,
+	 * just return that resolver.
+	 */
+
+	avro_resolved_writer_t  *saved = NULL;
+	if (avro_memoize_get(&state->mem, wschema, rschema, (void **) &saved)) {
+		DEBUG("Already resolved %s%s%s->%s",
+		      is_avro_link(wschema)? "[": "",
+		      avro_schema_type_name(wschema),
+		      is_avro_link(wschema)? "]": "",
+		      avro_schema_type_name(rschema));
+		avro_value_iface_incref(&saved->parent);
+		return saved;
+	} else {
+		DEBUG("Resolving %s%s%s->%s",
+		      is_avro_link(wschema)? "[": "",
+		      avro_schema_type_name(wschema),
+		      is_avro_link(wschema)? "]": "",
+		      avro_schema_type_name(rschema));
+	}
+
+	/*
+	 * Otherwise we have some work to do.
+	 */
+
+	switch (avro_typeof(wschema))
+	{
+		case AVRO_BOOLEAN:
+			check_simple_writer(state, wschema, rschema, boolean);
+			return NULL;
+
+		case AVRO_BYTES:
+			check_simple_writer(state, wschema, rschema, bytes);
+			return NULL;
+
+		case AVRO_DOUBLE:
+			check_simple_writer(state, wschema, rschema, double);
+			return NULL;
+
+		case AVRO_FLOAT:
+			check_simple_writer(state, wschema, rschema, float);
+			return NULL;
+
+		case AVRO_INT32:
+			check_simple_writer(state, wschema, rschema, int);
+			return NULL;
+
+		case AVRO_INT64:
+			check_simple_writer(state, wschema, rschema, long);
+			return NULL;
+
+		case AVRO_NULL:
+			check_simple_writer(state, wschema, rschema, null);
+			return NULL;
+
+		case AVRO_STRING:
+			check_simple_writer(state, wschema, rschema, string);
+			return NULL;
+
+		case AVRO_ARRAY:
+			check_simple_writer(state, wschema, rschema, array);
+			return NULL;
+
+		case AVRO_ENUM:
+			check_simple_writer(state, wschema, rschema, enum);
+			return NULL;
+
+		case AVRO_FIXED:
+			check_simple_writer(state, wschema, rschema, fixed);
+			return NULL;
+
+		case AVRO_MAP:
+			check_simple_writer(state, wschema, rschema, map);
+			return NULL;
+
+		case AVRO_RECORD:
+			check_simple_writer(state, wschema, rschema, record);
+			return NULL;
+
+		case AVRO_UNION:
+			return try_union(state, wschema, rschema);
+
+		case AVRO_LINK:
+			check_simple_writer(state, wschema, rschema, link);
+			return NULL;
+
+		default:
+			avro_set_error("Unknown schema type");
+			return NULL;
+	}
+
+	return NULL;
+}
+
+
+avro_value_iface_t *
+avro_resolved_writer_new(avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * Create a state to keep track of the value implementations
+	 * that we create for each subschema.
+	 */
+
+	memoize_state_t  state;
+	avro_memoize_init(&state.mem);
+	state.links = NULL;
+
+	/*
+	 * Create the value implementations.
+	 */
+
+	avro_resolved_writer_t  *result =
+	    avro_resolved_writer_new_memoized(&state, wschema, rschema);
+	if (result == NULL) {
+		avro_memoize_done(&state.mem);
+		return NULL;
+	}
+
+	/*
+	 * Fix up any link schemas so that their value implementations
+	 * point to their target schemas' implementations.
+	 */
+
+	avro_resolved_writer_calculate_size(result);
+	while (state.links != NULL) {
+		avro_resolved_link_writer_t  *liface = state.links;
+		avro_resolved_writer_calculate_size(liface->target_resolver);
+		state.links = liface->next;
+		liface->next = NULL;
+	}
+
+	/*
+	 * And now we can return.
+	 */
+
+	avro_memoize_done(&state.mem);
+	return &result->parent;
+}
diff --git a/lang/c/src/resolver.c b/lang/c/src/resolver.c
new file mode 100644
index 0000000..7d9e7f3
--- /dev/null
+++ b/lang/c/src/resolver.c
@@ -0,0 +1,1338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.	 You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/consumer.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/legacy.h"
+#include "avro/schema.h"
+#include "avro_private.h"
+#include "st.h"
+
+
+#if !defined(DEBUG_RESOLVER)
+#define DEBUG_RESOLVER 0
+#endif
+
+#if DEBUG_RESOLVER
+#include <stdio.h>
+#define debug(...) { fprintf(stderr, __VA_ARGS__); fprintf(stderr, "\n"); }
+#else
+#define debug(...) /* no debug output */
+#endif
+
+
+typedef struct avro_resolver_t  avro_resolver_t;
+
+struct avro_resolver_t {
+	avro_consumer_t  parent;
+
+	/* The reader schema for this resolver. */
+	avro_schema_t  rschema;
+
+	/* An array of any child resolvers needed for the subschemas of
+	 * wschema */
+	avro_consumer_t  **child_resolvers;
+
+	/* If the reader and writer schemas are records, this field
+	 * contains a mapping from writer field indices to reader field
+	 * indices. */
+	int  *index_mapping;
+
+	/* The number of elements in the child_resolvers and
+	 * index_mapping arrays. */
+	size_t  num_children;
+
+	/* If the reader schema is a union, but the writer schema is
+	 * not, this field indicates which branch of the reader union
+	 * should be selected. */
+	int  reader_union_branch;
+};
+
+
+/**
+ * Frees a resolver object, while ensuring that all of the resolvers in
+ * a graph of resolvers is only freed once.
+ */
+
+static void
+avro_resolver_free_cycles(avro_consumer_t *consumer, st_table *freeing)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+
+	/*
+	 * First check if we've already started freeing this resolver.
+	 */
+
+	if (st_lookup(freeing, (st_data_t) resolver, NULL)) {
+		return;
+	}
+
+	/*
+	 * Otherwise add this resolver to the freeing set, and then
+	 * actually free the thing.
+	 */
+
+	st_insert(freeing, (st_data_t) resolver, (st_data_t) NULL);
+
+	avro_schema_decref(resolver->parent.schema);
+	avro_schema_decref(resolver->rschema);
+	if (resolver->child_resolvers) {
+		unsigned int  i;
+		for (i = 0; i < resolver->num_children; i++) {
+			avro_consumer_t  *child = resolver->child_resolvers[i];
+			if (child) {
+				avro_resolver_free_cycles(child, freeing);
+			}
+		}
+		avro_free(resolver->child_resolvers,
+			  sizeof(avro_resolver_t *) * resolver->num_children);
+	}
+	if (resolver->index_mapping) {
+		avro_free(resolver->index_mapping,
+			  sizeof(int) * resolver->num_children);
+	}
+	avro_freet(avro_resolver_t, resolver);
+}
+
+
+static void
+avro_resolver_free(avro_consumer_t *consumer)
+{
+	st_table  *freeing = st_init_numtable();
+	avro_resolver_free_cycles(consumer, freeing);
+	st_free_table(freeing);
+}
+
+/**
+ * Create a new avro_resolver_t instance.  You must fill in the callback
+ * pointers that are appropriate for the writer schema after this
+ * function returns.
+ */
+
+static avro_resolver_t *
+avro_resolver_create(avro_schema_t wschema,
+		     avro_schema_t rschema)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) avro_new(avro_resolver_t);
+	memset(resolver, 0, sizeof(avro_resolver_t));
+
+	resolver->parent.free = avro_resolver_free;
+	resolver->parent.schema = avro_schema_incref(wschema);
+	resolver->rschema = avro_schema_incref(rschema);
+	resolver->reader_union_branch = -1;
+	return resolver;
+}
+
+
+static avro_datum_t
+avro_resolver_get_real_dest(avro_resolver_t *resolver, avro_datum_t dest)
+{
+	if (resolver->reader_union_branch < 0) {
+		/*
+		 * The reader schema isn't a union, so use the dest
+		 * field as-is.
+		 */
+
+		return dest;
+	}
+
+	debug("Retrieving union branch %d for %s value",
+	      resolver->reader_union_branch,
+	      avro_schema_type_name(resolver->parent.schema));
+
+	avro_datum_t  branch = NULL;
+	avro_union_set_discriminant
+	    (dest, resolver->reader_union_branch, &branch);
+	return branch;
+}
+
+
+#define skip_links(schema)					\
+	while (is_avro_link(schema)) {				\
+		schema = avro_schema_link_target(schema);	\
+	}
+
+
+/*-----------------------------------------------------------------------
+ * Memoized resolvers
+ */
+
+static avro_consumer_t *
+avro_resolver_new_memoized(avro_memoize_t *mem,
+			   avro_schema_t wschema, avro_schema_t rschema);
+
+
+/*-----------------------------------------------------------------------
+ * Reader unions
+ */
+
+/*
+ * For each Avro type, we have to check whether the reader schema on its
+ * own is compatible, and whether the reader is a union that contains a
+ * compatible type.  The macros in this section help us perform both of
+ * these checks with less code.
+ */
+
+
+/**
+ * A helper macro that handles the case where neither writer nor reader
+ * are unions.  Uses @ref check_func to see if the two schemas are
+ * compatible.
+ */
+
+#define check_non_union(saved, wschema, rschema, check_func)	\
+do {								\
+	avro_resolver_t  *self = NULL;				\
+	int  rc = check_func(saved, &self, wschema, rschema,	\
+			     rschema);				\
+	if (self) {						\
+		debug("Non-union schemas %s (writer) "		\
+		      "and %s (reader) match",			\
+		      avro_schema_type_name(wschema),		\
+		      avro_schema_type_name(rschema));		\
+								\
+		self->reader_union_branch = -1;			\
+		return &self->parent;				\
+        }							\
+								\
+        if (rc) {						\
+		return NULL;					\
+	}							\
+} while (0)
+
+
+/**
+ * Helper macro that handles the case where the reader is a union, and
+ * the writer is not.  Checks each branch of the reader union schema,
+ * looking for the first branch that is compatible with the writer
+ * schema.  The @ref check_func argument should be a function that can
+ * check the compatiblity of each branch schema.
+ */
+
+#define check_reader_union(saved, wschema, rschema, check_func)		\
+do {									\
+	if (!is_avro_union(rschema)) {					\
+		break;							\
+	}								\
+									\
+	debug("Checking reader union schema");				\
+	size_t  num_branches = avro_schema_union_size(rschema);		\
+	unsigned int  i;						\
+									\
+	for (i = 0; i < num_branches; i++) {				\
+		avro_schema_t  branch_schema =				\
+		    avro_schema_union_branch(rschema, i);		\
+		skip_links(branch_schema);				\
+		avro_resolver_t  *self = NULL;				\
+		int  rc = check_func(saved, &self,			\
+				     wschema, branch_schema,		\
+				     rschema);				\
+		if (self) {						\
+			debug("Reader union branch %d (%s) "		\
+			      "and writer %s match",			\
+			      i, avro_schema_type_name(branch_schema),	\
+			      avro_schema_type_name(wschema));		\
+			self->reader_union_branch = i;			\
+			return &self->parent;				\
+		} else {						\
+			debug("Reader union branch %d (%s) "		\
+			      "doesn't match",				\
+			      i, avro_schema_type_name(branch_schema));	\
+		}							\
+									\
+		if (rc) {						\
+			return NULL;					\
+		}							\
+	}								\
+									\
+	debug("No reader union branches match");			\
+} while (0)
+
+/**
+ * A helper macro that defines wraps together check_non_union and
+ * check_reader_union for a simple (non-union) writer schema type.
+ */
+
+#define check_simple_writer(saved, wschema, rschema, type_name)		\
+do {									\
+	check_non_union(saved, wschema, rschema, try_##type_name);	\
+	check_reader_union(saved, wschema, rschema, try_##type_name);	\
+	debug("Writer %s doesn't match reader %s",			\
+	      avro_schema_type_name(wschema),				\
+	      avro_schema_type_name(rschema));				\
+	avro_set_error("Cannot store " #type_name " into %s",		\
+		       avro_schema_type_name(rschema));			\
+	return NULL;							\
+} while (0)
+
+
+/*-----------------------------------------------------------------------
+ * primitives
+ */
+
+static int
+avro_resolver_boolean_value(avro_consumer_t *consumer, int value,
+			    void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %s into %p", value? "TRUE": "FALSE", dest);
+	return avro_boolean_set(dest, value);
+}
+
+static int
+try_boolean(avro_memoize_t *mem, avro_resolver_t **resolver,
+	    avro_schema_t wschema, avro_schema_t rschema,
+	    avro_schema_t root_rschema)
+{
+	if (is_avro_boolean(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.boolean_value = avro_resolver_boolean_value;
+	}
+	return 0;
+}
+
+
+static void
+free_bytes(void *ptr, size_t sz)
+{
+	/*
+	 * The binary encoder class allocates bytes values with an extra
+	 * byte, so that they're NUL terminated.
+	 */
+	avro_free(ptr, sz+1);
+}
+
+static int
+avro_resolver_bytes_value(avro_consumer_t *consumer,
+			  const void *value, size_t value_len,
+			  void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRIsz " bytes into %p", value_len, dest);
+	return avro_givebytes_set(dest, (const char *) value, value_len, free_bytes);
+}
+
+static int
+try_bytes(avro_memoize_t *mem, avro_resolver_t **resolver,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	if (is_avro_bytes(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.bytes_value = avro_resolver_bytes_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_double_value(avro_consumer_t *consumer, double value,
+			   void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %le into %p", value, dest);
+	return avro_double_set(dest, value);
+}
+
+static int
+try_double(avro_memoize_t *mem, avro_resolver_t **resolver,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	if (is_avro_double(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.double_value = avro_resolver_double_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_float_value(avro_consumer_t *consumer, float value,
+			  void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %e into %p", value, dest);
+	return avro_float_set(dest, value);
+}
+
+static int
+avro_resolver_float_double_value(avro_consumer_t *consumer, float value,
+				 void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %e into %p (promoting float to double)", value, dest);
+	return avro_double_set(dest, value);
+}
+
+static int
+try_float(avro_memoize_t *mem, avro_resolver_t **resolver,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	if (is_avro_float(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.float_value = avro_resolver_float_value;
+	}
+	else if (is_avro_double(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.float_value = avro_resolver_float_double_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_int_value(avro_consumer_t *consumer, int32_t value,
+			void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId32 " into %p", value, dest);
+	return avro_int32_set(dest, value);
+}
+
+static int
+avro_resolver_int_long_value(avro_consumer_t *consumer, int32_t value,
+			     void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId32 " into %p (promoting int to long)", value, dest);
+	return avro_int64_set(dest, value);
+}
+
+static int
+avro_resolver_int_double_value(avro_consumer_t *consumer, int32_t value,
+			       void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId32 " into %p (promoting int to double)", value, dest);
+	return avro_double_set(dest, value);
+}
+
+static int
+avro_resolver_int_float_value(avro_consumer_t *consumer, int32_t value,
+			      void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId32 " into %p (promoting int to float)", value, dest);
+	return avro_float_set(dest, (const float) value);
+}
+
+static int
+try_int(avro_memoize_t *mem, avro_resolver_t **resolver,
+	avro_schema_t wschema, avro_schema_t rschema,
+	avro_schema_t root_rschema)
+{
+	if (is_avro_int32(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.int_value = avro_resolver_int_value;
+	}
+	else if (is_avro_int64(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.int_value = avro_resolver_int_long_value;
+	}
+	else if (is_avro_double(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.int_value = avro_resolver_int_double_value;
+	}
+	else if (is_avro_float(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.int_value = avro_resolver_int_float_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_long_value(avro_consumer_t *consumer, int64_t value,
+			 void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId64 " into %p", value, dest);
+	return avro_int64_set(dest, value);
+}
+
+static int
+avro_resolver_long_float_value(avro_consumer_t *consumer, int64_t value,
+			       void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId64 " into %p (promoting long to float)", value, dest);
+	return avro_float_set(dest, (const float) value);
+}
+
+static int
+avro_resolver_long_double_value(avro_consumer_t *consumer, int64_t value,
+				void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing %" PRId64 " into %p (promoting long to double)", value, dest);
+	return avro_double_set(dest, (const double) value);
+}
+
+static int
+try_long(avro_memoize_t *mem, avro_resolver_t **resolver,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	if (is_avro_int64(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.long_value = avro_resolver_long_value;
+	}
+	else if (is_avro_double(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.long_value = avro_resolver_long_double_value;
+	}
+	else if (is_avro_float(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.long_value = avro_resolver_long_float_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_null_value(avro_consumer_t *consumer, void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+
+	AVRO_UNUSED(dest);
+	debug("Storing null into %p", dest);
+	return 0;
+}
+
+static int
+try_null(avro_memoize_t *mem, avro_resolver_t **resolver,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	if (is_avro_null(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.null_value = avro_resolver_null_value;
+	}
+	return 0;
+}
+
+
+static int
+avro_resolver_string_value(avro_consumer_t *consumer,
+			   const void *value, size_t value_len,
+			   void *user_data)
+{
+	AVRO_UNUSED(value_len);
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing \"%s\" into %p", (const char *) value, dest);
+	return avro_givestring_set(dest, (const char *) value, avro_alloc_free_func);
+}
+
+static int
+try_string(avro_memoize_t *mem, avro_resolver_t **resolver,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	if (is_avro_string(rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.string_value = avro_resolver_string_value;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * arrays
+ */
+
+static int
+avro_resolver_array_start_block(avro_consumer_t *consumer,
+				int is_first_block,
+				unsigned int block_count,
+				void *user_data)
+{
+	if (is_first_block) {
+		avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+		avro_datum_t  ud_dest = (avro_datum_t) user_data;
+		avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+		AVRO_UNUSED(dest);
+
+		debug("Starting array %p", dest);
+	}
+
+	AVRO_UNUSED(block_count);
+	return 0;
+}
+
+static int
+avro_resolver_array_element(avro_consumer_t *consumer,
+			    unsigned int index,
+			    avro_consumer_t **element_consumer,
+			    void **element_user_data,
+			    void *user_data)
+{
+	AVRO_UNUSED(index);
+
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Adding element to array %p", dest);
+
+	/*
+	 * Allocate a new element datum and add it to the array.
+	 */
+
+	avro_schema_t  array_schema = avro_datum_get_schema(dest);
+	avro_schema_t  item_schema = avro_schema_array_items(array_schema);
+	avro_datum_t  element = avro_datum_from_schema(item_schema);
+	avro_array_append_datum(dest, element);
+	avro_datum_decref(element);
+
+	/*
+	 * Return the consumer that we allocated to process the array's
+	 * children.
+	 */
+
+	*element_consumer = resolver->child_resolvers[0];
+	*element_user_data = element;
+	return 0;
+}
+
+static int
+try_array(avro_memoize_t *mem, avro_resolver_t **resolver,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is an array.
+	 */
+
+	if (!is_avro_array(rschema)) {
+		return 0;
+	}
+
+	/*
+	 * Array schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an avro_resolver_t to
+	 * check the compatibility.
+	 */
+
+	*resolver = avro_resolver_create(wschema, root_rschema);
+	avro_memoize_set(mem, wschema, root_rschema, *resolver);
+
+	avro_schema_t  witems = avro_schema_array_items(wschema);
+	avro_schema_t  ritems = avro_schema_array_items(rschema);
+
+	avro_consumer_t  *item_consumer =
+	    avro_resolver_new_memoized(mem, witems, ritems);
+	if (!item_consumer) {
+		avro_memoize_delete(mem, wschema, root_rschema);
+		avro_consumer_free(&(*resolver)->parent);
+		avro_prefix_error("Array values aren't compatible: ");
+		return EINVAL;
+	}
+
+	/*
+	 * The two schemas are compatible, so go ahead and create a
+	 * GavroResolver for the array.  Store the item schema's
+	 * resolver into the child_resolvers field.
+	 */
+
+	(*resolver)->num_children = 1;
+	(*resolver)->child_resolvers = (avro_consumer_t **) avro_calloc(1, sizeof(avro_consumer_t *));
+	(*resolver)->child_resolvers[0] = item_consumer;
+	(*resolver)->parent.array_start_block = avro_resolver_array_start_block;
+	(*resolver)->parent.array_element = avro_resolver_array_element;
+
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * enums
+ */
+
+static int
+avro_resolver_enum_value(avro_consumer_t *consumer, int value,
+			 void *user_data)
+{
+	AVRO_UNUSED(value);
+
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+
+	const char  *symbol_name = avro_schema_enum_get(resolver->parent.schema, value);
+	debug("Storing symbol %s into %p", symbol_name, dest);
+	return avro_enum_set_name(dest, symbol_name);
+}
+
+static int
+try_enum(avro_memoize_t *mem, avro_resolver_t **resolver,
+	 avro_schema_t wschema, avro_schema_t rschema,
+	 avro_schema_t root_rschema)
+{
+	/*
+	 * Enum schemas have to have the same name — but not the same
+	 * list of symbols — to be compatible.
+	 */
+
+	if (is_avro_enum(rschema)) {
+		const char  *wname = avro_schema_name(wschema);
+		const char  *rname = avro_schema_name(rschema);
+
+		if (!strcmp(wname, rname)) {
+			*resolver = avro_resolver_create(wschema, root_rschema);
+			avro_memoize_set(mem, wschema, root_rschema, *resolver);
+			(*resolver)->parent.enum_value = avro_resolver_enum_value;
+		}
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * fixed
+ */
+
+static int
+avro_resolver_fixed_value(avro_consumer_t *consumer,
+			  const void *value, size_t value_len,
+			  void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Storing (fixed) %" PRIsz " bytes into %p", value_len, dest);
+	return avro_givefixed_set(dest, (const char *) value, value_len, avro_alloc_free_func);
+}
+
+static int
+try_fixed(avro_memoize_t *mem, avro_resolver_t **resolver,
+	  avro_schema_t wschema, avro_schema_t rschema,
+	  avro_schema_t root_rschema)
+{
+	/*
+	 * Fixed schemas need the same name and size to be compatible.
+	 */
+
+	if (avro_schema_equal(wschema, rschema)) {
+		*resolver = avro_resolver_create(wschema, root_rschema);
+		avro_memoize_set(mem, wschema, root_rschema, *resolver);
+		(*resolver)->parent.fixed_value = avro_resolver_fixed_value;
+	}
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * maps
+ */
+
+static int
+avro_resolver_map_start_block(avro_consumer_t *consumer,
+			      int is_first_block,
+			      unsigned int block_count,
+			      void *user_data)
+{
+	if (is_first_block) {
+		avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+		avro_datum_t  ud_dest = (avro_datum_t) user_data;
+		avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+		AVRO_UNUSED(dest);
+
+		debug("Starting map %p", dest);
+	}
+
+	AVRO_UNUSED(block_count);
+	return 0;
+}
+
+static int
+avro_resolver_map_element(avro_consumer_t *consumer,
+			  unsigned int index,
+			  const char *key,
+			  avro_consumer_t **value_consumer,
+			  void **value_user_data,
+			  void *user_data)
+{
+	AVRO_UNUSED(index);
+
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	debug("Adding element to map %p", dest);
+
+	/*
+	 * Allocate a new element datum and add it to the map.
+	 */
+
+	avro_schema_t  map_schema = avro_datum_get_schema(dest);
+	avro_schema_t  value_schema = avro_schema_map_values(map_schema);
+	avro_datum_t  value = avro_datum_from_schema(value_schema);
+	avro_map_set(dest, key, value);
+	avro_datum_decref(value);
+
+	/*
+	 * Return the consumer that we allocated to process the map's
+	 * children.
+	 */
+
+	*value_consumer = resolver->child_resolvers[0];
+	*value_user_data = value;
+	return 0;
+}
+
+static int
+try_map(avro_memoize_t *mem, avro_resolver_t **resolver,
+	avro_schema_t wschema, avro_schema_t rschema,
+	avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is an map.
+	 */
+
+	if (!is_avro_map(rschema)) {
+		return 0;
+	}
+
+	/*
+	 * Array schemas have to have compatible element schemas to be
+	 * compatible themselves.  Try to create an avro_resolver_t to
+	 * check the compatibility.
+	 */
+
+	*resolver = avro_resolver_create(wschema, root_rschema);
+	avro_memoize_set(mem, wschema, root_rschema, *resolver);
+
+	avro_schema_t  wvalues = avro_schema_map_values(wschema);
+	avro_schema_t  rvalues = avro_schema_map_values(rschema);
+
+	avro_consumer_t  *value_consumer =
+	    avro_resolver_new_memoized(mem, wvalues, rvalues);
+	if (!value_consumer) {
+		avro_memoize_delete(mem, wschema, root_rschema);
+		avro_consumer_free(&(*resolver)->parent);
+		avro_prefix_error("Map values aren't compatible: ");
+		return EINVAL;
+	}
+
+	/*
+	 * The two schemas are compatible, so go ahead and create a
+	 * GavroResolver for the map.  Store the value schema's
+	 * resolver into the child_resolvers field.
+	 */
+
+	(*resolver)->num_children = 1;
+	(*resolver)->child_resolvers = (avro_consumer_t **) avro_calloc(1, sizeof(avro_consumer_t *));
+	(*resolver)->child_resolvers[0] = value_consumer;
+	(*resolver)->parent.map_start_block = avro_resolver_map_start_block;
+	(*resolver)->parent.map_element = avro_resolver_map_element;
+
+	return 0;
+}
+
+
+/*-----------------------------------------------------------------------
+ * records
+ */
+
+static int
+avro_resolver_record_start(avro_consumer_t *consumer,
+			   void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+	AVRO_UNUSED(dest);
+
+	debug("Starting record at %p", dest);
+
+	/*
+	 * TODO: Eventually, we'll fill in default values for the extra
+	 * reader fields here.
+	 */
+
+	return 0;
+}
+
+static int
+avro_resolver_record_field(avro_consumer_t *consumer,
+			   unsigned int index,
+			   avro_consumer_t **field_consumer,
+			   void **field_user_data,
+			   void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+	avro_datum_t  ud_dest = (avro_datum_t) user_data;
+	avro_datum_t  dest = avro_resolver_get_real_dest(resolver, ud_dest);
+
+	const char  *field_name =
+	    avro_schema_record_field_name(consumer->schema, index);
+
+	/*
+	 * Grab the resolver for this field of the writer record.  If
+	 * it's NULL, this this field doesn't exist in the reader
+	 * record, and should be skipped.
+	 */
+
+	debug("Retrieving resolver for writer field %i (%s)",
+	      index, field_name);
+
+	if (!resolver->child_resolvers[index]) {
+		debug("Reader doesn't have field %s, skipping", field_name);
+		return 0;
+	}
+
+	/*
+	 * TODO: Once we can retrieve record fields by index (quickly),
+	 * use the index_mapping.
+	 */
+
+	avro_datum_t  field = NULL;
+	avro_record_get(dest, field_name, &field);
+
+	*field_consumer = resolver->child_resolvers[index];
+	*field_user_data = field;
+	return 0;
+}
+
+static int
+try_record(avro_memoize_t *mem, avro_resolver_t **resolver,
+	   avro_schema_t wschema, avro_schema_t rschema,
+	   avro_schema_t root_rschema)
+{
+	/*
+	 * First verify that the reader is also a record, and has the
+	 * same name as the writer.
+	 */
+
+	if (!is_avro_record(rschema)) {
+		return 0;
+	}
+
+	const char  *wname = avro_schema_name(wschema);
+	const char  *rname = avro_schema_name(rschema);
+
+	if (strcmp(wname, rname)) {
+		return 0;
+	}
+
+	/*
+	 * Categorize the fields in the record schemas.  Fields that are
+	 * only in the writer are ignored.  Fields that are only in the
+	 * reader raise a schema mismatch error, unless the field has a
+	 * default value.  Fields that are in both are resolved
+	 * recursively.
+	 *
+	 * The child_resolver array will contain an avro_resolver_t for
+	 * each field in the writer schema.  To build this array, we
+	 * loop through the fields of the reader schema.  If that field
+	 * is also in the writer schema, we resolve them recursively,
+	 * and store the resolver into the array.  If the field isn't in
+	 * the writer schema, we raise an error.  (TODO: Eventually,
+	 * we'll handle default values here.)  After this loop finishes,
+	 * any NULLs in the child_resolver array will represent fields
+	 * in the writer but not the reader; these fields will be
+	 * skipped when processing the input.
+	 */
+
+	*resolver = avro_resolver_create(wschema, root_rschema);
+	avro_memoize_set(mem, wschema, root_rschema, *resolver);
+
+	size_t  wfields = avro_schema_record_size(wschema);
+	size_t  rfields = avro_schema_record_size(rschema);
+
+	debug("Checking writer record schema %s", wname);
+
+	avro_consumer_t  **child_resolvers =
+	    (avro_consumer_t **) avro_calloc(wfields, sizeof(avro_consumer_t *));
+	int  *index_mapping = (int *) avro_calloc(wfields, sizeof(int));
+
+	unsigned int  ri;
+	for (ri = 0; ri < rfields; ri++) {
+		avro_schema_t  rfield =
+		    avro_schema_record_field_get_by_index(rschema, ri);
+		const char  *field_name =
+		    avro_schema_record_field_name(rschema, ri);
+
+		debug("Resolving reader record field %u (%s)", ri, field_name);
+
+		/*
+		 * See if this field is also in the writer schema.
+		 */
+
+		int  wi = avro_schema_record_field_get_index(wschema, field_name);
+
+		if (wi == -1) {
+			/*
+			 * This field isn't in the writer schema —
+			 * that's an error!  TODO: Handle default
+			 * values!
+			 */
+
+			debug("Field %s isn't in writer", field_name);
+			avro_set_error("Reader field %s doesn't appear in writer",
+				       field_name);
+			goto error;
+		}
+
+		/*
+		 * Try to recursively resolve the schemas for this
+		 * field.  If they're not compatible, that's an error.
+		 */
+
+		avro_schema_t  wfield =
+		    avro_schema_record_field_get_by_index(wschema, wi);
+		avro_consumer_t  *field_resolver =
+		    avro_resolver_new_memoized(mem, wfield, rfield);
+
+		if (!field_resolver) {
+			avro_prefix_error("Field %s isn't compatible: ", field_name);
+			goto error;
+		}
+
+		/*
+		 * Save the details for this field.
+		 */
+
+		debug("Found match for field %s (%u in reader, %d in writer)",
+		      field_name, ri, wi);
+		child_resolvers[wi] = field_resolver;
+		index_mapping[wi] = ri;
+	}
+
+	/*
+	 * We might not have found matches for all of the writer fields,
+	 * but that's okay — any extras will be ignored.
+	 */
+
+	(*resolver)->num_children = wfields;
+	(*resolver)->child_resolvers = child_resolvers;
+	(*resolver)->index_mapping = index_mapping;
+	(*resolver)->parent.record_start = avro_resolver_record_start;
+	(*resolver)->parent.record_field = avro_resolver_record_field;
+	return 0;
+
+error:
+	/*
+	 * Clean up any consumer we might have already created.
+	 */
+
+	avro_memoize_delete(mem, wschema, root_rschema);
+	avro_consumer_free(&(*resolver)->parent);
+
+	{
+		unsigned int  i;
+		for (i = 0; i < wfields; i++) {
+			if (child_resolvers[i]) {
+				avro_consumer_free(child_resolvers[i]);
+			}
+		}
+	}
+
+	avro_free(child_resolvers, wfields * sizeof(avro_consumer_t *));
+	avro_free(index_mapping, wfields * sizeof(int));
+	return EINVAL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * union
+ */
+
+static int
+avro_resolver_union_branch(avro_consumer_t *consumer,
+			   unsigned int discriminant,
+			   avro_consumer_t **branch_consumer,
+			   void **branch_user_data,
+			   void *user_data)
+{
+	avro_resolver_t  *resolver = (avro_resolver_t *) consumer;
+
+	/*
+	 * Grab the resolver for this branch of the writer union.  If
+	 * it's NULL, then this branch is incompatible with the reader.
+	 */
+
+	debug("Retrieving resolver for writer branch %u", discriminant);
+
+	if (!resolver->child_resolvers[discriminant]) {
+		avro_set_error("Writer union branch %u is incompatible "
+			       "with reader schema \"%s\"",
+			       discriminant, avro_schema_type_name(resolver->rschema));
+		return EINVAL;
+	}
+
+	/*
+	 * Return the branch's resolver.
+	 */
+
+	*branch_consumer = resolver->child_resolvers[discriminant];
+	*branch_user_data = user_data;
+	return 0;
+}
+
+static avro_consumer_t *
+try_union(avro_memoize_t *mem, avro_schema_t wschema, avro_schema_t rschema)
+{
+	/*
+	 * For a writer union, we recursively try to resolve each branch
+	 * against the reader schema.  This will work correctly whether
+	 * or not the reader is also a union — if the reader is a union,
+	 * then we'll resolve each (non-union) writer branch against the
+	 * reader union, which will be checked in our calls to
+	 * check_simple_writer below.  The net result is that we might
+	 * end up trying every combination of writer and reader
+	 * branches, when looking for compatible schemas.
+	 *
+	 * Regardless of what the reader schema is, for each writer
+	 * branch, we stash away the recursive avro_resolver_t into the
+	 * child_resolvers array.  A NULL entry in this array means that
+	 * that branch isn't compatible with the reader.  This isn't an
+	 * immediate schema resolution error, since we allow
+	 * incompatible branches in the types as long as that branch
+	 * never appears in the actual data.  We only return an error if
+	 * there are *no* branches that are compatible.
+	 */
+
+	size_t  num_branches = avro_schema_union_size(wschema);
+	debug("Checking %" PRIsz "-branch writer union schema", num_branches);
+
+	avro_resolver_t  *resolver = avro_resolver_create(wschema, rschema);
+	avro_memoize_set(mem, wschema, rschema, resolver);
+
+	avro_consumer_t  **child_resolvers =
+	    (avro_consumer_t **) avro_calloc(num_branches, sizeof(avro_consumer_t *));
+	int  some_branch_compatible = 0;
+
+	unsigned int  i;
+	for (i = 0; i < num_branches; i++) {
+		avro_schema_t  branch_schema =
+		    avro_schema_union_branch(wschema, i);
+
+		debug("Resolving writer union branch %u (%s)",
+		      i, avro_schema_type_name(branch_schema));
+
+		/*
+		 * Try to recursively resolve this branch of the writer
+		 * union.  Don't raise an error if this fails — it's
+		 * okay for some of the branches to not be compatible
+		 * with the reader, as long as those branches never
+		 * appear in the input.
+		 */
+
+		child_resolvers[i] =
+		    avro_resolver_new_memoized(mem, branch_schema, rschema);
+		if (child_resolvers[i]) {
+			debug("Found match for writer union branch %u", i);
+			some_branch_compatible = 1;
+		} else {
+			debug("No match for writer union branch %u", i);
+		}
+	}
+
+	/*
+	 * As long as there's at least one branch that's compatible with
+	 * the reader, then we consider this schema resolution a
+	 * success.
+	 */
+
+	if (!some_branch_compatible) {
+		debug("No writer union branches match");
+		avro_set_error("No branches in the writer are compatible "
+			       "with reader schema %s",
+			       avro_schema_type_name(rschema));
+		goto error;
+	}
+
+	resolver->num_children = num_branches;
+	resolver->child_resolvers = child_resolvers;
+	resolver->parent.union_branch = avro_resolver_union_branch;
+	return &resolver->parent;
+
+error:
+	/*
+	 * Clean up any consumer we might have already created.
+	 */
+
+	avro_memoize_delete(mem, wschema, rschema);
+	avro_consumer_free(&resolver->parent);
+
+	for (i = 0; i < num_branches; i++) {
+		if (child_resolvers[i]) {
+			avro_consumer_free(child_resolvers[i]);
+		}
+	}
+
+	avro_free(child_resolvers, num_branches * sizeof(avro_consumer_t *));
+	return NULL;
+}
+
+
+/*-----------------------------------------------------------------------
+ * schema type dispatcher
+ */
+
+static avro_consumer_t *
+avro_resolver_new_memoized(avro_memoize_t *mem,
+			   avro_schema_t wschema, avro_schema_t rschema)
+{
+	check_param(NULL, is_avro_schema(wschema), "writer schema");
+	check_param(NULL, is_avro_schema(rschema), "reader schema");
+
+	skip_links(wschema);
+	skip_links(rschema);
+
+	/*
+	 * First see if we've already matched these two schemas.  If so,
+	 * just return that resolver.
+	 */
+
+	avro_resolver_t  *saved = NULL;
+	if (avro_memoize_get(mem, wschema, rschema, (void **) &saved)) {
+		debug("Already resolved %s and %s",
+		      avro_schema_type_name(wschema),
+		      avro_schema_type_name(rschema));
+		return &saved->parent;
+	}
+
+	/*
+	 * Otherwise we have some work to do.
+	 */
+
+	switch (avro_typeof(wschema))
+	{
+		case AVRO_BOOLEAN:
+			check_simple_writer(mem, wschema, rschema, boolean);
+			return NULL;
+
+		case AVRO_BYTES:
+			check_simple_writer(mem, wschema, rschema, bytes);
+			return NULL;
+
+		case AVRO_DOUBLE:
+			check_simple_writer(mem, wschema, rschema, double);
+			return NULL;
+
+		case AVRO_FLOAT:
+			check_simple_writer(mem, wschema, rschema, float);
+			return NULL;
+
+		case AVRO_INT32:
+			check_simple_writer(mem, wschema, rschema, int);
+			return NULL;
+
+		case AVRO_INT64:
+			check_simple_writer(mem, wschema, rschema, long);
+			return NULL;
+
+		case AVRO_NULL:
+			check_simple_writer(mem, wschema, rschema, null);
+			return NULL;
+
+		case AVRO_STRING:
+			check_simple_writer(mem, wschema, rschema, string);
+			return NULL;
+
+		case AVRO_ARRAY:
+			check_simple_writer(mem, wschema, rschema, array);
+			return NULL;
+
+		case AVRO_ENUM:
+			check_simple_writer(mem, wschema, rschema, enum);
+			return NULL;
+
+		case AVRO_FIXED:
+			check_simple_writer(mem, wschema, rschema, fixed);
+			return NULL;
+
+		case AVRO_MAP:
+			check_simple_writer(mem, wschema, rschema, map);
+			return NULL;
+
+		case AVRO_RECORD:
+			check_simple_writer(mem, wschema, rschema, record);
+			return NULL;
+
+		case AVRO_UNION:
+			return try_union(mem, wschema, rschema);
+
+		default:
+			avro_set_error("Unknown schema type");
+			return NULL;
+	}
+
+	return NULL;
+}
+
+
+avro_consumer_t *
+avro_resolver_new(avro_schema_t wschema, avro_schema_t rschema)
+{
+	avro_memoize_t  mem;
+	avro_memoize_init(&mem);
+	avro_consumer_t  *result =
+	    avro_resolver_new_memoized(&mem, wschema, rschema);
+	avro_memoize_done(&mem);
+	return result;
+}
diff --git a/lang/c/src/schema.c b/lang/c/src/schema.c
new file mode 100644
index 0000000..7ed6125
--- /dev/null
+++ b/lang/c/src/schema.c
@@ -0,0 +1,1702 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro/allocation.h"
+#include "avro/refcount.h"
+#include "avro/errors.h"
+#include "avro/io.h"
+#include "avro/legacy.h"
+#include "avro/schema.h"
+#include "avro_private.h"
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+#include <errno.h>
+#include <ctype.h>
+
+#include "jansson.h"
+#include "st.h"
+#include "schema.h"
+
+#define DEFAULT_TABLE_SIZE 32
+
+/* forward declaration */
+static int
+avro_schema_to_json2(const avro_schema_t schema, avro_writer_t out,
+		     const char *parent_namespace);
+
+static void avro_schema_init(avro_schema_t schema, avro_type_t type)
+{
+	schema->type = type;
+	schema->class_type = AVRO_SCHEMA;
+	avro_refcount_set(&schema->refcount, 1);
+}
+
+static int is_avro_id(const char *name)
+{
+	size_t i, len;
+	if (name) {
+		len = strlen(name);
+		if (len < 1) {
+			return 0;
+		}
+		for (i = 0; i < len; i++) {
+			if (!(isalpha(name[i])
+			      || name[i] == '_' || (i && isdigit(name[i])))) {
+				return 0;
+			}
+		}
+		/*
+		 * starts with [A-Za-z_] subsequent [A-Za-z0-9_] 
+		 */
+		return 1;
+	}
+	return 0;
+}
+
+static int record_free_foreach(int i, struct avro_record_field_t *field,
+			       void *arg)
+{
+	AVRO_UNUSED(i);
+	AVRO_UNUSED(arg);
+
+	avro_str_free(field->name);
+	avro_schema_decref(field->type);
+	avro_freet(struct avro_record_field_t, field);
+	return ST_DELETE;
+}
+
+static int enum_free_foreach(int i, char *sym, void *arg)
+{
+	AVRO_UNUSED(i);
+	AVRO_UNUSED(arg);
+
+	avro_str_free(sym);
+	return ST_DELETE;
+}
+
+static int union_free_foreach(int i, avro_schema_t schema, void *arg)
+{
+	AVRO_UNUSED(i);
+	AVRO_UNUSED(arg);
+
+	avro_schema_decref(schema);
+	return ST_DELETE;
+}
+
+static void avro_schema_free(avro_schema_t schema)
+{
+	if (is_avro_schema(schema)) {
+		switch (avro_typeof(schema)) {
+		case AVRO_STRING:
+		case AVRO_BYTES:
+		case AVRO_INT32:
+		case AVRO_INT64:
+		case AVRO_FLOAT:
+		case AVRO_DOUBLE:
+		case AVRO_BOOLEAN:
+		case AVRO_NULL:
+			/* no memory allocated for primitives */
+			return;
+
+		case AVRO_RECORD:{
+				struct avro_record_schema_t *record;
+				record = avro_schema_to_record(schema);
+				avro_str_free(record->name);
+				if (record->space) {
+					avro_str_free(record->space);
+				}
+				st_foreach(record->fields, HASH_FUNCTION_CAST record_free_foreach,
+					   0);
+				st_free_table(record->fields_byname);
+				st_free_table(record->fields);
+				avro_freet(struct avro_record_schema_t, record);
+			}
+			break;
+
+		case AVRO_ENUM:{
+				struct avro_enum_schema_t *enump;
+				enump = avro_schema_to_enum(schema);
+				avro_str_free(enump->name);
+				st_foreach(enump->symbols, HASH_FUNCTION_CAST enum_free_foreach,
+					   0);
+				st_free_table(enump->symbols);
+				st_free_table(enump->symbols_byname);
+				avro_freet(struct avro_enum_schema_t, enump);
+			}
+			break;
+
+		case AVRO_FIXED:{
+				struct avro_fixed_schema_t *fixed;
+				fixed = avro_schema_to_fixed(schema);
+				avro_str_free((char *) fixed->name);
+				avro_freet(struct avro_fixed_schema_t, fixed);
+			}
+			break;
+
+		case AVRO_MAP:{
+				struct avro_map_schema_t *map;
+				map = avro_schema_to_map(schema);
+				avro_schema_decref(map->values);
+				avro_freet(struct avro_map_schema_t, map);
+			}
+			break;
+
+		case AVRO_ARRAY:{
+				struct avro_array_schema_t *array;
+				array = avro_schema_to_array(schema);
+				avro_schema_decref(array->items);
+				avro_freet(struct avro_array_schema_t, array);
+			}
+			break;
+		case AVRO_UNION:{
+				struct avro_union_schema_t *unionp;
+				unionp = avro_schema_to_union(schema);
+				st_foreach(unionp->branches, HASH_FUNCTION_CAST union_free_foreach,
+					   0);
+				st_free_table(unionp->branches);
+				st_free_table(unionp->branches_byname);
+				avro_freet(struct avro_union_schema_t, unionp);
+			}
+			break;
+
+		case AVRO_LINK:{
+				struct avro_link_schema_t *link;
+				link = avro_schema_to_link(schema);
+				avro_schema_decref(link->to);
+				avro_freet(struct avro_link_schema_t, link);
+			}
+			break;
+		}
+	}
+}
+
+avro_schema_t avro_schema_incref(avro_schema_t schema)
+{
+	if (schema) {
+		avro_refcount_inc(&schema->refcount);
+	}
+	return schema;
+}
+
+int
+avro_schema_decref(avro_schema_t schema)
+{
+	if (schema && avro_refcount_dec(&schema->refcount)) {
+		avro_schema_free(schema);
+		return 0;
+	}
+	return 1;
+}
+
+avro_schema_t avro_schema_string(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_STRING,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_bytes(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_BYTES,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_int(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_INT32,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_long(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_INT64,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_float(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_FLOAT,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_double(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_DOUBLE,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_boolean(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_BOOLEAN,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_null(void)
+{
+	static struct avro_obj_t obj = {
+		AVRO_NULL,
+		AVRO_SCHEMA,
+		1
+	};
+	return avro_schema_incref(&obj);
+}
+
+avro_schema_t avro_schema_fixed(const char *name, const int64_t size)
+{
+	if (!is_avro_id(name)) {
+		avro_set_error("Invalid Avro identifier");
+		return NULL;
+	}
+
+	struct avro_fixed_schema_t *fixed =
+	    (struct avro_fixed_schema_t *) avro_new(struct avro_fixed_schema_t);
+	if (!fixed) {
+		avro_set_error("Cannot allocate new fixed schema");
+		return NULL;
+	}
+	fixed->name = avro_strdup(name);
+	fixed->size = size;
+	avro_schema_init(&fixed->obj, AVRO_FIXED);
+	return &fixed->obj;
+}
+
+int64_t avro_schema_fixed_size(const avro_schema_t fixed)
+{
+	return avro_schema_to_fixed(fixed)->size;
+}
+
+avro_schema_t avro_schema_union(void)
+{
+	struct avro_union_schema_t *schema =
+	    (struct avro_union_schema_t *) avro_new(struct avro_union_schema_t);
+	if (!schema) {
+		avro_set_error("Cannot allocate new union schema");
+		return NULL;
+	}
+	schema->branches = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!schema->branches) {
+		avro_set_error("Cannot allocate new union schema");
+		avro_freet(struct avro_union_schema_t, schema);
+		return NULL;
+	}
+	schema->branches_byname =
+	    st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!schema->branches_byname) {
+		avro_set_error("Cannot allocate new union schema");
+		st_free_table(schema->branches);
+		avro_freet(struct avro_union_schema_t, schema);
+		return NULL;
+	}
+
+	avro_schema_init(&schema->obj, AVRO_UNION);
+	return &schema->obj;
+}
+
+int
+avro_schema_union_append(const avro_schema_t union_schema,
+			 const avro_schema_t schema)
+{
+	check_param(EINVAL, is_avro_schema(union_schema), "union schema");
+	check_param(EINVAL, is_avro_union(union_schema), "union schema");
+	check_param(EINVAL, is_avro_schema(schema), "schema");
+
+	struct avro_union_schema_t *unionp = avro_schema_to_union(union_schema);
+	int  new_index = unionp->branches->num_entries;
+	st_insert(unionp->branches, new_index, (st_data_t) schema);
+	const char *name = avro_schema_type_name(schema);
+	st_insert(unionp->branches_byname, (st_data_t) name,
+		  (st_data_t) new_index);
+	avro_schema_incref(schema);
+	return 0;
+}
+
+size_t avro_schema_union_size(const avro_schema_t union_schema)
+{
+	check_param(EINVAL, is_avro_schema(union_schema), "union schema");
+	check_param(EINVAL, is_avro_union(union_schema), "union schema");
+	struct avro_union_schema_t *unionp = avro_schema_to_union(union_schema);
+	return unionp->branches->num_entries;
+}
+
+avro_schema_t avro_schema_union_branch(avro_schema_t unionp,
+				       int branch_index)
+{
+	union {
+		st_data_t data;
+		avro_schema_t schema;
+	} val;
+	if (st_lookup(avro_schema_to_union(unionp)->branches,
+		      branch_index, &val.data)) {
+		return val.schema;
+	} else {
+		avro_set_error("No union branch for discriminant %d",
+			       branch_index);
+		return NULL;
+	}
+}
+
+avro_schema_t avro_schema_union_branch_by_name
+(avro_schema_t unionp, int *branch_index, const char *name)
+{
+	union {
+		st_data_t data;
+		int  branch_index;
+	} val;
+
+	if (!st_lookup(avro_schema_to_union(unionp)->branches_byname,
+		       (st_data_t) name, &val.data)) {
+		avro_set_error("No union branch named %s", name);
+		return NULL;
+	}
+
+	if (branch_index != NULL) {
+		*branch_index = val.branch_index;
+	}
+	return avro_schema_union_branch(unionp, val.branch_index);
+}
+
+avro_schema_t avro_schema_array(const avro_schema_t items)
+{
+	struct avro_array_schema_t *array =
+	    (struct avro_array_schema_t *) avro_new(struct avro_array_schema_t);
+	if (!array) {
+		avro_set_error("Cannot allocate new array schema");
+		return NULL;
+	}
+	array->items = avro_schema_incref(items);
+	avro_schema_init(&array->obj, AVRO_ARRAY);
+	return &array->obj;
+}
+
+avro_schema_t avro_schema_array_items(avro_schema_t array)
+{
+	return avro_schema_to_array(array)->items;
+}
+
+avro_schema_t avro_schema_map(const avro_schema_t values)
+{
+	struct avro_map_schema_t *map =
+	    (struct avro_map_schema_t *) avro_new(struct avro_map_schema_t);
+	if (!map) {
+		avro_set_error("Cannot allocate new map schema");
+		return NULL;
+	}
+	map->values = avro_schema_incref(values);
+	avro_schema_init(&map->obj, AVRO_MAP);
+	return &map->obj;
+}
+
+avro_schema_t avro_schema_map_values(avro_schema_t map)
+{
+	return avro_schema_to_map(map)->values;
+}
+
+avro_schema_t avro_schema_enum(const char *name)
+{
+	if (!is_avro_id(name)) {
+		avro_set_error("Invalid Avro identifier");
+		return NULL;
+	}
+
+	struct avro_enum_schema_t *enump = (struct avro_enum_schema_t *) avro_new(struct avro_enum_schema_t);
+	if (!enump) {
+		avro_set_error("Cannot allocate new enum schema");
+		return NULL;
+	}
+	enump->name = avro_strdup(name);
+	if (!enump->name) {
+		avro_set_error("Cannot allocate new enum schema");
+		avro_freet(struct avro_enum_schema_t, enump);
+		return NULL;
+	}
+	enump->symbols = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!enump->symbols) {
+		avro_set_error("Cannot allocate new enum schema");
+		avro_str_free(enump->name);
+		avro_freet(struct avro_enum_schema_t, enump);
+		return NULL;
+	}
+	enump->symbols_byname = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!enump->symbols_byname) {
+		avro_set_error("Cannot allocate new enum schema");
+		st_free_table(enump->symbols);
+		avro_str_free(enump->name);
+		avro_freet(struct avro_enum_schema_t, enump);
+		return NULL;
+	}
+	avro_schema_init(&enump->obj, AVRO_ENUM);
+	return &enump->obj;
+}
+
+const char *avro_schema_enum_get(const avro_schema_t enump,
+				 int index)
+{
+	union {
+		st_data_t data;
+		char *sym;
+	} val;
+	st_lookup(avro_schema_to_enum(enump)->symbols, index, &val.data);
+	return val.sym;
+}
+
+int avro_schema_enum_get_by_name(const avro_schema_t enump,
+				 const char *symbol_name)
+{
+	union {
+		st_data_t data;
+		long idx;
+	} val;
+
+	if (st_lookup(avro_schema_to_enum(enump)->symbols_byname,
+		      (st_data_t) symbol_name, &val.data)) {
+		return val.idx;
+	} else {
+		avro_set_error("No enum symbol named %s", symbol_name);
+		return -1;
+	}
+}
+
+int
+avro_schema_enum_symbol_append(const avro_schema_t enum_schema,
+			       const char *symbol)
+{
+	check_param(EINVAL, is_avro_schema(enum_schema), "enum schema");
+	check_param(EINVAL, is_avro_enum(enum_schema), "enum schema");
+	check_param(EINVAL, symbol, "symbol");
+
+	char *sym;
+	long idx;
+	struct avro_enum_schema_t *enump = avro_schema_to_enum(enum_schema);
+	sym = avro_strdup(symbol);
+	if (!sym) {
+		avro_set_error("Cannot create copy of symbol name");
+		return ENOMEM;
+	}
+	idx = enump->symbols->num_entries;
+	st_insert(enump->symbols, (st_data_t) idx, (st_data_t) sym);
+	st_insert(enump->symbols_byname, (st_data_t) sym, (st_data_t) idx);
+	return 0;
+}
+
+int
+avro_schema_record_field_append(const avro_schema_t record_schema,
+				const char *field_name,
+				const avro_schema_t field_schema)
+{
+	check_param(EINVAL, is_avro_schema(record_schema), "record schema");
+	check_param(EINVAL, is_avro_record(record_schema), "record schema");
+	check_param(EINVAL, field_name, "field name");
+	check_param(EINVAL, is_avro_schema(field_schema), "field schema");
+
+	if (!is_avro_id(field_name)) {
+		avro_set_error("Invalid Avro identifier");
+		return EINVAL;
+	}
+
+	if (record_schema == field_schema) {
+		avro_set_error("Cannot create a circular schema");
+		return EINVAL;
+	}
+
+	struct avro_record_schema_t *record = avro_schema_to_record(record_schema);
+	struct avro_record_field_t *new_field = (struct avro_record_field_t *) avro_new(struct avro_record_field_t);
+	if (!new_field) {
+		avro_set_error("Cannot allocate new record field");
+		return ENOMEM;
+	}
+	new_field->index = record->fields->num_entries;
+	new_field->name = avro_strdup(field_name);
+	new_field->type = avro_schema_incref(field_schema);
+	st_insert(record->fields, record->fields->num_entries,
+		  (st_data_t) new_field);
+	st_insert(record->fields_byname, (st_data_t) new_field->name,
+		  (st_data_t) new_field);
+	return 0;
+}
+
+avro_schema_t avro_schema_record(const char *name, const char *space)
+{
+	if (!is_avro_id(name)) {
+		avro_set_error("Invalid Avro identifier");
+		return NULL;
+	}
+
+	struct avro_record_schema_t *record = (struct avro_record_schema_t *) avro_new(struct avro_record_schema_t);
+	if (!record) {
+		avro_set_error("Cannot allocate new record schema");
+		return NULL;
+	}
+	record->name = avro_strdup(name);
+	if (!record->name) {
+		avro_set_error("Cannot allocate new record schema");
+		avro_freet(struct avro_record_schema_t, record);
+		return NULL;
+	}
+	record->space = space ? avro_strdup(space) : NULL;
+	if (space && !record->space) {
+		avro_set_error("Cannot allocate new record schema");
+		avro_str_free(record->name);
+		avro_freet(struct avro_record_schema_t, record);
+		return NULL;
+	}
+	record->fields = st_init_numtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!record->fields) {
+		avro_set_error("Cannot allocate new record schema");
+		if (record->space) {
+			avro_str_free(record->space);
+		}
+		avro_str_free(record->name);
+		avro_freet(struct avro_record_schema_t, record);
+		return NULL;
+	}
+	record->fields_byname = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!record->fields_byname) {
+		avro_set_error("Cannot allocate new record schema");
+		st_free_table(record->fields);
+		if (record->space) {
+			avro_str_free(record->space);
+		}
+		avro_str_free(record->name);
+		avro_freet(struct avro_record_schema_t, record);
+		return NULL;
+	}
+
+	avro_schema_init(&record->obj, AVRO_RECORD);
+	return &record->obj;
+}
+
+size_t avro_schema_record_size(const avro_schema_t record)
+{
+	return avro_schema_to_record(record)->fields->num_entries;
+}
+
+avro_schema_t avro_schema_record_field_get(const avro_schema_t
+					   record, const char *field_name)
+{
+	union {
+		st_data_t data;
+		struct avro_record_field_t *field;
+	} val;
+	st_lookup(avro_schema_to_record(record)->fields_byname,
+		  (st_data_t) field_name, &val.data);
+	return val.field->type;
+}
+
+int avro_schema_record_field_get_index(const avro_schema_t schema,
+				       const char *field_name)
+{
+	union {
+		st_data_t data;
+		struct avro_record_field_t *field;
+	} val;
+	if (st_lookup(avro_schema_to_record(schema)->fields_byname,
+		      (st_data_t) field_name, &val.data)) {
+		return val.field->index;
+	}
+
+	avro_set_error("No field named %s in record", field_name);
+	return -1;
+}
+
+const char *avro_schema_record_field_name(const avro_schema_t schema, int index)
+{
+	union {
+		st_data_t data;
+		struct avro_record_field_t *field;
+	} val;
+	st_lookup(avro_schema_to_record(schema)->fields, index, &val.data);
+	return val.field->name;
+}
+
+avro_schema_t avro_schema_record_field_get_by_index
+(const avro_schema_t record, int index)
+{
+	union {
+		st_data_t data;
+		struct avro_record_field_t *field;
+	} val;
+	st_lookup(avro_schema_to_record(record)->fields, index, &val.data);
+	return val.field->type;
+}
+
+avro_schema_t avro_schema_link(avro_schema_t to)
+{
+	if (!is_avro_named_type(to)) {
+		avro_set_error("Can only link to named types");
+		return NULL;
+	}
+
+	struct avro_link_schema_t *link = (struct avro_link_schema_t *) avro_new(struct avro_link_schema_t);
+	if (!link) {
+		avro_set_error("Cannot allocate new link schema");
+		return NULL;
+	}
+	link->to = avro_schema_incref(to);
+	avro_schema_init(&link->obj, AVRO_LINK);
+	return &link->obj;
+}
+
+avro_schema_t avro_schema_link_target(avro_schema_t schema)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+	check_param(NULL, is_avro_link(schema), "schema");
+
+	struct avro_link_schema_t *link = avro_schema_to_link(schema);
+	return link->to;
+}
+
+static const char *
+qualify_name(const char *name, const char *namespace)
+{
+	char *full_name;
+	if (namespace != NULL && strchr(name, '.') == NULL) {
+		full_name = avro_str_alloc(strlen(name) + strlen(namespace) + 2);
+		sprintf(full_name, "%s.%s", namespace, name);
+	} else {
+		full_name = avro_strdup(name);
+	}
+	return full_name;
+}
+
+static int
+save_named_schemas(const char *name, const char *namespace, avro_schema_t schema, st_table *st)
+{
+	const char *full_name = qualify_name(name, namespace);
+	int rval = st_insert(st, (st_data_t) full_name, (st_data_t) schema);
+	return rval;
+}
+
+static avro_schema_t
+find_named_schemas(const char *name, const char *namespace, st_table *st)
+{
+	union {
+		avro_schema_t schema;
+		st_data_t data;
+	} val;
+	const char *full_name = qualify_name(name, namespace);
+	int rval = st_lookup(st, (st_data_t) full_name, &(val.data));
+	avro_str_free((char *)full_name);
+	if (rval) {
+		return val.schema;
+	}
+	avro_set_error("No schema type named %s", name);
+	return NULL;
+};
+
+static int
+avro_type_from_json_t(json_t *json, avro_type_t *type,
+		      st_table *named_schemas, avro_schema_t *named_type,
+		      const char *namespace)
+{
+	json_t *json_type;
+	const char *type_str;
+
+	if (json_is_array(json)) {
+		*type = AVRO_UNION;
+		return 0;
+	} else if (json_is_object(json)) {
+		json_type = json_object_get(json, "type");
+	} else {
+		json_type = json;
+	}
+	if (!json_is_string(json_type)) {
+		avro_set_error("\"type\" field must be a string");
+		return EINVAL;
+	}
+	type_str = json_string_value(json_type);
+	if (!type_str) {
+		avro_set_error("\"type\" field must be a string");
+		return EINVAL;
+	}
+	/*
+	 * TODO: gperf/re2c this 
+	 */
+	if (strcmp(type_str, "string") == 0) {
+		*type = AVRO_STRING;
+	} else if (strcmp(type_str, "bytes") == 0) {
+		*type = AVRO_BYTES;
+	} else if (strcmp(type_str, "int") == 0) {
+		*type = AVRO_INT32;
+	} else if (strcmp(type_str, "long") == 0) {
+		*type = AVRO_INT64;
+	} else if (strcmp(type_str, "float") == 0) {
+		*type = AVRO_FLOAT;
+	} else if (strcmp(type_str, "double") == 0) {
+		*type = AVRO_DOUBLE;
+	} else if (strcmp(type_str, "boolean") == 0) {
+		*type = AVRO_BOOLEAN;
+	} else if (strcmp(type_str, "null") == 0) {
+		*type = AVRO_NULL;
+	} else if (strcmp(type_str, "record") == 0) {
+		*type = AVRO_RECORD;
+	} else if (strcmp(type_str, "enum") == 0) {
+		*type = AVRO_ENUM;
+	} else if (strcmp(type_str, "array") == 0) {
+		*type = AVRO_ARRAY;
+	} else if (strcmp(type_str, "map") == 0) {
+		*type = AVRO_MAP;
+	} else if (strcmp(type_str, "fixed") == 0) {
+		*type = AVRO_FIXED;
+	} else if ((*named_type = find_named_schemas(type_str, namespace, named_schemas))) {
+		*type = AVRO_LINK;
+	} else {
+		avro_set_error("Unknown Avro \"type\": %s", type_str);
+		return EINVAL;
+	}
+	return 0;
+}
+
+static int
+avro_schema_from_json_t(json_t *json, avro_schema_t *schema,
+			st_table *named_schemas, const char *parent_namespace)
+{
+#ifdef _WIN32
+ #pragma message("#warning: Bug: '0' is not of type avro_type_t.")
+#else
+ #warning "Bug: '0' is not of type avro_type_t."
+#endif
+  /* We should really have an "AVRO_INVALID" type in
+   * avro_type_t. Suppress warning below in which we set type to 0.
+   */
+	avro_type_t type = (avro_type_t) 0;
+	unsigned int i;
+	avro_schema_t named_type = NULL;
+
+	if (avro_type_from_json_t(json, &type, named_schemas, &named_type, parent_namespace)) {
+		return EINVAL;
+	}
+
+	switch (type) {
+	case AVRO_LINK:
+		*schema = avro_schema_link(named_type);
+		break;
+
+	case AVRO_STRING:
+		*schema = avro_schema_string();
+		break;
+
+	case AVRO_BYTES:
+		*schema = avro_schema_bytes();
+		break;
+
+	case AVRO_INT32:
+		*schema = avro_schema_int();
+		break;
+
+	case AVRO_INT64:
+		*schema = avro_schema_long();
+		break;
+
+	case AVRO_FLOAT:
+		*schema = avro_schema_float();
+		break;
+
+	case AVRO_DOUBLE:
+		*schema = avro_schema_double();
+		break;
+
+	case AVRO_BOOLEAN:
+		*schema = avro_schema_boolean();
+		break;
+
+	case AVRO_NULL:
+		*schema = avro_schema_null();
+		break;
+
+	case AVRO_RECORD:
+		{
+			json_t *json_name = json_object_get(json, "name");
+			json_t *json_namespace =
+			    json_object_get(json, "namespace");
+			json_t *json_fields = json_object_get(json, "fields");
+			unsigned int num_fields;
+			const char *record_name;
+			const char *record_namespace;
+
+			if (!json_is_string(json_name)) {
+				avro_set_error("Record type must have a \"name\"");
+				return EINVAL;
+			}
+			if (!json_is_array(json_fields)) {
+				avro_set_error("Record type must have \"fields\"");
+				return EINVAL;
+			}
+			num_fields = json_array_size(json_fields);
+			if (num_fields == 0) {
+				avro_set_error("Record type must have at least one field");
+				return EINVAL;
+			}
+			record_name = json_string_value(json_name);
+			if (!record_name) {
+				avro_set_error("Record type must have a \"name\"");
+				return EINVAL;
+			}
+			if (json_is_string(json_namespace)) {
+				record_namespace =
+				    json_string_value(json_namespace);
+			} else {
+				record_namespace = parent_namespace;
+			}
+			*schema =
+			    avro_schema_record(record_name, record_namespace);
+			if (save_named_schemas(record_name, record_namespace, *schema, named_schemas)) {
+				avro_set_error("Cannot save record schema");
+				return ENOMEM;
+			}
+			for (i = 0; i < num_fields; i++) {
+				json_t *json_field =
+				    json_array_get(json_fields, i);
+				json_t *json_field_name;
+				json_t *json_field_type;
+				avro_schema_t json_field_type_schema;
+				int field_rval;
+
+				if (!json_is_object(json_field)) {
+					avro_set_error("Record field %d must be an array", i);
+					avro_schema_decref(*schema);
+					return EINVAL;
+				}
+				json_field_name =
+				    json_object_get(json_field, "name");
+				if (!json_field_name) {
+					avro_set_error("Record field %d must have a \"name\"", i);
+					avro_schema_decref(*schema);
+					return EINVAL;
+				}
+				json_field_type =
+				    json_object_get(json_field, "type");
+				if (!json_field_type) {
+					avro_set_error("Record field %d must have a \"type\"", i);
+					avro_schema_decref(*schema);
+					return EINVAL;
+				}
+				field_rval =
+				    avro_schema_from_json_t(json_field_type,
+							    &json_field_type_schema,
+							    named_schemas, record_namespace);
+				if (field_rval) {
+					avro_schema_decref(*schema);
+					return field_rval;
+				}
+				field_rval =
+				    avro_schema_record_field_append(*schema,
+								    json_string_value
+								    (json_field_name),
+								    json_field_type_schema);
+				avro_schema_decref(json_field_type_schema);
+				if (field_rval != 0) {
+					avro_schema_decref(*schema);
+					return field_rval;
+				}
+			}
+		}
+		break;
+
+	case AVRO_ENUM:
+		{
+			json_t *json_name = json_object_get(json, "name");
+			json_t *json_symbols = json_object_get(json, "symbols");
+			const char *name;
+			unsigned int num_symbols;
+
+			if (!json_is_string(json_name)) {
+				avro_set_error("Enum type must have a \"name\"");
+				return EINVAL;
+			}
+			if (!json_is_array(json_symbols)) {
+				avro_set_error("Enum type must have \"symbols\"");
+				return EINVAL;
+			}
+
+			name = json_string_value(json_name);
+			if (!name) {
+				avro_set_error("Enum type must have a \"name\"");
+				return EINVAL;
+			}
+			num_symbols = json_array_size(json_symbols);
+			if (num_symbols == 0) {
+				avro_set_error("Enum type must have at least one symbol");
+				return EINVAL;
+			}
+			*schema = avro_schema_enum(name);
+			if (save_named_schemas(name, parent_namespace, *schema, named_schemas)) {
+				avro_set_error("Cannot save enum schema");
+				return ENOMEM;
+			}
+			for (i = 0; i < num_symbols; i++) {
+				int enum_rval;
+				json_t *json_symbol =
+				    json_array_get(json_symbols, i);
+				const char *symbol;
+				if (!json_is_string(json_symbol)) {
+					avro_set_error("Enum symbol %d must be a string", i);
+					avro_schema_decref(*schema);
+					return EINVAL;
+				}
+				symbol = json_string_value(json_symbol);
+				enum_rval =
+				    avro_schema_enum_symbol_append(*schema,
+								   symbol);
+				if (enum_rval != 0) {
+					avro_schema_decref(*schema);
+					return enum_rval;
+				}
+			}
+		}
+		break;
+
+	case AVRO_ARRAY:
+		{
+			int items_rval;
+			json_t *json_items = json_object_get(json, "items");
+			avro_schema_t items_schema;
+			if (!json_items) {
+				avro_set_error("Array type must have \"items\"");
+				return EINVAL;
+			}
+			items_rval =
+			    avro_schema_from_json_t(json_items, &items_schema,
+						    named_schemas, parent_namespace);
+			if (items_rval) {
+				return items_rval;
+			}
+			*schema = avro_schema_array(items_schema);
+			avro_schema_decref(items_schema);
+		}
+		break;
+
+	case AVRO_MAP:
+		{
+			int values_rval;
+			json_t *json_values = json_object_get(json, "values");
+			avro_schema_t values_schema;
+
+			if (!json_values) {
+				avro_set_error("Map type must have \"values\"");
+				return EINVAL;
+			}
+			values_rval =
+			    avro_schema_from_json_t(json_values, &values_schema,
+						    named_schemas, parent_namespace);
+			if (values_rval) {
+				return values_rval;
+			}
+			*schema = avro_schema_map(values_schema);
+			avro_schema_decref(values_schema);
+		}
+		break;
+
+	case AVRO_UNION:
+		{
+			unsigned int num_schemas = json_array_size(json);
+			avro_schema_t s;
+			if (num_schemas == 0) {
+				avro_set_error("Union type must have at least one branch");
+				return EINVAL;
+			}
+			*schema = avro_schema_union();
+			for (i = 0; i < num_schemas; i++) {
+				int schema_rval;
+				json_t *schema_json = json_array_get(json, i);
+				if (!schema_json) {
+					avro_set_error("Cannot retrieve branch JSON");
+					return EINVAL;
+				}
+				schema_rval =
+				    avro_schema_from_json_t(schema_json, &s,
+							    named_schemas, parent_namespace);
+				if (schema_rval != 0) {
+					avro_schema_decref(*schema);
+					return schema_rval;
+				}
+				schema_rval =
+				    avro_schema_union_append(*schema, s);
+				avro_schema_decref(s);
+				if (schema_rval != 0) {
+					avro_schema_decref(*schema);
+					return schema_rval;
+				}
+			}
+		}
+		break;
+
+	case AVRO_FIXED:
+		{
+			json_t *json_size = json_object_get(json, "size");
+			json_t *json_name = json_object_get(json, "name");
+			json_int_t size;
+			const char *name;
+			if (!json_is_integer(json_size)) {
+				avro_set_error("Fixed type must have a \"size\"");
+				return EINVAL;
+			}
+			if (!json_is_string(json_name)) {
+				avro_set_error("Fixed type must have a \"name\"");
+				return EINVAL;
+			}
+			size = json_integer_value(json_size);
+			name = json_string_value(json_name);
+			*schema = avro_schema_fixed(name, (int64_t) size);
+			if (save_named_schemas(name, parent_namespace, *schema, named_schemas)) {
+				avro_set_error("Cannot save fixed schema");
+				return ENOMEM;
+			}
+		}
+		break;
+
+	default:
+		avro_set_error("Unknown schema type");
+		return EINVAL;
+	}
+	return 0;
+}
+
+static int named_schema_free_foreach(char *full_name, st_data_t value, st_data_t arg)
+{
+	AVRO_UNUSED(value);
+	AVRO_UNUSED(arg);
+
+	avro_str_free(full_name);
+	return ST_DELETE;
+}
+
+static int
+avro_schema_from_json_root(json_t *root, avro_schema_t *schema)
+{
+	int  rval;
+	st_table *named_schemas;
+
+	named_schemas = st_init_strtable_with_size(DEFAULT_TABLE_SIZE);
+	if (!named_schemas) {
+		avro_set_error("Cannot allocate named schema map");
+		json_decref(root);
+		return ENOMEM;
+	}
+
+	/* json_dumpf(root, stderr, 0); */
+	rval = avro_schema_from_json_t(root, schema, named_schemas, NULL);
+	json_decref(root);
+	st_foreach(named_schemas, HASH_FUNCTION_CAST named_schema_free_foreach, 0);
+	st_free_table(named_schemas);
+	return rval;
+}
+
+int
+avro_schema_from_json(const char *jsontext, const int32_t len,
+		      avro_schema_t *schema, avro_schema_error_t *e)
+{
+	check_param(EINVAL, jsontext, "JSON text");
+	check_param(EINVAL, schema, "schema pointer");
+
+	json_t  *root;
+	json_error_t  json_error;
+
+	AVRO_UNUSED(len);
+	AVRO_UNUSED(e);
+
+	root = json_loads(jsontext, 0, &json_error);
+	if (!root) {
+		avro_set_error("Error parsing JSON: %s", json_error.text);
+		return EINVAL;
+	}
+
+	return avro_schema_from_json_root(root, schema);
+}
+
+int
+avro_schema_from_json_length(const char *jsontext, size_t length,
+			     avro_schema_t *schema)
+{
+	check_param(EINVAL, jsontext, "JSON text");
+	check_param(EINVAL, schema, "schema pointer");
+
+	json_t  *root;
+	json_error_t  json_error;
+
+	root = json_loadb(jsontext, length, 0, &json_error);
+	if (!root) {
+		avro_set_error("Error parsing JSON: %s", json_error.text);
+		return EINVAL;
+	}
+
+	return avro_schema_from_json_root(root, schema);
+}
+
+avro_schema_t avro_schema_copy(avro_schema_t schema)
+{
+	long i;
+	avro_schema_t new_schema = NULL;
+	if (!schema) {
+		return NULL;
+	}
+	switch (avro_typeof(schema)) {
+	case AVRO_STRING:
+	case AVRO_BYTES:
+	case AVRO_INT32:
+	case AVRO_INT64:
+	case AVRO_FLOAT:
+	case AVRO_DOUBLE:
+	case AVRO_BOOLEAN:
+	case AVRO_NULL:
+		/*
+		 * No need to copy primitives since they're static 
+		 */
+		new_schema = schema;
+		break;
+
+	case AVRO_RECORD:
+		{
+			struct avro_record_schema_t *record_schema =
+			    avro_schema_to_record(schema);
+			new_schema =
+			    avro_schema_record(record_schema->name,
+					       record_schema->space);
+			for (i = 0; i < record_schema->fields->num_entries; i++) {
+				union {
+					st_data_t data;
+					struct avro_record_field_t *field;
+				} val;
+				st_lookup(record_schema->fields, i, &val.data);
+				avro_schema_t type_copy =
+				    avro_schema_copy(val.field->type);
+				avro_schema_record_field_append(new_schema,
+								val.field->name,
+								type_copy);
+			}
+		}
+		break;
+
+	case AVRO_ENUM:
+		{
+			struct avro_enum_schema_t *enum_schema =
+			    avro_schema_to_enum(schema);
+			new_schema = avro_schema_enum(enum_schema->name);
+			for (i = 0; i < enum_schema->symbols->num_entries; i++) {
+				union {
+					st_data_t data;
+					char *sym;
+				} val;
+				st_lookup(enum_schema->symbols, i, &val.data);
+				avro_schema_enum_symbol_append(new_schema,
+							       val.sym);
+			}
+		}
+		break;
+
+	case AVRO_FIXED:
+		{
+			struct avro_fixed_schema_t *fixed_schema =
+			    avro_schema_to_fixed(schema);
+			new_schema =
+			    avro_schema_fixed(fixed_schema->name,
+					      fixed_schema->size);
+		}
+		break;
+
+	case AVRO_MAP:
+		{
+			struct avro_map_schema_t *map_schema =
+			    avro_schema_to_map(schema);
+			avro_schema_t values_copy =
+			    avro_schema_copy(map_schema->values);
+			if (!values_copy) {
+				return NULL;
+			}
+			new_schema = avro_schema_map(values_copy);
+		}
+		break;
+
+	case AVRO_ARRAY:
+		{
+			struct avro_array_schema_t *array_schema =
+			    avro_schema_to_array(schema);
+			avro_schema_t items_copy =
+			    avro_schema_copy(array_schema->items);
+			if (!items_copy) {
+				return NULL;
+			}
+			new_schema = avro_schema_array(items_copy);
+		}
+		break;
+
+	case AVRO_UNION:
+		{
+			struct avro_union_schema_t *union_schema =
+			    avro_schema_to_union(schema);
+
+			new_schema = avro_schema_union();
+			for (i = 0; i < union_schema->branches->num_entries;
+			     i++) {
+				avro_schema_t schema_copy;
+				union {
+					st_data_t data;
+					avro_schema_t schema;
+				} val;
+				st_lookup(union_schema->branches, i, &val.data);
+				schema_copy = avro_schema_copy(val.schema);
+				if (avro_schema_union_append
+				    (new_schema, schema_copy)) {
+					avro_schema_decref(new_schema);
+					return NULL;
+				}
+			}
+		}
+		break;
+
+	case AVRO_LINK:
+		{
+			struct avro_link_schema_t *link_schema =
+			    avro_schema_to_link(schema);
+			/*
+			 * TODO: use an avro_schema_copy of to instead of pointing to
+			 * the same reference 
+			 */
+			avro_schema_incref(link_schema->to);
+			new_schema = avro_schema_link(link_schema->to);
+		}
+		break;
+
+	default:
+		return NULL;
+	}
+	return new_schema;
+}
+
+avro_schema_t avro_schema_get_subschema(const avro_schema_t schema,
+         const char *name)
+{
+ if (is_avro_record(schema)) {
+   const struct avro_record_schema_t *rschema =
+     avro_schema_to_record(schema);
+   union {
+     st_data_t data;
+     struct avro_record_field_t *field;
+   } field;
+
+   if (st_lookup(rschema->fields_byname,
+           (st_data_t) name, &field.data))
+   {
+     return field.field->type;
+   }
+
+   avro_set_error("No record field named %s", name);
+   return NULL;
+ } else if (is_avro_union(schema)) {
+   const struct avro_union_schema_t *uschema =
+     avro_schema_to_union(schema);
+   long i;
+
+   for (i = 0; i < uschema->branches->num_entries; i++) {
+     union {
+       st_data_t data;
+       avro_schema_t schema;
+     } val;
+     st_lookup(uschema->branches, i, &val.data);
+     if (strcmp(avro_schema_type_name(val.schema),
+          name) == 0)
+     {
+       return val.schema;
+     }
+   }
+
+   avro_set_error("No union branch named %s", name);
+   return NULL;
+ } else if (is_avro_array(schema)) {
+   if (strcmp(name, "[]") == 0) {
+     const struct avro_array_schema_t *aschema =
+       avro_schema_to_array(schema);
+     return aschema->items;
+   }
+
+   avro_set_error("Array subschema must be called \"[]\"");
+   return NULL;
+ } else if (is_avro_map(schema)) {
+   if (strcmp(name, "{}") == 0) {
+     const struct avro_map_schema_t *mschema =
+       avro_schema_to_map(schema);
+     return mschema->values;
+   }
+
+   avro_set_error("Map subschema must be called \"{}\"");
+   return NULL;
+ }
+
+ avro_set_error("Can only retrieve subschemas from record, union, array, or map");
+ return NULL;
+}
+
+const char *avro_schema_name(const avro_schema_t schema)
+{
+	if (is_avro_record(schema)) {
+		return (avro_schema_to_record(schema))->name;
+	} else if (is_avro_enum(schema)) {
+		return (avro_schema_to_enum(schema))->name;
+	} else if (is_avro_fixed(schema)) {
+		return (avro_schema_to_fixed(schema))->name;
+	}
+	avro_set_error("Schema has no name");
+	return NULL;
+}
+
+const char *avro_schema_type_name(const avro_schema_t schema)
+{
+	if (is_avro_record(schema)) {
+		return (avro_schema_to_record(schema))->name;
+	} else if (is_avro_enum(schema)) {
+		return (avro_schema_to_enum(schema))->name;
+	} else if (is_avro_fixed(schema)) {
+		return (avro_schema_to_fixed(schema))->name;
+	} else if (is_avro_union(schema)) {
+		return "union";
+	} else if (is_avro_array(schema)) {
+		return "array";
+	} else if (is_avro_map(schema)) {
+		return "map";
+	} else if (is_avro_int32(schema)) {
+		return "int";
+	} else if (is_avro_int64(schema)) {
+		return "long";
+	} else if (is_avro_float(schema)) {
+		return "float";
+	} else if (is_avro_double(schema)) {
+		return "double";
+	} else if (is_avro_boolean(schema)) {
+		return "boolean";
+	} else if (is_avro_null(schema)) {
+		return "null";
+	} else if (is_avro_string(schema)) {
+		return "string";
+	} else if (is_avro_bytes(schema)) {
+		return "bytes";
+	} else if (is_avro_link(schema)) {
+		avro_schema_t  target = avro_schema_link_target(schema);
+		return avro_schema_type_name(target);
+	}
+	avro_set_error("Unknown schema type");
+	return NULL;
+}
+
+avro_datum_t avro_datum_from_schema(const avro_schema_t schema)
+{
+	check_param(NULL, is_avro_schema(schema), "schema");
+
+	switch (avro_typeof(schema)) {
+		case AVRO_STRING:
+			return avro_givestring("", NULL);
+
+		case AVRO_BYTES:
+			return avro_givebytes("", 0, NULL);
+
+		case AVRO_INT32:
+			return avro_int32(0);
+
+		case AVRO_INT64:
+			return avro_int64(0);
+
+		case AVRO_FLOAT:
+			return avro_float(0);
+
+		case AVRO_DOUBLE:
+			return avro_double(0);
+
+		case AVRO_BOOLEAN:
+			return avro_boolean(0);
+
+		case AVRO_NULL:
+			return avro_null();
+
+		case AVRO_RECORD:
+			{
+				const struct avro_record_schema_t *record_schema =
+				    avro_schema_to_record(schema);
+
+				avro_datum_t  rec = avro_record(schema);
+
+				int  i;
+				for (i = 0; i < record_schema->fields->num_entries; i++) {
+					union {
+						st_data_t data;
+						struct avro_record_field_t *field;
+					} val;
+					st_lookup(record_schema->fields, i, &val.data);
+
+					avro_datum_t  field =
+					    avro_datum_from_schema(val.field->type);
+					avro_record_set(rec, val.field->name, field);
+					avro_datum_decref(field);
+				}
+
+				return rec;
+			}
+
+		case AVRO_ENUM:
+			return avro_enum(schema, 0);
+
+		case AVRO_FIXED:
+			{
+				const struct avro_fixed_schema_t *fixed_schema =
+				    avro_schema_to_fixed(schema);
+				return avro_givefixed(schema, NULL, fixed_schema->size, NULL);
+			}
+
+		case AVRO_MAP:
+			return avro_map(schema);
+
+		case AVRO_ARRAY:
+			return avro_array(schema);
+
+		case AVRO_UNION:
+			return avro_union(schema, -1, NULL);
+
+		case AVRO_LINK:
+			{
+				const struct avro_link_schema_t *link_schema =
+				    avro_schema_to_link(schema);
+				return avro_datum_from_schema(link_schema->to);
+			}
+
+		default:
+			avro_set_error("Unknown schema type");
+			return NULL;
+	}
+}
+
+/* simple helper for writing strings */
+static int avro_write_str(avro_writer_t out, const char *str)
+{
+	return avro_write(out, (char *)str, strlen(str));
+}
+
+static int write_field(avro_writer_t out, const struct avro_record_field_t *field,
+		       const char *parent_namespace)
+{
+	int rval;
+	check(rval, avro_write_str(out, "{\"name\":\""));
+	check(rval, avro_write_str(out, field->name));
+	check(rval, avro_write_str(out, "\",\"type\":"));
+	check(rval, avro_schema_to_json2(field->type, out, parent_namespace));
+	return avro_write_str(out, "}");
+}
+
+static int write_record(avro_writer_t out, const struct avro_record_schema_t *record,
+			const char *parent_namespace)
+{
+	int rval;
+	long i;
+
+	check(rval, avro_write_str(out, "{\"type\":\"record\",\"name\":\""));
+	check(rval, avro_write_str(out, record->name));
+	check(rval, avro_write_str(out, "\","));
+	if (nullstrcmp(record->space, parent_namespace)) {
+		check(rval, avro_write_str(out, "\"namespace\":\""));
+		check(rval, avro_write_str(out, record->space));
+		check(rval, avro_write_str(out, "\","));
+	}
+	check(rval, avro_write_str(out, "\"fields\":["));
+	for (i = 0; i < record->fields->num_entries; i++) {
+		union {
+			st_data_t data;
+			struct avro_record_field_t *field;
+		} val;
+		st_lookup(record->fields, i, &val.data);
+		if (i) {
+			check(rval, avro_write_str(out, ","));
+		}
+		check(rval, write_field(out, val.field, record->space));
+	}
+	return avro_write_str(out, "]}");
+}
+
+static int write_enum(avro_writer_t out, const struct avro_enum_schema_t *enump)
+{
+	int rval;
+	long i;
+	check(rval, avro_write_str(out, "{\"type\":\"enum\",\"name\":\""));
+	check(rval, avro_write_str(out, enump->name));
+	check(rval, avro_write_str(out, "\",\"symbols\":["));
+
+	for (i = 0; i < enump->symbols->num_entries; i++) {
+		union {
+			st_data_t data;
+			char *sym;
+		} val;
+		st_lookup(enump->symbols, i, &val.data);
+		if (i) {
+			check(rval, avro_write_str(out, ","));
+		}
+		check(rval, avro_write_str(out, "\""));
+		check(rval, avro_write_str(out, val.sym));
+		check(rval, avro_write_str(out, "\""));
+	}
+	return avro_write_str(out, "]}");
+}
+static int write_fixed(avro_writer_t out, const struct avro_fixed_schema_t *fixed)
+{
+	int rval;
+	char size[16];
+	check(rval, avro_write_str(out, "{\"type\":\"fixed\",\"name\":\""));
+	check(rval, avro_write_str(out, fixed->name));
+	check(rval, avro_write_str(out, "\",\"size\":"));
+	snprintf(size, sizeof(size), "%" PRId64, fixed->size);
+	check(rval, avro_write_str(out, size));
+	return avro_write_str(out, "}");
+}
+static int write_map(avro_writer_t out, const struct avro_map_schema_t *map,
+		     const char *parent_namespace)
+{
+	int rval;
+	check(rval, avro_write_str(out, "{\"type\":\"map\",\"values\":"));
+	check(rval, avro_schema_to_json2(map->values, out, parent_namespace));
+	return avro_write_str(out, "}");
+}
+static int write_array(avro_writer_t out, const struct avro_array_schema_t *array,
+		       const char *parent_namespace)
+{
+	int rval;
+	check(rval, avro_write_str(out, "{\"type\":\"array\",\"items\":"));
+	check(rval, avro_schema_to_json2(array->items, out, parent_namespace));
+	return avro_write_str(out, "}");
+}
+static int write_union(avro_writer_t out, const struct avro_union_schema_t *unionp,
+		       const char *parent_namespace)
+{
+	int rval;
+	long i;
+	check(rval, avro_write_str(out, "["));
+
+	for (i = 0; i < unionp->branches->num_entries; i++) {
+		union {
+			st_data_t data;
+			avro_schema_t schema;
+		} val;
+		st_lookup(unionp->branches, i, &val.data);
+		if (i) {
+			check(rval, avro_write_str(out, ","));
+		}
+		check(rval, avro_schema_to_json2(val.schema, out, parent_namespace));
+	}
+	return avro_write_str(out, "]");
+}
+static int write_link(avro_writer_t out, const struct avro_link_schema_t *link,
+		      const char *parent_namespace)
+{
+	int rval;
+	check(rval, avro_write_str(out, "\""));
+	if (is_avro_record(link->to)) {
+		const char *namespace = avro_schema_to_record(link->to)->space;
+		if (nullstrcmp(namespace, parent_namespace)) {
+			check(rval, avro_write_str(out, namespace));
+			check(rval, avro_write_str(out, "."));
+		}
+	}
+	check(rval, avro_write_str(out, avro_schema_name(link->to)));
+	return avro_write_str(out, "\"");
+}
+
+static int
+avro_schema_to_json2(const avro_schema_t schema, avro_writer_t out,
+		     const char *parent_namespace)
+{
+	check_param(EINVAL, is_avro_schema(schema), "schema");
+	check_param(EINVAL, out, "writer");
+
+	int rval;
+
+	if (is_avro_primitive(schema)) {
+		check(rval, avro_write_str(out, "{\"type\":\""));
+	}
+
+	switch (avro_typeof(schema)) {
+	case AVRO_STRING:
+		check(rval, avro_write_str(out, "string"));
+		break;
+	case AVRO_BYTES:
+		check(rval, avro_write_str(out, "bytes"));
+		break;
+	case AVRO_INT32:
+		check(rval, avro_write_str(out, "int"));
+		break;
+	case AVRO_INT64:
+		check(rval, avro_write_str(out, "long"));
+		break;
+	case AVRO_FLOAT:
+		check(rval, avro_write_str(out, "float"));
+		break;
+	case AVRO_DOUBLE:
+		check(rval, avro_write_str(out, "double"));
+		break;
+	case AVRO_BOOLEAN:
+		check(rval, avro_write_str(out, "boolean"));
+		break;
+	case AVRO_NULL:
+		check(rval, avro_write_str(out, "null"));
+		break;
+	case AVRO_RECORD:
+		return write_record(out, avro_schema_to_record(schema), parent_namespace);
+	case AVRO_ENUM:
+		return write_enum(out, avro_schema_to_enum(schema));
+	case AVRO_FIXED:
+		return write_fixed(out, avro_schema_to_fixed(schema));
+	case AVRO_MAP:
+		return write_map(out, avro_schema_to_map(schema), parent_namespace);
+	case AVRO_ARRAY:
+		return write_array(out, avro_schema_to_array(schema), parent_namespace);
+	case AVRO_UNION:
+		return write_union(out, avro_schema_to_union(schema), parent_namespace);
+	case AVRO_LINK:
+		return write_link(out, avro_schema_to_link(schema), parent_namespace);
+	}
+
+	if (is_avro_primitive(schema)) {
+		return avro_write_str(out, "\"}");
+	}
+	avro_set_error("Unknown schema type");
+	return EINVAL;
+}
+
+int avro_schema_to_json(const avro_schema_t schema, avro_writer_t out)
+{
+	return avro_schema_to_json2(schema, out, NULL);
+}
diff --git a/lang/c/src/schema.h b/lang/c/src/schema.h
new file mode 100644
index 0000000..efeeac3
--- /dev/null
+++ b/lang/c/src/schema.h
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+#ifndef AVRO_SCHEMA_PRIV_H
+#define AVRO_SCHEMA_PRIV_H
+
+#include <avro/platform.h>
+#include "avro/basics.h"
+#include "avro/schema.h"
+#include "avro_private.h"
+#include "st.h"
+
+struct avro_record_field_t {
+	int index;
+	char *name;
+	avro_schema_t type;
+	/*
+	 * TODO: default values 
+	 */
+};
+
+struct avro_record_schema_t {
+	struct avro_obj_t obj;
+	char *name;
+	char *space;
+	st_table *fields;
+	st_table *fields_byname;
+};
+
+struct avro_enum_schema_t {
+	struct avro_obj_t obj;
+	char *name;
+	st_table *symbols;
+	st_table *symbols_byname;
+};
+
+struct avro_array_schema_t {
+	struct avro_obj_t obj;
+	avro_schema_t items;
+};
+
+struct avro_map_schema_t {
+	struct avro_obj_t obj;
+	avro_schema_t values;
+};
+
+struct avro_union_schema_t {
+	struct avro_obj_t obj;
+	st_table *branches;
+	st_table *branches_byname;
+};
+
+struct avro_fixed_schema_t {
+	struct avro_obj_t obj;
+	const char *name;
+	int64_t size;
+};
+
+struct avro_link_schema_t {
+	struct avro_obj_t obj;
+	avro_schema_t to;
+};
+
+#define avro_schema_to_record(schema_)  (container_of(schema_, struct avro_record_schema_t, obj))
+#define avro_schema_to_enum(schema_)    (container_of(schema_, struct avro_enum_schema_t, obj))
+#define avro_schema_to_array(schema_)   (container_of(schema_, struct avro_array_schema_t, obj))
+#define avro_schema_to_map(schema_)     (container_of(schema_, struct avro_map_schema_t, obj))
+#define avro_schema_to_union(schema_)   (container_of(schema_, struct avro_union_schema_t, obj))
+#define avro_schema_to_fixed(schema_)   (container_of(schema_, struct avro_fixed_schema_t, obj))
+#define avro_schema_to_link(schema_)    (container_of(schema_, struct avro_link_schema_t, obj))
+
+#endif
diff --git a/lang/c/src/schema_equal.c b/lang/c/src/schema_equal.c
new file mode 100644
index 0000000..287d218
--- /dev/null
+++ b/lang/c/src/schema_equal.c
@@ -0,0 +1,194 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include "schema.h"
+#include <string.h>
+
+static int
+schema_record_equal(struct avro_record_schema_t *a,
+		    struct avro_record_schema_t *b)
+{
+	long i;
+	if (strcmp(a->name, b->name)) {
+		/*
+		 * They have different names 
+		 */
+		return 0;
+	}
+	if (nullstrcmp(a->space, b->space)) {
+		return 0;
+	}
+	for (i = 0; i < a->fields->num_entries; i++) {
+		union {
+			st_data_t data;
+			struct avro_record_field_t *f;
+		} fa, fb;
+		st_lookup(a->fields, i, &fa.data);
+		if (!st_lookup(b->fields, i, &fb.data)) {
+			return 0;
+		}
+		if (strcmp(fa.f->name, fb.f->name)) {
+			/*
+			 * They have fields with different names 
+			 */
+			return 0;
+		}
+		if (!avro_schema_equal(fa.f->type, fb.f->type)) {
+			/*
+			 * They have fields with different schemas 
+			 */
+			return 0;
+		}
+	}
+	return 1;
+}
+
+static int
+schema_enum_equal(struct avro_enum_schema_t *a, struct avro_enum_schema_t *b)
+{
+	long i;
+	if (strcmp(a->name, b->name)) {
+		/*
+		 * They have different names 
+		 */
+		return 0;
+	}
+	for (i = 0; i < a->symbols->num_entries; i++) {
+		union {
+			st_data_t data;
+			char *sym;
+		} sa, sb;
+		st_lookup(a->symbols, i, &sa.data);
+		if (!st_lookup(b->symbols, i, &sb.data)) {
+			return 0;
+		}
+		if (strcmp(sa.sym, sb.sym) != 0) {
+			/*
+			 * They have different symbol names 
+			 */
+			return 0;
+		}
+	}
+	return 1;
+}
+
+static int
+schema_fixed_equal(struct avro_fixed_schema_t *a, struct avro_fixed_schema_t *b)
+{
+	if (strcmp(a->name, b->name)) {
+		/*
+		 * They have different names 
+		 */
+		return 0;
+	}
+	return (a->size == b->size);
+}
+
+static int
+schema_map_equal(struct avro_map_schema_t *a, struct avro_map_schema_t *b)
+{
+	return avro_schema_equal(a->values, b->values);
+}
+
+static int
+schema_array_equal(struct avro_array_schema_t *a, struct avro_array_schema_t *b)
+{
+	return avro_schema_equal(a->items, b->items);
+}
+
+static int
+schema_union_equal(struct avro_union_schema_t *a, struct avro_union_schema_t *b)
+{
+	long i;
+	for (i = 0; i < a->branches->num_entries; i++) {
+		union {
+			st_data_t data;
+			avro_schema_t schema;
+		} ab, bb;
+		st_lookup(a->branches, i, &ab.data);
+		if (!st_lookup(b->branches, i, &bb.data)) {
+			return 0;
+		}
+		if (!avro_schema_equal(ab.schema, bb.schema)) {
+			/*
+			 * They don't have the same schema types 
+			 */
+			return 0;
+		}
+	}
+	return 1;
+}
+
+static int
+schema_link_equal(struct avro_link_schema_t *a, struct avro_link_schema_t *b)
+{
+	/*
+	 * NOTE: links can only be used for named types. They are used in
+	 * recursive schemas so we just check the name of the schema pointed
+	 * to instead of a deep check.  Otherwise, we recurse forever... 
+	 */
+	if (is_avro_record(a->to)) {
+		if (!is_avro_record(b->to)) {
+			return 0;
+		}
+		if (nullstrcmp(avro_schema_to_record(a->to)->space,
+			       avro_schema_to_record(b->to)->space)) {
+			return 0;
+		}
+	}
+	return (strcmp(avro_schema_name(a->to), avro_schema_name(b->to)) == 0);
+}
+
+int avro_schema_equal(avro_schema_t a, avro_schema_t b)
+{
+	if (!a || !b) {
+		/*
+		 * this is an error. protecting from segfault. 
+		 */
+		return 0;
+	} else if (a == b) {
+		/*
+		 * an object is equal to itself 
+		 */
+		return 1;
+	} else if (avro_typeof(a) != avro_typeof(b)) {
+		return 0;
+	} else if (is_avro_record(a)) {
+		return schema_record_equal(avro_schema_to_record(a),
+					   avro_schema_to_record(b));
+	} else if (is_avro_enum(a)) {
+		return schema_enum_equal(avro_schema_to_enum(a),
+					 avro_schema_to_enum(b));
+	} else if (is_avro_fixed(a)) {
+		return schema_fixed_equal(avro_schema_to_fixed(a),
+					  avro_schema_to_fixed(b));
+	} else if (is_avro_map(a)) {
+		return schema_map_equal(avro_schema_to_map(a),
+					avro_schema_to_map(b));
+	} else if (is_avro_array(a)) {
+		return schema_array_equal(avro_schema_to_array(a),
+					  avro_schema_to_array(b));
+	} else if (is_avro_union(a)) {
+		return schema_union_equal(avro_schema_to_union(a),
+					  avro_schema_to_union(b));
+	} else if (is_avro_link(a)) {
+		return schema_link_equal(avro_schema_to_link(a),
+					 avro_schema_to_link(b));
+	}
+	return 1;
+}
diff --git a/lang/c/src/schema_specific.c b/lang/c/src/schema_specific.c
new file mode 100644
index 0000000..4ae3e31
--- /dev/null
+++ b/lang/c/src/schema_specific.c
@@ -0,0 +1,232 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro_private.h"
+#include <stdio.h>
+#include <errno.h>
+#include <string.h>
+
+#include "schema.h"
+
+enum specific_state {
+	START_STATE,
+};
+typedef enum specific_state specific_state;
+
+struct specific_ctx {
+	FILE *header;
+	FILE *source;
+	int depth;
+	specific_state state;
+};
+typedef struct specific_ctx specific_ctx;
+
+static void indent(specific_ctx * ctx, FILE * fp)
+{
+	int i;
+	for (i = 0; i < ctx->depth; i++) {
+		fprintf(fp, "   ");
+	}
+}
+
+static int avro_schema_to_source(avro_schema_t schema, specific_ctx * ctx)
+{
+	switch (schema->type) {
+	default:
+		return 0;
+	}
+	return EINVAL;
+}
+
+static int avro_schema_to_header(avro_schema_t schema, specific_ctx * ctx)
+{
+	size_t i;
+	FILE *fp = ctx->header;
+
+	indent(ctx, fp);
+	ctx->depth++;
+
+	if (is_avro_primitive(schema) && !ctx->name) {
+		return 0;
+	}
+
+	switch (schema->type) {
+	case AVRO_STRING:
+		fprintf(fp, "char *%s;\n", ctx->name);
+		break;
+
+	case AVRO_BYTES:
+		fprintf(fp, "struct %s { size_t %s_len; char *%s_val } %s;\n",
+			ctx->name, ctx->name, ctx->name, ctx->name);
+		break;
+
+	case AVRO_INT:
+		fprintf(fp, "int %s;\n", ctx->name);
+		break;
+
+	case AVRO_LONG:
+		fprintf(fp, "long %s;\n", ctx->name);
+		break;
+
+	case AVRO_FLOAT:
+		fprintf(fp, "float %s;\n", ctx->name);
+		break;
+
+	case AVRO_DOUBLE:
+		fprintf(fp, "double %s;\n", ctx->name);
+		break;
+
+	case AVRO_BOOLEAN:
+		fprintf(fp, "int %s; /* boolean */\n", ctx->name);
+		break;
+
+	case AVRO_NULL:
+		break;
+
+	case AVRO_RECORD:
+		{
+			struct schema_record_t *record_schema =
+			    avro_schema_to_record(schema);
+			fprintf(fp, "struct %s {\n", record_schema->name);
+			for (i = 0; i < record_schema->num_fields; i++) {
+				struct record_field_t *field =
+				    record_schema->fields[i];
+				ctx->name = field->name;
+				avro_schema_to_header(field->type, ctx);
+				ctx->name = NULL;
+			}
+			fprintf(fp, "};\n");
+			fprintf(fp, "typedef struct %s %s;\n\n",
+				record_schema->name, record_schema->name);
+		}
+		break;
+
+	case AVRO_ENUM:
+		{
+			struct schema_enum_t *enum_schema =
+			    avro_schema_to_enum(schema);
+			fprintf(fp, "enum %s {\n", enum_schema->name);
+			ctx->depth++;
+			for (i = 0; i < enum_schema->num_symbols; i++) {
+				indent(ctx, fp);
+				fprintf(fp, "%s = %ld,\n",
+					enum_schema->symbols[i], i);
+			}
+			ctx->depth--;
+			fprintf(fp, "};\n");
+			fprintf(fp, "typedef enum %s %s;\n\n",
+				enum_schema->name, enum_schema->name);
+		}
+		break;
+
+	case AVRO_FIXED:
+		{
+			struct schema_fixed_t *fixed_schema =
+			    avro_schema_to_fixed(schema);
+			fprintf(fp, "char %s[%ld];\n", fixed_schema->name,
+				fixed_schema->size);
+		}
+		break;
+
+	case AVRO_MAP:
+		{
+
+		}
+		break;
+
+	case AVRO_ARRAY:
+		{
+			struct schema_array_t *array_schema =
+			    avro_schema_to_array(schema);
+			if (!ctx->name) {
+				break;
+			}
+			fprintf(fp, "struct { size_t %s_len; ", ctx->name);
+			if (is_avro_named_type(array_schema->items)) {
+				fprintf(fp, "%s",
+					avro_schema_name(array_schema->items));
+			} else if (is_avro_link(array_schema->items)) {
+				struct schema_link_t *link_schema =
+				    avro_schema_to_link(array_schema->items);
+				fprintf(fp, "struct %s",
+					avro_schema_name(link_schema->to));
+			} else {
+				avro_schema_to_header(array_schema->items, ctx);
+			}
+			fprintf(fp, " *%s_val;} %s;\n", ctx->name, ctx->name);
+		}
+		break;
+	case AVRO_UNION:
+		{
+			struct schema_union_t *union_schema =
+			    avro_schema_to_array(schema);
+			if (!ctx->name) {
+				break;
+			}
+			fprintf(fp, "union {\n");
+			for (i = 0; i < union_schema->num_schemas; i++) {
+				avro_schema_to_header(union_schema->schemas[i],
+						      ctx);
+			}
+			fprintf(fp, "%s_u;\n");
+		}
+		break;
+	case AVRO_LINK:
+		break;
+	default:
+		return EINVAL;
+	}
+
+	ctx->depth--;
+	return 0;
+}
+
+int avro_schema_to_specific(avro_schema_t schema, const char *prefix)
+{
+	specific_ctx ctx;
+	char buf[1024];
+	int rval;
+
+	if (!schema) {
+		return EINVAL;
+	}
+
+	memset(&ctx, 0, sizeof(ctx));
+	snprintf(buf, sizeof(buf), "%s_avro.h", prefix);
+	ctx.header = fopen(buf, "w");
+	if (!ctx.header) {
+		return errno;
+	}
+	snprintf(buf, sizeof(buf), "%s_avro.c", prefix);
+	ctx.source = fopen(buf, "w");
+	if (!ctx.source) {
+		fclose(ctx.header);
+		return errno;
+	}
+
+	rval = avro_schema_to_header(schema, &ctx);
+	if (rval) {
+		goto out;
+	}
+
+	rval = avro_schema_to_source(schema, &ctx);
+
+      out:
+	fclose(ctx.header);
+	fclose(ctx.source);
+	return rval;
+}
diff --git a/lang/c/src/st.c b/lang/c/src/st.c
new file mode 100644
index 0000000..2757828
--- /dev/null
+++ b/lang/c/src/st.c
@@ -0,0 +1,543 @@
+/*
+ * This is a public domain general purpose hash table package written by
+ * Peter Moore @ UCB. 
+ */
+
+/*
+ * static char sccsid[] = "@(#) st.c 5.1 89/12/14 Crucible"; 
+ */
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "st.h"
+
+typedef struct st_table_entry st_table_entry;
+
+struct st_table_entry {
+	unsigned int hash;
+	st_data_t key;
+	st_data_t record;
+	st_table_entry *next;
+};
+
+#define ST_DEFAULT_MAX_DENSITY 5
+#define ST_DEFAULT_INIT_TABLE_SIZE 11
+
+	/*
+	 * DEFAULT_MAX_DENSITY is the default for the largest we allow the
+	 * average number of items per bin before increasing the number of
+	 * bins
+	 *
+	 * DEFAULT_INIT_TABLE_SIZE is the default for the number of bins
+	 * allocated initially
+	 *
+	 */
+static int numcmp(long, long);
+static int numhash(long);
+static struct st_hash_type type_numhash = {
+	HASH_FUNCTION_CAST numcmp,
+	HASH_FUNCTION_CAST numhash
+};
+
+/*
+ * extern int strcmp(const char *, const char *); 
+ */
+static int strhash(const char *);
+static struct st_hash_type type_strhash = {
+	HASH_FUNCTION_CAST strcmp,
+	HASH_FUNCTION_CAST strhash
+};
+
+static void rehash(st_table *);
+
+#ifdef RUBY
+#define malloc xmalloc
+#define calloc xcalloc
+#endif
+
+#define Calloc(n,s) (char*)avro_calloc((n),(s))
+
+#define free_bins(tbl)  \
+	avro_free(tbl->bins, tbl->num_bins * sizeof(st_table_entry *))
+
+#define EQUAL(table,x,y) ((x)==(y) || (*table->type->compare)((x),(y)) == 0)
+
+#define do_hash(key,table) (unsigned int)(*(table)->type->hash)((key))
+#define do_hash_bin(key,table) (do_hash(key, table)%(table)->num_bins)
+
+/*
+ * MINSIZE is the minimum size of a dictionary.
+ */
+
+#define MINSIZE 8
+
+/*
+ * Table of prime numbers 2^n+a, 2<=n<=30. 
+ */
+static long primes[] = {
+	8 + 3,
+	16 + 3,
+	32 + 5,
+	64 + 3,
+	128 + 3,
+	256 + 27,
+	512 + 9,
+	1024 + 9,
+	2048 + 5,
+	4096 + 3,
+	8192 + 27,
+	16384 + 43,
+	32768 + 3,
+	65536 + 45,
+	131072 + 29,
+	262144 + 3,
+	524288 + 21,
+	1048576 + 7,
+	2097152 + 17,
+	4194304 + 15,
+	8388608 + 9,
+	16777216 + 43,
+	33554432 + 35,
+	67108864 + 15,
+	134217728 + 29,
+	268435456 + 3,
+	536870912 + 11,
+	1073741824 + 85,
+	0
+};
+
+static int new_size(int size)
+{
+	unsigned int i;
+
+#if 0
+	for (i = 3; i < 31; i++) {
+		if ((1 << i) > size)
+			return 1 << i;
+	}
+	return -1;
+#else
+	int newsize;
+
+	for (i = 0, newsize = MINSIZE;
+	     i < sizeof(primes) / sizeof(primes[0]); i++, newsize <<= 1) {
+		if (newsize > size)
+			return primes[i];
+	}
+	/*
+	 * Ran out of polynomials 
+	 */
+	return -1;		/* should raise exception */
+#endif
+}
+
+#ifdef HASH_LOG
+static int collision = 0;
+static int init_st = 0;
+
+static void stat_col()
+{
+	FILE *f = fopen("/tmp/col", "w");
+	fprintf(f, "collision: %d\n", collision);
+	fclose(f);
+}
+#endif
+
+st_table *st_init_table_with_size(struct st_hash_type *type, int size)
+{
+	st_table *tbl;
+
+#ifdef HASH_LOG
+	if (init_st == 0) {
+		init_st = 1;
+		atexit(stat_col);
+	}
+#endif
+
+	size = new_size(size);	/* round up to prime number */
+
+	tbl = (st_table *) avro_new(st_table);
+	tbl->type = type;
+	tbl->num_entries = 0;
+	tbl->num_bins = size;
+	tbl->bins = (st_table_entry **) Calloc(size, sizeof(st_table_entry *));
+
+	return tbl;
+}
+
+st_table *st_init_table(struct st_hash_type *type)
+{
+	return st_init_table_with_size(type, 0);
+}
+
+st_table *st_init_numtable(void)
+{
+	return st_init_table(&type_numhash);
+}
+
+st_table *st_init_numtable_with_size(int size)
+{
+	return st_init_table_with_size(&type_numhash, size);
+}
+
+st_table *st_init_strtable(void)
+{
+	return st_init_table(&type_strhash);
+}
+
+st_table *st_init_strtable_with_size(int size)
+{
+	return st_init_table_with_size(&type_strhash, size);
+}
+
+void st_free_table(st_table *table)
+{
+	register st_table_entry *ptr, *next;
+	int i;
+
+	for (i = 0; i < table->num_bins; i++) {
+		ptr = table->bins[i];
+		while (ptr != 0) {
+			next = ptr->next;
+			avro_freet(st_table_entry, ptr);
+			ptr = next;
+		}
+	}
+	free_bins(table);
+	avro_freet(st_table, table);
+}
+
+#define PTR_NOT_EQUAL(table, ptr, hash_val, key) \
+((ptr) != 0 && (ptr->hash != (hash_val) || !EQUAL((table), (key), (ptr)->key)))
+
+#ifdef HASH_LOG
+#define COLLISION collision++
+#else
+#define COLLISION
+#endif
+
+#define FIND_ENTRY(table, ptr, hash_val, bin_pos) do {\
+    bin_pos = hash_val%(table)->num_bins;\
+    ptr = (table)->bins[bin_pos];\
+    if (PTR_NOT_EQUAL(table, ptr, hash_val, key)) {\
+	COLLISION;\
+	while (PTR_NOT_EQUAL(table, ptr->next, hash_val, key)) {\
+	    ptr = ptr->next;\
+	}\
+	ptr = ptr->next;\
+    }\
+} while (0)
+
+int st_lookup(st_table *table, register st_data_t key, st_data_t *value)
+{
+	unsigned int hash_val, bin_pos;
+	register st_table_entry *ptr;
+
+	hash_val = do_hash(key, table);
+	FIND_ENTRY(table, ptr, hash_val, bin_pos);
+
+	if (ptr == 0) {
+		return 0;
+	} else {
+		if (value != 0)
+			*value = ptr->record;
+		return 1;
+	}
+}
+
+#define ADD_DIRECT(table, key, value, hash_val, bin_pos)\
+do {\
+    st_table_entry *entry;\
+    if (table->num_entries/(table->num_bins) > ST_DEFAULT_MAX_DENSITY) {\
+	rehash(table);\
+        bin_pos = hash_val % table->num_bins;\
+    }\
+    \
+    entry = (st_table_entry *) avro_new(st_table_entry);\
+    \
+    entry->hash = hash_val;\
+    entry->key = key;\
+    entry->record = value;\
+    entry->next = table->bins[bin_pos];\
+    table->bins[bin_pos] = entry;\
+    table->num_entries++;\
+} while (0)
+
+int st_insert(register st_table *table, register st_data_t key, st_data_t value)
+{
+	unsigned int hash_val, bin_pos;
+	register st_table_entry *ptr;
+
+	hash_val = do_hash(key, table);
+	FIND_ENTRY(table, ptr, hash_val, bin_pos);
+
+	if (ptr == 0) {
+		ADD_DIRECT(table, key, value, hash_val, bin_pos);
+		return 0;
+	} else {
+		ptr->record = value;
+		return 1;
+	}
+}
+
+void st_add_direct(st_table *table,st_data_t key,st_data_t value)
+{
+	unsigned int hash_val, bin_pos;
+
+	hash_val = do_hash(key, table);
+	bin_pos = hash_val % table->num_bins;
+	ADD_DIRECT(table, key, value, hash_val, bin_pos);
+}
+
+static void rehash(register st_table *table)
+{
+	register st_table_entry *ptr, *next, **new_bins;
+	int i, old_num_bins = table->num_bins, new_num_bins;
+	unsigned int hash_val;
+
+	new_num_bins = new_size(old_num_bins + 1);
+	new_bins =
+	    (st_table_entry **) Calloc(new_num_bins, sizeof(st_table_entry *));
+
+	for (i = 0; i < old_num_bins; i++) {
+		ptr = table->bins[i];
+		while (ptr != 0) {
+			next = ptr->next;
+			hash_val = ptr->hash % new_num_bins;
+			ptr->next = new_bins[hash_val];
+			new_bins[hash_val] = ptr;
+			ptr = next;
+		}
+	}
+	free_bins(table);
+	table->num_bins = new_num_bins;
+	table->bins = new_bins;
+}
+
+st_table *st_copy(st_table *old_table)
+{
+	st_table *new_table;
+	st_table_entry *ptr, *entry;
+	int i, num_bins = old_table->num_bins;
+
+	new_table = (st_table *) avro_new(st_table);
+	if (new_table == 0) {
+		return 0;
+	}
+
+	*new_table = *old_table;
+	new_table->bins = (st_table_entry **)
+	    Calloc((unsigned)num_bins, sizeof(st_table_entry *));
+
+	if (new_table->bins == 0) {
+		avro_freet(st_table, new_table);
+		return 0;
+	}
+
+	for (i = 0; i < num_bins; i++) {
+		new_table->bins[i] = 0;
+		ptr = old_table->bins[i];
+		while (ptr != 0) {
+			entry = (st_table_entry *) avro_new(st_table_entry);
+			if (entry == 0) {
+				free_bins(new_table);
+				avro_freet(st_table, new_table);
+				return 0;
+			}
+			*entry = *ptr;
+			entry->next = new_table->bins[i];
+			new_table->bins[i] = entry;
+			ptr = ptr->next;
+		}
+	}
+	return new_table;
+}
+
+int st_delete(register st_table *table,register st_data_t *key,st_data_t *value)
+{
+	unsigned int hash_val;
+	st_table_entry *tmp;
+	register st_table_entry *ptr;
+
+	hash_val = do_hash_bin(*key, table);
+	ptr = table->bins[hash_val];
+
+	if (ptr == 0) {
+		if (value != 0)
+			*value = 0;
+		return 0;
+	}
+
+	if (EQUAL(table, *key, ptr->key)) {
+		table->bins[hash_val] = ptr->next;
+		table->num_entries--;
+		if (value != 0)
+			*value = ptr->record;
+		*key = ptr->key;
+		avro_freet(st_table_entry, ptr);
+		return 1;
+	}
+
+	for (; ptr->next != 0; ptr = ptr->next) {
+		if (EQUAL(table, ptr->next->key, *key)) {
+			tmp = ptr->next;
+			ptr->next = ptr->next->next;
+			table->num_entries--;
+			if (value != 0)
+				*value = tmp->record;
+			*key = tmp->key;
+			avro_freet(st_table_entry, tmp);
+			return 1;
+		}
+	}
+
+	return 0;
+}
+
+int st_delete_safe(register st_table *table,register st_data_t *key,st_data_t *value,st_data_t never)
+{
+	unsigned int hash_val;
+	register st_table_entry *ptr;
+
+	hash_val = do_hash_bin(*key, table);
+	ptr = table->bins[hash_val];
+
+	if (ptr == 0) {
+		if (value != 0)
+			*value = 0;
+		return 0;
+	}
+
+	for (; ptr != 0; ptr = ptr->next) {
+		if ((ptr->key != never) && EQUAL(table, ptr->key, *key)) {
+			table->num_entries--;
+			*key = ptr->key;
+			if (value != 0)
+				*value = ptr->record;
+			ptr->key = ptr->record = never;
+			return 1;
+		}
+	}
+
+	return 0;
+}
+
+static int delete_never(st_data_t key, st_data_t value, st_data_t never)
+{
+	AVRO_UNUSED(key);
+
+	if (value == never)
+		return ST_DELETE;
+	return ST_CONTINUE;
+}
+
+void st_cleanup_safe(st_table *table,st_data_t never)
+{
+	int num_entries = table->num_entries;
+
+	st_foreach(table, HASH_FUNCTION_CAST delete_never, never);
+	table->num_entries = num_entries;
+}
+
+int st_foreach(st_table *table,int (*func) (ANYARGS),st_data_t arg)
+{
+	st_table_entry *ptr, *last, *tmp;
+	enum st_retval retval;
+	int i;
+
+	for (i = 0; i < table->num_bins; i++) {
+		last = 0;
+		for (ptr = table->bins[i]; ptr != 0;) {
+			retval = (enum st_retval) (*func) (ptr->key, ptr->record, arg);
+			switch (retval) {
+			case ST_CHECK:	/* check if hash is modified during
+					 * iteration */
+				tmp = 0;
+				if (i < table->num_bins) {
+					for (tmp = table->bins[i]; tmp;
+					     tmp = tmp->next) {
+						if (tmp == ptr)
+							break;
+					}
+				}
+				if (!tmp) {
+					/*
+					 * call func with error notice 
+					 */
+					return 1;
+				}
+				/*
+				 * fall through 
+				 */
+			case ST_CONTINUE:
+				last = ptr;
+				ptr = ptr->next;
+				break;
+			case ST_STOP:
+				return 0;
+			case ST_DELETE:
+				tmp = ptr;
+				if (last == 0) {
+					table->bins[i] = ptr->next;
+				} else {
+					last->next = ptr->next;
+				}
+				ptr = ptr->next;
+				avro_freet(st_table_entry, tmp);
+				table->num_entries--;
+			}
+		}
+	}
+	return 0;
+}
+
+static int strhash(register const char *string)
+{
+	register int c;
+
+#ifdef HASH_ELFHASH
+	register unsigned int h = 0, g;
+
+	while ((c = *string++) != '\0') {
+		h = (h << 4) + c;
+		if (g = h & 0xF0000000)
+			h ^= g >> 24;
+		h &= ~g;
+	}
+	return h;
+#elif defined(HASH_PERL)
+	register int val = 0;
+
+	while ((c = *string++) != '\0') {
+		val += c;
+		val += (val << 10);
+		val ^= (val >> 6);
+	}
+	val += (val << 3);
+	val ^= (val >> 11);
+
+	return val + (val << 15);
+#else
+	register int val = 0;
+
+	while ((c = *string++) != '\0') {
+		val = val * 997 + c;
+	}
+
+	return val + (val >> 5);
+#endif
+}
+
+static int numcmp(long x, long y)
+{
+	return x != y;
+}
+
+static int numhash(long n)
+{
+	return n;
+}
diff --git a/lang/c/src/st.h b/lang/c/src/st.h
new file mode 100644
index 0000000..cf8a224
--- /dev/null
+++ b/lang/c/src/st.h
@@ -0,0 +1,87 @@
+/*
+ * This is a public domain general purpose hash table package written by
+ * Peter Moore @ UCB. 
+ */
+
+/*
+ * @(#) st.h 5.1 89/12/14 
+ */
+
+#ifndef ST_INCLUDED
+#define ST_INCLUDED
+#ifdef __cplusplus
+extern "C" {
+#define CLOSE_EXTERN }
+#else
+#define CLOSE_EXTERN
+#endif
+
+#include <avro/platform.h>		/* for uintptr_t */
+
+#pragma GCC visibility push(hidden)
+
+#ifndef ANYARGS
+ #ifdef __cplusplus
+   #define ANYARGS ...
+ #else
+   #define ANYARGS
+ #endif
+#endif
+
+#ifdef _WIN32
+  #define HASH_FUNCTION_CAST (int (__cdecl *)(ANYARGS))
+#else
+  #define HASH_FUNCTION_CAST
+#endif
+
+typedef uintptr_t st_data_t;
+typedef struct st_table st_table;
+
+struct st_hash_type {
+  int (*compare) (ANYARGS);
+  int (*hash) (ANYARGS);
+};
+
+struct st_table {
+	struct st_hash_type *type;
+	int num_bins;
+	int num_entries;
+	struct st_table_entry **bins;
+};
+
+#define st_is_member(table,key) st_lookup(table,key,(st_data_t *)0)
+
+enum st_retval { ST_CONTINUE, ST_STOP, ST_DELETE, ST_CHECK };
+
+#ifndef _
+# define _(args) args
+#endif
+
+st_table *st_init_table _((struct st_hash_type *));
+st_table *st_init_table_with_size _((struct st_hash_type *, int));
+st_table *st_init_numtable _((void));
+st_table *st_init_numtable_with_size _((int));
+st_table *st_init_strtable _((void));
+st_table *st_init_strtable_with_size _((int));
+int st_delete _((st_table *, st_data_t *, st_data_t *));
+int st_delete_safe _((st_table *, st_data_t *, st_data_t *, st_data_t));
+int st_insert _((st_table *, st_data_t, st_data_t));
+int st_lookup _((st_table *, st_data_t, st_data_t *));
+int st_foreach _((st_table *, int (*)(ANYARGS), st_data_t));
+void st_add_direct _((st_table *, st_data_t, st_data_t));
+void st_free_table _((st_table *));
+void st_cleanup_safe _((st_table *, st_data_t));
+st_table *st_copy _((st_table *));
+
+#define ST_NUMCMP	((int (*)()) 0)
+#define ST_NUMHASH	((int (*)()) -2)
+
+#define st_numcmp	ST_NUMCMP
+#define st_numhash	ST_NUMHASH
+
+int st_strhash();
+
+#pragma GCC visibility pop
+
+CLOSE_EXTERN
+#endif				/* ST_INCLUDED */
diff --git a/lang/c/src/string.c b/lang/c/src/string.c
new file mode 100644
index 0000000..7327722
--- /dev/null
+++ b/lang/c/src/string.c
@@ -0,0 +1,304 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/data.h"
+#include "avro/allocation.h"
+#include "avro/errors.h"
+
+#ifndef AVRO_STRING_DEBUG
+#define AVRO_STRING_DEBUG 0
+#endif
+
+#if AVRO_STRING_DEBUG
+#include <stdio.h>
+#define DEBUG(...) \
+	do { \
+		fprintf(stderr, __VA_ARGS__); \
+		fprintf(stderr, "\n"); \
+	} while (0)
+#else
+#define DEBUG(...)  /* don't print messages */
+#endif
+
+
+/*
+ * A resizable wrapped buffer implementation.  This implementation makes
+ * actual copies in its copy method; if we wanted a zero-copy solution
+ * here, then we'd have to keep track of all copies of the buffer, so
+ * that we can update pointers whenever the buffer is resized (since
+ * this might change the location of the memory region).
+ */
+
+struct avro_wrapped_resizable {
+	size_t  buf_size;
+};
+
+#define avro_wrapped_resizable_size(sz) \
+	(sizeof(struct avro_wrapped_resizable) + (sz))
+
+static void
+avro_wrapped_resizable_free(avro_wrapped_buffer_t *self)
+{
+	DEBUG("--- Freeing resizable <%p:%" PRIsz "> (%p)", self->buf, self->size, self->user_data);
+	struct avro_wrapped_resizable  *resizable = (struct avro_wrapped_resizable *) self->user_data;
+	avro_free(resizable, avro_wrapped_resizable_size(resizable->buf_size));
+}
+
+static int
+avro_wrapped_resizable_resize(avro_wrapped_buffer_t *self, size_t desired)
+{
+	struct avro_wrapped_resizable  *resizable = (struct avro_wrapped_resizable *) self->user_data;
+
+	/*
+	 * If we've already allocated enough memory for the desired
+	 * size, there's nothing to do.
+	 */
+
+	if (resizable->buf_size >= desired) {
+		return 0;
+	}
+
+	size_t  new_buf_size = resizable->buf_size * 2;
+	if (desired > new_buf_size) {
+		new_buf_size = desired;
+	}
+
+	DEBUG("--- Resizing <%p:%" PRIsz "> (%p) -> %" PRIsz,
+	      self->buf, self->buf_size, self->user_data, new_buf_size);
+
+	struct avro_wrapped_resizable  *new_resizable =
+	    (struct avro_wrapped_resizable *) avro_realloc(resizable,
+		         avro_wrapped_resizable_size(resizable->buf_size),
+			 avro_wrapped_resizable_size(new_buf_size));
+	if (new_resizable == NULL) {
+		return ENOMEM;
+	}
+	DEBUG("--- New buffer <%p:%" PRIsz ">", new_buf, new_buf_size);
+
+	new_resizable->buf_size = new_buf_size;
+
+	char  *old_buf = (char *) resizable;
+	char  *new_buf = (char *) new_resizable;
+
+	ptrdiff_t  offset = (char *) self->buf - old_buf;
+	DEBUG("--- Old data pointer is %p", self->buf);
+	self->buf = new_buf + offset;
+	self->user_data = new_resizable;
+	DEBUG("--- New data pointer is %p", self->buf);
+	return 0;
+}
+
+static int
+avro_wrapped_resizable_new(avro_wrapped_buffer_t *dest, size_t buf_size)
+{
+	size_t  allocated_size = avro_wrapped_resizable_size(buf_size);
+	struct avro_wrapped_resizable  *resizable =
+	    (struct avro_wrapped_resizable *) avro_malloc(allocated_size);
+	if (resizable == NULL) {
+		return ENOMEM;
+	}
+
+	resizable->buf_size = buf_size;
+
+	dest->buf = ((char *) resizable) + sizeof(struct avro_wrapped_resizable);
+	DEBUG("--- Creating resizable <%p:%" PRIsz "> (%p)", dest->buf, buf_size, resizable);
+	dest->size = buf_size;
+	dest->user_data = resizable;
+	dest->free = avro_wrapped_resizable_free;
+	dest->copy = NULL;
+	dest->slice = NULL;
+	return 0;
+}
+
+#define is_resizable(buf) \
+	((buf).free == avro_wrapped_resizable_free)
+
+
+
+void
+avro_raw_string_init(avro_raw_string_t *str)
+{
+	memset(str, 0, sizeof(avro_raw_string_t));
+}
+
+
+void
+avro_raw_string_clear(avro_raw_string_t *str)
+{
+	/*
+	 * If the string's buffer is one that we control, then we don't
+	 * free it; that lets us reuse the storage on the next call to
+	 * avro_raw_string_set[_length].
+	 */
+
+	if (is_resizable(str->wrapped)) {
+		DEBUG("--- Clearing resizable buffer");
+		str->wrapped.size = 0;
+	} else {
+		DEBUG("--- Freeing wrapped buffer");
+		avro_wrapped_buffer_free(&str->wrapped);
+		avro_raw_string_init(str);
+	}
+}
+
+
+void
+avro_raw_string_done(avro_raw_string_t *str)
+{
+	avro_wrapped_buffer_free(&str->wrapped);
+	avro_raw_string_init(str);
+}
+
+
+/**
+ * Makes sure that the string's buffer is one that we allocated
+ * ourselves, and that the buffer is big enough to hold a string of the
+ * given length.
+ */
+
+static int
+avro_raw_string_ensure_buf(avro_raw_string_t *str, size_t length)
+{
+	int  rval;
+
+	DEBUG("--- Ensuring resizable buffer of size %" PRIsz, length);
+	if (is_resizable(str->wrapped)) {
+		/*
+		 * If we've already got a resizable buffer, just have it
+		 * resize itself.
+		 */
+
+		return avro_wrapped_resizable_resize(&str->wrapped, length);
+	} else {
+		/*
+		 * Stash a copy of the old wrapped buffer, and then
+		 * create a new resizable buffer to store our content
+		 * in.
+		 */
+
+		avro_wrapped_buffer_t  orig = str->wrapped;
+		check(rval, avro_wrapped_resizable_new(&str->wrapped, length));
+
+		/*
+		 * If there was any content in the old wrapped buffer,
+		 * copy it into the new resizable one.
+		 */
+
+		if (orig.size > 0) {
+			size_t  to_copy =
+			    (orig.size < length)? orig.size: length;
+			memcpy((void *) str->wrapped.buf, orig.buf, to_copy);
+		}
+		avro_wrapped_buffer_free(&orig);
+
+		return 0;
+	}
+}
+
+
+void
+avro_raw_string_set_length(avro_raw_string_t *str,
+			   const void *src, size_t length)
+{
+	avro_raw_string_ensure_buf(str, length+1);
+	memcpy((void *) str->wrapped.buf, src, length);
+	((char *) str->wrapped.buf)[length] = '\0';
+	str->wrapped.size = length;
+}
+
+
+void avro_raw_string_append_length(avro_raw_string_t *str,
+				   const void *src,
+				   size_t length)
+{
+	if (avro_raw_string_length(str) == 0) {
+		return avro_raw_string_set_length(str, src, length);
+	}
+
+	avro_raw_string_ensure_buf(str, str->wrapped.size + length);
+	memcpy((char *) str->wrapped.buf + str->wrapped.size, src, length);
+	str->wrapped.size += length;
+}
+
+
+void
+avro_raw_string_set(avro_raw_string_t *str, const char *src)
+{
+	size_t  length = strlen(src);
+	avro_raw_string_ensure_buf(str, length+1);
+	memcpy((void *) str->wrapped.buf, src, length+1);
+	str->wrapped.size = length+1;
+}
+
+
+void
+avro_raw_string_append(avro_raw_string_t *str, const char *src)
+{
+	if (avro_raw_string_length(str) == 0) {
+		return avro_raw_string_set(str, src);
+	}
+
+	/* Assume that str->wrapped.size includes a NUL terminator */
+	size_t  length = strlen(src);
+	avro_raw_string_ensure_buf(str, str->wrapped.size + length);
+	memcpy((char *) str->wrapped.buf + str->wrapped.size - 1, src, length+1);
+	str->wrapped.size += length;
+}
+
+
+void
+avro_raw_string_give(avro_raw_string_t *str,
+		     avro_wrapped_buffer_t *src)
+{
+	DEBUG("--- Giving control of <%p:%" PRIsz "> (%p) to string",
+	      src->buf, src->size, src);
+	avro_wrapped_buffer_free(&str->wrapped);
+	avro_wrapped_buffer_move(&str->wrapped, src);
+}
+
+int
+avro_raw_string_grab(const avro_raw_string_t *str,
+		     avro_wrapped_buffer_t *dest)
+{
+	return avro_wrapped_buffer_copy(dest, &str->wrapped, 0, str->wrapped.size);
+}
+
+
+int
+avro_raw_string_equals(const avro_raw_string_t *str1,
+		       const avro_raw_string_t *str2)
+{
+	if (str1 == str2) {
+		return 1;
+	}
+
+	if (!str1 || !str2) {
+		return 0;
+	}
+
+	if (str1->wrapped.size != str2->wrapped.size) {
+		return 0;
+	}
+
+	return (memcmp(str1->wrapped.buf, str2->wrapped.buf,
+		       str1->wrapped.size) == 0);
+}
diff --git a/lang/c/src/value-hash.c b/lang/c/src/value-hash.c
new file mode 100644
index 0000000..3af4779
--- /dev/null
+++ b/lang/c/src/value-hash.c
@@ -0,0 +1,294 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/value.h"
+#include "avro_private.h"
+
+#define check_return(retval, call) \
+	do { \
+		int  rval = call; \
+		if (rval != 0) { return (retval); } \
+	} while (0)
+
+/*
+ * We currently use MurmurHash3 [1], which is public domain, as our hash
+ * implementation.
+ *
+ * [1] http://code.google.com/p/smhasher/
+ */
+
+/* Our seed is the MurmurHash3 of the string "avro.value" */
+#define SEED  0xaf4c78df
+
+#define ROTL32(a,b) (((a) << ((b) & 0x1f)) | ((a) >> (32 - ((b) & 0x1f))))
+
+static inline uint32_t
+fmix(uint32_t h)
+{
+	h ^= h >> 16;
+	h *= 0x85ebca6b;
+	h ^= h >> 13;
+	h *= 0xc2b2ae35;
+	h ^= h >> 16;
+	return h;
+}
+
+static const uint32_t  c1 = 0xcc9e2d51;
+static const uint32_t  c2 = 0x1b873593;
+
+static inline uint32_t
+add_hash(uint32_t start, uint32_t current)
+{
+	current *= c1;
+	current = ROTL32(current, 15);
+	current *= c2;
+
+	start ^= current;
+	start = ROTL32(start, 13);
+	start = start * 5 + 0xe6546b64;
+
+	return start;
+}
+
+static inline uint32_t
+hash_buffer(uint32_t start, const void *src, size_t len)
+{
+	const uint8_t  *data = (const uint8_t *) src;
+	const int  nblocks = len / 4;
+
+	uint32_t  h1 = start;
+
+	//----------
+	// body
+
+	const uint32_t  *blocks = (const uint32_t *) (data + nblocks*4);
+	int  i;
+
+	for (i = -nblocks; i != 0; i++) {
+		uint32_t  k1 = blocks[i];
+
+		k1 *= c1;
+		k1 = ROTL32(k1,15);
+		k1 *= c2;
+
+		h1 ^= k1;
+		h1 = ROTL32(h1,13);
+		h1 = h1*5+0xe6546b64;
+	}
+
+	//----------
+	// tail
+
+	const uint8_t  *tail = (const uint8_t *) (data + nblocks*4);
+
+	uint32_t  k1 = 0;
+
+	switch (len & 3)
+	{
+		case 3: k1 ^= tail[2] << 16;
+		case 2: k1 ^= tail[1] << 8;
+		case 1: k1 ^= tail[0];
+			k1 *= c1; k1 = ROTL32(k1,15); k1 *= c2; h1 ^= k1;
+	};
+
+	//----------
+	// finalization
+
+	h1 ^= len;
+	return h1;
+}
+
+static uint32_t
+avro_value_hash_fast(avro_value_t *value, uint32_t start)
+{
+	avro_type_t  type = avro_value_get_type(value);
+
+	switch (type) {
+		case AVRO_BOOLEAN:
+		{
+			int  v;
+			check_return(0, avro_value_get_boolean(value, &v));
+			return add_hash(start, v);
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf;
+			size_t  size;
+			check_return(0, avro_value_get_bytes(value, &buf, &size));
+			return hash_buffer(start, buf, size);
+		}
+
+		case AVRO_DOUBLE:
+		{
+			union {
+				double  d;
+				uint32_t  u32[2];
+			} v;
+			check_return(0, avro_value_get_double(value, &v.d));
+			return add_hash(add_hash(start, v.u32[0]), v.u32[1]);
+		}
+
+		case AVRO_FLOAT:
+		{
+			union {
+				float  f;
+				uint32_t  u32;
+			} v;
+			check_return(0, avro_value_get_float(value, &v.f));
+			return add_hash(start, v.u32);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  v;
+			check_return(0, avro_value_get_int(value, &v));
+			return add_hash(start, v);
+		}
+
+		case AVRO_INT64:
+		{
+			union {
+				int64_t  u64;
+				uint32_t  u32[2];
+			} v;
+			check_return(0, avro_value_get_long(value, &v.u64));
+			return add_hash(add_hash(start, v.u32[0]), v.u32[1]);
+		}
+
+		case AVRO_NULL:
+		{
+			check_return(0, avro_value_get_null(value));
+			return add_hash(start, 0);
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *buf;
+			size_t  size;
+			check_return(0, avro_value_get_string(value, &buf, &size));
+			return hash_buffer(start, buf, size);
+		}
+
+		case AVRO_ARRAY:
+		{
+			size_t  count;
+			size_t  i;
+			check_return(0, avro_value_get_size(value, &count));
+
+			for (i = 0; i < count; i++) {
+				avro_value_t  child;
+				check_return(0, avro_value_get_by_index
+					     (value, i, &child, NULL));
+				start = avro_value_hash_fast(&child, start);
+			}
+
+			start ^= count;
+			return start;
+		}
+
+		case AVRO_ENUM:
+		{
+			int  v;
+			check_return(0, avro_value_get_enum(value, &v));
+			return add_hash(start, v);
+		}
+
+		case AVRO_FIXED:
+		{
+			const void  *buf;
+			size_t  size;
+			check_return(0, avro_value_get_fixed(value, &buf, &size));
+			return hash_buffer(start, buf, size);
+		}
+
+		case AVRO_MAP:
+		{
+			size_t  count;
+			size_t  i;
+			check_return(0, avro_value_get_size(value, &count));
+
+			/*
+			 * The hash for a map must be built up without
+			 * taking into account the order of the elements
+			 */
+			uint32_t  map_hash = 0;
+			for (i = 0; i < count; i++) {
+				avro_value_t  child;
+				const char  *key;
+				check_return(0, avro_value_get_by_index
+					     (value, i, &child, &key));
+
+				uint32_t  element = SEED;
+				element = hash_buffer(element, key, strlen(key));
+				element = avro_value_hash_fast(&child, element);
+				element = fmix(element);
+
+				map_hash ^= element;
+			}
+			map_hash ^= count;
+
+			return add_hash(start, map_hash);
+		}
+
+		case AVRO_RECORD:
+		{
+			size_t  count;
+			size_t  i;
+			check_return(0, avro_value_get_size(value, &count));
+
+			for (i = 0; i < count; i++) {
+				avro_value_t  child;
+				check_return(0, avro_value_get_by_index
+					     (value, i, &child, NULL));
+				start = avro_value_hash_fast(&child, start);
+			}
+
+			start ^= count;
+			return start;
+		}
+
+		case AVRO_UNION:
+		{
+			int  disc;
+			avro_value_t  branch;
+			check_return(0, avro_value_get_discriminant(value, &disc));
+			check_return(0, avro_value_get_current_branch(value, &branch));
+
+			start = add_hash(start, disc);
+			start = avro_value_hash_fast(&branch, start);
+			return start;
+		}
+
+		default:
+			return 0;
+	}
+}
+
+uint32_t
+avro_value_hash(avro_value_t *value)
+{
+	uint32_t  hash = avro_value_hash_fast(value, SEED);
+	return (hash == 0)? hash: fmix(hash);
+}
diff --git a/lang/c/src/value-json.c b/lang/c/src/value-json.c
new file mode 100644
index 0000000..49757ba
--- /dev/null
+++ b/lang/c/src/value-json.c
@@ -0,0 +1,417 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/errors.h"
+#include "avro/legacy.h"
+#include "avro/schema.h"
+#include "avro/value.h"
+#include "avro_private.h"
+#include "jansson.h"
+
+/*
+ * Converts a binary buffer into a NUL-terminated JSON UTF-8 string.
+ * Avro bytes and fixed values are encoded in JSON as a string, and JSON
+ * strings must be in UTF-8.  For these Avro types, the JSON string is
+ * restricted to the characters U+0000..U+00FF, which corresponds to the
+ * ISO-8859-1 character set.  This function performs this conversion.
+ * The resulting string must be freed using avro_free when you're done
+ * with it.
+ */
+
+static int
+encode_utf8_bytes(const void *src, size_t src_len,
+		  void **dest, size_t *dest_len)
+{
+	check_param(EINVAL, src, "source");
+	check_param(EINVAL, dest, "dest");
+	check_param(EINVAL, dest_len, "dest_len");
+
+	// First, determine the size of the resulting UTF-8 buffer.
+	// Bytes in the range 0x00..0x7f will take up one byte; bytes in
+	// the range 0x80..0xff will take up two.
+	const uint8_t  *src8 = (const uint8_t *) src;
+
+	size_t  utf8_len = src_len + 1;  // +1 for NUL terminator
+	size_t  i;
+	for (i = 0; i < src_len; i++) {
+		if (src8[i] & 0x80) {
+			utf8_len++;
+		}
+	}
+
+	// Allocate a new buffer for the UTF-8 string and fill it in.
+	uint8_t  *dest8 = (uint8_t *) avro_malloc(utf8_len);
+	if (dest8 == NULL) {
+		avro_set_error("Cannot allocate JSON bytes buffer");
+		return ENOMEM;
+	}
+
+	uint8_t  *curr = dest8;
+	for (i = 0; i < src_len; i++) {
+		if (src8[i] & 0x80) {
+			*curr++ = (0xc0 | (src8[i] >> 6));
+			*curr++ = (0x80 | (src8[i] & 0x3f));
+		} else {
+			*curr++ = src8[i];
+		}
+	}
+
+	*curr = '\0';
+
+	// And we're good.
+	*dest = dest8;
+	*dest_len = utf8_len;
+	return 0;
+}
+
+#define return_json(type, exp)						\
+	{								\
+		json_t  *result = exp;					\
+		if (result == NULL) {					\
+			avro_set_error("Cannot allocate JSON " type);	\
+		}							\
+		return result;						\
+	}
+
+#define check_return(retval, call) \
+	do { \
+		int  __rc; \
+		__rc = call; \
+		if (__rc != 0) { \
+			return retval; \
+		} \
+	} while (0)
+
+static json_t *
+avro_value_to_json_t(const avro_value_t *value)
+{
+	switch (avro_value_get_type(value)) {
+		case AVRO_BOOLEAN:
+		{
+			int  val;
+			check_return(NULL, avro_value_get_boolean(value, &val));
+			return_json("boolean",
+				    val?  json_true(): json_false());
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *val;
+			size_t  size;
+			void  *encoded = NULL;
+			size_t  encoded_size = 0;
+
+			check_return(NULL, avro_value_get_bytes(value, &val, &size));
+
+			if (encode_utf8_bytes(val, size, &encoded, &encoded_size)) {
+				return NULL;
+			}
+
+			json_t  *result = json_string_nocheck((const char *) encoded);
+			avro_free(encoded, encoded_size);
+			if (result == NULL) {
+				avro_set_error("Cannot allocate JSON bytes");
+			}
+			return result;
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			check_return(NULL, avro_value_get_double(value, &val));
+			return_json("double", json_real(val));
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			check_return(NULL, avro_value_get_float(value, &val));
+			return_json("float", json_real(val));
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			check_return(NULL, avro_value_get_int(value, &val));
+			return_json("int", json_integer(val));
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			check_return(NULL, avro_value_get_long(value, &val));
+			return_json("long", json_integer(val));
+		}
+
+		case AVRO_NULL:
+		{
+			check_return(NULL, avro_value_get_null(value));
+			return_json("null", json_null());
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *val;
+			size_t  size;
+			check_return(NULL, avro_value_get_string(value, &val, &size));
+			return_json("string", json_string(val));
+		}
+
+		case AVRO_ARRAY:
+		{
+			int  rc;
+			size_t  element_count, i;
+			json_t  *result = json_array();
+			if (result == NULL) {
+				avro_set_error("Cannot allocate JSON array");
+				return NULL;
+			}
+
+			rc = avro_value_get_size(value, &element_count);
+			if (rc != 0) {
+				json_decref(result);
+				return NULL;
+			}
+
+			for (i = 0; i < element_count; i++) {
+				avro_value_t  element;
+				rc = avro_value_get_by_index(value, i, &element, NULL);
+				if (rc != 0) {
+					json_decref(result);
+					return NULL;
+				}
+
+				json_t  *element_json = avro_value_to_json_t(&element);
+				if (element_json == NULL) {
+					json_decref(result);
+					return NULL;
+				}
+
+				if (json_array_append_new(result, element_json)) {
+					avro_set_error("Cannot append element to array");
+					json_decref(result);
+					return NULL;
+				}
+			}
+
+			return result;
+		}
+
+		case AVRO_ENUM:
+		{
+			avro_schema_t  enum_schema;
+			int  symbol_value;
+			const char  *symbol_name;
+
+			check_return(NULL, avro_value_get_enum(value, &symbol_value));
+			enum_schema = avro_value_get_schema(value);
+			symbol_name = avro_schema_enum_get(enum_schema, symbol_value);
+			return_json("enum", json_string(symbol_name));
+		}
+
+		case AVRO_FIXED:
+		{
+			const void  *val;
+			size_t  size;
+			void  *encoded = NULL;
+			size_t  encoded_size = 0;
+
+			check_return(NULL, avro_value_get_fixed(value, &val, &size));
+
+			if (encode_utf8_bytes(val, size, &encoded, &encoded_size)) {
+				return NULL;
+			}
+
+			json_t  *result = json_string_nocheck((const char *) encoded);
+			avro_free(encoded, encoded_size);
+			if (result == NULL) {
+				avro_set_error("Cannot allocate JSON fixed");
+			}
+			return result;
+		}
+
+		case AVRO_MAP:
+		{
+			int  rc;
+			size_t  element_count, i;
+			json_t  *result = json_object();
+			if (result == NULL) {
+				avro_set_error("Cannot allocate JSON map");
+				return NULL;
+			}
+
+			rc = avro_value_get_size(value, &element_count);
+			if (rc != 0) {
+				json_decref(result);
+				return NULL;
+			}
+
+			for (i = 0; i < element_count; i++) {
+				const char  *key;
+				avro_value_t  element;
+
+				rc = avro_value_get_by_index(value, i, &element, &key);
+				if (rc != 0) {
+					json_decref(result);
+					return NULL;
+				}
+
+				json_t  *element_json = avro_value_to_json_t(&element);
+				if (element_json == NULL) {
+					json_decref(result);
+					return NULL;
+				}
+
+				if (json_object_set_new(result, key, element_json)) {
+					avro_set_error("Cannot append element to map");
+					json_decref(result);
+					return NULL;
+				}
+			}
+
+			return result;
+		}
+
+		case AVRO_RECORD:
+		{
+			int  rc;
+			size_t  field_count, i;
+			json_t  *result = json_object();
+			if (result == NULL) {
+				avro_set_error("Cannot allocate new JSON record");
+				return NULL;
+			}
+
+			rc = avro_value_get_size(value, &field_count);
+			if (rc != 0) {
+				json_decref(result);
+				return NULL;
+			}
+
+			for (i = 0; i < field_count; i++) {
+				const char  *field_name;
+				avro_value_t  field;
+
+				rc = avro_value_get_by_index(value, i, &field, &field_name);
+				if (rc != 0) {
+					json_decref(result);
+					return NULL;
+				}
+
+				json_t  *field_json = avro_value_to_json_t(&field);
+				if (field_json == NULL) {
+					json_decref(result);
+					return NULL;
+				}
+
+				if (json_object_set_new(result, field_name, field_json)) {
+					avro_set_error("Cannot append field to record");
+					json_decref(result);
+					return NULL;
+				}
+			}
+
+			return result;
+		}
+
+		case AVRO_UNION:
+		{
+			int  disc;
+			avro_value_t  branch;
+			avro_schema_t  union_schema;
+			avro_schema_t  branch_schema;
+			const char  *branch_name;
+
+			check_return(NULL, avro_value_get_current_branch(value, &branch));
+
+			if (avro_value_get_type(&branch) == AVRO_NULL) {
+				return_json("null", json_null());
+			}
+
+			check_return(NULL, avro_value_get_discriminant(value, &disc));
+			union_schema = avro_value_get_schema(value);
+			branch_schema =
+			    avro_schema_union_branch(union_schema, disc);
+			branch_name = avro_schema_type_name(branch_schema);
+
+			json_t  *result = json_object();
+			if (result == NULL) {
+				avro_set_error("Cannot allocate JSON union");
+				return NULL;
+			}
+
+			json_t  *branch_json = avro_value_to_json_t(&branch);
+			if (branch_json == NULL) {
+				json_decref(result);
+				return NULL;
+			}
+
+			if (json_object_set_new(result, branch_name, branch_json)) {
+				avro_set_error("Cannot append branch to union");
+				json_decref(result);
+				return NULL;
+			}
+
+			return result;
+		}
+
+		default:
+			return NULL;
+	}
+}
+
+int
+avro_value_to_json(const avro_value_t *value,
+		   int one_line, char **json_str)
+{
+	check_param(EINVAL, value, "value");
+	check_param(EINVAL, json_str, "string buffer");
+
+	json_t  *json = avro_value_to_json_t(value);
+	if (json == NULL) {
+		return ENOMEM;
+	}
+
+	/*
+	 * Jansson will only encode an object or array as the root
+	 * element.
+	 */
+
+	*json_str = json_dumps
+	    (json,
+	     JSON_ENCODE_ANY |
+	     JSON_INDENT(one_line? 0: 2) |
+	     JSON_ENSURE_ASCII |
+	     JSON_PRESERVE_ORDER);
+	json_decref(json);
+	return 0;
+}
+
+int
+avro_datum_to_json(const avro_datum_t datum,
+		   int one_line, char **json_str)
+{
+	avro_value_t  value;
+	avro_datum_as_value(&value, datum);
+	return avro_value_to_json(&value, one_line, json_str);
+}
diff --git a/lang/c/src/value-read.c b/lang/c/src/value-read.c
new file mode 100644
index 0000000..6e0cc6f
--- /dev/null
+++ b/lang/c/src/value-read.c
@@ -0,0 +1,392 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/basics.h"
+#include "avro/data.h"
+#include "avro/io.h"
+#include "avro/value.h"
+#include "avro_private.h"
+#include "encoding.h"
+
+
+/*
+ * Forward declaration; this is basically the same as avro_value_read,
+ * but it doesn't reset dest first.  (Since it will have already been
+ * reset in avro_value_read itself).
+ */
+
+static int
+read_value(avro_reader_t reader, avro_value_t *dest);
+
+
+static int
+read_array_value(avro_reader_t reader, avro_value_t *dest)
+{
+	int  rval;
+	size_t  i;          /* index within the current block */
+	size_t  index = 0;  /* index within the entire array */
+	int64_t  block_count;
+	int64_t  block_size;
+
+	check_prefix(rval, avro_binary_encoding.
+		     read_long(reader, &block_count),
+		     "Cannot read array block count: ");
+
+	while (block_count != 0) {
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, avro_binary_encoding.
+				     read_long(reader, &block_size),
+				     "Cannot read array block size: ");
+		}
+
+		for (i = 0; i < (size_t) block_count; i++, index++) {
+			avro_value_t  child;
+
+			check(rval, avro_value_append(dest, &child, NULL));
+			check(rval, read_value(reader, &child));
+		}
+
+		check_prefix(rval, avro_binary_encoding.
+			     read_long(reader, &block_count),
+			     "Cannot read array block count: ");
+	}
+
+	return 0;
+}
+
+
+static int
+read_map_value(avro_reader_t reader, avro_value_t *dest)
+{
+	int  rval;
+	size_t  i;          /* index within the current block */
+	size_t  index = 0;  /* index within the entire array */
+	int64_t  block_count;
+	int64_t  block_size;
+
+	check_prefix(rval, avro_binary_encoding.read_long(reader, &block_count),
+		     "Cannot read map block count: ");
+
+	while (block_count != 0) {
+		if (block_count < 0) {
+			block_count = block_count * -1;
+			check_prefix(rval, avro_binary_encoding.
+				     read_long(reader, &block_size),
+				     "Cannot read map block size: ");
+		}
+
+		for (i = 0; i < (size_t) block_count; i++, index++) {
+			char *key;
+			int64_t key_size;
+			avro_value_t  child;
+
+			check_prefix(rval, avro_binary_encoding.
+				     read_string(reader, &key, &key_size),
+				     "Cannot read map key: ");
+
+			rval = avro_value_add(dest, key, &child, NULL, NULL);
+			if (rval) {
+				avro_free(key, key_size);
+				return rval;
+			}
+
+			rval = read_value(reader, &child);
+			if (rval) {
+				avro_free(key, key_size);
+				return rval;
+			}
+
+			avro_free(key, key_size);
+		}
+
+		check_prefix(rval, avro_binary_encoding.
+			     read_long(reader, &block_count),
+			     "Cannot read map block count: ");
+	}
+
+	return 0;
+}
+
+
+static int
+read_record_value(avro_reader_t reader, avro_value_t *dest)
+{
+	int  rval;
+	size_t  field_count;
+	size_t  i;
+
+	avro_schema_t  record_schema = avro_value_get_schema(dest);
+
+	check(rval, avro_value_get_size(dest, &field_count));
+	for (i = 0; i < field_count; i++) {
+		avro_value_t  field;
+
+		check(rval, avro_value_get_by_index(dest, i, &field, NULL));
+		if (field.iface != NULL) {
+			check(rval, read_value(reader, &field));
+		} else {
+			avro_schema_t  field_schema =
+			    avro_schema_record_field_get_by_index(record_schema, i);
+			check(rval, avro_skip_data(reader, field_schema));
+		}
+	}
+
+	return 0;
+}
+
+
+static int
+read_union_value(avro_reader_t reader, avro_value_t *dest)
+{
+	int rval;
+	int64_t discriminant;
+	avro_schema_t  union_schema;
+	int64_t  branch_count;
+	avro_value_t  branch;
+
+	check_prefix(rval, avro_binary_encoding.
+		     read_long(reader, &discriminant),
+		     "Cannot read union discriminant: ");
+
+	union_schema = avro_value_get_schema(dest);
+	branch_count = avro_schema_union_size(union_schema);
+
+	if (discriminant < 0 || discriminant >= branch_count) {
+		avro_set_error("Invalid union discriminant value: (%d)",
+			       discriminant);
+		return 1;
+	}
+
+	check(rval, avro_value_set_branch(dest, discriminant, &branch));
+	check(rval, read_value(reader, &branch));
+	return 0;
+}
+
+
+/*
+ * A wrapped buffer implementation that takes control of a buffer
+ * allocated using avro_malloc.
+ */
+
+struct avro_wrapped_alloc {
+	const void  *original;
+	size_t  allocated_size;
+};
+
+static void
+avro_wrapped_alloc_free(avro_wrapped_buffer_t *self)
+{
+	struct avro_wrapped_alloc  *alloc = (struct avro_wrapped_alloc *) self->user_data;
+	avro_free((void *) alloc->original, alloc->allocated_size);
+	avro_freet(struct avro_wrapped_alloc, alloc);
+}
+
+static int
+avro_wrapped_alloc_new(avro_wrapped_buffer_t *dest,
+		       const void *buf, size_t length)
+{
+	struct avro_wrapped_alloc  *alloc = (struct avro_wrapped_alloc *) avro_new(struct avro_wrapped_alloc);
+	if (alloc == NULL) {
+		return ENOMEM;
+	}
+
+	dest->buf = buf;
+	dest->size = length;
+	dest->user_data = alloc;
+	dest->free = avro_wrapped_alloc_free;
+	dest->copy = NULL;
+	dest->slice = NULL;
+
+	alloc->original = buf;
+	alloc->allocated_size = length;
+	return 0;
+}
+
+
+static int
+read_value(avro_reader_t reader, avro_value_t *dest)
+{
+	int  rval;
+
+	switch (avro_value_get_type(dest)) {
+		case AVRO_BOOLEAN:
+		{
+			int8_t  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_boolean(reader, &val),
+				     "Cannot read boolean value: ");
+			return avro_value_set_boolean(dest, val);
+		}
+
+		case AVRO_BYTES:
+		{
+			char  *bytes;
+			int64_t  len;
+			check_prefix(rval, avro_binary_encoding.
+				     read_bytes(reader, &bytes, &len),
+				     "Cannot read bytes value: ");
+
+			/*
+			 * read_bytes allocates an extra byte to always
+			 * ensure that the data is NUL terminated, but
+			 * that byte isn't included in the length.  We
+			 * include that extra byte in the allocated
+			 * size, but not in the length of the buffer.
+			 */
+
+			avro_wrapped_buffer_t  buf;
+			check(rval, avro_wrapped_alloc_new(&buf, bytes, len+1));
+			buf.size--;
+			return avro_value_give_bytes(dest, &buf);
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_double(reader, &val),
+				     "Cannot read double value: ");
+			return avro_value_set_double(dest, val);
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_float(reader, &val),
+				     "Cannot read float value: ");
+			return avro_value_set_float(dest, val);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_int(reader, &val),
+				     "Cannot read int value: ");
+			return avro_value_set_int(dest, val);
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_long(reader, &val),
+				     "Cannot read long value: ");
+			return avro_value_set_long(dest, val);
+		}
+
+		case AVRO_NULL:
+		{
+			check_prefix(rval, avro_binary_encoding.
+				     read_null(reader),
+				     "Cannot read null value: ");
+			return avro_value_set_null(dest);
+		}
+
+		case AVRO_STRING:
+		{
+			char  *str;
+			int64_t  size;
+
+			/*
+			 * read_string returns a size that includes the
+			 * NUL terminator, and the free function will be
+			 * called with a size that also includes the NUL
+			 */
+
+			check_prefix(rval, avro_binary_encoding.
+				     read_string(reader, &str, &size),
+				     "Cannot read string value: ");
+
+			avro_wrapped_buffer_t  buf;
+			check(rval, avro_wrapped_alloc_new(&buf, str, size));
+			return avro_value_give_string_len(dest, &buf);
+		}
+
+		case AVRO_ARRAY:
+			return read_array_value(reader, dest);
+
+		case AVRO_ENUM:
+		{
+			int64_t  val;
+			check_prefix(rval, avro_binary_encoding.
+				     read_long(reader, &val),
+				     "Cannot read enum value: ");
+			return avro_value_set_enum(dest, val);
+		}
+
+		case AVRO_FIXED:
+		{
+			avro_schema_t  schema = avro_value_get_schema(dest);
+			char *bytes;
+			int64_t size = avro_schema_fixed_size(schema);
+
+			bytes = (char *) avro_malloc(size);
+			if (!bytes) {
+				avro_prefix_error("Cannot allocate new fixed value");
+				return ENOMEM;
+			}
+			rval = avro_read(reader, bytes, size);
+			if (rval) {
+				avro_prefix_error("Cannot read fixed value: ");
+				avro_free(bytes, size);
+				return rval;
+			}
+
+			avro_wrapped_buffer_t  buf;
+			rval = avro_wrapped_alloc_new(&buf, bytes, size);
+			if (rval != 0) {
+				avro_free(bytes, size);
+				return rval;
+			}
+
+			return avro_value_give_fixed(dest, &buf);
+		}
+
+		case AVRO_MAP:
+			return read_map_value(reader, dest);
+
+		case AVRO_RECORD:
+			return read_record_value(reader, dest);
+
+		case AVRO_UNION:
+			return read_union_value(reader, dest);
+
+		default:
+		{
+			avro_set_error("Unknown schema type");
+			return EINVAL;
+		}
+	}
+
+	return 0;
+}
+
+int
+avro_value_read(avro_reader_t reader, avro_value_t *dest)
+{
+	int  rval;
+	check(rval, avro_value_reset(dest));
+	return read_value(reader, dest);
+}
diff --git a/lang/c/src/value-sizeof.c b/lang/c/src/value-sizeof.c
new file mode 100644
index 0000000..2583041
--- /dev/null
+++ b/lang/c/src/value-sizeof.c
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include "avro/basics.h"
+#include "avro/io.h"
+#include "avro/value.h"
+#include "avro_private.h"
+#include "encoding.h"
+
+
+/*
+ * Forward declaration; this is basically the same as avro_value_sizeof,
+ * but it doesn't initialize size first.  (Since it will have already
+ * been initialized in avro_value_sizeof itself).
+ */
+
+static int
+sizeof_value(avro_value_t *src, size_t *size);
+
+
+static int
+sizeof_array_value(avro_value_t *src, size_t *size)
+{
+	int  rval;
+	size_t  element_count;
+	check(rval, avro_value_get_size(src, &element_count));
+
+	if (element_count > 0) {
+		*size += avro_binary_encoding.size_long(NULL, element_count);
+
+		size_t  i;
+		for (i = 0; i < element_count; i++) {
+			avro_value_t  child;
+			check(rval, avro_value_get_by_index(src, i, &child, NULL));
+			check(rval, sizeof_value(&child, size));
+		}
+	}
+
+	*size += avro_binary_encoding.size_long(NULL, 0);
+	return 0;
+}
+
+
+static int
+sizeof_map_value(avro_value_t *src, size_t *size)
+{
+	int  rval;
+	size_t  element_count;
+	check(rval, avro_value_get_size(src, &element_count));
+
+	if (element_count > 0) {
+		*size += avro_binary_encoding.size_long(NULL, element_count);
+
+		size_t  i;
+		for (i = 0; i < element_count; i++) {
+			avro_value_t  child;
+			const char  *key;
+			check(rval, avro_value_get_by_index(src, i, &child, &key));
+			*size += avro_binary_encoding.size_string(NULL, key);
+			check(rval, sizeof_value(&child, size));
+		}
+	}
+
+	*size += avro_binary_encoding.size_long(NULL, 0);
+	return 0;
+}
+
+static int
+sizeof_record_value(avro_value_t *src, size_t *size)
+{
+	int  rval;
+	size_t  field_count;
+	check(rval, avro_value_get_size(src, &field_count));
+
+	size_t  i;
+	for (i = 0; i < field_count; i++) {
+		avro_value_t  field;
+		check(rval, avro_value_get_by_index(src, i, &field, NULL));
+		check(rval, sizeof_value(&field, size));
+	}
+
+	return 0;
+}
+
+static int
+sizeof_union_value(avro_value_t *src, size_t *size)
+{
+	int  rval;
+	int  discriminant;
+	avro_value_t  branch;
+
+	check(rval, avro_value_get_discriminant(src, &discriminant));
+	check(rval, avro_value_get_current_branch(src, &branch));
+	*size += avro_binary_encoding.size_long(NULL, discriminant);
+	return sizeof_value(&branch, size);
+}
+
+static int
+sizeof_value(avro_value_t *src, size_t *size)
+{
+	int  rval;
+
+	switch (avro_value_get_type(src)) {
+		case AVRO_BOOLEAN:
+		{
+			int  val;
+			check(rval, avro_value_get_boolean(src, &val));
+			*size += avro_binary_encoding.size_boolean(NULL, val);
+			return 0;
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf;
+			size_t  sz;
+			check(rval, avro_value_get_bytes(src, &buf, &sz));
+			*size += avro_binary_encoding.size_bytes(NULL, (const char *) buf, sz);
+			return 0;
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			check(rval, avro_value_get_double(src, &val));
+			*size += avro_binary_encoding.size_double(NULL, val);
+			return 0;
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			check(rval, avro_value_get_float(src, &val));
+			*size += avro_binary_encoding.size_float(NULL, val);
+			return 0;
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			check(rval, avro_value_get_int(src, &val));
+			*size += avro_binary_encoding.size_long(NULL, val);
+			return 0;
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			check(rval, avro_value_get_long(src, &val));
+			*size += avro_binary_encoding.size_long(NULL, val);
+			return 0;
+		}
+
+		case AVRO_NULL:
+		{
+			check(rval, avro_value_get_null(src));
+			*size += avro_binary_encoding.size_null(NULL);
+			return 0;
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *str;
+			size_t  sz;
+			check(rval, avro_value_get_string(src, &str, &sz));
+			*size += avro_binary_encoding.size_bytes(NULL, str, sz-1);
+			return 0;
+		}
+
+		case AVRO_ARRAY:
+			return sizeof_array_value(src, size);
+
+		case AVRO_ENUM:
+		{
+			int  val;
+			check(rval, avro_value_get_enum(src, &val));
+			*size += avro_binary_encoding.size_long(NULL, val);
+			return 0;
+		}
+
+		case AVRO_FIXED:
+		{
+			size_t  sz;
+			check(rval, avro_value_get_fixed(src, NULL, &sz));
+			*size += sz;
+			return 0;
+		}
+
+		case AVRO_MAP:
+			return sizeof_map_value(src, size);
+
+		case AVRO_RECORD:
+			return sizeof_record_value(src, size);
+
+		case AVRO_UNION:
+			return sizeof_union_value(src, size);
+
+		default:
+		{
+			avro_set_error("Unknown schema type");
+			return EINVAL;
+		}
+	}
+
+	return 0;
+}
+
+int
+avro_value_sizeof(avro_value_t *src, size_t *size)
+{
+	check_param(EINVAL, size, "size pointer");
+	*size = 0;
+	return sizeof_value(src, size);
+}
diff --git a/lang/c/src/value-write.c b/lang/c/src/value-write.c
new file mode 100644
index 0000000..947ce80
--- /dev/null
+++ b/lang/c/src/value-write.c
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro/platform.h>
+#include <stdlib.h>
+
+#include "avro/basics.h"
+#include "avro/io.h"
+#include "avro/value.h"
+#include "avro_private.h"
+#include "encoding.h"
+
+
+static int
+write_array_value(avro_writer_t writer, avro_value_t *src)
+{
+	int  rval;
+	size_t  element_count;
+	check(rval, avro_value_get_size(src, &element_count));
+
+	if (element_count > 0) {
+		check_prefix(rval, avro_binary_encoding.write_long
+			     (writer, element_count),
+			     "Cannot write array block count: ");
+
+		size_t  i;
+		for (i = 0; i < element_count; i++) {
+			avro_value_t  child;
+			check(rval, avro_value_get_by_index(src, i, &child, NULL));
+			check(rval, avro_value_write(writer, &child));
+		}
+	}
+
+	check_prefix(rval, avro_binary_encoding.write_long(writer, 0),
+		     "Cannot write array block count: ");
+	return 0;
+}
+
+
+static int
+write_map_value(avro_writer_t writer, avro_value_t *src)
+{
+	int  rval;
+	size_t  element_count;
+	check(rval, avro_value_get_size(src, &element_count));
+
+	if (element_count > 0) {
+		check_prefix(rval, avro_binary_encoding.write_long
+			     (writer, element_count),
+			     "Cannot write map block count: ");
+
+		size_t  i;
+		for (i = 0; i < element_count; i++) {
+			avro_value_t  child;
+			const char  *key;
+			check(rval, avro_value_get_by_index(src, i, &child, &key));
+			check(rval, avro_binary_encoding.write_string(writer, key));
+			check(rval, avro_value_write(writer, &child));
+		}
+	}
+
+	check_prefix(rval, avro_binary_encoding.write_long(writer, 0),
+		     "Cannot write map block count: ");
+	return 0;
+}
+
+static int
+write_record_value(avro_writer_t writer, avro_value_t *src)
+{
+	int  rval;
+	size_t  field_count;
+	check(rval, avro_value_get_size(src, &field_count));
+
+	size_t  i;
+	for (i = 0; i < field_count; i++) {
+		avro_value_t  field;
+		check(rval, avro_value_get_by_index(src, i, &field, NULL));
+		check(rval, avro_value_write(writer, &field));
+	}
+
+	return 0;
+}
+
+static int
+write_union_value(avro_writer_t writer, avro_value_t *src)
+{
+	int  rval;
+	int  discriminant;
+	avro_value_t  branch;
+
+	check(rval, avro_value_get_discriminant(src, &discriminant));
+	check(rval, avro_value_get_current_branch(src, &branch));
+	check(rval, avro_binary_encoding.write_long(writer, discriminant));
+	return avro_value_write(writer, &branch);
+}
+
+int
+avro_value_write(avro_writer_t writer, avro_value_t *src)
+{
+	int  rval;
+
+	switch (avro_value_get_type(src)) {
+		case AVRO_BOOLEAN:
+		{
+			int  val;
+			check(rval, avro_value_get_boolean(src, &val));
+			return avro_binary_encoding.write_boolean(writer, val);
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf;
+			size_t  size;
+			check(rval, avro_value_get_bytes(src, &buf, &size));
+			return avro_binary_encoding.write_bytes(writer, (const char *) buf, size);
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			check(rval, avro_value_get_double(src, &val));
+			return avro_binary_encoding.write_double(writer, val);
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			check(rval, avro_value_get_float(src, &val));
+			return avro_binary_encoding.write_float(writer, val);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			check(rval, avro_value_get_int(src, &val));
+			return avro_binary_encoding.write_long(writer, val);
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			check(rval, avro_value_get_long(src, &val));
+			return avro_binary_encoding.write_long(writer, val);
+		}
+
+		case AVRO_NULL:
+		{
+			check(rval, avro_value_get_null(src));
+			return avro_binary_encoding.write_null(writer);
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *str;
+			size_t  size;
+			check(rval, avro_value_get_string(src, &str, &size));
+			return avro_binary_encoding.write_bytes(writer, str, size-1);
+		}
+
+		case AVRO_ARRAY:
+			return write_array_value(writer, src);
+
+		case AVRO_ENUM:
+		{
+			int  val;
+			check(rval, avro_value_get_enum(src, &val));
+			return avro_binary_encoding.write_long(writer, val);
+		}
+
+		case AVRO_FIXED:
+		{
+			const void  *buf;
+			size_t  size;
+			check(rval, avro_value_get_fixed(src, &buf, &size));
+			return avro_write(writer, (void *) buf, size);
+		}
+
+		case AVRO_MAP:
+			return write_map_value(writer, src);
+
+		case AVRO_RECORD:
+			return write_record_value(writer, src);
+
+		case AVRO_UNION:
+			return write_union_value(writer, src);
+
+		default:
+		{
+			avro_set_error("Unknown schema type");
+			return EINVAL;
+		}
+	}
+
+	return 0;
+}
diff --git a/lang/c/src/value.c b/lang/c/src/value.c
new file mode 100644
index 0000000..18fb3af
--- /dev/null
+++ b/lang/c/src/value.c
@@ -0,0 +1,690 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/errors.h"
+#include "avro/value.h"
+#include "avro_private.h"
+
+
+#define check_return(retval, call) \
+	do { \
+		int  rval = call; \
+		if (rval != 0) { return (retval); } \
+	} while (0)
+
+
+void
+avro_value_incref(avro_value_t *value)
+{
+	value->iface->incref(value);
+}
+
+void
+avro_value_decref(avro_value_t *value)
+{
+	value->iface->decref(value);
+	avro_value_iface_decref(value->iface);
+	value->iface = NULL;
+	value->self = NULL;
+}
+
+void
+avro_value_copy_ref(avro_value_t *dest, const avro_value_t *src)
+{
+	dest->iface = src->iface;
+	dest->self = src->self;
+	avro_value_iface_incref(dest->iface);
+	dest->iface->incref(dest);
+}
+
+void
+avro_value_move_ref(avro_value_t *dest, avro_value_t *src)
+{
+	dest->iface = src->iface;
+	dest->self = src->self;
+	src->iface = NULL;
+	src->self = NULL;
+}
+
+
+int
+avro_value_equal_fast(avro_value_t *val1, avro_value_t *val2)
+{
+	avro_type_t  type1 = avro_value_get_type(val1);
+	avro_type_t  type2 = avro_value_get_type(val2);
+	if (type1 != type2) {
+		return 0;
+	}
+
+	switch (type1) {
+		case AVRO_BOOLEAN:
+		{
+			int  v1;
+			int  v2;
+			check_return(0, avro_value_get_boolean(val1, &v1));
+			check_return(0, avro_value_get_boolean(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf1;
+			const void  *buf2;
+			size_t  size1;
+			size_t  size2;
+			check_return(0, avro_value_get_bytes(val1, &buf1, &size1));
+			check_return(0, avro_value_get_bytes(val2, &buf2, &size2));
+			if (size1 != size2) {
+				return 0;
+			}
+			return (memcmp(buf1, buf2, size1) == 0);
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  v1;
+			double  v2;
+			check_return(0, avro_value_get_double(val1, &v1));
+			check_return(0, avro_value_get_double(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  v1;
+			float  v2;
+			check_return(0, avro_value_get_float(val1, &v1));
+			check_return(0, avro_value_get_float(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  v1;
+			int32_t  v2;
+			check_return(0, avro_value_get_int(val1, &v1));
+			check_return(0, avro_value_get_int(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  v1;
+			int64_t  v2;
+			check_return(0, avro_value_get_long(val1, &v1));
+			check_return(0, avro_value_get_long(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_NULL:
+		{
+			check_return(0, avro_value_get_null(val1));
+			check_return(0, avro_value_get_null(val2));
+			return 1;
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *buf1;
+			const char  *buf2;
+			size_t  size1;
+			size_t  size2;
+			check_return(0, avro_value_get_string(val1, &buf1, &size1));
+			check_return(0, avro_value_get_string(val2, &buf2, &size2));
+			if (size1 != size2) {
+				return 0;
+			}
+			return (memcmp(buf1, buf2, size1) == 0);
+		}
+
+		case AVRO_ARRAY:
+		{
+			size_t  count1;
+			size_t  count2;
+			check_return(0, avro_value_get_size(val1, &count1));
+			check_return(0, avro_value_get_size(val2, &count2));
+			if (count1 != count2) {
+				return 0;
+			}
+
+			size_t  i;
+			for (i = 0; i < count1; i++) {
+				avro_value_t  child1;
+				avro_value_t  child2;
+				check_return(0, avro_value_get_by_index
+					     (val1, i, &child1, NULL));
+				check_return(0, avro_value_get_by_index
+					     (val2, i, &child2, NULL));
+				if (!avro_value_equal_fast(&child1, &child2)) {
+					return 0;
+				}
+			}
+
+			return 1;
+		}
+
+		case AVRO_ENUM:
+		{
+			int  v1;
+			int  v2;
+			check_return(0, avro_value_get_enum(val1, &v1));
+			check_return(0, avro_value_get_enum(val2, &v2));
+			return (v1 == v2);
+		}
+
+		case AVRO_FIXED:
+		{
+			const void  *buf1;
+			const void  *buf2;
+			size_t  size1;
+			size_t  size2;
+			check_return(0, avro_value_get_fixed(val1, &buf1, &size1));
+			check_return(0, avro_value_get_fixed(val2, &buf2, &size2));
+			if (size1 != size2) {
+				return 0;
+			}
+			return (memcmp(buf1, buf2, size1) == 0);
+		}
+
+		case AVRO_MAP:
+		{
+			size_t  count1;
+			size_t  count2;
+			check_return(0, avro_value_get_size(val1, &count1));
+			check_return(0, avro_value_get_size(val2, &count2));
+			if (count1 != count2) {
+				return 0;
+			}
+
+			size_t  i;
+			for (i = 0; i < count1; i++) {
+				avro_value_t  child1;
+				avro_value_t  child2;
+				const char  *key1;
+				check_return(0, avro_value_get_by_index
+					     (val1, i, &child1, &key1));
+				check_return(0, avro_value_get_by_name
+					     (val2, key1, &child2, NULL));
+				if (!avro_value_equal_fast(&child1, &child2)) {
+					return 0;
+				}
+			}
+
+			return 1;
+		}
+
+		case AVRO_RECORD:
+		{
+			size_t  count1;
+			check_return(0, avro_value_get_size(val1, &count1));
+
+			size_t  i;
+			for (i = 0; i < count1; i++) {
+				avro_value_t  child1;
+				avro_value_t  child2;
+				check_return(0, avro_value_get_by_index
+					     (val1, i, &child1, NULL));
+				check_return(0, avro_value_get_by_index
+					     (val2, i, &child2, NULL));
+				if (!avro_value_equal_fast(&child1, &child2)) {
+					return 0;
+				}
+			}
+
+			return 1;
+		}
+
+		case AVRO_UNION:
+		{
+			int  disc1;
+			int  disc2;
+			check_return(0, avro_value_get_discriminant(val1, &disc1));
+			check_return(0, avro_value_get_discriminant(val2, &disc2));
+			if (disc1 != disc2) {
+				return 0;
+			}
+
+			avro_value_t  branch1;
+			avro_value_t  branch2;
+			check_return(0, avro_value_get_current_branch(val1, &branch1));
+			check_return(0, avro_value_get_current_branch(val2, &branch2));
+			return avro_value_equal_fast(&branch1, &branch2);
+		}
+
+		default:
+			return 0;
+	}
+}
+
+int
+avro_value_equal(avro_value_t *val1, avro_value_t *val2)
+{
+	avro_schema_t  schema1 = avro_value_get_schema(val1);
+	avro_schema_t  schema2 = avro_value_get_schema(val2);
+	if (!avro_schema_equal(schema1, schema2)) {
+		return 0;
+	}
+
+	return avro_value_equal_fast(val1, val2);
+}
+
+
+#define cmp(v1, v2) \
+	(((v1) == (v2))? 0: \
+	 ((v1) <  (v2))? -1: 1)
+int
+avro_value_cmp_fast(avro_value_t *val1, avro_value_t *val2)
+{
+	avro_type_t  type1 = avro_value_get_type(val1);
+	avro_type_t  type2 = avro_value_get_type(val2);
+	if (type1 != type2) {
+		return -1;
+	}
+
+	switch (type1) {
+		case AVRO_BOOLEAN:
+		{
+			int  v1;
+			int  v2;
+			check_return(0, avro_value_get_boolean(val1, &v1));
+			check_return(0, avro_value_get_boolean(val2, &v2));
+			return cmp(!!v1, !!v2);
+		}
+
+		case AVRO_BYTES:
+		{
+			const void  *buf1;
+			const void  *buf2;
+			size_t  size1;
+			size_t  size2;
+			size_t  min_size;
+			int  result;
+
+			check_return(0, avro_value_get_bytes(val1, &buf1, &size1));
+			check_return(0, avro_value_get_bytes(val2, &buf2, &size2));
+
+			min_size = (size1 < size2)? size1: size2;
+			result = memcmp(buf1, buf2, min_size);
+			if (result != 0) {
+				return result;
+			} else {
+				return cmp(size1, size2);
+			}
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  v1;
+			double  v2;
+			check_return(0, avro_value_get_double(val1, &v1));
+			check_return(0, avro_value_get_double(val2, &v2));
+			return cmp(v1, v2);
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  v1;
+			float  v2;
+			check_return(0, avro_value_get_float(val1, &v1));
+			check_return(0, avro_value_get_float(val2, &v2));
+			return cmp(v1, v2);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  v1;
+			int32_t  v2;
+			check_return(0, avro_value_get_int(val1, &v1));
+			check_return(0, avro_value_get_int(val2, &v2));
+			return cmp(v1, v2);
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  v1;
+			int64_t  v2;
+			check_return(0, avro_value_get_long(val1, &v1));
+			check_return(0, avro_value_get_long(val2, &v2));
+			return cmp(v1, v2);
+		}
+
+		case AVRO_NULL:
+		{
+			check_return(0, avro_value_get_null(val1));
+			check_return(0, avro_value_get_null(val2));
+			return 0;
+		}
+
+		case AVRO_STRING:
+		{
+			const char  *buf1;
+			const char  *buf2;
+			size_t  size1;
+			size_t  size2;
+			size_t  min_size;
+			int  result;
+			check_return(0, avro_value_get_string(val1, &buf1, &size1));
+			check_return(0, avro_value_get_string(val2, &buf2, &size2));
+
+			min_size = (size1 < size2)? size1: size2;
+			result = memcmp(buf1, buf2, min_size);
+			if (result != 0) {
+				return result;
+			} else {
+				return cmp(size1, size2);
+			}
+		}
+
+		case AVRO_ARRAY:
+		{
+			size_t  count1;
+			size_t  count2;
+			size_t  min_count;
+			size_t  i;
+			check_return(0, avro_value_get_size(val1, &count1));
+			check_return(0, avro_value_get_size(val2, &count2));
+
+			min_count = (count1 < count2)? count1: count2;
+			for (i = 0; i < min_count; i++) {
+				avro_value_t  child1;
+				avro_value_t  child2;
+				int  result;
+				check_return(0, avro_value_get_by_index
+					     (val1, i, &child1, NULL));
+				check_return(0, avro_value_get_by_index
+					     (val2, i, &child2, NULL));
+				result = avro_value_cmp_fast(&child1, &child2);
+				if (result != 0) {
+					return result;
+				}
+			}
+
+			return cmp(count1, count2);
+		}
+
+		case AVRO_ENUM:
+		{
+			int  v1;
+			int  v2;
+			check_return(0, avro_value_get_enum(val1, &v1));
+			check_return(0, avro_value_get_enum(val2, &v2));
+			return cmp(v1, v2);
+		}
+
+		case AVRO_FIXED:
+		{
+			const void  *buf1;
+			const void  *buf2;
+			size_t  size1;
+			size_t  size2;
+			check_return(0, avro_value_get_fixed(val1, &buf1, &size1));
+			check_return(0, avro_value_get_fixed(val2, &buf2, &size2));
+			if (size1 != size2) {
+				return -1;
+			}
+			return memcmp(buf1, buf2, size1);
+		}
+
+		case AVRO_MAP:
+		{
+			return -1;
+		}
+
+		case AVRO_RECORD:
+		{
+			size_t  count1;
+			check_return(0, avro_value_get_size(val1, &count1));
+
+			size_t  i;
+			for (i = 0; i < count1; i++) {
+				avro_value_t  child1;
+				avro_value_t  child2;
+				int  result;
+
+				check_return(0, avro_value_get_by_index
+					     (val1, i, &child1, NULL));
+				check_return(0, avro_value_get_by_index
+					     (val2, i, &child2, NULL));
+				result = avro_value_cmp_fast(&child1, &child2);
+				if (result != 0) {
+					return result;
+				}
+			}
+
+			return 0;
+		}
+
+		case AVRO_UNION:
+		{
+			int  disc1;
+			int  disc2;
+			check_return(0, avro_value_get_discriminant(val1, &disc1));
+			check_return(0, avro_value_get_discriminant(val2, &disc2));
+
+			if (disc1 == disc2) {
+				avro_value_t  branch1;
+				avro_value_t  branch2;
+				check_return(0, avro_value_get_current_branch(val1, &branch1));
+				check_return(0, avro_value_get_current_branch(val2, &branch2));
+				return avro_value_cmp_fast(&branch1, &branch2);
+			} else {
+				return cmp(disc1, disc2);
+			}
+		}
+
+		default:
+			return 0;
+	}
+}
+
+int
+avro_value_cmp(avro_value_t *val1, avro_value_t *val2)
+{
+	avro_schema_t  schema1 = avro_value_get_schema(val1);
+	avro_schema_t  schema2 = avro_value_get_schema(val2);
+	if (!avro_schema_equal(schema1, schema2)) {
+		return 0;
+	}
+
+	return avro_value_cmp_fast(val1, val2);
+}
+
+
+int
+avro_value_copy_fast(avro_value_t *dest, const avro_value_t *src)
+{
+	avro_type_t  dest_type = avro_value_get_type(dest);
+	avro_type_t  src_type = avro_value_get_type(src);
+	if (dest_type != src_type) {
+		return 0;
+	}
+
+	int  rval;
+	check(rval, avro_value_reset(dest));
+
+	switch (dest_type) {
+		case AVRO_BOOLEAN:
+		{
+			int  val;
+			check(rval, avro_value_get_boolean(src, &val));
+			return avro_value_set_boolean(dest, val);
+		}
+
+		case AVRO_BYTES:
+		{
+			avro_wrapped_buffer_t  val;
+			check(rval, avro_value_grab_bytes(src, &val));
+			return avro_value_give_bytes(dest, &val);
+		}
+
+		case AVRO_DOUBLE:
+		{
+			double  val;
+			check(rval, avro_value_get_double(src, &val));
+			return avro_value_set_double(dest, val);
+		}
+
+		case AVRO_FLOAT:
+		{
+			float  val;
+			check(rval, avro_value_get_float(src, &val));
+			return avro_value_set_float(dest, val);
+		}
+
+		case AVRO_INT32:
+		{
+			int32_t  val;
+			check(rval, avro_value_get_int(src, &val));
+			return avro_value_set_int(dest, val);
+		}
+
+		case AVRO_INT64:
+		{
+			int64_t  val;
+			check(rval, avro_value_get_long(src, &val));
+			return avro_value_set_long(dest, val);
+		}
+
+		case AVRO_NULL:
+		{
+			check(rval, avro_value_get_null(src));
+			return avro_value_set_null(dest);
+		}
+
+		case AVRO_STRING:
+		{
+			avro_wrapped_buffer_t  val;
+			check(rval, avro_value_grab_string(src, &val));
+			return avro_value_give_string_len(dest, &val);
+		}
+
+		case AVRO_ARRAY:
+		{
+			size_t  count;
+			check(rval, avro_value_get_size(src, &count));
+
+			size_t  i;
+			for (i = 0; i < count; i++) {
+				avro_value_t  src_child;
+				avro_value_t  dest_child;
+
+				check(rval, avro_value_get_by_index
+				      (src, i, &src_child, NULL));
+				check(rval, avro_value_append
+				      (dest, &dest_child, NULL));
+				check(rval, avro_value_copy_fast
+				      (&dest_child, &src_child));
+			}
+
+			return 0;
+		}
+
+		case AVRO_ENUM:
+		{
+			int  val;
+			check(rval, avro_value_get_enum(src, &val));
+			return avro_value_set_enum(dest, val);
+		}
+
+		case AVRO_FIXED:
+		{
+			avro_wrapped_buffer_t  val;
+			check(rval, avro_value_grab_fixed(src, &val));
+			return avro_value_give_fixed(dest, &val);
+		}
+
+		case AVRO_MAP:
+		{
+			size_t  count;
+			check(rval, avro_value_get_size(src, &count));
+
+			size_t  i;
+			for (i = 0; i < count; i++) {
+				avro_value_t  src_child;
+				avro_value_t  dest_child;
+				const char  *key;
+
+				check(rval, avro_value_get_by_index
+				      (src, i, &src_child, &key));
+				check(rval, avro_value_add
+				      (dest, key, &dest_child, NULL, NULL));
+				check(rval, avro_value_copy_fast
+				      (&dest_child, &src_child));
+			}
+
+			return 0;
+		}
+
+		case AVRO_RECORD:
+		{
+			size_t  count;
+			check(rval, avro_value_get_size(src, &count));
+
+			size_t  i;
+			for (i = 0; i < count; i++) {
+				avro_value_t  src_child;
+				avro_value_t  dest_child;
+
+				check(rval, avro_value_get_by_index
+				      (src, i, &src_child, NULL));
+				check(rval, avro_value_get_by_index
+				      (dest, i, &dest_child, NULL));
+				check(rval, avro_value_copy_fast
+				      (&dest_child, &src_child));
+			}
+
+			return 0;
+		}
+
+		case AVRO_UNION:
+		{
+			int  disc;
+			check(rval, avro_value_get_discriminant(src, &disc));
+
+			avro_value_t  src_branch;
+			avro_value_t  dest_branch;
+
+			check(rval, avro_value_get_current_branch(src, &src_branch));
+			check(rval, avro_value_set_branch(dest, disc, &dest_branch));
+
+			return avro_value_copy_fast(&dest_branch, &src_branch);
+		}
+
+		default:
+			return 0;
+	}
+}
+
+
+int
+avro_value_copy(avro_value_t *dest, const avro_value_t *src)
+{
+	avro_schema_t  dest_schema = avro_value_get_schema(dest);
+	avro_schema_t  src_schema = avro_value_get_schema(src);
+	if (!avro_schema_equal(dest_schema, src_schema)) {
+		avro_set_error("Schemas don't match");
+		return EINVAL;
+	}
+
+	return avro_value_copy_fast(dest, src);
+}
diff --git a/lang/c/src/wrapped-buffer.c b/lang/c/src/wrapped-buffer.c
new file mode 100644
index 0000000..a798f7a
--- /dev/null
+++ b/lang/c/src/wrapped-buffer.c
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/allocation.h"
+#include "avro/data.h"
+#include "avro/refcount.h"
+
+struct avro_wrapped_copy {
+	volatile int  refcount;
+	size_t  allocated_size;
+};
+
+static void
+avro_wrapped_copy_free(avro_wrapped_buffer_t *self)
+{
+	struct avro_wrapped_copy  *copy = (struct avro_wrapped_copy *) self->user_data;
+	if (avro_refcount_dec(&copy->refcount)) {
+		avro_free(copy, copy->allocated_size);
+	}
+}
+
+static int
+avro_wrapped_copy_copy(avro_wrapped_buffer_t *dest,
+		       const avro_wrapped_buffer_t *src,
+		       size_t offset, size_t length)
+{
+	struct avro_wrapped_copy  *copy = (struct avro_wrapped_copy *) src->user_data;
+	avro_refcount_inc(&copy->refcount);
+	dest->buf = (char *) src->buf + offset;
+	dest->size = length;
+	dest->user_data = copy;
+	dest->free = avro_wrapped_copy_free;
+	dest->copy = avro_wrapped_copy_copy;
+	dest->slice = NULL;
+	return 0;
+}
+
+int
+avro_wrapped_buffer_new_copy(avro_wrapped_buffer_t *dest,
+			     const void *buf, size_t length)
+{
+	size_t  allocated_size = sizeof(struct avro_wrapped_copy) + length;
+	struct avro_wrapped_copy  *copy = (struct avro_wrapped_copy *) avro_malloc(allocated_size);
+	if (copy == NULL) {
+		return ENOMEM;
+	}
+
+	dest->buf = ((char *) copy) + sizeof(struct avro_wrapped_copy);
+	dest->size = length;
+	dest->user_data = copy;
+	dest->free = avro_wrapped_copy_free;
+	dest->copy = avro_wrapped_copy_copy;
+	dest->slice = NULL;
+
+	avro_refcount_set(&copy->refcount, 1);
+	copy->allocated_size = allocated_size;
+	memcpy((void *) dest->buf, buf, length);
+	return 0;
+}
+
+int
+avro_wrapped_buffer_new(avro_wrapped_buffer_t *dest,
+			const void *buf, size_t length)
+{
+	dest->buf = buf;
+	dest->size = length;
+	dest->user_data = NULL;
+	dest->free = NULL;
+	dest->copy = NULL;
+	dest->slice = NULL;
+	return 0;
+}
+
+
+void
+avro_wrapped_buffer_move(avro_wrapped_buffer_t *dest,
+			 avro_wrapped_buffer_t *src)
+{
+	memcpy(dest, src, sizeof(avro_wrapped_buffer_t));
+	memset(src, 0, sizeof(avro_wrapped_buffer_t));
+}
+
+int
+avro_wrapped_buffer_copy(avro_wrapped_buffer_t *dest,
+			 const avro_wrapped_buffer_t *src,
+			 size_t offset, size_t length)
+{
+	if (offset > src->size) {
+		avro_set_error("Invalid offset when slicing buffer");
+		return EINVAL;
+	}
+
+	if ((offset+length) > src->size) {
+		avro_set_error("Invalid length when slicing buffer");
+		return EINVAL;
+	}
+
+	if (src->copy == NULL) {
+		return avro_wrapped_buffer_new_copy(dest, (char *) src->buf + offset, length);
+	} else {
+		return src->copy(dest, src, offset, length);
+	}
+}
+
+int
+avro_wrapped_buffer_slice(avro_wrapped_buffer_t *self,
+			  size_t offset, size_t length)
+{
+	if (offset > self->size) {
+		avro_set_error("Invalid offset when slicing buffer");
+		return EINVAL;
+	}
+
+	if ((offset+length) > self->size) {
+		avro_set_error("Invalid length when slicing buffer");
+		return EINVAL;
+	}
+
+	if (self->slice == NULL) {
+		self->buf  = (char *) self->buf + offset;
+		self->size = length;
+		return 0;
+	} else {
+		return self->slice(self, offset, length);
+	}
+}
diff --git a/lang/c/tests/.gitignore b/lang/c/tests/.gitignore
new file mode 100644
index 0000000..534eb06
--- /dev/null
+++ b/lang/c/tests/.gitignore
@@ -0,0 +1,9 @@
+generate_interop_data
+performance
+test_avro_data
+test_avro_schema
+test_avro_schema_names
+test_avro_values
+test_cpp
+test_data_structures
+test_interop_data
diff --git a/lang/c/tests/CMakeLists.txt b/lang/c/tests/CMakeLists.txt
new file mode 100644
index 0000000..0f2c993
--- /dev/null
+++ b/lang/c/tests/CMakeLists.txt
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+macro(add_avro_executable name)
+    set(source "${ARGV1}")
+    if (NOT source)
+        set(source "${name}.c")
+    endif (NOT source)
+    add_executable(${name} ${source})
+    target_link_libraries(${name} avro-static)
+endmacro(add_avro_executable)
+
+macro(add_avro_test name)
+    add_avro_executable(${name} ${ARGN})
+    if (WIN32)
+        set(exec_name ${CMAKE_CURRENT_BINARY_DIR}/Debug/${name}.exe)
+    else (WIN32)
+        set(exec_name ${CMAKE_CURRENT_BINARY_DIR}/${name})
+    endif (WIN32)
+
+    add_test(${name}
+        ${CMAKE_COMMAND} -E chdir ${AvroC_SOURCE_DIR}/tests
+        ${exec_name}
+    )
+endmacro(add_avro_test)
+
+add_avro_executable(generate_interop_data)
+add_avro_executable(performance)
+add_avro_executable(test_interop_data)
+
+add_avro_test(test_data_structures)
+add_avro_test(test_avro_schema)
+add_avro_test(test_avro_schema_names)
+add_avro_test(test_avro_values)
+add_avro_test(test_avro_968)
+add_avro_test(test_avro_984)
+add_avro_test(test_avro_1034)
+add_avro_test(test_avro_1084)
+add_avro_test(test_avro_1087)
+add_avro_test(test_avro_1165)
+add_avro_test(test_avro_1237)
+add_avro_test(test_avro_1238)
+add_avro_test(test_avro_1279)
+add_avro_test(test_avro_1405)
+add_avro_test(test_avro_data)
+add_avro_test(test_refcount)
+add_avro_test(test_cpp test_cpp.cpp)
+add_avro_test(test_avro_1379)
diff --git a/lang/c/tests/avro-1237-bad-union-discriminant.avro b/lang/c/tests/avro-1237-bad-union-discriminant.avro
new file mode 100644
index 0000000..6dc539e
Binary files /dev/null and b/lang/c/tests/avro-1237-bad-union-discriminant.avro differ
diff --git a/lang/c/tests/avro-1237-good.avro b/lang/c/tests/avro-1237-good.avro
new file mode 100644
index 0000000..336dc28
Binary files /dev/null and b/lang/c/tests/avro-1237-good.avro differ
diff --git a/lang/c/tests/avro-1238-good.avro b/lang/c/tests/avro-1238-good.avro
new file mode 100644
index 0000000..336dc28
Binary files /dev/null and b/lang/c/tests/avro-1238-good.avro differ
diff --git a/lang/c/tests/avro-1238-truncated.avro b/lang/c/tests/avro-1238-truncated.avro
new file mode 100644
index 0000000..f48d54d
Binary files /dev/null and b/lang/c/tests/avro-1238-truncated.avro differ
diff --git a/lang/c/tests/avro-1279-codec.avro b/lang/c/tests/avro-1279-codec.avro
new file mode 100644
index 0000000..dd24230
Binary files /dev/null and b/lang/c/tests/avro-1279-codec.avro differ
diff --git a/lang/c/tests/avro-1279-no-codec.avro b/lang/c/tests/avro-1279-no-codec.avro
new file mode 100644
index 0000000..4099de5
Binary files /dev/null and b/lang/c/tests/avro-1279-no-codec.avro differ
diff --git a/lang/c/tests/generate_interop_data.c b/lang/c/tests/generate_interop_data.c
new file mode 100644
index 0000000..b6a6c9b
--- /dev/null
+++ b/lang/c/tests/generate_interop_data.c
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro.h"
+#include "avro_private.h"
+#include <stdio.h>
+#include <stdlib.h>
+
+int main(int argc, char *argv[])
+{
+	int rval;
+	avro_file_writer_t file_writer;
+	avro_file_reader_t file_reader;
+	char outpath[128];
+	FILE *fp;
+	char jsontext[16 * 1024];
+	avro_schema_t schema;
+	avro_schema_error_t schema_error;
+	avro_datum_t interop;
+	avro_datum_t array_datum;
+	avro_datum_t node_datum;
+	avro_datum_t union_datum;
+	avro_datum_t out_datum;
+	enum Kind {
+		KIND_A,
+		KIND_B,
+		KIND_C
+	};
+
+	if (argc != 3) {
+		exit(EXIT_FAILURE);
+	}
+	snprintf(outpath, sizeof(outpath), "%s/c.avro", argv[2]);
+	fprintf(stderr, "Writing to %s\n", outpath);
+
+	fp = fopen(argv[1], "r");
+	rval = fread(jsontext, 1, sizeof(jsontext) - 1, fp);
+	jsontext[rval] = '\0';
+
+	check(rval,
+	      avro_schema_from_json(jsontext, rval, &schema, &schema_error));
+	check(rval, avro_file_writer_create(outpath, schema, &file_writer));
+
+	/* TODO: create a method for generating random data from schema */
+	interop = avro_record(schema);
+	avro_record_set(interop, "intField", avro_int32(42));
+	avro_record_set(interop, "longField", avro_int64(4242));
+	avro_record_set(interop, "stringField",
+			avro_givestring("Follow your bliss.", NULL));
+	avro_record_set(interop, "boolField", avro_boolean(1));
+	avro_record_set(interop, "floatField", avro_float(3.14159265));
+	avro_record_set(interop, "doubleField", avro_double(2.71828183));
+	avro_record_set(interop, "bytesField", avro_bytes("abcd", 4));
+	avro_record_set(interop, "nullField", avro_null());
+
+	avro_schema_t  array_schema = avro_schema_get_subschema(schema, "arrayField");
+	array_datum = avro_array(array_schema);
+	avro_array_append_datum(array_datum, avro_double(1.0));
+	avro_array_append_datum(array_datum, avro_double(2.0));
+	avro_array_append_datum(array_datum, avro_double(3.0));
+	avro_record_set(interop, "arrayField", array_datum);
+
+	avro_schema_t  map_schema = avro_schema_get_subschema(schema, "mapField");
+	avro_record_set(interop, "mapField", avro_map(map_schema));
+
+	avro_schema_t  union_schema = avro_schema_get_subschema(schema, "unionField");
+	union_datum = avro_union(union_schema, 1, avro_double(1.61803399));
+	avro_record_set(interop, "unionField", union_datum);
+
+	avro_schema_t  enum_schema = avro_schema_get_subschema(schema, "enumField");
+	avro_record_set(interop, "enumField", avro_enum(enum_schema, KIND_A));
+
+	avro_schema_t  fixed_schema = avro_schema_get_subschema(schema, "fixedField");
+	avro_record_set(interop, "fixedField",
+			avro_fixed(fixed_schema, "1234567890123456", 16));
+
+	avro_schema_t  node_schema = avro_schema_get_subschema(schema, "recordField");
+	node_datum = avro_record(node_schema);
+	avro_record_set(node_datum, "label",
+			avro_givestring("If you label me, you negate me.", NULL));
+	avro_schema_t  children_schema = avro_schema_get_subschema(node_schema, "children");
+	avro_record_set(node_datum, "children", avro_array(children_schema));
+	avro_record_set(interop, "recordField", node_datum);
+
+	rval = avro_file_writer_append(file_writer, interop);
+	if (rval) {
+		fprintf(stderr, "Unable to append data to interop file!\n");
+		exit(EXIT_FAILURE);
+	} else {
+		fprintf(stderr, "Successfully appended datum to file\n");
+	}
+
+	check(rval, avro_file_writer_close(file_writer));
+	fprintf(stderr, "Closed writer.\n");
+
+	check(rval, avro_file_reader(outpath, &file_reader));
+	fprintf(stderr, "Re-reading datum to verify\n");
+	check(rval, avro_file_reader_read(file_reader, NULL, &out_datum));
+	fprintf(stderr, "Verifying datum...");
+	if (!avro_datum_equal(interop, out_datum)) {
+		fprintf(stderr, "fail!\n");
+		exit(EXIT_FAILURE);
+	}
+	fprintf(stderr, "ok\n");
+	check(rval, avro_file_reader_close(file_reader));
+	fprintf(stderr, "Closed reader.\n");
+	return 0;
+}
diff --git a/lang/c/tests/msdirent.h b/lang/c/tests/msdirent.h
new file mode 100644
index 0000000..445d040
--- /dev/null
+++ b/lang/c/tests/msdirent.h
@@ -0,0 +1,372 @@
+/*****************************************************************************
+ * dirent.h - dirent API for Microsoft Visual Studio
+ *
+ * Copyright (C) 2006 Toni Ronkko
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files (the
+ * ``Software''), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL TONI RONKKO BE LIABLE FOR ANY CLAIM, DAMAGES OR
+ * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
+ * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ * OTHER DEALINGS IN THE SOFTWARE.
+ *
+ * Mar 15, 2011, Toni Ronkko
+ * Defined FILE_ATTRIBUTE_DEVICE for MSVC 6.0.
+ *
+ * Aug 11, 2010, Toni Ronkko
+ * Added d_type and d_namlen fields to dirent structure.  The former is
+ * especially useful for determining whether directory entry represents a
+ * file or a directory.  For more information, see
+ * http://www.delorie.com/gnu/docs/glibc/libc_270.html
+ *
+ * Aug 11, 2010, Toni Ronkko
+ * Improved conformance to the standards.  For example, errno is now set
+ * properly on failure and assert() is never used.  Thanks to Peter Brockam
+ * for suggestions.
+ *
+ * Aug 11, 2010, Toni Ronkko
+ * Fixed a bug in rewinddir(): when using relative directory names, change
+ * of working directory no longer causes rewinddir() to fail.
+ *
+ * Dec 15, 2009, John Cunningham
+ * Added rewinddir member function
+ *
+ * Jan 18, 2008, Toni Ronkko
+ * Using FindFirstFileA and WIN32_FIND_DATAA to avoid converting string
+ * between multi-byte and unicode representations.  This makes the
+ * code simpler and also allows the code to be compiled under MingW.  Thanks
+ * to Azriel Fasten for the suggestion.
+ *
+ * Mar 4, 2007, Toni Ronkko
+ * Bug fix: due to the strncpy_s() function this file only compiled in
+ * Visual Studio 2005.  Using the new string functions only when the
+ * compiler version allows.
+ *
+ * Nov  2, 2006, Toni Ronkko
+ * Major update: removed support for Watcom C, MS-DOS and Turbo C to
+ * simplify the file, updated the code to compile cleanly on Visual
+ * Studio 2005 with both unicode and multi-byte character strings,
+ * removed rewinddir() as it had a bug.
+ *
+ * Aug 20, 2006, Toni Ronkko
+ * Removed all remarks about MSVC 1.0, which is antiqued now.  Simplified
+ * comments by removing SGML tags.
+ *
+ * May 14 2002, Toni Ronkko
+ * Embedded the function definitions directly to the header so that no
+ * source modules need to be included in the Visual Studio project.  Removed
+ * all the dependencies to other projects so that this very header can be
+ * used independently.
+ *
+ * May 28 1998, Toni Ronkko
+ * First version.
+ *****************************************************************************/
+#ifndef DIRENT_H
+#define DIRENT_H
+
+#define WIN32_LEAN_AND_MEAN
+#include <windows.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <errno.h>
+
+/* Entries missing from MSVC 6.0 */
+#if !defined(FILE_ATTRIBUTE_DEVICE)
+# define FILE_ATTRIBUTE_DEVICE 0x40
+#endif
+
+/* File type and permission flags for stat() */
+#if defined(_MSC_VER)  &&  !defined(S_IREAD)
+# define S_IFMT   _S_IFMT                      /* file type mask */
+# define S_IFDIR  _S_IFDIR                     /* directory */
+# define S_IFCHR  _S_IFCHR                     /* character device */
+# define S_IFFIFO _S_IFFIFO                    /* pipe */
+# define S_IFREG  _S_IFREG                     /* regular file */
+# define S_IREAD  _S_IREAD                     /* read permission */
+# define S_IWRITE _S_IWRITE                    /* write permission */
+# define S_IEXEC  _S_IEXEC                     /* execute permission */
+#endif
+#define S_IFBLK   0                            /* block device */
+#define S_IFLNK   0                            /* link */
+#define S_IFSOCK  0                            /* socket */
+
+#if defined(_MSC_VER)
+# define S_IRUSR  S_IREAD                      /* read, user */
+# define S_IWUSR  S_IWRITE                     /* write, user */
+# define S_IXUSR  0                            /* execute, user */
+# define S_IRGRP  0                            /* read, group */
+# define S_IWGRP  0                            /* write, group */
+# define S_IXGRP  0                            /* execute, group */
+# define S_IROTH  0                            /* read, others */
+# define S_IWOTH  0                            /* write, others */
+# define S_IXOTH  0                            /* execute, others */
+#endif
+
+/* Indicates that d_type field is available in dirent structure */
+#define _DIRENT_HAVE_D_TYPE
+
+/* File type flags for d_type */
+#define DT_UNKNOWN  0
+#define DT_REG      S_IFREG
+#define DT_DIR      S_IFDIR
+#define DT_FIFO     S_IFFIFO
+#define DT_SOCK     S_IFSOCK
+#define DT_CHR      S_IFCHR
+#define DT_BLK      S_IFBLK
+
+/* Macros for converting between st_mode and d_type */
+#define IFTODT(mode) ((mode) & S_IFMT)
+#define DTTOIF(type) (type)
+
+/*
+ * File type macros.  Note that block devices, sockets and links cannot be
+ * distinguished on Windows and the macros S_ISBLK, S_ISSOCK and S_ISLNK are
+ * only defined for compatibility.  These macros should always return false
+ * on Windows.
+ */
+#define	S_ISFIFO(mode) (((mode) & S_IFMT) == S_IFFIFO)
+#define	S_ISDIR(mode)  (((mode) & S_IFMT) == S_IFDIR)
+#define	S_ISREG(mode)  (((mode) & S_IFMT) == S_IFREG)
+#define	S_ISLNK(mode)  (((mode) & S_IFMT) == S_IFLNK)
+#define	S_ISSOCK(mode) (((mode) & S_IFMT) == S_IFSOCK)
+#define	S_ISCHR(mode)  (((mode) & S_IFMT) == S_IFCHR)
+#define	S_ISBLK(mode)  (((mode) & S_IFMT) == S_IFBLK)
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+typedef struct dirent
+{
+   char d_name[MAX_PATH + 1];                  /* File name */
+   size_t d_namlen;                            /* Length of name without \0 */
+   int d_type;                                 /* File type */
+} dirent;
+
+
+typedef struct DIR
+{
+   dirent           curentry;                  /* Current directory entry */
+   WIN32_FIND_DATAA find_data;                 /* Private file data */
+   int              cached;                    /* True if data is valid */
+   HANDLE           search_handle;             /* Win32 search handle */
+   char             patt[MAX_PATH + 3];        /* Initial directory name */
+} DIR;
+
+
+/* Forward declarations */
+static DIR *opendir(const char *dirname);
+static struct dirent *readdir(DIR *dirp);
+static int closedir(DIR *dirp);
+static void rewinddir(DIR* dirp);
+
+
+/* Use the new safe string functions introduced in Visual Studio 2005 */
+#if defined(_MSC_VER) && _MSC_VER >= 1400
+# define DIRENT_STRNCPY(dest,src,size) strncpy_s((dest),(size),(src),_TRUNCATE)
+#else
+# define DIRENT_STRNCPY(dest,src,size) strncpy((dest),(src),(size))
+#endif
+
+/* Set errno variable */
+#if defined(_MSC_VER)
+#define DIRENT_SET_ERRNO(x) _set_errno (x)
+#else
+#define DIRENT_SET_ERRNO(x) (errno = (x))
+#endif
+
+
+/*****************************************************************************
+ * Open directory stream DIRNAME for read and return a pointer to the
+ * internal working area that is used to retrieve individual directory
+ * entries.
+ */
+static DIR *opendir(const char *dirname)
+{
+   DIR *dirp;
+
+   /* ensure that the resulting search pattern will be a valid file name */
+   if (dirname == NULL) {
+      DIRENT_SET_ERRNO (ENOENT);
+      return NULL;
+   }
+   if (strlen (dirname) + 3 >= MAX_PATH) {
+      DIRENT_SET_ERRNO (ENAMETOOLONG);
+      return NULL;
+   }
+
+   /* construct new DIR structure */
+   dirp = (DIR*) malloc (sizeof (struct DIR));
+   if (dirp != NULL) {
+      int error;
+
+      /*
+       * Convert relative directory name to an absolute one.  This
+       * allows rewinddir() to function correctly when the current working
+       * directory is changed between opendir() and rewinddir().
+       */
+      if (GetFullPathNameA (dirname, MAX_PATH, dirp->patt, NULL)) {
+         char *p;
+
+         /* append the search pattern "\\*\0" to the directory name */
+         p = strchr (dirp->patt, '\0');
+         if (dirp->patt < p  &&  *(p-1) != '\\'  &&  *(p-1) != ':') {
+           *p++ = '\\';
+         }
+         *p++ = '*';
+         *p = '\0';
+
+         /* open directory stream and retrieve the first entry */
+         dirp->search_handle = FindFirstFileA (dirp->patt, &dirp->find_data);
+         if (dirp->search_handle != INVALID_HANDLE_VALUE) {
+            /* a directory entry is now waiting in memory */
+            dirp->cached = 1;
+            error = 0;
+         } else {
+            /* search pattern is not a directory name? */
+            DIRENT_SET_ERRNO (ENOENT);
+            error = 1;
+         }
+      } else {
+         /* buffer too small */
+         DIRENT_SET_ERRNO (ENOMEM);
+         error = 1;
+      }
+
+      if (error) {
+         free (dirp);
+         dirp = NULL;
+      }
+   }
+
+   return dirp;
+}
+
+
+/*****************************************************************************
+ * Read a directory entry, and return a pointer to a dirent structure
+ * containing the name of the entry in d_name field.  Individual directory
+ * entries returned by this very function include regular files,
+ * sub-directories, pseudo-directories "." and "..", but also volume labels,
+ * hidden files and system files may be returned.
+ */
+static struct dirent *readdir(DIR *dirp)
+{
+   DWORD attr;
+   if (dirp == NULL) {
+      /* directory stream did not open */
+      DIRENT_SET_ERRNO (EBADF);
+      return NULL;
+   }
+
+   /* get next directory entry */
+   if (dirp->cached != 0) {
+      /* a valid directory entry already in memory */
+      dirp->cached = 0;
+   } else {
+      /* get the next directory entry from stream */
+      if (dirp->search_handle == INVALID_HANDLE_VALUE) {
+         return NULL;
+      }
+      if (FindNextFileA (dirp->search_handle, &dirp->find_data) == FALSE) {
+         /* the very last entry has been processed or an error occured */
+         FindClose (dirp->search_handle);
+         dirp->search_handle = INVALID_HANDLE_VALUE;
+         return NULL;
+      }
+   }
+
+   /* copy as a multibyte character string */
+   DIRENT_STRNCPY ( dirp->curentry.d_name,
+             dirp->find_data.cFileName,
+             sizeof(dirp->curentry.d_name) );
+   dirp->curentry.d_name[MAX_PATH] = '\0';
+
+   /* compute the length of name */
+   dirp->curentry.d_namlen = strlen (dirp->curentry.d_name);
+
+   /* determine file type */
+   attr = dirp->find_data.dwFileAttributes;
+   if ((attr & FILE_ATTRIBUTE_DEVICE) != 0) {
+      dirp->curentry.d_type = DT_CHR;
+   } else if ((attr & FILE_ATTRIBUTE_DIRECTORY) != 0) {
+      dirp->curentry.d_type = DT_DIR;
+   } else {
+      dirp->curentry.d_type = DT_REG;
+   }
+   return &dirp->curentry;
+}
+
+
+/*****************************************************************************
+ * Close directory stream opened by opendir() function.  Close of the
+ * directory stream invalidates the DIR structure as well as any previously
+ * read directory entry.
+ */
+static int closedir(DIR *dirp)
+{
+   if (dirp == NULL) {
+      /* invalid directory stream */
+      DIRENT_SET_ERRNO (EBADF);
+      return -1;
+   }
+
+   /* release search handle */
+   if (dirp->search_handle != INVALID_HANDLE_VALUE) {
+      FindClose (dirp->search_handle);
+      dirp->search_handle = INVALID_HANDLE_VALUE;
+   }
+
+   /* release directory structure */
+   free (dirp);
+   return 0;
+}
+
+
+/*****************************************************************************
+ * Resets the position of the directory stream to which dirp refers to the
+ * beginning of the directory.  It also causes the directory stream to refer
+ * to the current state of the corresponding directory, as a call to opendir()
+ * would have done.  If dirp does not refer to a directory stream, the effect
+ * is undefined.
+ */
+static void rewinddir(DIR* dirp)
+{
+   if (dirp != NULL) {
+      /* release search handle */
+      if (dirp->search_handle != INVALID_HANDLE_VALUE) {
+         FindClose (dirp->search_handle);
+      }
+
+      /* open new search handle and retrieve the first entry */
+      dirp->search_handle = FindFirstFileA (dirp->patt, &dirp->find_data);
+      if (dirp->search_handle != INVALID_HANDLE_VALUE) {
+         /* a directory entry is now waiting in memory */
+         dirp->cached = 1;
+      } else {
+         /* failed to re-open directory: no directory entry in memory */
+         dirp->cached = 0;
+      }
+   }
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /*DIRENT_H*/
diff --git a/lang/c/tests/performance.c b/lang/c/tests/performance.c
new file mode 100644
index 0000000..c3196fe
--- /dev/null
+++ b/lang/c/tests/performance.c
@@ -0,0 +1,848 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <limits.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+
+/* The following definitions can be used as bitflags. They can also be
+ * passed in as the resolution_mode flags to the helper functions.
+ */
+#define USE_MATCHED_SCHEMAS (0x00)
+#define USE_RESOLVED_READER (0x01)
+#define USE_RESOLVED_WRITER (0x02)
+#define USE_BOTH_RESOLVED   (0x03)
+
+
+/*
+ * A series of performance tests.
+ */
+
+typedef void
+(*test_func_t)(unsigned long);
+
+
+void init_rand(void)
+{
+	srand(time(NULL));
+}
+
+double rand_number(double from, double to)
+{
+	double range = to - from;
+	return from + ((double)rand() / (RAND_MAX + 1.0)) * range;
+}
+
+int64_t rand_int64(void)
+{
+	return (int64_t) rand_number(LONG_MIN, LONG_MAX);
+}
+
+int32_t rand_int32(void)
+{
+	return (int32_t) rand_number(INT_MIN, INT_MAX);
+}
+
+
+/**
+ * Tests the single-threaded performance of our reference counting
+ * mechanism.  We create a single datum, and then reference and
+ * deference it many many times.
+ */
+
+static void
+test_refcount(unsigned long num_tests)
+{
+	unsigned long  i;
+
+	avro_datum_t  datum = avro_int32(42);
+	for (i = 0; i < num_tests; i++) {
+		avro_datum_incref(datum);
+		avro_datum_decref(datum);
+	}
+	avro_datum_decref(datum);
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the legacy datum API.
+ */
+
+static void
+test_nested_record_datum(unsigned long num_tests)
+{
+	static const char  *schema_json =
+		"{"
+		"  \"type\": \"record\","
+		"  \"name\": \"test\","
+		"  \"fields\": ["
+		"    { \"name\": \"i\", \"type\": \"int\" },"
+		"    { \"name\": \"l\", \"type\": \"long\" },"
+		"    { \"name\": \"s\", \"type\": \"string\" },"
+		"    {"
+		"      \"name\": \"subrec\","
+		"      \"type\": {"
+		"        \"type\": \"record\","
+		"        \"name\": \"sub\","
+		"        \"fields\": ["
+		"          { \"name\": \"f\", \"type\": \"float\" },"
+		"          { \"name\": \"d\", \"type\": \"double\" }"
+		"        ]"
+		"      }"
+		"    }"
+		"  ]"
+		"}";
+
+	static const char *strings[] = {
+		"Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation", "conceived in Liberty",
+		"and dedicated to the proposition that all men are created equal."
+	};
+	static const unsigned int  NUM_STRINGS =
+	  sizeof(strings) / sizeof(strings[0]);
+
+	int  rc;
+	static char  buf[4096];
+	avro_reader_t  reader = avro_reader_memory(buf, sizeof(buf));
+	avro_writer_t  writer = avro_writer_memory(buf, sizeof(buf));
+
+	avro_schema_t  schema = NULL;
+	avro_schema_error_t  error = NULL;
+	avro_schema_from_json(schema_json, strlen(schema_json),
+						  &schema, &error);
+
+	unsigned long  i;
+
+	avro_datum_t  in = avro_datum_from_schema(schema);
+
+	for (i = 0; i < num_tests; i++) {
+		avro_record_set_field_value(rc, in, int32, "i", rand_int32());
+		avro_record_set_field_value(rc, in, int64, "l", rand_int64());
+		avro_record_set_field_value(rc, in, givestring, "s",
+									strings[i % NUM_STRINGS], NULL);
+
+		avro_datum_t  subrec = NULL;
+		avro_record_get(in, "subrec", &subrec);
+		avro_record_set_field_value(rc, in, float, "f", rand_number(-1e10, 1e10));
+		avro_record_set_field_value(rc, in, double, "d", rand_number(-1e10, 1e10));
+
+		avro_writer_reset(writer);
+		avro_write_data(writer, schema, in);
+
+		avro_datum_t  out = NULL;
+
+		avro_reader_reset(reader);
+		avro_read_data(reader, schema, schema, &out);
+
+		avro_datum_equal(in, out);
+		avro_datum_decref(out);
+	}
+
+	avro_datum_decref(in);
+	avro_schema_decref(schema);
+	avro_writer_free(writer);
+	avro_reader_free(reader);
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the new value API, retrieving record fields
+ * by index.
+ */
+
+static void
+test_nested_record_value_by_index(unsigned long num_tests)
+{
+	static const char  *schema_json =
+		"{"
+		"  \"type\": \"record\","
+		"  \"name\": \"test\","
+		"  \"fields\": ["
+		"    { \"name\": \"i\", \"type\": \"int\" },"
+		"    { \"name\": \"l\", \"type\": \"long\" },"
+		"    { \"name\": \"s\", \"type\": \"string\" },"
+		"    {"
+		"      \"name\": \"subrec\","
+		"      \"type\": {"
+		"        \"type\": \"record\","
+		"        \"name\": \"sub\","
+		"        \"fields\": ["
+		"          { \"name\": \"f\", \"type\": \"float\" },"
+		"          { \"name\": \"d\", \"type\": \"double\" }"
+		"        ]"
+		"      }"
+		"    }"
+		"  ]"
+		"}";
+
+	static char *strings[] = {
+		"Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation", "conceived in Liberty",
+		"and dedicated to the proposition that all men are created equal."
+	};
+	static const unsigned int  NUM_STRINGS =
+	  sizeof(strings) / sizeof(strings[0]);
+
+	static char  buf[4096];
+	avro_reader_t  reader = avro_reader_memory(buf, sizeof(buf));
+	avro_writer_t  writer = avro_writer_memory(buf, sizeof(buf));
+
+	avro_schema_t  schema = NULL;
+	avro_schema_error_t  error = NULL;
+	avro_schema_from_json(schema_json, strlen(schema_json),
+						  &schema, &error);
+
+	unsigned long  i;
+
+	avro_value_iface_t  *iface = avro_generic_class_from_schema(schema);
+
+	avro_value_t  val;
+	avro_generic_value_new(iface, &val);
+
+	avro_value_t  out;
+	avro_generic_value_new(iface, &out);
+
+	for (i = 0; i < num_tests; i++) {
+		avro_value_t  field;
+
+		avro_value_get_by_index(&val, 0, &field, NULL);
+		avro_value_set_int(&field, rand_int32());
+
+		avro_value_get_by_index(&val, 1, &field, NULL);
+		avro_value_set_long(&field, rand_int64());
+
+		avro_wrapped_buffer_t  wbuf;
+		avro_wrapped_buffer_new_string(&wbuf, strings[i % NUM_STRINGS]);
+		avro_value_get_by_index(&val, 2, &field, NULL);
+		avro_value_give_string_len(&field, &wbuf);
+
+		avro_value_t  subrec;
+		avro_value_get_by_index(&val, 3, &subrec, NULL);
+
+		avro_value_get_by_index(&subrec, 0, &field, NULL);
+		avro_value_set_float(&field, rand_number(-1e10, 1e10));
+
+		avro_value_get_by_index(&subrec, 1, &field, NULL);
+		avro_value_set_double(&field, rand_number(-1e10, 1e10));
+
+		avro_writer_reset(writer);
+		avro_value_write(writer, &val);
+
+		avro_reader_reset(reader);
+		avro_value_read(reader, &out);
+
+		if (! avro_value_equal_fast(&val, &out) ) {
+			printf("Broken\n");
+			exit (1);
+		}
+	}
+
+	avro_value_decref(&val);
+	avro_value_decref(&out);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(schema);
+	avro_writer_free(writer);
+	avro_reader_free(reader);
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the new value API, retrieving record fields
+ * by name.
+ */
+
+static void
+test_nested_record_value_by_name(unsigned long num_tests)
+{
+	static const char  *schema_json =
+		"{"
+		"  \"type\": \"record\","
+		"  \"name\": \"test\","
+		"  \"fields\": ["
+		"    { \"name\": \"i\", \"type\": \"int\" },"
+		"    { \"name\": \"l\", \"type\": \"long\" },"
+		"    { \"name\": \"s\", \"type\": \"string\" },"
+		"    {"
+		"      \"name\": \"subrec\","
+		"      \"type\": {"
+		"        \"type\": \"record\","
+		"        \"name\": \"sub\","
+		"        \"fields\": ["
+		"          { \"name\": \"f\", \"type\": \"float\" },"
+		"          { \"name\": \"d\", \"type\": \"double\" }"
+		"        ]"
+		"      }"
+		"    }"
+		"  ]"
+		"}";
+
+	static char *strings[] = {
+		"Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation", "conceived in Liberty",
+		"and dedicated to the proposition that all men are created equal."
+	};
+	static const unsigned int  NUM_STRINGS =
+	  sizeof(strings) / sizeof(strings[0]);
+
+	static char  buf[4096];
+	avro_reader_t  reader = avro_reader_memory(buf, sizeof(buf));
+	avro_writer_t  writer = avro_writer_memory(buf, sizeof(buf));
+
+	avro_schema_t  schema = NULL;
+	avro_schema_error_t  error = NULL;
+	avro_schema_from_json(schema_json, strlen(schema_json),
+						  &schema, &error);
+
+	unsigned long  i;
+
+	avro_value_iface_t  *iface = avro_generic_class_from_schema(schema);
+
+	avro_value_t  val;
+	avro_generic_value_new(iface, &val);
+
+	avro_value_t  out;
+	avro_generic_value_new(iface, &out);
+
+	for (i = 0; i < num_tests; i++) {
+		avro_value_t  field;
+
+		avro_value_get_by_name(&val, "i", &field, NULL);
+		avro_value_set_int(&field, rand_int32());
+
+		avro_value_get_by_name(&val, "l", &field, NULL);
+		avro_value_set_long(&field, rand_int64());
+
+		avro_wrapped_buffer_t  wbuf;
+		avro_wrapped_buffer_new_string(&wbuf, strings[i % NUM_STRINGS]);
+		avro_value_get_by_name(&val, "s", &field, NULL);
+		avro_value_give_string_len(&field, &wbuf);
+
+		avro_value_t  subrec;
+		avro_value_get_by_name(&val, "subrec", &subrec, NULL);
+
+		avro_value_get_by_name(&subrec, "f", &field, NULL);
+		avro_value_set_float(&field, rand_number(-1e10, 1e10));
+
+		avro_value_get_by_name(&subrec, "d", &field, NULL);
+		avro_value_set_double(&field, rand_number(-1e10, 1e10));
+
+		avro_writer_reset(writer);
+		avro_value_write(writer, &val);
+
+		avro_reader_reset(reader);
+		avro_value_read(reader, &out);
+
+		if (! avro_value_equal_fast(&val, &out) ) {
+			printf("Broken\n");
+			exit (1);
+		}
+	}
+
+	avro_value_decref(&val);
+	avro_value_decref(&out);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(schema);
+	avro_writer_free(writer);
+	avro_reader_free(reader);
+}
+
+
+
+/**
+ * Helper function to test the performance of serializing and
+ * deserializing a given avro value using the provided function to
+ * populate avro value using the new value API. Allows testing using
+ * matching schemas or using schema resolution.
+ */
+
+static void
+test_generic_helper( unsigned long num_tests,
+					 int resolution_type,
+					 const char *schema_json,
+					 void (*populate_value_func)(avro_value_t *,
+												 unsigned long)
+					 )
+{
+	static char  buf[4096];
+
+	avro_reader_t  reader = avro_reader_memory(buf, sizeof(buf));
+	avro_writer_t  writer = avro_writer_memory(buf, sizeof(buf));
+
+	avro_schema_t  schema = NULL;
+	avro_schema_error_t  error = NULL;
+	avro_schema_from_json(schema_json, strlen(schema_json),
+						  &schema, &error);
+
+	unsigned long  i;
+
+	avro_value_iface_t  *writer_iface = avro_generic_class_from_schema(schema);
+	avro_value_iface_t  *reader_iface = avro_generic_class_from_schema(schema);
+
+	avro_value_t  val;
+	avro_generic_value_new(writer_iface, &val);
+
+	avro_value_t  out;
+	avro_generic_value_new(reader_iface, &out);
+
+	/* Use resolved reader to resolve schemas while writing data to memory */
+	avro_value_iface_t *resolved_reader_iface = NULL;
+	avro_value_t resolved_reader_value;
+	if ( resolution_type & USE_RESOLVED_READER ) {
+		resolved_reader_iface = avro_resolved_reader_new( schema, schema );
+		avro_resolved_reader_new_value( resolved_reader_iface,
+										&resolved_reader_value );
+	  avro_resolved_reader_set_source( &resolved_reader_value, &val );
+	}
+
+	/* Use resolved writer to resolve schemas while reading data from memory */
+	avro_value_iface_t *resolved_writer_iface = NULL;
+	avro_value_t resolved_writer_value;
+	if ( resolution_type & USE_RESOLVED_WRITER ) {
+		resolved_writer_iface = avro_resolved_writer_new( schema, schema );
+		avro_resolved_writer_new_value( resolved_writer_iface,
+										&resolved_writer_value );
+	  avro_resolved_writer_set_dest( &resolved_writer_value, &out );
+	}
+
+	/* Set up pointers */
+	avro_value_t *p_value_to_write_to_memory = NULL;
+	avro_value_t *p_value_to_read_from_memory = NULL;
+
+	if ( resolution_type == USE_MATCHED_SCHEMAS ) {
+		p_value_to_write_to_memory = &val;
+		p_value_to_read_from_memory = &out;
+	}
+	else if ( resolution_type == USE_RESOLVED_READER ) {
+		p_value_to_write_to_memory = &resolved_reader_value;
+		p_value_to_read_from_memory = &out;
+	}
+	else if ( resolution_type == USE_RESOLVED_WRITER ) {
+		p_value_to_write_to_memory = &val;
+		p_value_to_read_from_memory = &resolved_writer_value;
+	}
+	else if ( resolution_type == USE_BOTH_RESOLVED ) {
+		p_value_to_write_to_memory = &resolved_reader_value;
+		p_value_to_read_from_memory = &resolved_writer_value;
+	}
+
+	/* Perform the tests */
+	for (i = 0; i < num_tests; i++) {
+
+		avro_value_reset(&val);
+
+		/* Execute the function to populate the Avro Value */
+		(*populate_value_func)(&val, i);
+
+		avro_writer_reset(writer);
+		avro_value_write(writer, p_value_to_write_to_memory);
+
+		avro_reader_reset(reader);
+		avro_value_read(reader, p_value_to_read_from_memory);
+
+		if (! avro_value_equal_fast(&val, &out) ) {
+			printf("Broken\n");
+			exit (1);
+		}
+	}
+
+	avro_value_decref(&val);
+	avro_value_decref(&out);
+	if ( resolution_type & USE_RESOLVED_READER ) {
+		avro_value_decref(&resolved_reader_value);
+		avro_value_iface_decref(resolved_reader_iface);
+	}
+	if ( resolution_type & USE_RESOLVED_WRITER ) {
+		avro_value_decref(&resolved_writer_value);
+		avro_value_iface_decref(resolved_writer_iface);
+	}
+	avro_value_iface_decref(writer_iface);
+	avro_value_iface_decref(reader_iface);
+	avro_schema_decref(schema);
+	avro_writer_free(writer);
+	avro_reader_free(reader);
+}
+
+
+
+
+/**
+ * Helper function to populate a somewhat complex record type using
+ * the new value API, retrieving record fields by index.
+ */
+
+static const char  *complex_record_schema_json =
+		"{"
+		"  \"type\": \"record\","
+		"  \"name\": \"test\","
+		"  \"fields\": ["
+		"    { \"name\": \"i\", \"type\": \"int\" },"
+		"    { \"name\": \"l\", \"type\": \"long\" },"
+		"    { \"name\": \"s\", \"type\": \"string\" },"
+		"    {"
+		"      \"name\": \"subrec\","
+		"      \"type\": {"
+		"        \"type\": \"record\","
+		"        \"name\": \"sub\","
+		"        \"fields\": ["
+		"          { \"name\": \"f\", \"type\": \"float\" },"
+		"          { \"name\": \"d\", \"type\": \"double\" }"
+		"        ]"
+		"      }"
+		"    }"
+		"  ]"
+		"}";
+
+
+
+static void
+populate_complex_record(avro_value_t *p_val, unsigned long i)
+{
+	static char *strings[] = {
+		"Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation", "conceived in Liberty",
+		"and dedicated to the proposition that all men are created equal."
+	};
+	static const unsigned int  NUM_STRINGS =
+	  sizeof(strings) / sizeof(strings[0]);
+
+	avro_value_t  field;
+
+	avro_value_get_by_index(p_val, 0, &field, NULL);
+	avro_value_set_int(&field, rand_int32());
+
+	avro_value_get_by_index(p_val, 1, &field, NULL);
+	avro_value_set_long(&field, rand_int64());
+
+	avro_wrapped_buffer_t  wbuf;
+	avro_wrapped_buffer_new_string(&wbuf, strings[i % NUM_STRINGS]);
+	avro_value_get_by_index(p_val, 2, &field, NULL);
+	avro_value_give_string_len(&field, &wbuf);
+
+	avro_value_t  subrec;
+	avro_value_get_by_index(p_val, 3, &subrec, NULL);
+
+	avro_value_get_by_index(&subrec, 0, &field, NULL);
+	avro_value_set_float(&field, rand_number(-1e10, 1e10));
+
+	avro_value_get_by_index(&subrec, 1, &field, NULL);
+	avro_value_set_double(&field, rand_number(-1e10, 1e10));
+
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the new value API, retrieving record
+ * fields by index. The functionality is almost identical to
+ * test_nested_record_value_by_index(), however, there may be some
+ * overhead of using function calls instead of inline code, and
+ * running some additional "if" statements..
+ */
+
+static void
+test_nested_record_value_by_index_matched_schemas(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_MATCHED_SCHEMAS,
+						complex_record_schema_json,
+						populate_complex_record);
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the new value API, retrieving record
+ * fields by index. Uses a resolved_writer to resolve between two
+ * (identical) schemas when reading the array.
+ */
+
+static void
+test_nested_record_value_by_index_resolved_writer(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_WRITER,
+						complex_record_schema_json,
+						populate_complex_record);
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a somewhat
+ * complex record type using the new value API, retrieving record
+ * fields by index. Uses a resolved_reader to resolve between two
+ * (identical) schemas when writing the array.
+ */
+
+static void
+test_nested_record_value_by_index_resolved_reader(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_READER,
+						complex_record_schema_json,
+						populate_complex_record);
+}
+
+
+
+/**
+ * Helper function to test the performance of serializing and
+ * deserializing a simple array using the new value API. Allows
+ * testing using matching schemas or using schema resolution.
+ */
+
+static const char  *simple_array_schema_json =
+  "{\"name\": \"a\", \"type\": \"array\", \"items\":\"long\"}";
+
+static void
+populate_simple_array(avro_value_t *p_val, unsigned long i)
+{
+	const size_t array_length = 21;
+	avro_value_t  field;
+	size_t idx;
+	size_t dummy_index;
+	(void) i;
+
+	for ( idx = 0; idx < array_length; idx++ ) {
+		avro_value_append(p_val, &field, &dummy_index);
+		avro_value_set_long(&field, rand_int64());
+	}
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a simple
+ * array using the new value API.
+ */
+
+static void
+test_simple_array(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_MATCHED_SCHEMAS,
+						simple_array_schema_json,
+						populate_simple_array);
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a simple
+ * array using the new value API, using a resolved writer to resolve
+ * between (identical) reader and writer schemas, when reading the
+ * array.
+ */
+static void
+test_simple_array_resolved_writer(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_WRITER,
+						simple_array_schema_json,
+						populate_simple_array);
+}
+
+
+
+/**
+ * Tests the performance of serializing and deserializing a simple
+ * array using the new value API, using a resolved reader to resolve
+ * between (identical) reader and writer schemas, when writing the
+ * array.
+ */
+
+static void
+test_simple_array_resolved_reader(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_READER,
+						simple_array_schema_json,
+						populate_simple_array);
+}
+
+
+
+
+/**
+ * Helper function to test the performance of serializing and
+ * deserializing a nested array using the new value API. Allows
+ * testing using matching schemas or using schema resolution.
+ */
+
+static const char  *nested_array_schema_json =
+  "{\"type\":\"array\", \"items\": {\"type\": \"array\", \"items\": \"long\"}}";
+
+
+static void
+populate_nested_array(avro_value_t *p_val, unsigned long i)
+{
+
+	const size_t array_length = 7;
+	const size_t subarray_length = 3;
+	avro_value_t  subarray;
+	avro_value_t  field;
+	size_t idx;
+	size_t jdx;
+	size_t dummy_index;
+	(void) i;
+
+	for ( idx = 0; idx < array_length; idx++ ) {
+		avro_value_append(p_val, &subarray, &dummy_index);
+		for ( jdx = 0; jdx < subarray_length; jdx ++ ) {
+		  avro_value_append(&subarray, &field, &dummy_index);
+		  avro_value_set_long(&field, rand_int64());
+		}
+	}
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a nested
+ * array using the new value API.
+ */
+
+static void
+test_nested_array(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_MATCHED_SCHEMAS,
+						nested_array_schema_json,
+						populate_nested_array);
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a nested
+ * array using the new value API, using a resolved writer to resolve
+ * between (identical) reader and writer schemas, when reading the
+ * array.
+ */
+
+static void
+test_nested_array_resolved_writer(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_WRITER,
+						nested_array_schema_json,
+						populate_nested_array);
+}
+
+
+/**
+ * Tests the performance of serializing and deserializing a nested
+ * array using the new value API, using a resolved reader to resolve
+ * between (identical) reader and writer schemas, when writing the
+ * array.
+ */
+
+static void
+test_nested_array_resolved_reader(unsigned long num_tests)
+{
+	test_generic_helper(num_tests,
+						USE_RESOLVED_READER,
+						nested_array_schema_json,
+						populate_nested_array);
+}
+
+
+
+/**
+ * Test harness
+ */
+
+#define NUM_RUNS  3
+
+int
+main(int argc, char **argv)
+{
+	AVRO_UNUSED(argc);
+	AVRO_UNUSED(argv);
+
+	init_rand();
+
+	unsigned int  i;
+	struct avro_tests {
+		const char  *name;
+		unsigned long  num_tests;
+		test_func_t  func;
+	} tests[] = {
+		{ "refcount", 100000000,
+		  test_refcount },
+		{ "nested record (legacy)", 100000,
+		  test_nested_record_datum },
+		{ "nested record (value by index)", 1000000,
+		  test_nested_record_value_by_index },
+		{ "nested record (value by name)", 1000000,
+		  test_nested_record_value_by_name },
+		{ "nested record (value by index) matched schemas", 1000000,
+		  test_nested_record_value_by_index_matched_schemas },
+		{ "nested record (value by index) resolved writer", 1000000,
+		  test_nested_record_value_by_index_resolved_writer },
+		{ "nested record (value by index) resolved reader", 1000000,
+		  test_nested_record_value_by_index_resolved_reader },
+		{ "simple array matched schemas", 250000,
+		  test_simple_array },
+		{ "simple array resolved writer", 250000,
+		  test_simple_array_resolved_writer },
+		{ "simple array resolved reader", 250000,
+		  test_simple_array_resolved_reader },
+		{ "nested array matched schemas", 250000,
+		  test_nested_array },
+		{ "nested array resolved writer", 250000,
+		  test_nested_array_resolved_writer },
+		{ "nested array resolved reader", 250000,
+		  test_nested_array_resolved_reader },
+	};
+
+	for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++) {
+		fprintf(stderr, "**** Running %s ****\n  %lu tests per run\n",
+			tests[i].name, tests[i].num_tests);
+		unsigned int  run;
+
+		double  sum = 0.0;
+
+		for (run = 1; run <= NUM_RUNS; run++) {
+			fprintf(stderr, "  Run %u\n", run);
+
+			clock_t  before = clock();
+			tests[i].func(tests[i].num_tests);
+			clock_t  after = clock();
+			double  secs = ((double) after-before) / CLOCKS_PER_SEC;
+			sum += secs;
+		}
+
+		fprintf(stderr, "  Average time: %.03lfs\n", sum / NUM_RUNS);
+		fprintf(stderr, "  Tests/sec:    %.0lf\n",
+			tests[i].num_tests / (sum / NUM_RUNS));
+	}
+
+	return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/schema_tests/fail/enum_nonarray_symbols b/lang/c/tests/schema_tests/fail/enum_nonarray_symbols
new file mode 100644
index 0000000..f4dae95
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/enum_nonarray_symbols
@@ -0,0 +1,3 @@
+{"type": "enum",
+ "name": "Status",
+ "symbols": "Normal Caution Critical"}
diff --git a/lang/c/tests/schema_tests/fail/enum_nonstring_name b/lang/c/tests/schema_tests/fail/enum_nonstring_name
new file mode 100644
index 0000000..baa13d9
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/enum_nonstring_name
@@ -0,0 +1,3 @@
+{"type": "enum",
+ "name": [ 0, 1, 1, 2, 3, 5, 8 ],
+ "symbols": ["Golden", "Mean"]}
diff --git a/lang/c/tests/schema_tests/fail/enum_without_name b/lang/c/tests/schema_tests/fail/enum_without_name
new file mode 100644
index 0000000..57f6853
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/enum_without_name
@@ -0,0 +1,3 @@
+{"type": "enum"
+  "symbols" : ["I", "will", "fail", "no", "name"]
+}
diff --git a/lang/c/tests/schema_tests/fail/fixed_without_name b/lang/c/tests/schema_tests/fail/fixed_without_name
new file mode 100644
index 0000000..fbf96ab
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/fixed_without_name
@@ -0,0 +1,2 @@
+{"type": "fixed",
+ "size": 314}
diff --git a/lang/c/tests/schema_tests/fail/fixed_without_size b/lang/c/tests/schema_tests/fail/fixed_without_size
new file mode 100644
index 0000000..15c5e78
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/fixed_without_size
@@ -0,0 +1,2 @@
+{"type": "fixed",
+ "name": "Missing size"}
diff --git a/lang/c/tests/schema_tests/fail/illegal_type b/lang/c/tests/schema_tests/fail/illegal_type
new file mode 100644
index 0000000..e65c046
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/illegal_type
@@ -0,0 +1 @@
+{"type":"panther"}
diff --git a/lang/c/tests/schema_tests/fail/invalid_avro_id b/lang/c/tests/schema_tests/fail/invalid_avro_id
new file mode 100644
index 0000000..c684e7d
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/invalid_avro_id
@@ -0,0 +1,3 @@
+{ "name" : "2d2",
+  "type": "enum",
+  "symbols" : [ "c3po" ] }
diff --git a/lang/c/tests/schema_tests/fail/record_with_field_missing_name b/lang/c/tests/schema_tests/fail/record_with_field_missing_name
new file mode 100644
index 0000000..ba62d52
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/record_with_field_missing_name
@@ -0,0 +1,5 @@
+{"type": "record",
+ "name": "Address",
+ "fields": [
+     {"type": "string"},
+     {"type": "string", "name": "City"}]}
diff --git a/lang/c/tests/schema_tests/fail/record_with_field_missing_type b/lang/c/tests/schema_tests/fail/record_with_field_missing_type
new file mode 100644
index 0000000..b449f3b
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/record_with_field_missing_type
@@ -0,0 +1,5 @@
+{"type": "record",
+ "name": "Event",
+ "fields": [ 
+    { "name": "Sponsor"},
+    { "name": "City", "type": "string"}]}
diff --git a/lang/c/tests/schema_tests/fail/record_with_invalid_reference b/lang/c/tests/schema_tests/fail/record_with_invalid_reference
new file mode 100644
index 0000000..49b3590
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/record_with_invalid_reference
@@ -0,0 +1,7 @@
+{ "type": "record",
+  "name": "recursive",
+  "fields": [
+    { "name": "label", "type": "string" },
+    { "name": "children", "type": {"type": "array", "items": "foobar"} }
+  ]
+}
diff --git a/lang/c/tests/schema_tests/fail/record_with_nonarray_fields b/lang/c/tests/schema_tests/fail/record_with_nonarray_fields
new file mode 100644
index 0000000..b81fbe3
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/record_with_nonarray_fields
@@ -0,0 +1,3 @@
+{ "type": "record",
+  "fields": "His vision, from the constantly passing bars,"
+  "name", "Rainer" }
diff --git a/lang/c/tests/schema_tests/fail/record_with_nonstring_name b/lang/c/tests/schema_tests/fail/record_with_nonstring_name
new file mode 100644
index 0000000..0ded9c5
--- /dev/null
+++ b/lang/c/tests/schema_tests/fail/record_with_nonstring_name
@@ -0,0 +1,3 @@
+{"name": ["Tom", "Jerry"],
+ "type": "record",
+ "fields": [ {"name": "name", "type": "string"} ]}
diff --git a/lang/c/tests/schema_tests/pass/array b/lang/c/tests/schema_tests/pass/array
new file mode 100644
index 0000000..d695049
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/array
@@ -0,0 +1 @@
+{"type": "array", "items": "long"}
diff --git a/lang/c/tests/schema_tests/pass/boolean_full b/lang/c/tests/schema_tests/pass/boolean_full
new file mode 100644
index 0000000..69d3579
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/boolean_full
@@ -0,0 +1 @@
+{"type":"boolean"}
diff --git a/lang/c/tests/schema_tests/pass/bytes_full b/lang/c/tests/schema_tests/pass/bytes_full
new file mode 100644
index 0000000..3b91ef0
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/bytes_full
@@ -0,0 +1 @@
+{"type":"bytes"}
diff --git a/lang/c/tests/schema_tests/pass/double_full b/lang/c/tests/schema_tests/pass/double_full
new file mode 100644
index 0000000..dbd22f7
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/double_full
@@ -0,0 +1 @@
+{"type":"double"}
diff --git a/lang/c/tests/schema_tests/pass/enum b/lang/c/tests/schema_tests/pass/enum
new file mode 100644
index 0000000..749b0a3
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/enum
@@ -0,0 +1,4 @@
+{ "type": "enum",
+  "name": "three_stooges",
+  "symbols" : [ "Moe", "Larry", "Curly" ]
+}
diff --git a/lang/c/tests/schema_tests/pass/extra_attributes b/lang/c/tests/schema_tests/pass/extra_attributes
new file mode 100644
index 0000000..49885b9
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/extra_attributes
@@ -0,0 +1 @@
+{"type":"string", "ignored": "value"}
diff --git a/lang/c/tests/schema_tests/pass/fixed b/lang/c/tests/schema_tests/pass/fixed
new file mode 100644
index 0000000..0449ebc
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/fixed
@@ -0,0 +1 @@
+{"type": "fixed", "size": 16, "name": "md5"}
diff --git a/lang/c/tests/schema_tests/pass/float_full b/lang/c/tests/schema_tests/pass/float_full
new file mode 100644
index 0000000..fbd1164
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/float_full
@@ -0,0 +1 @@
+{"type":"float"}
diff --git a/lang/c/tests/schema_tests/pass/int_full b/lang/c/tests/schema_tests/pass/int_full
new file mode 100644
index 0000000..92b134d
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/int_full
@@ -0,0 +1 @@
+{"type":"int"}
diff --git a/lang/c/tests/schema_tests/pass/interop.avsc b/lang/c/tests/schema_tests/pass/interop.avsc
new file mode 100644
index 0000000..8cfbba2
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/interop.avsc
@@ -0,0 +1,28 @@
+{"type": "record", "name":"Interop", "namespace": "org.apache.avro",
+  "fields": [
+      {"name": "intField", "type": "int"},
+      {"name": "longField", "type": "long"},
+      {"name": "stringField", "type": "string"},
+      {"name": "boolField", "type": "boolean"},
+      {"name": "floatField", "type": "float"},
+      {"name": "doubleField", "type": "double"},
+      {"name": "bytesField", "type": "bytes"},
+      {"name": "nullField", "type": "null"},
+      {"name": "arrayField", "type": {"type": "array", "items": "double"}},
+      {"name": "mapField", "type":
+       {"type": "map", "values":
+        {"type": "record", "name": "Foo",
+         "fields": [{"name": "label", "type": "string"}]}}},
+      {"name": "unionField", "type":
+       ["boolean", "double", {"type": "array", "items": "bytes"}]},
+      {"name": "enumField", "type":
+       {"type": "enum", "name": "Kind", "symbols": ["A","B","C"]}},
+      {"name": "fixedField", "type":
+       {"type": "fixed", "name": "MD5", "size": 16}},
+      {"name": "recordField", "type":
+       {"type": "record", "name": "Node",
+        "fields": [
+            {"name": "label", "type": "string"},
+            {"name": "children", "type": {"type": "array", "items": "Node"}}]}}
+  ]
+}
diff --git a/lang/c/tests/schema_tests/pass/long_full b/lang/c/tests/schema_tests/pass/long_full
new file mode 100644
index 0000000..ccfd917
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/long_full
@@ -0,0 +1 @@
+{"type":"long"}
diff --git a/lang/c/tests/schema_tests/pass/map b/lang/c/tests/schema_tests/pass/map
new file mode 100644
index 0000000..436d961
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/map
@@ -0,0 +1 @@
+{"type" : "map", "values": "long"}
diff --git a/lang/c/tests/schema_tests/pass/namespace_recursive b/lang/c/tests/schema_tests/pass/namespace_recursive
new file mode 100644
index 0000000..3c2d0eb
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/namespace_recursive
@@ -0,0 +1,28 @@
+{ "type": "record",
+  "name": "Container",
+  "namespace": "namespace1",
+  "fields": [
+    { "name": "contained",
+      "type": { "type": "record",
+                "name": "MutuallyRecursive",
+                "fields": [
+                    { "name": "label", "type": "string" },
+                    { "name": "children",
+                        "type": {"type": "array", "items":
+                            {"type": "record",
+                                "name": "MutuallyRecursive",
+                                "namespace": "namespace2",
+                                "fields": [
+                                    { "name": "value", "type": "int" },
+                                    { "name": "children", "type": {"type": "array", "items": "namespace1.MutuallyRecursive" }},
+                                    { "name": "morechildren", "type": {"type": "array", "items": "MutuallyRecursive" }}
+                                ]
+                            }
+                        }
+                    },
+                    { "name": "anotherchild", "type": "namespace2.MutuallyRecursive"}
+                ]
+      }
+    }
+  ]
+}
diff --git a/lang/c/tests/schema_tests/pass/namespace_simple b/lang/c/tests/schema_tests/pass/namespace_simple
new file mode 100644
index 0000000..f5a117f
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/namespace_simple
@@ -0,0 +1,5 @@
+{"type": "record", "namespace": "x", "name": "Y", "fields": [
+  {"name": "e", "type": {"type": "record", "name": "Z", "fields": [
+    {"name": "f", "type": "x.Z"}
+  ]}}
+]}
diff --git a/lang/c/tests/schema_tests/pass/null_full b/lang/c/tests/schema_tests/pass/null_full
new file mode 100644
index 0000000..cae8767
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/null_full
@@ -0,0 +1 @@
+{"type":"null"}
diff --git a/lang/c/tests/schema_tests/pass/record b/lang/c/tests/schema_tests/pass/record
new file mode 100644
index 0000000..43ac456
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/record
@@ -0,0 +1,5 @@
+{"name": "person",
+ "type": "record",
+ "fields": [ {"name": "height", "type": "long"},
+             {"name": "weight", "type": "long"},
+             {"name": "name", "type": "string"}]}
diff --git a/lang/c/tests/schema_tests/pass/record_fields_with_defaults b/lang/c/tests/schema_tests/pass/record_fields_with_defaults
new file mode 100644
index 0000000..545ccbb
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/record_fields_with_defaults
@@ -0,0 +1,6 @@
+{"name": "person",
+ "type": "record",
+ "fields": [ {"name": "height", "type": "long"},
+             {"name": "weight", "type": "long"},
+             {"name": "name", "type": "string"},
+             {"name": "hacker", "type": "boolean", "default": false}]}
diff --git a/lang/c/tests/schema_tests/pass/recursive_record b/lang/c/tests/schema_tests/pass/recursive_record
new file mode 100644
index 0000000..0967bb4
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/recursive_record
@@ -0,0 +1,7 @@
+{ "type": "record",
+  "name": "recursive",
+  "fields": [
+    { "name": "label", "type": "string" },
+    { "name": "children", "type": {"type": "array", "items": "recursive"} }
+  ]
+}
diff --git a/lang/c/tests/schema_tests/pass/string_extra_attributes b/lang/c/tests/schema_tests/pass/string_extra_attributes
new file mode 100644
index 0000000..49885b9
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/string_extra_attributes
@@ -0,0 +1 @@
+{"type":"string", "ignored": "value"}
diff --git a/lang/c/tests/schema_tests/pass/string_full b/lang/c/tests/schema_tests/pass/string_full
new file mode 100644
index 0000000..5566b9f
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/string_full
@@ -0,0 +1 @@
+{"type": "string"}
diff --git a/lang/c/tests/schema_tests/pass/union b/lang/c/tests/schema_tests/pass/union
new file mode 100644
index 0000000..ef2b6ec
--- /dev/null
+++ b/lang/c/tests/schema_tests/pass/union
@@ -0,0 +1 @@
+["string", "long", "null"]
diff --git a/lang/c/tests/test_avro_1034.c b/lang/c/tests/test_avro_1034.c
new file mode 100644
index 0000000..036337a
--- /dev/null
+++ b/lang/c/tests/test_avro_1034.c
@@ -0,0 +1,394 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+/* Test code for JIRA Issue AVRO-1034.
+ *
+ * AVRO-1034: Resolved reader does not initialize children of arrays,
+ * resulting in seg faults
+ *
+ * This program tests schema resolution for nested arrays. For the
+ * purposes of this test, there are two schemas "old" and "new" which
+ * are created by reading the same JSON schema.
+ *
+ * The test creates and populates a nested array avro value, and
+ * serializes it to memory. The raw memory is written to a file. Note
+ * that the schema is not written to the file. The nested array is
+ * also printed to the screen.
+ *
+ * An identical nested array avro value is then created. A
+ * resolved_reader_class and a corresponding resolved_record instance
+ * is created (using identical "writer" and "reader" schemas for
+ * simplicity), and an attempt is made to "read" the resolved avro
+ * value. 
+ * 
+ * Once the resolved value has been read, the source value (nested)
+ * and the resolved value (resolved_record) are both reset using
+ * avro_value_reset(). Then the source value (nested) is populated
+ * with another (larger) nested array. Then an attempt is made to read
+ * the resolved avro value again.
+ * 
+ * This second attempt to read the resolved value results in a
+ * segmentation fault under Linux, using the patch in 
+ * https://issues.apache.org/jira/secure/attachment/12516487/0001-AVRO-1034.-C-Resolved-reader-initializes-child-array.patch.
+ * 
+ * However, the program does not seg fault, using the patch in
+ * https://issues.apache.org/jira/secure/attachment/12515544/AVRO-1034.patch
+ *
+ * AVRO-C was compiled with CMAKE_INSTALL_PREFIX=avrolib
+ * The static library (libavro.a) was copied into a subdirectory of avrolib/lib/static
+ *
+ * This file was compiled under Linux using:
+ *   gcc -g avro-1034-test-2.c -o test2 -I../../build/avrolib/include -L../../build/avrolib/lib/static -lavro
+ *
+ */
+
+
+// Encode the following json string in NESTED_ARRAY
+// {"type":"array", "items": {"type": "array", "items": "long"}}
+//
+#define NESTED_ARRAY \
+  "{\"type\":\"array\", \"items\": {\"type\": \"array\", \"items\": \"long\"}}"
+
+avro_schema_t schema_old = NULL;
+avro_schema_t schema_new = NULL;
+
+/* Parse schema into a schema data structure */
+void init_schema(void)
+{
+  avro_schema_error_t error;
+  if (avro_schema_from_json(NESTED_ARRAY, sizeof(NESTED_ARRAY),
+                            &schema_old, &error)) {
+    printf( "Unable to parse old schema\n");
+    exit(EXIT_FAILURE);
+  }
+
+  if (avro_schema_from_json(NESTED_ARRAY, sizeof(NESTED_ARRAY),
+                            &schema_new, &error)) {
+    printf( "Unable to parse new schema\n");
+    exit(EXIT_FAILURE);
+  }
+}
+
+#define try(call, msg) \
+	do { \
+		if (call) { \
+			printf( msg ":\n  %s\n", avro_strerror()); \
+			exit (EXIT_FAILURE);                       \
+		} \
+	} while (0)
+
+
+/* The input avro_value_t p_array should contain a nested array.
+ * Print the fields of this nested array to the screen.
+ */
+int print_array_fields ( avro_value_t *p_array )
+{
+  size_t idx;
+  size_t length;
+  avro_type_t val_type;
+
+  val_type = avro_value_get_type( p_array );
+  printf( "Main array type = %d\n", val_type );
+
+  try( avro_value_get_size( p_array, &length ),
+       "Couldn't get array size" );
+  printf( "Main array length = %d\n", (int) length );
+
+  for ( idx = 0; idx < length; idx ++ )
+  {
+    avro_value_t subarray;
+    size_t sublength;
+    size_t jdx;
+    const char *unused;
+
+    try ( avro_value_get_by_index( p_array, idx, &subarray, &unused ),
+          "Couldn't get subarray" );
+
+    val_type = avro_value_get_type( &subarray );
+    printf( "Subarray type = %d\n", val_type );
+
+    try( avro_value_get_size( &subarray, &sublength ),
+         "Couldn't get subarray size" );
+    printf( "Subarray length = %d\n", (int) sublength );
+
+    for ( jdx = 0; jdx < sublength; jdx++ )
+    {
+      avro_value_t element;
+      int64_t val;
+
+      try ( avro_value_get_by_index( &subarray, jdx, &element, &unused  ),
+            "Couldn't get subarray element" );
+
+      val_type = avro_value_get_type( &element );
+
+      try ( avro_value_get_long( &element, &val ),
+            "Couldn't get subarray element value" );
+
+      printf( "nested_array[%d][%d]: type = %d value = %lld\n",
+              (int) idx, (int) jdx, (int) val_type, (long long) val );
+
+    }
+  }
+
+  return 0;
+}
+
+
+/* The input avro_value_t p_subarray should contain an array of long
+ * integers. Add "elements" number of long integers to this array. Set
+ * the values to be distinct based on the iteration parameter.
+ */
+int add_subarray( avro_value_t *p_subarray,
+                  size_t elements,
+                  int32_t iteration )
+{
+  avro_value_t element;
+  size_t index;
+  size_t idx;
+
+  for ( idx = 0; idx < elements; idx ++ )
+  {
+    // Append avro array element to subarray
+    try ( avro_value_append( p_subarray, &element, &index ),
+          "Error appending element in subarray" );
+
+    try ( avro_value_set_long( &element, (iteration+1)*100 + (iteration+1) ),
+          "Error setting subarray element" );
+  }
+
+  return 0;
+}
+
+int populate_array( avro_value_t *p_array, int32_t elements )
+{
+  int32_t idx;
+  fprintf( stderr, "Elements = %d\n", elements);
+  for ( idx = 0; idx < elements; idx ++ )
+  {
+    avro_value_t subarray;
+    size_t index;
+
+    // Append avro array element for top level array
+    try ( avro_value_append( p_array, &subarray, &index ),
+          "Error appending subarray" );
+
+    // Populate array element with subarray of length 2
+#define SUBARRAY_LENGTH (2)
+    try ( add_subarray( &subarray, SUBARRAY_LENGTH, idx ),
+          "Error populating subarray" );
+  }
+  return 0;
+}
+
+
+/* Create a nested array using the schema NESTED_ARRAY. Populate its
+ * elements with unique values. Serialize the nested array to the
+ * memory buffer in avro_writer_t. The number of elements in the first
+ * dimension of the nested array is "elements". The number of elements
+ * in the second dimension of the nested array is hardcoded to 2.
+ */
+int add_array( avro_writer_t writer,
+               int32_t elements,
+               int use_resolving_writer )
+{
+  avro_schema_t chosen_schema;
+  avro_value_iface_t *nested_array_class;
+  avro_value_t nested;
+
+  // Select (hardcode) schema to use
+  chosen_schema = schema_old;
+
+  // Create avro class and value
+  nested_array_class = avro_generic_class_from_schema( chosen_schema );
+  try ( avro_generic_value_new( nested_array_class, &nested ),
+        "Error creating instance of record" );
+
+  try ( populate_array( &nested, elements ),
+        "Error populating array" );
+
+  if ( use_resolving_writer )
+  {
+    // Resolve schema differences
+    avro_value_iface_t *resolved_reader_class;
+    avro_value_iface_t *writer_class;
+    avro_value_t resolved_record;
+
+    // Note - we will read values from the reader of "schema to write
+    // to file" and we will copy them into a writer of the same
+    // schema.
+    resolved_reader_class = avro_resolved_reader_new( schema_old,// schema populated above
+                                                      schema_new // schema_to_write_to_file
+                                                    );
+    if ( resolved_reader_class == NULL )
+    {
+      printf( "Failed avro_resolved_reader_new()\n");
+      exit( EXIT_FAILURE );
+    }
+
+    try ( avro_resolved_reader_new_value( resolved_reader_class, &resolved_record ),
+          "Failed avro_resolved_reader_new_value" );
+
+    // Map the resolved reader to the record you want to get data from
+    avro_resolved_reader_set_source( &resolved_record, &nested );
+
+    // Now the resolved_record is mapped to read data from record. Now
+    // we need to copy the data from resolved_record into a
+    // writer_record, which is an instance of the same schema as
+    // resolved_record.
+
+    // Create a writer of the schema you want to write using
+    writer_class = avro_generic_class_from_schema( schema_new );
+    if ( writer_class == NULL )
+    {
+      printf( "Failed avro_generic_class_from_schema()\n");
+      exit( EXIT_FAILURE );
+    }
+
+    try ( avro_value_write( writer, &resolved_record ),
+          "Unable to write record into memory using writer_record" );
+
+    print_array_fields( &resolved_record );
+
+    avro_value_reset( &nested );
+
+    // Question: Is it permissible to call avro_value_reset() on a
+    // resolved_record? Set the #if 1 to #if 0 to disable the
+    // avro_value_reset(), to prevent the segmentation fault.
+   #if 1
+    avro_value_reset( &resolved_record );
+   #endif
+
+    try ( populate_array( &nested, 2*elements ),
+          "Error populating array" );
+
+    try ( avro_value_write( writer, &resolved_record ),
+          "Unable to write record into memory using writer_record" );
+    
+    print_array_fields( &resolved_record );
+
+    avro_value_decref( &resolved_record );
+    avro_value_iface_decref( writer_class );
+  }
+  else
+  {
+    // Write the value to memory
+    try ( avro_value_write( writer, &nested ),
+          "Unable to write nested into memory" );
+
+    print_array_fields( &nested );
+  }
+
+
+  // Release the record
+  avro_value_decref( &nested );
+  avro_value_iface_decref( nested_array_class );
+
+  return 0;
+}
+
+/* Create a raw binary file containing a serialized version of a
+ * nested array. This file will later be read by
+ * read_nested_array_file().
+ */
+int write_nested_array_file ( int64_t buf_len,
+                              const char *raw_binary_file_name,
+                              int use_resolving_writer )
+{
+  char *buf;
+  avro_writer_t nested_writer;
+  FILE *fid = NULL;
+
+  fprintf( stdout, "Create %s\n", raw_binary_file_name );
+
+  // Allocate a buffer
+  buf = (char *) malloc( buf_len * sizeof( char ) );
+  if ( buf == NULL )
+  {
+    printf( "There was an error creating the nested buffer %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+
+  /* Create a new memory writer */
+  nested_writer = avro_writer_memory( buf, buf_len );
+  if ( nested_writer == NULL )
+  {
+    printf( "There was an error creating the buffer for writing %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+
+  /* Add an array containing 4 subarrays */
+  printf( "before avro_writer_tell %d\n", (int) avro_writer_tell( nested_writer ) );
+#define ARRAY_LENGTH (4)
+  add_array( nested_writer, ARRAY_LENGTH, use_resolving_writer );
+  printf( "after avro_writer_tell %d\n", (int) avro_writer_tell( nested_writer ) );
+
+  /* Serialize the nested array */
+  printf( "Serialize the data to a file\n");
+
+  /* Delete the nested array if it exists, and create a new one */
+  remove(raw_binary_file_name);
+  fid = fopen( raw_binary_file_name, "w+");
+  if ( fid == NULL )
+  {
+    printf( "There was an error creating the file %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+  fwrite( buf, 1, avro_writer_tell( nested_writer ), fid );
+  fclose(fid);
+  avro_writer_free( nested_writer );
+  free(buf);
+  return 0;
+}
+
+
+/* Top level function to impelement a test for the JIRA issue
+ * AVRO-1034. See detailed documentation at the top of this file.
+ */
+int main(void)
+{
+  const char *raw_binary_file_name = "nested_array.bin";
+  const char *raw_binary_file_name_resolved = "nested_array_resolved.bin";
+  int64_t buf_len = 2048;
+  int use_resolving_writer;
+
+  /* Initialize the schema structure from JSON */
+  init_schema();
+
+  printf( "Write the serialized nested array to %s\n", raw_binary_file_name );
+  use_resolving_writer = 0;
+  write_nested_array_file( buf_len, raw_binary_file_name, use_resolving_writer );
+
+  printf( "\nWrite the serialized nested array after schema resolution to %s\n",
+          raw_binary_file_name_resolved );
+  use_resolving_writer = 1;
+  write_nested_array_file( buf_len, raw_binary_file_name_resolved, use_resolving_writer );
+
+  // Close out schemas
+  avro_schema_decref(schema_old);
+  avro_schema_decref(schema_new);
+
+  // Remove the binary files
+  remove(raw_binary_file_name);
+  remove(raw_binary_file_name_resolved);
+
+  printf("\n");
+  return 0;
+}
diff --git a/lang/c/tests/test_avro_1084.c b/lang/c/tests/test_avro_1084.c
new file mode 100644
index 0000000..eab6f9a
--- /dev/null
+++ b/lang/c/tests/test_avro_1084.c
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+const char  PERSON_SCHEMA[] =
+"{\"type\":\"record\",\
+  \"name\":\"Person\",\
+  \"fields\":[\
+     {\"name\": \"ID\", \"type\": \"long\"}]}";
+
+const char *dbname = "test.db";
+avro_schema_t schema;
+
+int main()
+{
+	avro_file_writer_t writer;
+
+	// refcount == 1
+	if (avro_schema_from_json_literal (PERSON_SCHEMA, &schema))
+	{
+		printf ("Unable to parse schema\n");
+		return EXIT_FAILURE;
+	}
+
+	// BUG: refcount == 1
+	if (avro_file_writer_create ("test.db", schema, &writer))
+	{
+		printf ("There was an error creating db: %s\n", avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	// this is "unusual" behaviour
+	// refcount == 0
+	avro_schema_decref (schema);
+
+	// crash
+	avro_datum_t main_datum = avro_record(schema);		
+	avro_datum_t id_datum = avro_int32(1);
+	
+	if (avro_record_set (main_datum, "ID", id_datum))
+	{
+		printf ("Unable to create datum");
+		return EXIT_FAILURE;
+	}
+
+	avro_file_writer_append (writer, main_datum);
+	avro_file_writer_flush (writer);
+	avro_file_writer_close (writer);
+	remove (dbname);
+
+	avro_datum_decref (id_datum);
+	avro_datum_decref (main_datum);
+
+	return EXIT_SUCCESS;
+}
+
diff --git a/lang/c/tests/test_avro_1087.c b/lang/c/tests/test_avro_1087.c
new file mode 100644
index 0000000..c6aa807
--- /dev/null
+++ b/lang/c/tests/test_avro_1087.c
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+const char  PERSON_SCHEMA[] =
+"{\"type\":\"record\",\
+  \"name\":\"Person\",\
+  \"fields\":[\
+     {\"name\": \"ID\", \"type\": \"int\"}]}";
+
+const char *dbname = "test.db";
+avro_schema_t schema;
+
+void add_record (avro_file_writer_t writer)
+{
+	avro_datum_t main_datum = avro_record(schema);		
+	avro_datum_t id_datum = avro_int32(1);
+	
+	if (avro_record_set (main_datum, "ID", id_datum))
+	{
+		printf ("Unable to create datum");
+		exit (EXIT_FAILURE);
+	}
+
+	avro_file_writer_append (writer, main_datum);
+
+	avro_datum_decref (id_datum);
+	avro_datum_decref (main_datum);
+}
+
+void create_database()
+{
+	avro_file_writer_t writer;
+
+	if (avro_schema_from_json_literal (PERSON_SCHEMA, &schema))
+	{
+		printf ("Unable to parse schema\n");
+		exit (EXIT_FAILURE);
+	}
+
+	if (avro_file_writer_create ("test.db", schema, &writer))
+	{
+		printf ("There was an error creating db: %s\n", avro_strerror());
+		exit (EXIT_FAILURE);
+	}
+
+	add_record (writer);
+
+	avro_file_writer_flush (writer);
+	avro_file_writer_close (writer);
+}
+
+
+int main()
+{
+	avro_file_writer_t writer;
+
+	create_database();
+
+	avro_file_writer_open (dbname, &writer);
+	add_record (writer);
+	
+	avro_file_writer_flush (writer);
+	avro_file_writer_close (writer);
+
+	remove (dbname);
+
+	return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/test_avro_1165.c b/lang/c/tests/test_avro_1165.c
new file mode 100644
index 0000000..677b8a4
--- /dev/null
+++ b/lang/c/tests/test_avro_1165.c
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <avro.h>
+
+/* To validate AVRO-1165, run this test program through valgrind
+ * before and after applying the AVRO-1165.patch. Before the patch
+ * valgrind will show memory leaks, and after the patch it will not.
+ * The specific valgrind commandline to use from the
+ * avro-trunk/lang/c/tests directory is:
+ *    valgrind -v --track-origins=yes --leak-check=full
+ *          --show-reachable = yes ../build/tests/test_avro_1165
+ */
+
+int main(int argc, char **argv)
+{
+	const char  *json =
+		"{"
+		"  \"name\": \"repeated_subrecord_array\","
+		"  \"type\": \"record\","
+		"  \"fields\": ["
+		"    { \"name\": \"subrecord_one\","
+		"      \"type\": {"
+		"                  \"name\": \"SubrecordType\","
+		"                  \"type\": \"record\","
+		"                  \"fields\": ["
+		"                    { \"name\": \"x\", \"type\": \"int\" },"
+		"                    { \"name\": \"y\", \"type\": \"int\" }"
+		"                  ]"
+		"                }"
+		"    },"
+		"    { \"name\": \"subrecord_two\", \"type\": \"SubrecordType\" },"
+		"    { \"name\": \"subrecord_array\", \"type\": {"
+		"                                                 \"type\":\"array\","
+		"                                                 \"items\": \"SubrecordType\""
+		"                                               }"
+		"    }"
+		"  ]"
+		"}";
+
+	int rval;
+	avro_schema_t schema = NULL;
+	avro_schema_error_t error;
+	avro_value_iface_t *p_reader_class;
+
+	(void) argc;
+	(void) argv;
+
+	rval = avro_schema_from_json(json, strlen(json), &schema, &error);
+	if ( rval )
+	{
+		printf("Failed to read schema from JSON.\n");
+		return 1;
+	}
+	else
+	{
+		printf("Successfully read schema from JSON.\n");
+	}
+
+	p_reader_class = avro_generic_class_from_schema(schema);
+
+	avro_value_iface_decref(p_reader_class);
+
+	avro_schema_decref(schema);
+	return 0;
+}
diff --git a/lang/c/tests/test_avro_1237.c b/lang/c/tests/test_avro_1237.c
new file mode 100644
index 0000000..6cb8bd8
--- /dev/null
+++ b/lang/c/tests/test_avro_1237.c
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define check_exit(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc != 0) { \
+			fprintf(stderr, "Unexpected error:\n  %s\n  %s\n", \
+				avro_strerror(), #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+#define expect_error(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc == 0) { \
+			fprintf(stderr, "Expected an error:\n  %s\n", #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+#define check_expected_value(actual, expected) \
+	do { \
+		if (!avro_value_equal_fast((actual), (expected))) { \
+			char  *actual_json; \
+			char  *expected_json; \
+			avro_value_to_json((actual), 1, &actual_json); \
+			avro_value_to_json((expected), 1, &expected_json); \
+			fprintf(stderr, "Expected %s\nGot      %s\n", \
+				expected_json, actual_json); \
+			free(actual_json); \
+			free(expected_json); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+int main(void)
+{
+	avro_schema_t  schema;
+	avro_file_reader_t  reader;
+	avro_value_iface_t  *iface;
+	avro_value_t  actual;
+	avro_value_t  expected;
+	avro_value_t  branch;
+
+	schema = avro_schema_union();
+	avro_schema_union_append(schema, avro_schema_null());
+	avro_schema_union_append(schema, avro_schema_int());
+
+	iface = avro_generic_class_from_schema(schema);
+	avro_generic_value_new(iface, &actual);
+	avro_generic_value_new(iface, &expected);
+
+
+	/* First read the contents of the good file. */
+
+	check_exit(avro_file_reader("avro-1237-good.avro", &reader));
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 0, &branch));
+	check_exit(avro_value_set_null(&branch));
+	check_expected_value(&actual, &expected);
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 1, &branch));
+	check_exit(avro_value_set_int(&branch, 100));
+	check_expected_value(&actual, &expected);
+
+	check_exit(avro_file_reader_close(reader));
+
+
+	/* Then read from the malformed file. */
+
+	check_exit(avro_file_reader
+		   ("avro-1237-bad-union-discriminant.avro", &reader));
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 0, &branch));
+	check_exit(avro_value_set_null(&branch));
+	check_expected_value(&actual, &expected);
+
+	expect_error(avro_file_reader_read_value(reader, &actual));
+
+	check_exit(avro_file_reader_close(reader));
+
+
+	/* Clean up and exit */
+	avro_value_decref(&actual);
+	avro_value_decref(&expected);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(schema);
+	exit(EXIT_SUCCESS);
+}
diff --git a/lang/c/tests/test_avro_1238.c b/lang/c/tests/test_avro_1238.c
new file mode 100644
index 0000000..50f95e5
--- /dev/null
+++ b/lang/c/tests/test_avro_1238.c
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define check_exit(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc != 0) { \
+			fprintf(stderr, "Unexpected error:\n  %s\n  %s\n", \
+				avro_strerror(), #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+#define expect_eof(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc != EOF) { \
+			fprintf(stderr, "Expected EOF:\n  %s\n", #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+#define expect_error(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc == 0) { \
+			fprintf(stderr, "Expected an error:\n  %s\n", #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+#define check_expected_value(actual, expected) \
+	do { \
+		if (!avro_value_equal_fast((actual), (expected))) { \
+			char  *actual_json; \
+			char  *expected_json; \
+			avro_value_to_json((actual), 1, &actual_json); \
+			avro_value_to_json((expected), 1, &expected_json); \
+			fprintf(stderr, "Expected %s\nGot      %s\n", \
+				expected_json, actual_json); \
+			free(actual_json); \
+			free(expected_json); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+int main(void)
+{
+	avro_schema_t  schema;
+	avro_file_reader_t  reader;
+	avro_value_iface_t  *iface;
+	avro_value_t  actual;
+	avro_value_t  expected;
+	avro_value_t  branch;
+
+	schema = avro_schema_union();
+	avro_schema_union_append(schema, avro_schema_null());
+	avro_schema_union_append(schema, avro_schema_int());
+
+	iface = avro_generic_class_from_schema(schema);
+	avro_generic_value_new(iface, &actual);
+	avro_generic_value_new(iface, &expected);
+
+
+	/* First read the contents of the good file. */
+
+	check_exit(avro_file_reader("avro-1238-good.avro", &reader));
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 0, &branch));
+	check_exit(avro_value_set_null(&branch));
+	check_expected_value(&actual, &expected);
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 1, &branch));
+	check_exit(avro_value_set_int(&branch, 100));
+	check_expected_value(&actual, &expected);
+
+	expect_eof(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_file_reader_close(reader));
+
+
+	/* Then read from the truncated file. */
+
+	check_exit(avro_file_reader("avro-1238-truncated.avro", &reader));
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 0, &branch));
+	check_exit(avro_value_set_null(&branch));
+	check_expected_value(&actual, &expected);
+
+	check_exit(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_value_set_branch(&expected, 1, &branch));
+	check_exit(avro_value_set_int(&branch, 100));
+	check_expected_value(&actual, &expected);
+
+	expect_error(avro_file_reader_read_value(reader, &actual));
+	check_exit(avro_file_reader_close(reader));
+
+
+	/* Clean up and exit */
+	avro_value_decref(&actual);
+	avro_value_decref(&expected);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(schema);
+	exit(EXIT_SUCCESS);
+}
diff --git a/lang/c/tests/test_avro_1279.c b/lang/c/tests/test_avro_1279.c
new file mode 100644
index 0000000..f0dfc7f
--- /dev/null
+++ b/lang/c/tests/test_avro_1279.c
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define check_exit(call) \
+	do { \
+		int  __rc = call; \
+		if (__rc != 0) { \
+			fprintf(stderr, "Unexpected error:\n  %s\n  %s\n", \
+				avro_strerror(), #call); \
+			exit(EXIT_FAILURE); \
+		} \
+	} while (0)
+
+int main(void)
+{
+	avro_file_reader_t  reader;
+
+	/* First open the file with the explicit codec. */
+	check_exit(avro_file_reader("avro-1279-codec.avro", &reader));
+	check_exit(avro_file_reader_close(reader));
+
+
+	/* Then the file with no codec. */
+	check_exit(avro_file_reader("avro-1279-no-codec.avro", &reader));
+	check_exit(avro_file_reader_close(reader));
+
+	/* Clean up and exit */
+	exit(EXIT_SUCCESS);
+}
diff --git a/lang/c/tests/test_avro_1379.c b/lang/c/tests/test_avro_1379.c
new file mode 100644
index 0000000..738a0bd
--- /dev/null
+++ b/lang/c/tests/test_avro_1379.c
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include "avro.h"
+#include "avro_private.h"
+#include <stdio.h>
+#include <stdlib.h>
+
+static const char  *schema_json =
+	"{"
+	"  \"type\": \"record\","
+	"  \"name\": \"test\","
+	"  \"fields\": ["
+	"    { \"name\": \"i\", \"type\": \"int\" },"
+	"    { \"name\": \"l\", \"type\": \"long\" },"
+	"    { \"name\": \"s\", \"type\": \"string\" },"
+	"    {"
+	"      \"name\": \"subrec\","
+	"      \"type\": {"
+	"        \"type\": \"record\","
+	"        \"name\": \"sub\","
+	"        \"fields\": ["
+	"          { \"name\": \"f\", \"type\": \"float\" },"
+	"          { \"name\": \"d\", \"type\": \"double\" }"
+	"        ]"
+	"      }"
+	"    }"
+	"  ]"
+	"}";
+
+static void
+populate_complex_record(avro_value_t *p_val)
+{
+	avro_value_t  field;
+
+	avro_value_get_by_index(p_val, 0, &field, NULL);
+	avro_value_set_int(&field, 42);
+
+	avro_value_get_by_index(p_val, 1, &field, NULL);
+	avro_value_set_long(&field, 4242);
+
+	avro_wrapped_buffer_t  wbuf;
+	avro_wrapped_buffer_new_string(&wbuf, "Follow your bliss.");
+	avro_value_get_by_index(p_val, 2, &field, NULL);
+	avro_value_give_string_len(&field, &wbuf);
+
+	avro_value_t  subrec;
+	avro_value_get_by_index(p_val, 3, &subrec, NULL);
+
+	avro_value_get_by_index(&subrec, 0, &field, NULL);
+	avro_value_set_float(&field, 3.14159265);
+
+	avro_value_get_by_index(&subrec, 1, &field, NULL);
+	avro_value_set_double(&field, 2.71828183);
+}
+
+int main(void)
+{
+	int rval = 0;
+	size_t len;
+	static char  buf[4096];
+	avro_writer_t  writer;
+	avro_file_writer_t file_writer;
+	avro_file_reader_t file_reader;
+	const char *outpath = "test-1379.avro";
+
+	avro_schema_t  schema = NULL;
+	avro_schema_error_t  error = NULL;
+	check(rval, avro_schema_from_json(schema_json, strlen(schema_json), &schema, &error));
+
+	avro_value_iface_t  *iface = avro_generic_class_from_schema(schema);
+
+	avro_value_t  val;
+	avro_generic_value_new(iface, &val);
+
+	avro_value_t  out;
+	avro_generic_value_new(iface, &out);
+
+	/* create the val */
+	avro_value_reset(&val);
+	populate_complex_record(&val);
+
+	/* create the writers */
+	writer = avro_writer_memory(buf, sizeof(buf));
+	check(rval, avro_file_writer_create(outpath, schema, &file_writer));
+
+	fprintf(stderr, "Writing to buffer\n");
+	check(rval, avro_value_write(writer, &val));
+
+	fprintf(stderr, "Writing buffer to %s "
+		"using avro_file_writer_append_encoded()\n", outpath);
+	len = avro_writer_tell(writer);
+	check(rval, avro_file_writer_append_encoded(file_writer, buf, len));
+	check(rval, avro_file_writer_close(file_writer));
+
+	check(rval, avro_file_reader(outpath, &file_reader));
+	fprintf(stderr, "Re-reading value to verify\n");
+	check(rval, avro_file_reader_read_value(file_reader, &out));
+	fprintf(stderr, "Verifying value...");
+	if (!avro_value_equal(&val, &out)) {
+		fprintf(stderr, "fail!\n");
+		exit(EXIT_FAILURE);
+	}
+	fprintf(stderr, "ok\n");
+	check(rval, avro_file_reader_close(file_reader));
+	remove(outpath);
+
+	exit(EXIT_SUCCESS);
+}
diff --git a/lang/c/tests/test_avro_1405.c b/lang/c/tests/test_avro_1405.c
new file mode 100644
index 0000000..b7c1747
--- /dev/null
+++ b/lang/c/tests/test_avro_1405.c
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stdio.h>
+#include "avro.h"
+
+#define NUM_RECORDS 10
+
+const char  PERSON_SCHEMA[] =
+	"{"
+	"    \"type\":\"record\","
+	"    \"name\":\"Person\","
+	"    \"fields\": ["
+	"        {\"name\": \"ID\", \"type\": \"long\"},"
+	"        {\"name\": \"First\", \"type\": \"string\"},"
+	"        {\"name\": \"Last\", \"type\": \"string\"},"
+	"        {\"name\": \"Phone\", \"type\": \"string\"},"
+	"        {\"name\": \"Age\", \"type\": \"int\"}"
+	"    ]"
+	"}";
+
+const char *file = "avro_file.dat";
+
+void print_avro_value(avro_value_t *value) {
+	char *json;
+	if (!avro_value_to_json(value, 1, &json)) {
+		printf("%s\n", json);
+		free(json);
+	}
+}
+
+int read_data() {
+	int rval;
+	int records_read = 0;
+
+	avro_file_reader_t reader;
+	avro_value_iface_t *iface;
+	avro_value_t value;
+
+	avro_file_reader(file, &reader);
+	avro_schema_t schema = avro_file_reader_get_writer_schema(reader);
+
+	iface = avro_generic_class_from_schema(schema);
+	avro_generic_value_new(iface, &value);
+
+	printf("\nReading...\n");
+	while ((rval = avro_file_reader_read_value(reader, &value)) == 0) {
+		char  *json;
+
+		if (avro_value_to_json(&value, 1, &json)) {
+			printf("Error converting value to JSON: %s\n",avro_strerror());
+		} else {
+			printf("%s\n", json);
+			free(json);
+			records_read++;
+		}
+
+		avro_value_reset(&value);
+	}
+
+	avro_value_decref(&value);
+	avro_value_iface_decref(iface);
+	avro_schema_decref(schema);
+	avro_file_reader_close(reader);
+
+	if (rval != EOF || records_read != NUM_RECORDS) {
+		fprintf(stderr, "Error: %s\n", avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	return EXIT_SUCCESS;
+}
+
+int write_data() {
+	int  i;
+	avro_schema_t schema;
+	avro_schema_error_t error;
+	avro_file_writer_t writer;
+	avro_value_iface_t *iface;
+	avro_value_t value;
+
+	if (avro_schema_from_json(PERSON_SCHEMA, 0, &schema, &error)) {
+		printf ("Unable to parse schema\n");
+		return EXIT_FAILURE;
+	}
+
+	iface = avro_generic_class_from_schema(schema);
+	avro_generic_value_new(iface, &value);
+
+	if (avro_file_writer_create(file, schema, &writer)) {
+		printf ("There was an error creating file: %s\n", avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	printf("\nWriting...\n");
+	for (i = 0; i < NUM_RECORDS; i++) {
+		avro_value_t  field;
+		avro_value_get_by_name(&value, "ID", &field, NULL);
+		avro_value_set_long(&field, (int64_t) i);
+
+		avro_value_get_by_name(&value, "Age", &field, NULL);
+		avro_value_set_int(&field, i);
+
+		avro_value_get_by_name(&value, "First", &field, NULL);
+		avro_value_set_string(&field, "Firstname");
+
+		avro_value_get_by_name(&value, "Last", &field, NULL);
+		avro_value_set_string(&field, "Lastname");
+
+
+		avro_value_get_by_name(&value, "Phone", &field, NULL);
+		avro_value_set_string(&field, "1234567");
+
+		print_avro_value(&value);
+
+		avro_file_writer_append_value(writer, &value);
+
+		// Writing multiple blocks
+		avro_file_writer_close(writer);
+		avro_file_writer_open(file, &writer);
+
+		avro_value_reset(&value);
+	}
+
+	avro_file_writer_close(writer);
+	avro_value_iface_decref(iface);
+	avro_value_decref(&value);
+	avro_schema_decref(schema);
+
+	return EXIT_SUCCESS;
+}
+
+
+int main()
+{
+	int read_data_result;
+
+	if (write_data()) {
+		return EXIT_FAILURE;
+	}
+
+	read_data_result = read_data();
+	remove(file);
+
+	return read_data_result;
+}
diff --git a/lang/c/tests/test_avro_968.c b/lang/c/tests/test_avro_968.c
new file mode 100644
index 0000000..fe0e70a
--- /dev/null
+++ b/lang/c/tests/test_avro_968.c
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include <stdio.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+#define try(call, msg) \
+	do { \
+		if (call) { \
+			fprintf(stderr, msg ":\n  %s\n", avro_strerror()); \
+			return EXIT_FAILURE; \
+		} \
+	} while (0)
+
+int
+main(int argc, char **argv)
+{
+	AVRO_UNUSED(argc);
+	AVRO_UNUSED(argv);
+
+	avro_value_t  v1;
+	avro_value_t  v2;
+
+	try(avro_generic_string_new(&v1, "test string a"),
+	    "Cannot create string value");
+	try(avro_generic_string_new(&v2, "test string b"),
+	    "Cannot create string value");
+
+	if (avro_value_equal(&v1, &v2)) {
+		fprintf(stderr, "Unexpected avro_value_equal\n");
+		return EXIT_FAILURE;
+	}
+
+	if (avro_value_equal_fast(&v1, &v2)) {
+		fprintf(stderr, "Unexpected avro_value_equal_fast\n");
+		return EXIT_FAILURE;
+	}
+
+	if (avro_value_cmp(&v1, &v2) >= 0) {
+		fprintf(stderr, "Unexpected avro_value_cmp\n");
+		return EXIT_FAILURE;
+	}
+
+	if (avro_value_cmp_fast(&v1, &v2) >= 0) {
+		fprintf(stderr, "Unexpected avro_value_cmp_fast\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_value_decref(&v1);
+	avro_value_decref(&v2);
+	return 0;
+}
diff --git a/lang/c/tests/test_avro_984.c b/lang/c/tests/test_avro_984.c
new file mode 100644
index 0000000..a5e338e
--- /dev/null
+++ b/lang/c/tests/test_avro_984.c
@@ -0,0 +1,464 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+
+/* Test code for JIRA Issue AVRO-984. 
+ * 
+ * AVRO-984: Avro-C schema resolution fails on nested array
+ * 
+ * This program tests schema resolution for nested arrays. For the
+ * purposes of this test, there are two schemas "old" and "new" which
+ * are created by reading the same JSON schema.
+ * 
+ * The test creates and populates a nested array, and serializes it to
+ * memory. The raw memory is written to a file, primarily to decouple
+ * writing and reading. Note that the schema is not written to the
+ * file. The nested array is also printed to the screen. 
+ * 
+ * The binary file is then read using two separate readers -- the
+ * matched reader and the resolved reader.
+ * 
+ * In the matched reader case, the "old" and "new" schemas are known
+ * to match, and therefore no schema resolution is done. The binary
+ * buffer is deserialized into an avro value and the nested array
+ * encoded in the avro value is printed to the screen. 
+ * 
+ * In the resolved reader case, the "old" and "new" schemas are not
+ * known to match, and therefore schema resolution is performed. (Note
+ * that the schemas *do* match, but we perform schema resolution
+ * anyway, to test the resolution process). The schema resolution
+ * appears to succeed. However, once the code tries to perform an
+ * "avro_value_read()" the code fails to read the nested array into
+ * the avro value.
+ * 
+ * Additionally valgrind indicates that conditional jumps are being
+ * performed based on uninitialized values. 
+ * 
+ * AVRO-C was compiled with CMAKE_INSTALL_PREFIX=avrolib
+ * The static library (libavro.a) was copied into a subdirectory of avrolib/lib/static
+ * 
+ * This file was compiled under Linux using:
+ *   gcc -g avro-984-test.c -o avro984 -I../../build/avrolib/include -L../../build/avrolib/lib/static -lavro
+ * 
+ * The code was tested with valgrind using the command:
+ *   valgrind -v --leak-check=full --track-origins=yes ./avro984
+ * 
+ */
+
+
+// Encode the following json string in NESTED_ARRAY
+// {"type":"array", "items": {"type": "array", "items": "long"}}
+// 
+#define NESTED_ARRAY \
+  "{\"type\":\"array\", \"items\": {\"type\": \"array\", \"items\": \"long\"}}"
+
+avro_schema_t schema_old = NULL;
+avro_schema_t schema_new = NULL;
+
+/* Parse schema into a schema data structure */
+void init_schema(void)
+{
+  avro_schema_error_t error;
+  if (avro_schema_from_json(NESTED_ARRAY, sizeof(NESTED_ARRAY),
+                            &schema_old, &error)) {
+    printf( "Unable to parse old schema\n");
+    exit(EXIT_FAILURE);
+  }
+
+  if (avro_schema_from_json(NESTED_ARRAY, sizeof(NESTED_ARRAY),
+                            &schema_new, &error)) {
+    printf( "Unable to parse new schema\n");
+    exit(EXIT_FAILURE);
+  }
+}
+
+#define try(call, msg) \
+	do { \
+		if (call) { \
+			printf( msg ":\n  %s\n", avro_strerror()); \
+			exit (EXIT_FAILURE);                       \
+		} \
+	} while (0)
+
+
+/* The input avro_value_t p_array should contain a nested array.
+ * Print the fields of this nested array to the screen.
+ */
+int print_array_fields ( avro_value_t *p_array )
+{
+  size_t idx;
+  size_t length;
+  avro_type_t val_type;
+
+  val_type = avro_value_get_type( p_array );
+  printf( "Main array type = %d\n", val_type );
+
+  try( avro_value_get_size( p_array, &length ),
+       "Couldn't get array size" );
+  printf( "Main array length = %d\n", (int) length );
+  
+  for ( idx = 0; idx < length; idx ++ )
+  {
+    avro_value_t subarray;
+    size_t sublength;
+    size_t jdx;
+    const char *unused;
+    
+    try ( avro_value_get_by_index( p_array, idx, &subarray, &unused ),
+          "Couldn't get subarray" );
+
+    val_type = avro_value_get_type( &subarray );
+    printf( "Subarray type = %d\n", val_type );
+
+    try( avro_value_get_size( &subarray, &sublength ),
+         "Couldn't get subarray size" );
+    printf( "Subarray length = %d\n", (int) sublength );
+
+    for ( jdx = 0; jdx < sublength; jdx++ )
+    {
+      avro_value_t element;
+      int64_t val;
+
+      try ( avro_value_get_by_index( &subarray, jdx, &element, &unused  ),
+            "Couldn't get subarray element" );
+
+      val_type = avro_value_get_type( &element );
+
+      try ( avro_value_get_long( &element, &val ),
+            "Couldn't get subarray element value" );
+
+      printf( "nested_array[%d][%d]: type = %d value = %lld\n", 
+              (int) idx, (int) jdx, (int) val_type, (long long) val );
+
+    }
+  }
+
+  return 0;
+}
+
+
+/* The input avro_value_t p_subarray should contain an array of long
+ * integers. Add "elements" number of long integers to this array. Set
+ * the values to be distinct based on the iteration parameter.
+ */
+int add_subarray( avro_value_t *p_subarray,
+                  int32_t elements, 
+                  int32_t iteration )
+{
+  avro_value_t element;
+  size_t index;
+  size_t idx;
+
+  for ( idx = 0; idx < (size_t) elements; idx ++ )
+  {
+    // Append avro array element to subarray
+    try ( avro_value_append( p_subarray, &element, &index ),
+          "Error appending element in subarray" );
+
+    try ( avro_value_set_long( &element, (iteration+1)*100 + (iteration+1) ),
+          "Error setting subarray element" );
+  }
+
+  return 0;
+}
+
+
+/* Create a nested array using the schema NESTED_ARRAY. Populate its
+ * elements with unique values. Serialize the nested array to the
+ * memory buffer in avro_writer_t. The number of elements in the first
+ * dimension of the nested array is "elements". The number of elements
+ * in the second dimension of the nested array is hardcoded to 2.
+ */
+int add_array( avro_writer_t writer, 
+               int32_t elements )
+{
+  avro_schema_t chosen_schema;
+  avro_value_iface_t *nested_array_class;
+  avro_value_t nested;
+  int32_t idx;
+
+  // Select (hardcode) schema to use
+  chosen_schema = schema_old;
+
+  // Create avro class and value
+  nested_array_class = avro_generic_class_from_schema( chosen_schema );
+  try ( avro_generic_value_new( nested_array_class, &nested ), 
+        "Error creating instance of record" );
+
+  for ( idx = 0; idx < elements; idx ++ )
+  {
+    avro_value_t subarray;
+    size_t index;
+
+    // Append avro array element for top level array
+    try ( avro_value_append( &nested, &subarray, &index ),
+          "Error appending subarray" );
+
+    // Populate array element with subarray of length 2
+#define SUBARRAY_LENGTH (2)
+    try ( add_subarray( &subarray, SUBARRAY_LENGTH, idx ),
+          "Error populating subarray" );
+  }
+
+  // Write the value to memory
+  try ( avro_value_write( writer, &nested ),
+        "Unable to write nested into memory" );
+
+  print_array_fields( &nested );
+
+  // Release the record
+  avro_value_decref( &nested );
+  avro_value_iface_decref( nested_array_class );
+
+  return 0;
+}
+
+/* Create a raw binary file containing a serialized version of a
+ * nested array. This file will later be read by
+ * read_nested_array_file().
+ */
+int write_nested_array_file ( int64_t buf_len, const char *raw_binary_file_name )
+{
+  char *buf;
+  avro_writer_t nested_writer;
+  FILE *fid = NULL;
+
+  fprintf( stdout, "Create %s\n", raw_binary_file_name );
+
+  // Allocate a buffer
+  buf = (char *) malloc( buf_len * sizeof( char ) );
+  if ( buf == NULL )
+  {
+    printf( "There was an error creating the nested buffer %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+
+  /* Create a new memory writer */
+  nested_writer = avro_writer_memory( buf, buf_len );
+  if ( nested_writer == NULL )
+  {
+    printf( "There was an error creating the buffer for writing %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+
+  /* Add an array containing 4 subarrays */
+  printf( "before avro_writer_tell %d\n", (int) avro_writer_tell( nested_writer ) );
+#define ARRAY_LENGTH (4)
+  add_array( nested_writer, ARRAY_LENGTH );
+  printf( "after avro_writer_tell %d\n", (int) avro_writer_tell( nested_writer ) );
+
+  /* Serialize the nested array */
+  printf( "Serialize the data to a file\n");
+
+  /* Delete the nested array if it exists, and create a new one */
+  remove(raw_binary_file_name);
+  fid = fopen( raw_binary_file_name, "w+");
+  if ( fid == NULL )
+  {
+    printf( "There was an error creating the file %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+  fwrite( buf, 1, avro_writer_tell( nested_writer ), fid );
+  fclose(fid);
+  avro_writer_free( nested_writer );
+  free(buf);
+  return 0;
+}
+
+
+/* Read the raw binary file containing a serialized version of a
+ * nested array, written by write_nested_array_file()
+ */
+int read_nested_array_file ( int64_t buf_len, 
+                             const char *raw_binary_file_name, 
+                             avro_schema_t writer_schema,
+                             avro_schema_t reader_schema,
+                             int use_resolving_reader
+                           )
+{
+
+  char *buf;
+  FILE *fid = NULL;
+  avro_reader_t nested_reader;
+  int64_t file_len;
+
+  // For Matched Reader and Resolving Reader
+  avro_value_iface_t *reader_class;
+  avro_value_t nested;
+  
+  // For Resolving Reader
+  avro_value_iface_t *resolver;
+  avro_value_t resolved_value;
+
+  fprintf( stdout, "Use %s reader\n", use_resolving_reader ? "Resolving":"Matched" );
+
+  // Allocate a buffer
+  buf = (char *) calloc( buf_len, sizeof( char ) );
+  if ( buf == NULL )
+  {
+    printf( "There was an error creating the buffer for reading %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+  // Start with a garbage buffer
+  memset(buf, 0xff, buf_len );
+
+  // Read the file into the buffer
+  fid = fopen( raw_binary_file_name, "r" );
+  if ( fid == NULL )
+  {
+    printf( "There was an error reading the file %s.\n", raw_binary_file_name);
+    exit(EXIT_FAILURE);
+  }
+  file_len = fread( buf, 1, buf_len, fid );
+  printf( "Read %d bytes\n", (int) file_len );
+  fclose(fid);
+
+  if ( use_resolving_reader )
+  {
+    // Resolving Reader
+
+    /* First resolve the writer and reader schemas */
+    resolver = avro_resolved_writer_new( writer_schema, reader_schema );
+    if ( !resolver )
+    {
+      printf( "Could not create resolver\n");
+      free(buf);
+      exit(EXIT_FAILURE);
+    }
+
+    /* Create a value that the resolver can write into. This is just
+     * an interface value, that is not directly read from.
+     */
+    if ( avro_resolved_writer_new_value( resolver, &resolved_value ) )
+    {
+      avro_value_iface_decref( resolver );
+      free(buf);      
+      exit(EXIT_FAILURE);
+    }
+
+    /* Then create the value with the reader schema, that we are going
+     * to use to read from.
+     */
+    reader_class = avro_generic_class_from_schema(reader_schema);
+    try ( avro_generic_value_new( reader_class, &nested ),
+          "Error creating instance of nested array" );
+
+    // When we read the memory using the resolved writer, we want to
+    // populate the instance of the value with the reader schema. This
+    // is done by set_dest.
+    avro_resolved_writer_set_dest(&resolved_value, &nested);
+
+    // Create a memory reader
+    nested_reader = avro_reader_memory( buf, buf_len );
+
+    if ( avro_value_read( nested_reader, &resolved_value ) )
+    {
+      printf( "Avro value read failed\n" );
+
+      avro_value_decref( &nested );
+      avro_value_iface_decref( reader_class );
+      avro_value_iface_decref( resolver );
+      avro_value_decref( &resolved_value );
+
+      exit(EXIT_FAILURE);
+    }
+  }
+  else
+  {
+    // Matched Reader
+    reader_class = avro_generic_class_from_schema(reader_schema);
+
+    try ( avro_generic_value_new( reader_class, &nested ),
+          "Error creating instance of nested array" );
+
+    // Send the memory in the buffer into the reader
+    nested_reader = avro_reader_memory( buf, buf_len );
+
+    try ( avro_value_read( nested_reader, &nested ),
+          "Could not read value from memory" );
+  }
+
+
+  /* Now the resolved record has been read into "nested" which is
+   * a value of type reader_class
+   */
+  print_array_fields( &nested );
+
+  if ( use_resolving_reader )
+  {
+    // Resolving Reader
+    avro_value_decref( &nested );
+    avro_value_iface_decref( reader_class );
+    avro_value_iface_decref( resolver );
+    avro_value_decref( &resolved_value );
+  }
+  else
+  {
+    // Matched Reader
+    avro_value_decref( &nested );    
+    avro_value_iface_decref( reader_class );
+  }
+
+  fprintf( stdout, "Done.\n\n");
+  avro_reader_free( nested_reader );
+  free(buf);
+  return 0;
+}
+
+
+/* Top level function to impelement a test for the JIRA issue
+ * AVRO-984. See detailed documentation at the top of this file.
+ */
+int main(void)
+{
+  const char *raw_binary_file_name = "nested_array.bin";
+  int64_t buf_len = 2048;
+  int use_resolving_reader;
+
+  /* Initialize the schema structure from JSON */
+  init_schema();
+
+  printf( "Write the serialized nested array to %s\n", raw_binary_file_name );
+
+  write_nested_array_file( buf_len, raw_binary_file_name );
+
+  printf("\nNow read all the array back out\n\n");
+
+  for ( use_resolving_reader = 0; use_resolving_reader < 2; use_resolving_reader++ )
+  {
+    read_nested_array_file( buf_len, 
+                            raw_binary_file_name,
+                            schema_old,
+                            schema_new,
+                            use_resolving_reader
+                          );
+  }
+
+  // Close out schemas
+  avro_schema_decref(schema_old);
+  avro_schema_decref(schema_new);
+
+  // Remove the binary file
+  remove(raw_binary_file_name);
+  
+  printf("\n");
+  return 0;
+}
diff --git a/lang/c/tests/test_avro_data.c b/lang/c/tests/test_avro_data.c
new file mode 100644
index 0000000..652ff16
--- /dev/null
+++ b/lang/c/tests/test_avro_data.c
@@ -0,0 +1,684 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro.h"
+#include "avro_private.h"
+#include <limits.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+char buf[4096];
+avro_reader_t reader;
+avro_writer_t writer;
+
+typedef int (*avro_test) (void);
+
+/*
+ * Use a custom allocator that verifies that the size that we use to
+ * free an object matches the size that we use to allocate it.
+ */
+
+static void *
+test_allocator(void *ud, void *ptr, size_t osize, size_t nsize)
+{
+	AVRO_UNUSED(ud);
+	AVRO_UNUSED(osize);
+
+	if (nsize == 0) {
+		size_t  *size = ((size_t *) ptr) - 1;
+		if (osize != *size) {
+			fprintf(stderr,
+				"Error freeing %p:\n"
+				"Size passed to avro_free (%" PRIsz ") "
+				"doesn't match size passed to "
+				"avro_malloc (%" PRIsz ")\n",
+				ptr, osize, *size);
+			abort();
+			//exit(EXIT_FAILURE);
+		}
+		free(size);
+		return NULL;
+	} else {
+		size_t  real_size = nsize + sizeof(size_t);
+		size_t  *old_size = ptr? ((size_t *) ptr)-1: NULL;
+		size_t  *size = (size_t *) realloc(old_size, real_size);
+		*size = nsize;
+		return (size + 1);
+	}
+}
+
+void init_rand(void)
+{
+	srand(time(NULL));
+}
+
+double rand_number(double from, double to)
+{
+	double range = to - from;
+	return from + ((double)rand() / (RAND_MAX + 1.0)) * range;
+}
+
+int64_t rand_int64(void)
+{
+	return (int64_t) rand_number(LONG_MIN, LONG_MAX);
+}
+
+int32_t rand_int32(void)
+{
+	return (int32_t) rand_number(INT_MIN, INT_MAX);
+}
+
+void
+write_read_check(avro_schema_t writers_schema, avro_datum_t datum,
+		 avro_schema_t readers_schema, avro_datum_t expected, char *type)
+{
+	avro_datum_t datum_out;
+	int validate;
+
+	for (validate = 0; validate <= 1; validate++) {
+
+		reader = avro_reader_memory(buf, sizeof(buf));
+		writer = avro_writer_memory(buf, sizeof(buf));
+
+		if (!expected) {
+			expected = datum;
+		}
+
+		/* Validating read/write */
+		if (avro_write_data
+		    (writer, validate ? writers_schema : NULL, datum)) {
+			fprintf(stderr, "Unable to write %s validate=%d\n  %s\n",
+				type, validate, avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+		int64_t size =
+		    avro_size_data(writer, validate ? writers_schema : NULL,
+				   datum);
+		if (size != avro_writer_tell(writer)) {
+			fprintf(stderr,
+				"Unable to calculate size %s validate=%d "
+				"(%"PRId64" != %"PRId64")\n  %s\n",
+				type, validate, size, avro_writer_tell(writer),
+				avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+		if (avro_read_data
+		    (reader, writers_schema, readers_schema, &datum_out)) {
+			fprintf(stderr, "Unable to read %s validate=%d\n  %s\n",
+				type, validate, avro_strerror());
+			fprintf(stderr, "  %s\n", avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+		if (!avro_datum_equal(expected, datum_out)) {
+			fprintf(stderr,
+				"Unable to encode/decode %s validate=%d\n  %s\n",
+				type, validate, avro_strerror());
+			exit(EXIT_FAILURE);
+		}
+
+		avro_reader_dump(reader, stderr);
+		avro_datum_decref(datum_out);
+		avro_reader_free(reader);
+		avro_writer_free(writer);
+	}
+}
+
+static void test_json(avro_datum_t datum, const char *expected)
+{
+	char  *json = NULL;
+	avro_datum_to_json(datum, 1, &json);
+	if (strcmp(json, expected) != 0) {
+		fprintf(stderr, "Unexpected JSON encoding: %s\n", json);
+		exit(EXIT_FAILURE);
+	}
+	free(json);
+}
+
+static int test_string(void)
+{
+	unsigned int i;
+	const char *strings[] = { "Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation", "conceived in Liberty",
+		"and dedicated to the proposition that all men are created equal."
+	};
+	avro_schema_t writer_schema = avro_schema_string();
+	for (i = 0; i < sizeof(strings) / sizeof(strings[0]); i++) {
+		avro_datum_t datum = avro_givestring(strings[i], NULL);
+		write_read_check(writer_schema, datum, NULL, NULL, "string");
+		avro_datum_decref(datum);
+	}
+
+	avro_datum_t  datum = avro_givestring(strings[0], NULL);
+	test_json(datum, "\"Four score and seven years ago\"");
+	avro_datum_decref(datum);
+
+	// The following should bork if we don't copy the string value
+	// correctly (since we'll try to free a static string).
+
+	datum = avro_string("this should be copied");
+	avro_string_set(datum, "also this");
+	avro_datum_decref(datum);
+
+	avro_schema_decref(writer_schema);
+	return 0;
+}
+
+static int test_bytes(void)
+{
+	char bytes[] = { 0xDE, 0xAD, 0xBE, 0xEF };
+	avro_schema_t writer_schema = avro_schema_bytes();
+	avro_datum_t datum;
+	avro_datum_t expected_datum;
+
+	datum = avro_givebytes(bytes, sizeof(bytes), NULL);
+	write_read_check(writer_schema, datum, NULL, NULL, "bytes");
+	test_json(datum, "\"\\u00de\\u00ad\\u00be\\u00ef\"");
+	avro_datum_decref(datum);
+	avro_schema_decref(writer_schema);
+
+	datum = avro_givebytes(NULL, 0, NULL);
+	avro_givebytes_set(datum, bytes, sizeof(bytes), NULL);
+	expected_datum = avro_givebytes(bytes, sizeof(bytes), NULL);
+	if (!avro_datum_equal(datum, expected_datum)) {
+		fprintf(stderr,
+		        "Expected equal bytes instances.\n");
+		exit(EXIT_FAILURE);
+	}
+	avro_datum_decref(datum);
+	avro_datum_decref(expected_datum);
+
+	// The following should bork if we don't copy the bytes value
+	// correctly (since we'll try to free a static string).
+
+	datum = avro_bytes("original", 8);
+	avro_bytes_set(datum, "alsothis", 8);
+	avro_datum_decref(datum);
+
+	avro_schema_decref(writer_schema);
+	return 0;
+}
+
+static int test_int32(void)
+{
+	int i;
+	avro_schema_t writer_schema = avro_schema_int();
+	avro_schema_t long_schema = avro_schema_long();
+	avro_schema_t float_schema = avro_schema_float();
+	avro_schema_t double_schema = avro_schema_double();
+	for (i = 0; i < 100; i++) {
+		int32_t  value = rand_int32();
+		avro_datum_t datum = avro_int32(value);
+		avro_datum_t long_datum = avro_int64(value);
+		avro_datum_t float_datum = avro_float(value);
+		avro_datum_t double_datum = avro_double(value);
+		write_read_check(writer_schema, datum, NULL, NULL, "int");
+		write_read_check(writer_schema, datum,
+				 long_schema, long_datum, "int->long");
+		write_read_check(writer_schema, datum,
+				 float_schema, float_datum, "int->float");
+		write_read_check(writer_schema, datum,
+				 double_schema, double_datum, "int->double");
+		avro_datum_decref(datum);
+		avro_datum_decref(long_datum);
+		avro_datum_decref(float_datum);
+		avro_datum_decref(double_datum);
+	}
+
+	avro_datum_t  datum = avro_int32(10000);
+	test_json(datum, "10000");
+	avro_datum_decref(datum);
+
+	avro_schema_decref(writer_schema);
+	avro_schema_decref(long_schema);
+	avro_schema_decref(float_schema);
+	avro_schema_decref(double_schema);
+	return 0;
+}
+
+static int test_int64(void)
+{
+	int i;
+	avro_schema_t writer_schema = avro_schema_long();
+	avro_schema_t float_schema = avro_schema_float();
+	avro_schema_t double_schema = avro_schema_double();
+	for (i = 0; i < 100; i++) {
+		int64_t  value = rand_int64();
+		avro_datum_t datum = avro_int64(value);
+		avro_datum_t float_datum = avro_float(value);
+		avro_datum_t double_datum = avro_double(value);
+		write_read_check(writer_schema, datum, NULL, NULL, "long");
+		write_read_check(writer_schema, datum,
+				 float_schema, float_datum, "long->float");
+		write_read_check(writer_schema, datum,
+				 double_schema, double_datum, "long->double");
+		avro_datum_decref(datum);
+		avro_datum_decref(float_datum);
+		avro_datum_decref(double_datum);
+	}
+
+	avro_datum_t  datum = avro_int64(10000);
+	test_json(datum, "10000");
+	avro_datum_decref(datum);
+
+	avro_schema_decref(writer_schema);
+	avro_schema_decref(float_schema);
+	avro_schema_decref(double_schema);
+	return 0;
+}
+
+static int test_double(void)
+{
+	int i;
+	avro_schema_t schema = avro_schema_double();
+	for (i = 0; i < 100; i++) {
+		avro_datum_t datum = avro_double(rand_number(-1.0E10, 1.0E10));
+		write_read_check(schema, datum, NULL, NULL, "double");
+		avro_datum_decref(datum);
+	}
+
+	avro_datum_t  datum = avro_double(2000.0);
+	test_json(datum, "2000.0");
+	avro_datum_decref(datum);
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_float(void)
+{
+	int i;
+	avro_schema_t schema = avro_schema_float();
+	avro_schema_t double_schema = avro_schema_double();
+	for (i = 0; i < 100; i++) {
+		float  value = rand_number(-1.0E10, 1.0E10);
+		avro_datum_t datum = avro_float(value);
+		avro_datum_t double_datum = avro_double(value);
+		write_read_check(schema, datum, NULL, NULL, "float");
+		write_read_check(schema, datum,
+				 double_schema, double_datum, "float->double");
+		avro_datum_decref(datum);
+		avro_datum_decref(double_datum);
+	}
+
+	avro_datum_t  datum = avro_float(2000.0);
+	test_json(datum, "2000.0");
+	avro_datum_decref(datum);
+
+	avro_schema_decref(schema);
+	avro_schema_decref(double_schema);
+	return 0;
+}
+
+static int test_boolean(void)
+{
+	int i;
+	const char  *expected_json[] = { "false", "true" };
+	avro_schema_t schema = avro_schema_boolean();
+	for (i = 0; i <= 1; i++) {
+		avro_datum_t datum = avro_boolean(i);
+		write_read_check(schema, datum, NULL, NULL, "boolean");
+		test_json(datum, expected_json[i]);
+		avro_datum_decref(datum);
+	}
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_null(void)
+{
+	avro_schema_t schema = avro_schema_null();
+	avro_datum_t datum = avro_null();
+	write_read_check(schema, datum, NULL, NULL, "null");
+	test_json(datum, "null");
+	avro_datum_decref(datum);
+	return 0;
+}
+
+static int test_record(void)
+{
+	avro_schema_t schema = avro_schema_record("person", NULL);
+	avro_schema_record_field_append(schema, "name", avro_schema_string());
+	avro_schema_record_field_append(schema, "age", avro_schema_int());
+
+	avro_datum_t datum = avro_record(schema);
+	avro_datum_t name_datum, age_datum;
+
+	name_datum = avro_givestring("Joseph Campbell", NULL);
+	age_datum = avro_int32(83);
+
+	avro_record_set(datum, "name", name_datum);
+	avro_record_set(datum, "age", age_datum);
+
+	write_read_check(schema, datum, NULL, NULL, "record");
+	test_json(datum, "{\"name\": \"Joseph Campbell\", \"age\": 83}");
+
+	int  rc;
+	avro_record_set_field_value(rc, datum, int32, "age", 104);
+
+	int32_t  age = 0;
+	avro_record_get_field_value(rc, datum, int32, "age", &age);
+	if (age != 104) {
+		fprintf(stderr, "Incorrect age value\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_datum_decref(name_datum);
+	avro_datum_decref(age_datum);
+	avro_datum_decref(datum);
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_nested_record(void)
+{
+	const char  *json =
+		"{"
+		"  \"type\": \"record\","
+		"  \"name\": \"list\","
+		"  \"fields\": ["
+		"    { \"name\": \"x\", \"type\": \"int\" },"
+		"    { \"name\": \"y\", \"type\": \"int\" },"
+		"    { \"name\": \"next\", \"type\": [\"null\",\"list\"]}"
+		"  ]"
+		"}";
+
+	int  rval;
+
+	avro_schema_t schema = NULL;
+	avro_schema_error_t error;
+	avro_schema_from_json(json, strlen(json), &schema, &error);
+
+	avro_datum_t  head = avro_datum_from_schema(schema);
+	avro_record_set_field_value(rval, head, int32, "x", 10);
+	avro_record_set_field_value(rval, head, int32, "y", 10);
+
+	avro_datum_t  next = NULL;
+	avro_datum_t  tail = NULL;
+
+	avro_record_get(head, "next", &next);
+	avro_union_set_discriminant(next, 1, &tail);
+	avro_record_set_field_value(rval, tail, int32, "x", 20);
+	avro_record_set_field_value(rval, tail, int32, "y", 20);
+
+	avro_record_get(tail, "next", &next);
+	avro_union_set_discriminant(next, 0, NULL);
+
+	write_read_check(schema, head, NULL, NULL, "nested record");
+
+	avro_schema_decref(schema);
+	avro_datum_decref(head);
+
+	return 0;
+}
+
+static int test_enum(void)
+{
+	enum avro_languages {
+		AVRO_C,
+		AVRO_CPP,
+		AVRO_PYTHON,
+		AVRO_RUBY,
+		AVRO_JAVA
+	};
+	avro_schema_t schema = avro_schema_enum("language");
+	avro_datum_t datum = avro_enum(schema, AVRO_C);
+
+	avro_schema_enum_symbol_append(schema, "C");
+	avro_schema_enum_symbol_append(schema, "C++");
+	avro_schema_enum_symbol_append(schema, "Python");
+	avro_schema_enum_symbol_append(schema, "Ruby");
+	avro_schema_enum_symbol_append(schema, "Java");
+
+	if (avro_enum_get(datum) != AVRO_C) {
+		fprintf(stderr, "Unexpected enum value AVRO_C\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (strcmp(avro_enum_get_name(datum), "C") != 0) {
+		fprintf(stderr, "Unexpected enum value name C\n");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, datum, NULL, NULL, "enum");
+	test_json(datum, "\"C\"");
+
+	avro_enum_set(datum, AVRO_CPP);
+	if (strcmp(avro_enum_get_name(datum), "C++") != 0) {
+		fprintf(stderr, "Unexpected enum value name C++\n");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, datum, NULL, NULL, "enum");
+	test_json(datum, "\"C++\"");
+
+	avro_enum_set_name(datum, "Python");
+	if (avro_enum_get(datum) != AVRO_PYTHON) {
+		fprintf(stderr, "Unexpected enum value AVRO_PYTHON\n");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, datum, NULL, NULL, "enum");
+	test_json(datum, "\"Python\"");
+
+	avro_datum_decref(datum);
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_array(void)
+{
+	int i, rval;
+	avro_schema_t schema = avro_schema_array(avro_schema_int());
+	avro_datum_t datum = avro_array(schema);
+
+	for (i = 0; i < 10; i++) {
+		avro_datum_t i32_datum = avro_int32(i);
+		rval = avro_array_append_datum(datum, i32_datum);
+		avro_datum_decref(i32_datum);
+		if (rval) {
+			exit(EXIT_FAILURE);
+		}
+	}
+
+	if (avro_array_size(datum) != 10) {
+		fprintf(stderr, "Unexpected array size");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, datum, NULL, NULL, "array");
+	test_json(datum, "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
+	avro_datum_decref(datum);
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_map(void)
+{
+	avro_schema_t schema = avro_schema_map(avro_schema_long());
+	avro_datum_t datum = avro_map(schema);
+	int64_t i = 0;
+	char *nums[] =
+	    { "zero", "one", "two", "three", "four", "five", "six", NULL };
+	while (nums[i]) {
+		avro_datum_t i_datum = avro_int64(i);
+		avro_map_set(datum, nums[i], i_datum);
+		avro_datum_decref(i_datum);
+		i++;
+	}
+
+	if (avro_array_size(datum) != 7) {
+		fprintf(stderr, "Unexpected map size\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_datum_t value;
+	const char  *key;
+	avro_map_get_key(datum, 2, &key);
+	avro_map_get(datum, key, &value);
+	int64_t  val;
+	avro_int64_get(value, &val);
+
+	if (val != 2) {
+		fprintf(stderr, "Unexpected map value 2\n");
+		exit(EXIT_FAILURE);
+	}
+
+	int  index;
+	if (avro_map_get_index(datum, "two", &index)) {
+		fprintf(stderr, "Can't get index for key \"two\": %s\n",
+			avro_strerror());
+		exit(EXIT_FAILURE);
+	}
+	if (index != 2) {
+		fprintf(stderr, "Unexpected index for key \"two\"\n");
+		exit(EXIT_FAILURE);
+	}
+	if (!avro_map_get_index(datum, "foobar", &index)) {
+		fprintf(stderr, "Unexpected index for key \"foobar\"\n");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, datum, NULL, NULL, "map");
+	test_json(datum,
+		  "{\"zero\": 0, \"one\": 1, \"two\": 2, \"three\": 3, "
+		  "\"four\": 4, \"five\": 5, \"six\": 6}");
+	avro_datum_decref(datum);
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_union(void)
+{
+	avro_schema_t schema = avro_schema_union();
+	avro_datum_t union_datum;
+	avro_datum_t datum;
+	avro_datum_t union_datum1;
+	avro_datum_t datum1;
+
+	avro_schema_union_append(schema, avro_schema_string());
+	avro_schema_union_append(schema, avro_schema_int());
+	avro_schema_union_append(schema, avro_schema_null());
+
+	datum = avro_givestring("Follow your bliss.", NULL);
+	union_datum = avro_union(schema, 0, datum);
+
+	if (avro_union_discriminant(union_datum) != 0) {
+		fprintf(stderr, "Unexpected union discriminant\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (avro_union_current_branch(union_datum) != datum) {
+		fprintf(stderr, "Unexpected union branch datum\n");
+		exit(EXIT_FAILURE);
+	}
+
+	union_datum1 = avro_datum_from_schema(schema);
+	avro_union_set_discriminant(union_datum1, 0, &datum1);
+	avro_givestring_set(datum1, "Follow your bliss.", NULL);
+
+	if (!avro_datum_equal(datum, datum1)) {
+		fprintf(stderr, "Union values should be equal\n");
+		exit(EXIT_FAILURE);
+	}
+
+	write_read_check(schema, union_datum, NULL, NULL, "union");
+	test_json(union_datum, "{\"string\": \"Follow your bliss.\"}");
+
+	avro_datum_decref(datum);
+	avro_union_set_discriminant(union_datum, 2, &datum);
+	test_json(union_datum, "null");
+
+	avro_datum_decref(union_datum);
+	avro_datum_decref(datum);
+	avro_datum_decref(union_datum1);
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_fixed(void)
+{
+	char bytes[] = { 0xD, 0xA, 0xD, 0xA, 0xB, 0xA, 0xB, 0xA };
+	avro_schema_t schema = avro_schema_fixed("msg", sizeof(bytes));
+	avro_datum_t datum;
+	avro_datum_t expected_datum;
+
+	datum = avro_givefixed(schema, bytes, sizeof(bytes), NULL);
+	write_read_check(schema, datum, NULL, NULL, "fixed");
+	test_json(datum, "\"\\r\\n\\r\\n\\u000b\\n\\u000b\\n\"");
+	avro_datum_decref(datum);
+
+	datum = avro_givefixed(schema, NULL, sizeof(bytes), NULL);
+	avro_givefixed_set(datum, bytes, sizeof(bytes), NULL);
+	expected_datum = avro_givefixed(schema, bytes, sizeof(bytes), NULL);
+	if (!avro_datum_equal(datum, expected_datum)) {
+		fprintf(stderr,
+		        "Expected equal fixed instances.\n");
+		exit(EXIT_FAILURE);
+	}
+	avro_datum_decref(datum);
+	avro_datum_decref(expected_datum);
+
+	// The following should bork if we don't copy the fixed value
+	// correctly (since we'll try to free a static string).
+
+	datum = avro_fixed(schema, "original", 8);
+	avro_fixed_set(datum, "alsothis", 8);
+	avro_datum_decref(datum);
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+int main(void)
+{
+	avro_set_allocator(test_allocator, NULL);
+
+	unsigned int i;
+	struct avro_tests {
+		char *name;
+		avro_test func;
+	} tests[] = {
+		{
+		"string", test_string}, {
+		"bytes", test_bytes}, {
+		"int", test_int32}, {
+		"long", test_int64}, {
+		"float", test_float}, {
+		"double", test_double}, {
+		"boolean", test_boolean}, {
+		"null", test_null}, {
+		"record", test_record}, {
+		"nested_record", test_nested_record}, {
+		"enum", test_enum}, {
+		"array", test_array}, {
+		"map", test_map}, {
+		"fixed", test_fixed}, {
+		"union", test_union}
+	};
+
+	init_rand();
+	for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++) {
+		struct avro_tests *test = tests + i;
+		fprintf(stderr, "**** Running %s tests ****\n", test->name);
+		if (test->func() != 0) {
+			return EXIT_FAILURE;
+		}
+	}
+	return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/test_avro_schema.c b/lang/c/tests/test_avro_schema.c
new file mode 100644
index 0000000..d4ac86f
--- /dev/null
+++ b/lang/c/tests/test_avro_schema.c
@@ -0,0 +1,316 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro.h"
+#include "avro_private.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/types.h>
+#ifdef _WIN32
+ #include "msdirent.h"
+#else
+ #include <dirent.h>
+#endif
+
+int test_cases = 0;
+avro_writer_t avro_stderr;
+
+static void run_tests(char *dirpath, int should_pass)
+{
+	char jsontext[4096];
+	char jsontext2[4096];
+	size_t rval;
+	char filepath[1024];
+	DIR *dir;
+	struct dirent *dent;
+	FILE *fp;
+	avro_schema_t schema;
+	avro_writer_t jsontext2_writer;
+
+	dir = opendir(dirpath);
+	if (dir == NULL) {
+		fprintf(stderr, "Unable to open '%s'\n", dirpath);
+		exit(EXIT_FAILURE);
+	}
+	do {
+		dent = readdir(dir);
+
+		/* Suppress failures on CVS directories */
+		if ( dent && !strcmp( (const char *) dent->d_name, "CVS" ) )
+			continue;
+
+		if (dent && dent->d_name[0] != '.') {
+			int test_rval;
+			snprintf(filepath, sizeof(filepath), "%s/%s", dirpath,
+				 dent->d_name);
+			fprintf(stderr, "TEST %s...", filepath);
+			fp = fopen(filepath, "r");
+			if (!fp) {
+				fprintf(stderr, "can't open!\n");
+				exit(EXIT_FAILURE);
+			}
+			rval = fread(jsontext, 1, sizeof(jsontext) - 1, fp);
+			fclose(fp);
+			jsontext[rval] = '\0';
+			test_rval =
+			    avro_schema_from_json(jsontext, 0, &schema, NULL);
+			test_cases++;
+			if (test_rval == 0) {
+				if (should_pass) {
+					avro_schema_t schema_copy =
+					    avro_schema_copy(schema);
+					fprintf(stderr, "pass\n");
+					avro_schema_to_json(schema,
+							    avro_stderr);
+					fprintf(stderr, "\n");
+					if (!avro_schema_equal
+					    (schema, schema_copy)) {
+						fprintf(stderr,
+							"failed to avro_schema_equal(schema,avro_schema_copy())\n");
+						exit(EXIT_FAILURE);
+					}
+					jsontext2_writer = avro_writer_memory(jsontext2, sizeof(jsontext2));
+					if (avro_schema_to_json(schema, jsontext2_writer)) {
+						fprintf(stderr, "failed to write schema (%s)\n",
+							avro_strerror());
+						exit(EXIT_FAILURE);
+					}
+					avro_write(jsontext2_writer, (void *)"", 1);  /* zero terminate */
+					avro_writer_free(jsontext2_writer);
+					avro_schema_decref(schema);
+					if (avro_schema_from_json(jsontext2, 0, &schema, NULL)) {
+						fprintf(stderr, "failed to write then read schema (%s)\n",
+							avro_strerror());
+						exit(EXIT_FAILURE);
+					}
+					if (!avro_schema_equal
+					    (schema, schema_copy)) {
+						fprintf(stderr, "failed read-write-read cycle (%s)\n",
+							avro_strerror());
+						exit(EXIT_FAILURE);
+					}
+					avro_schema_decref(schema_copy);
+					avro_schema_decref(schema);
+				} else {
+					/*
+					 * Unexpected success 
+					 */
+					fprintf(stderr,
+						"fail! (shouldn't succeed but did)\n");
+					exit(EXIT_FAILURE);
+				}
+			} else {
+				if (should_pass) {
+					fprintf(stderr, "%s\n", avro_strerror());
+					fprintf(stderr,
+						"fail! (should have succeeded but didn't)\n");
+					exit(EXIT_FAILURE);
+				} else {
+					fprintf(stderr, "pass\n");
+				}
+			}
+		}
+	}
+	while (dent != NULL);
+	closedir(dir);
+}
+
+static int test_array(void)
+{
+	avro_schema_t schema = avro_schema_array(avro_schema_int());
+
+	if (!avro_schema_equal
+	    (avro_schema_array_items(schema), avro_schema_int())) {
+		fprintf(stderr, "Unexpected array items schema");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_enum(void)
+{
+	enum avro_languages {
+		AVRO_C,
+		AVRO_CPP,
+		AVRO_PYTHON,
+		AVRO_RUBY,
+		AVRO_JAVA
+	};
+	avro_schema_t schema = avro_schema_enum("language");
+
+	avro_schema_enum_symbol_append(schema, "C");
+	avro_schema_enum_symbol_append(schema, "C++");
+	avro_schema_enum_symbol_append(schema, "Python");
+	avro_schema_enum_symbol_append(schema, "Ruby");
+	avro_schema_enum_symbol_append(schema, "Java");
+
+	const char  *symbol1 = avro_schema_enum_get(schema, 1);
+	if (strcmp(symbol1, "C++") != 0) {
+		fprintf(stderr, "Unexpected enum schema symbol\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (avro_schema_enum_get_by_name(schema, "C++") != 1) {
+		fprintf(stderr, "Unexpected enum schema index\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (avro_schema_enum_get_by_name(schema, "Haskell") != -1) {
+		fprintf(stderr, "Unexpected enum schema index\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_fixed(void)
+{
+	avro_schema_t schema = avro_schema_fixed("msg", 8);
+	if (avro_schema_fixed_size(schema) != 8) {
+		fprintf(stderr, "Unexpected fixed size\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_map(void)
+{
+	avro_schema_t schema = avro_schema_map(avro_schema_long());
+
+	if (!avro_schema_equal
+	    (avro_schema_map_values(schema), avro_schema_long())) {
+		fprintf(stderr, "Unexpected map values schema");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_record(void)
+{
+	avro_schema_t schema = avro_schema_record("person", NULL);
+
+	avro_schema_record_field_append(schema, "name", avro_schema_string());
+	avro_schema_record_field_append(schema, "age", avro_schema_int());
+
+	if (avro_schema_record_field_get_index(schema, "name") != 0) {
+		fprintf(stderr, "Incorrect index for \"name\" field\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (avro_schema_record_field_get_index(schema, "unknown") != -1) {
+		fprintf(stderr, "Incorrect index for \"unknown\" field\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_t  name_field =
+		avro_schema_record_field_get(schema, "name");
+	if (!avro_schema_equal(name_field, avro_schema_string())) {
+		fprintf(stderr, "Unexpected name field\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_t  field1 =
+		avro_schema_record_field_get_by_index(schema, 1);
+	if (!avro_schema_equal(field1, avro_schema_int())) {
+		fprintf(stderr, "Unexpected field 1\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+static int test_union(void)
+{
+	avro_schema_t schema = avro_schema_union();
+
+	avro_schema_union_append(schema, avro_schema_string());
+	avro_schema_union_append(schema, avro_schema_int());
+	avro_schema_union_append(schema, avro_schema_null());
+
+	if (!avro_schema_equal
+	    (avro_schema_string(),
+	     avro_schema_union_branch(schema, 0))) {
+		fprintf(stderr, "Unexpected union schema branch 0\n");
+		exit(EXIT_FAILURE);
+	}
+
+	if (!avro_schema_equal
+	    (avro_schema_string(),
+	     avro_schema_union_branch_by_name(schema, NULL, "string"))) {
+		fprintf(stderr, "Unexpected union schema branch \"string\"\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_schema_decref(schema);
+	return 0;
+}
+
+int main(int argc, char *argv[])
+{
+	char *srcdir = getenv("srcdir");
+	char path[1024];
+
+	AVRO_UNUSED(argc);
+	AVRO_UNUSED(argv);
+
+	if (!srcdir) {
+		srcdir = ".";
+	}
+
+	avro_stderr = avro_writer_file(stderr);
+
+	/*
+	 * Run the tests that should pass 
+	 */
+	snprintf(path, sizeof(path), "%s/schema_tests/pass", srcdir);
+	fprintf(stderr, "RUNNING %s\n", path);
+	run_tests(path, 1);
+	snprintf(path, sizeof(path), "%s/schema_tests/fail", srcdir);
+	fprintf(stderr, "RUNNING %s\n", path);
+	run_tests(path, 0);
+
+	fprintf(stderr, "*** Running array tests **\n");
+	test_array();
+	fprintf(stderr, "*** Running enum tests **\n");
+	test_enum();
+	fprintf(stderr, "*** Running fixed tests **\n");
+	test_fixed();
+	fprintf(stderr, "*** Running map tests **\n");
+	test_map();
+	fprintf(stderr, "*** Running record tests **\n");
+	test_record();
+	fprintf(stderr, "*** Running union tests **\n");
+	test_union();
+
+	fprintf(stderr, "==================================================\n");
+	fprintf(stderr,
+		"Finished running %d schema test cases successfully \n",
+		test_cases);
+	fprintf(stderr, "==================================================\n");
+
+	avro_writer_free(avro_stderr);
+	return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/test_avro_schema_names.c b/lang/c/tests/test_avro_schema_names.c
new file mode 100644
index 0000000..61ed768
--- /dev/null
+++ b/lang/c/tests/test_avro_schema_names.c
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0 
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License. 
+ */
+
+#include "avro.h"
+#include "avro_private.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+int test_cases = 0;
+avro_writer_t avro_stderr;
+
+static void test_helper(const char *json,
+     const char *name,
+     avro_schema_t expected)
+{
+ int rc;
+ avro_schema_t base;
+ avro_schema_error_t serror;
+
+ rc = avro_schema_from_json(json, strlen(json), &base, &serror);
+ if (rc != 0)
+ {
+   fprintf(stderr,
+     "Error parsing Avro schema:\n%s\n",
+     json);
+   exit(EXIT_FAILURE);
+ }
+
+ avro_schema_t actual =
+   avro_schema_get_subschema(base, name);
+
+ if (actual == NULL)
+ {
+   fprintf(stderr,
+     "No subschema named \"%s\" in %s\n",
+     name, avro_schema_type_name(base));
+   exit(EXIT_FAILURE);
+ }
+
+ if (!avro_schema_equal(actual, expected))
+ {
+   fprintf(stderr,
+     "Subschema \"%s\" should be %s, "
+     "is actually %s\n",
+     name,
+     avro_schema_type_name(expected),
+     avro_schema_type_name(actual));
+   exit(EXIT_FAILURE);
+ }
+
+ avro_schema_decref(base);
+ avro_schema_decref(expected);
+}
+
+static void test_array_schema_01()
+{
+ static char *JSON =
+   "{"
+   "  \"type\": \"array\","
+   "  \"items\": \"long\""
+   "}";
+
+ test_helper(JSON, "[]", avro_schema_long());
+}
+
+static void test_map_schema_01()
+{
+ static char *JSON =
+   "{"
+   "  \"type\": \"map\","
+   "  \"values\": \"long\""
+   "}";
+
+ test_helper(JSON, "{}", avro_schema_long());
+}
+
+static void test_record_schema_01()
+{
+ static char *JSON =
+   "{"
+   "  \"type\": \"record\","
+   "  \"name\": \"test\","
+   "  \"fields\": ["
+   "    { \"name\": \"a\", \"type\": \"long\" }"
+   "  ]"
+   "}";
+
+ test_helper(JSON, "a", avro_schema_long());
+}
+
+static void test_union_schema_01()
+{
+ static char *JSON =
+   "["
+   "  \"long\","
+   "  {"
+   "    \"type\": \"record\","
+   "    \"name\": \"test\","
+   "    \"fields\": ["
+   "      { \"name\": \"a\", \"type\": \"long\" }"
+   "    ]"
+   "  }"
+   "]";
+
+ test_helper(JSON, "long", avro_schema_long());
+}
+
+int main(int argc, char *argv[])
+{
+ AVRO_UNUSED(argc);
+ AVRO_UNUSED(argv);
+
+ test_array_schema_01();
+
+ test_map_schema_01();
+
+ test_record_schema_01();
+
+ test_union_schema_01();
+
+ return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/test_avro_values.c b/lang/c/tests/test_avro_values.c
new file mode 100644
index 0000000..a35e69f
--- /dev/null
+++ b/lang/c/tests/test_avro_values.c
@@ -0,0 +1,1455 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+/* Test cases for the new avro_value_t interface */
+
+#include <limits.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#include "avro.h"
+#include "avro_private.h"
+
+typedef int (*avro_test) (void);
+
+#ifndef SHOW_ALLOCATIONS
+#define SHOW_ALLOCATIONS 0
+#endif
+
+/*
+ * Use a custom allocator that verifies that the size that we use to
+ * free an object matches the size that we use to allocate it.
+ */
+
+static void *
+test_allocator(void *ud, void *ptr, size_t osize, size_t nsize)
+{
+	AVRO_UNUSED(ud);
+	AVRO_UNUSED(osize);
+
+#if SHOW_ALLOCATIONS
+	fprintf(stderr, "alloc(%p, %" PRIsz ", %" PRIsz ") => ", ptr, osize, nsize);
+#endif
+
+	if (nsize == 0) {
+		size_t  *size = ((size_t *) ptr) - 1;
+		if (osize != *size) {
+			fprintf(stderr,
+#if SHOW_ALLOCATIONS
+				"ERROR!\n"
+#endif
+				"Error freeing %p:\n"
+				"Size passed to avro_free (%" PRIsz ") "
+				"doesn't match size passed to "
+				"avro_malloc (%" PRIsz ")\n",
+				ptr, osize, *size);
+			exit(EXIT_FAILURE);
+		}
+		free(size);
+#if SHOW_ALLOCATIONS
+		fprintf(stderr, "NULL\n");
+#endif
+		return NULL;
+	} else {
+		size_t  real_size = nsize + sizeof(size_t);
+		size_t  *old_size = ptr? ((size_t *) ptr)-1: NULL;
+		size_t  *size = (size_t *) realloc(old_size, real_size);
+		*size = nsize;
+#if SHOW_ALLOCATIONS
+		fprintf(stderr, "%p\n", (size+1));
+#endif
+		return (size + 1);
+	}
+}
+
+void
+init_rand(void)
+{
+	srand(time(NULL));
+}
+
+double
+rand_number(double from, double to)
+{
+	double range = to - from;
+	return from + ((double)rand() / (RAND_MAX + 1.0)) * range;
+}
+
+int64_t
+rand_int64(void)
+{
+	return (int64_t) rand_number(LONG_MIN, LONG_MAX);
+}
+
+int32_t
+rand_int32(void)
+{
+	return (int32_t) rand_number(INT_MIN, INT_MAX);
+}
+
+size_t
+rand_count(void)
+{
+	return (size_t) rand_number(0, 100);
+}
+
+#define check_(call) \
+	do { \
+		int _rval = call; \
+		if (_rval) { return _rval; } \
+	} while (0)
+
+/*
+ * Verify that we can't call any of the getters and setters that don't
+ * apply to the given value.
+ */
+
+static int
+_check_invalid_methods(const char *name, avro_value_t *val)
+{
+	avro_type_t  type = avro_value_get_type(val);
+
+/* For a description on GCC vs Visual Studio 2008 usage of variadic
+ * macros see:
+ * http://stackoverflow.com/questions/2575864/the-problem-about-different
+ * -treatment-to-va-args-when-using-vs-2008-and-gcc
+ */
+#define expand_args(...) __VA_ARGS__
+#define check_bad(method, ...) \
+	do { \
+          if (!expand_args(avro_value_##method(__VA_ARGS__))) {  \
+			fprintf(stderr, \
+				"Shouldn't be able to " #method " a %s\n", \
+				name); \
+			return EXIT_FAILURE; \
+		} \
+	} while (0)
+
+	if (type != AVRO_BOOLEAN) {
+		int  dummy = 0;
+		check_bad(get_boolean, val, &dummy);
+		check_bad(set_boolean, val, dummy);
+	}
+
+	if (type != AVRO_BYTES) {
+		const void  *cbuf = NULL;
+		void  *buf = NULL;
+		size_t  size = 0;
+		check_bad(get_bytes, val, &cbuf, &size);
+		check_bad(set_bytes, val, buf, size);
+	}
+
+	if (type != AVRO_DOUBLE) {
+		double  dummy = 0;
+		check_bad(get_double, val, &dummy);
+		check_bad(set_double, val, dummy);
+	}
+
+	if (type != AVRO_FLOAT) {
+		float  dummy = 0;
+		check_bad(get_float, val, &dummy);
+		check_bad(set_float, val, dummy);
+	}
+
+	if (type != AVRO_INT32) {
+		int32_t  dummy = 0;
+		check_bad(get_int, val, &dummy);
+		check_bad(set_int, val, dummy);
+	}
+
+	if (type != AVRO_INT64) {
+		int64_t  dummy = 0;
+		check_bad(get_long, val, &dummy);
+		check_bad(set_long, val, dummy);
+	}
+
+	if (type != AVRO_NULL) {
+		check_bad(get_null, val);
+		check_bad(set_null, val);
+	}
+
+	if (type != AVRO_STRING) {
+		const char  *cstr = NULL;
+		char  *str = NULL;
+		size_t  size = 0;
+		check_bad(get_string, val, &cstr, &size);
+		check_bad(set_string, val, str);
+		check_bad(set_string_len, val, str, size);
+	}
+
+	if (type != AVRO_ENUM) {
+		int  dummy = 0;
+		check_bad(get_enum, val, &dummy);
+		check_bad(set_enum, val, dummy);
+	}
+
+	if (type != AVRO_FIXED) {
+		const void  *cbuf = NULL;
+		void  *buf = NULL;
+		size_t  size = 0;
+		check_bad(get_fixed, val, &cbuf, &size);
+		check_bad(set_fixed, val, buf, size);
+	}
+
+	if (type != AVRO_ARRAY && type != AVRO_MAP && type != AVRO_RECORD) {
+		size_t  size = 0;
+		check_bad(get_size, val, &size);
+
+		size_t  index = 0;
+		avro_value_t  child;
+		const char  *key = NULL;
+		check_bad(get_by_index, val, index, &child, &key);
+	}
+
+	if (type != AVRO_MAP && type != AVRO_RECORD) {
+		const char  *key = NULL;
+		avro_value_t  child;
+		size_t  index = 0;
+		check_bad(get_by_name, val, key, &child, &index);
+	}
+
+	if (type != AVRO_ARRAY) {
+		avro_value_t  child;
+		size_t  index;
+		check_bad(append, val, &child, &index);
+	}
+
+	if (type != AVRO_MAP) {
+		const char  *key = NULL;
+		avro_value_t  child;
+		size_t  index = 0;
+		int  is_new = 0;
+		check_bad(add, val, key, &child, &index, &is_new);
+	}
+
+	if (type != AVRO_UNION) {
+		int  discriminant = 0;
+		avro_value_t  branch;
+		check_bad(get_discriminant, val, &discriminant);
+		check_bad(get_current_branch, val, &branch);
+		check_bad(set_branch, val, discriminant, &branch);
+	}
+
+#undef check_bad
+
+	return EXIT_SUCCESS;
+}
+
+#define check_invalid_methods(name, val) \
+	check_(_check_invalid_methods(name, val))
+
+/*
+ * Verify that we get the expected type code and schema for a value.
+ */
+
+static int
+check_type_and_schema(const char *name,
+		      avro_value_t *val,
+		      avro_type_t expected_type,
+		      avro_schema_t expected_schema)
+{
+	if (avro_value_get_type(val) != expected_type) {
+		avro_schema_decref(expected_schema);
+		fprintf(stderr, "Unexpected type for %s\n", name);
+		return EXIT_FAILURE;
+	}
+
+	if (!avro_schema_equal(avro_value_get_schema(val),
+			       expected_schema)) {
+		avro_schema_decref(expected_schema);
+		fprintf(stderr, "Unexpected schema for %s\n", name);
+		return EXIT_FAILURE;
+	}
+
+	avro_schema_decref(expected_schema);
+	return EXIT_SUCCESS;
+}
+
+#define try(call, msg) \
+	do { \
+		if (call) { \
+			fprintf(stderr, msg ":\n  %s\n", avro_strerror()); \
+			return EXIT_FAILURE; \
+		} \
+	} while (0)
+
+static int
+_check_write_read(avro_value_t *val)
+{
+	static char  buf[4096];
+
+	avro_reader_t  reader = avro_reader_memory(buf, sizeof(buf));
+	avro_writer_t  writer = avro_writer_memory(buf, sizeof(buf));
+
+	if (avro_value_write(writer, val)) {
+		fprintf(stderr, "Unable to write value:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	avro_writer_dump(writer, stderr);
+
+	size_t size;
+	if (avro_value_sizeof(val, &size)) {
+		fprintf(stderr, "Unable to determine size of value:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	if (size != (size_t) avro_writer_tell(writer)) {
+		fprintf(stderr, "Unexpected size of encoded value\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_value_t  val_in;
+	if (avro_generic_value_new(val->iface, &val_in)) {
+		fprintf(stderr, "Cannot allocate new value instance:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	if (avro_value_read(reader, &val_in)) {
+		fprintf(stderr, "Unable to read value:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	if (!avro_value_equal(val, &val_in)) {
+		fprintf(stderr, "Round-trip values not equal\n");
+		exit(EXIT_FAILURE);
+	}
+
+	avro_value_decref(&val_in);
+	avro_reader_free(reader);
+	avro_writer_free(writer);
+
+	return EXIT_SUCCESS;
+}
+
+#define check_write_read(val) \
+	check_(_check_write_read(val))
+
+static int
+_check_hash(avro_value_t *val1, avro_value_t *val2)
+{
+	uint32_t  hash1 = avro_value_hash(val1);
+	uint32_t  hash2 = avro_value_hash(val2);
+	if (hash1 != hash2) {
+		fprintf(stderr, "Copied hashed not equal\n");
+		return EXIT_FAILURE;
+	}
+	return EXIT_SUCCESS;
+}
+
+#define check_hash(val1, val2) \
+	check_(_check_hash(val1, val2))
+
+static int
+_check_copy(avro_value_t *val)
+{
+	avro_value_t  copied_val;
+	if (avro_generic_value_new(val->iface, &copied_val)) {
+		fprintf(stderr, "Cannot allocate new value instance:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	if (avro_value_copy_fast(&copied_val, val)) {
+		fprintf(stderr, "Cannot copy value:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	if (!avro_value_equal(val, &copied_val)) {
+		fprintf(stderr, "Copied values not equal\n");
+		return EXIT_FAILURE;
+	}
+
+	check_hash(val, &copied_val);
+
+	avro_value_decref(&copied_val);
+	return EXIT_SUCCESS;
+}
+
+#define check_copy(val) \
+	check_(_check_copy(val))
+
+static int
+test_boolean(void)
+{
+	int  rval;
+
+	int  i;
+	for (i = 0; i <= 1; i++) {
+		avro_value_t  val;
+		try(avro_generic_boolean_new(&val, i),
+		    "Cannot create boolean");
+		check(rval, check_type_and_schema
+			    ("boolean", &val,
+			     AVRO_BOOLEAN, avro_schema_boolean()));
+		try(avro_value_reset(&val),
+		    "Cannot reset boolean");
+		try(avro_value_set_boolean(&val, i),
+		    "Cannot set boolean");
+
+		/* Start with the wrong value to make sure _get does
+		 * something. */
+		int  actual = (int) 23;
+		try(avro_value_get_boolean(&val, &actual),
+		    "Cannot get boolean value");
+
+		if (actual != i) {
+			fprintf(stderr, "Unexpected boolean value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("boolean", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+
+	avro_value_t  val1;
+	avro_value_t  val2;
+	try(avro_generic_boolean_new(&val1, 0),
+	    "Cannot create boolean");
+	try(avro_generic_boolean_new(&val2, 1),
+	    "Cannot create boolean");
+	if (avro_value_cmp_fast(&val1, &val2) >= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val2, &val1) <= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val1, &val1) != 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	avro_value_decref(&val1);
+	avro_value_decref(&val2);
+
+	return 0;
+}
+
+static int
+test_bytes(void)
+{
+	int  rval;
+
+	char bytes[] = { 0xDE, 0xAD, 0xBE, 0xEF };
+
+	avro_value_t  val;
+	try(avro_generic_bytes_new(&val, bytes, sizeof(bytes)),
+	    "Cannot create bytes");
+	check(rval, check_type_and_schema
+		    ("bytes", &val,
+		     AVRO_BYTES, avro_schema_bytes()));
+	try(avro_value_reset(&val),
+	    "Cannot reset bytes");
+	try(avro_value_set_bytes(&val, bytes, sizeof(bytes)),
+	    "Cannot set bytes");
+
+	const void  *actual_buf = NULL;
+	size_t  actual_size = 0;
+	try(avro_value_get_bytes(&val, &actual_buf, &actual_size),
+	    "Cannot get bytes value");
+
+	if (actual_size != sizeof(bytes)) {
+		fprintf(stderr, "Unexpected bytes size\n");
+		return EXIT_FAILURE;
+	}
+
+	if (memcmp(actual_buf, bytes, sizeof(bytes)) != 0) {
+		fprintf(stderr, "Unexpected bytes contents\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_wrapped_buffer_t  wbuf;
+	try(avro_value_grab_bytes(&val, &wbuf),
+	    "Cannot grab bytes value");
+
+	if (wbuf.size != sizeof(bytes)) {
+		fprintf(stderr, "Unexpected grabbed bytes size\n");
+		return EXIT_FAILURE;
+	}
+
+	if (memcmp(wbuf.buf, bytes, sizeof(bytes)) != 0) {
+		fprintf(stderr, "Unexpected grabbed bytes contents\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_wrapped_buffer_free(&wbuf);
+
+	check_invalid_methods("bytes", &val);
+	check_write_read(&val);
+	check_copy(&val);
+	avro_value_decref(&val);
+
+	avro_value_t  val1;
+	avro_value_t  val2;
+	avro_value_t  val3;
+	try(avro_generic_bytes_new(&val1, "abcd", 4),
+	    "Cannot create bytes");
+	try(avro_generic_bytes_new(&val2, "abcde", 5),
+	    "Cannot create bytes");
+	try(avro_generic_bytes_new(&val3, "abce", 4),
+	    "Cannot create bytes");
+	if (avro_value_cmp_fast(&val1, &val2) >= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val2, &val1) <= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val1, &val3) >= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val1, &val1) != 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	avro_value_decref(&val1);
+	avro_value_decref(&val2);
+	avro_value_decref(&val3);
+
+	return 0;
+}
+
+static int
+test_double(void)
+{
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		double  expected = rand_number(-1e10, 1e10);
+		avro_value_t  val;
+		try(avro_generic_double_new(&val, expected),
+		    "Cannot create double");
+		check(rval, check_type_and_schema
+			    ("double", &val,
+			     AVRO_DOUBLE, avro_schema_double()));
+		try(avro_value_reset(&val),
+		    "Cannot reset double");
+		try(avro_value_set_double(&val, expected),
+		    "Cannot set double");
+
+		double  actual = 0.0;
+		try(avro_value_get_double(&val, &actual),
+		    "Cannot get double value");
+
+		if (actual != expected) {
+			fprintf(stderr, "Unexpected double value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("double", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+	return 0;
+}
+
+static int
+test_float(void)
+{
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		float  expected = rand_number(-1e10, 1e10);
+		avro_value_t  val;
+		try(avro_generic_float_new(&val, expected),
+		    "Cannot create float");
+		check(rval, check_type_and_schema
+			    ("float", &val,
+			     AVRO_FLOAT, avro_schema_float()));
+		try(avro_value_reset(&val),
+		    "Cannot reset float");
+		try(avro_value_set_float(&val, expected),
+		    "Cannot set float");
+
+		float  actual = 0.0f;
+		try(avro_value_get_float(&val, &actual),
+		    "Cannot get float value");
+
+		if (actual != expected) {
+			fprintf(stderr, "Unexpected float value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("float", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+	return 0;
+}
+
+static int
+test_int(void)
+{
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		int32_t  expected = rand_int32();
+		avro_value_t  val;
+		try(avro_generic_int_new(&val, expected),
+		    "Cannot create int");
+		check(rval, check_type_and_schema
+			    ("int", &val,
+			     AVRO_INT32, avro_schema_int()));
+		try(avro_value_reset(&val),
+		    "Cannot reset int");
+		try(avro_value_set_int(&val, expected),
+		    "Cannot set int");
+
+		int32_t  actual = 0;
+		try(avro_value_get_int(&val, &actual),
+		    "Cannot get int value");
+
+		if (actual != expected) {
+			fprintf(stderr, "Unexpected int value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("int", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+
+	avro_value_t  val1;
+	avro_value_t  val2;
+	try(avro_generic_int_new(&val1, -10),
+	    "Cannot create int");
+	try(avro_generic_int_new(&val2, 42),
+	    "Cannot create int");
+	if (avro_value_cmp_fast(&val1, &val2) >= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val2, &val1) <= 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	if (avro_value_cmp_fast(&val1, &val1) != 0) {
+		fprintf(stderr, "Incorrect sort order\n");
+		return EXIT_FAILURE;
+	}
+	avro_value_decref(&val1);
+	avro_value_decref(&val2);
+
+	return 0;
+}
+
+static int
+test_long(void)
+{
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		int64_t  expected = rand_int64();
+		avro_value_t  val;
+		try(avro_generic_long_new(&val, expected),
+		    "Cannot create long");
+		check(rval, check_type_and_schema
+			    ("long", &val,
+			     AVRO_INT64, avro_schema_long()));
+		try(avro_value_reset(&val),
+		    "Cannot reset long");
+		try(avro_value_set_long(&val, expected),
+		    "Cannot set long");
+
+		int64_t  actual = 0;
+		try(avro_value_get_long(&val, &actual),
+		    "Cannot get long value");
+
+		if (actual != expected) {
+			fprintf(stderr, "Unexpected long value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("long", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+	return 0;
+}
+
+static int
+test_null(void)
+{
+	int  rval;
+
+	avro_value_t  val;
+	try(avro_generic_null_new(&val),
+	    "Cannot create null");
+	check(rval, check_type_and_schema
+		    ("null", &val,
+		     AVRO_NULL, avro_schema_null()));
+	try(avro_value_reset(&val),
+	    "Cannot reset null");
+	try(avro_value_set_null(&val),
+	    "Cannot set null");
+	try(avro_value_get_null(&val),
+	    "Cannot get null");
+
+	check_invalid_methods("null", &val);
+	check_write_read(&val);
+	check_copy(&val);
+	avro_value_decref(&val);
+	return 0;
+}
+
+static int
+test_string(void)
+{
+	int  rval;
+
+	char *strings[] = {
+		"Four score and seven years ago",
+		"our father brought forth on this continent",
+		"a new nation",
+		"conceived in Liberty",
+		"and dedicated to the proposition that all men "
+			"are created equal."
+	};
+
+	unsigned int  i;
+	for (i = 0; i < sizeof(strings) / sizeof(strings[0]); i++) {
+		avro_value_t  val;
+		try(avro_generic_string_new(&val, strings[i]),
+		    "Cannot create string");
+		check(rval, check_type_and_schema
+			    ("string", &val,
+			     AVRO_STRING, avro_schema_string()));
+		try(avro_value_reset(&val),
+		    "Cannot reset string");
+		try(avro_value_set_string_len(&val, "", 0),
+		    "Cannot set_len dummy string");
+
+		/* First try a round-trip using set_string */
+
+		try(avro_value_set_string(&val, strings[i]),
+		    "Cannot set string");
+
+		const char  *actual_str = NULL;
+		size_t  actual_size = 0;
+		try(avro_value_get_string(&val, &actual_str, &actual_size),
+		    "Cannot get string value");
+
+		if (actual_size != strlen(strings[i])+1) {
+			fprintf(stderr, "Unexpected string size\n");
+			return EXIT_FAILURE;
+		}
+
+		if (strcmp(actual_str, strings[i]) != 0) {
+			fprintf(stderr, "Unexpected string contents\n");
+			return EXIT_FAILURE;
+		}
+
+		avro_wrapped_buffer_t  wbuf;
+		try(avro_value_grab_string(&val, &wbuf),
+		    "Cannot grab string value");
+
+		if (wbuf.size != strlen(strings[i])+1) {
+			fprintf(stderr, "Unexpected grabbed string size\n");
+			return EXIT_FAILURE;
+		}
+
+		if (strcmp((const char *) wbuf.buf, strings[i]) != 0) {
+			fprintf(stderr, "Unexpected grabbed string contents\n");
+			return EXIT_FAILURE;
+		}
+
+		avro_wrapped_buffer_free(&wbuf);
+
+		/* and then again using set_string_len */
+
+		size_t  str_length = strlen(strings[i])+1;
+		try(avro_value_set_string_len(&val, strings[i], str_length),
+		    "Cannot set_len string");
+
+		actual_str = NULL;
+		actual_size = 0;
+		try(avro_value_get_string(&val, &actual_str, &actual_size),
+		    "Cannot get string value");
+
+		if (actual_size != strlen(strings[i])+1) {
+			fprintf(stderr, "Unexpected string size\n");
+			return EXIT_FAILURE;
+		}
+
+		if (strcmp(actual_str, strings[i]) != 0) {
+			fprintf(stderr, "Unexpected string contents\n");
+			return EXIT_FAILURE;
+		}
+
+		try(avro_value_grab_string(&val, &wbuf),
+		    "Cannot grab string value");
+
+		if (wbuf.size != strlen(strings[i])+1) {
+			fprintf(stderr, "Unexpected grabbed string size\n");
+			return EXIT_FAILURE;
+		}
+
+		if (strcmp((const char *) wbuf.buf, strings[i]) != 0) {
+			fprintf(stderr, "Unexpected grabbed string contents\n");
+			return EXIT_FAILURE;
+		}
+
+		avro_wrapped_buffer_free(&wbuf);
+
+		check_invalid_methods("string", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+
+	return 0;
+}
+
+static int
+test_array(void)
+{
+	avro_schema_t  double_schema = avro_schema_double();
+	avro_schema_t  array_schema = avro_schema_array(double_schema);
+
+	avro_value_iface_t  *array_class =
+	    avro_generic_class_from_schema(array_schema);
+
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		size_t  count = rand_count();
+
+		avro_value_t  val;
+		try(avro_generic_value_new(array_class, &val),
+		    "Cannot create array");
+		check(rval, check_type_and_schema
+			    ("array", &val, AVRO_ARRAY,
+			     avro_schema_incref(array_schema)));
+
+		size_t  j;
+		for (j = 0; j < count; j++) {
+			avro_value_t  element;
+			size_t  new_index;
+			try(avro_value_append(&val, &element, &new_index),
+			    "Cannot append to array");
+			if (new_index != j) {
+				fprintf(stderr, "Unexpected index\n");
+				return EXIT_FAILURE;
+			}
+
+			double  expected = rand_number(-1e10, 1e10);
+			try(avro_value_set_double(&element, expected),
+			    "Cannot set double");
+			try(avro_value_get_by_index(&val, j, &element, NULL),
+			    "Cannot get from array");
+
+			double  actual = 0.0;
+			try(avro_value_get_double(&element, &actual),
+			    "Cannot get double value");
+
+			if (actual != expected) {
+				fprintf(stderr, "Unexpected double value\n");
+				return EXIT_FAILURE;
+			}
+		}
+
+		size_t  actual_size = 0;
+		try(avro_value_get_size(&val, &actual_size),
+		    "Cannot get_size array");
+
+		if (actual_size != count) {
+			fprintf(stderr, "Unexpected size\n");
+			return EXIT_FAILURE;
+		}
+
+		check_write_read(&val);
+		check_copy(&val);
+
+		try(avro_value_reset(&val),
+		    "Cannot reset array");
+		try(avro_value_get_size(&val, &actual_size),
+		    "Cannot get_size empty array");
+
+		if (actual_size != 0) {
+			fprintf(stderr, "Unexpected empty size\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("array", &val);
+		avro_value_decref(&val);
+	}
+
+	avro_schema_decref(double_schema);
+	avro_schema_decref(array_schema);
+	avro_value_iface_decref(array_class);
+	return 0;
+}
+
+static int
+test_enum(void)
+{
+	static const char  SCHEMA_JSON[] =
+	"{"
+	"  \"type\": \"enum\","
+	"  \"name\": \"suits\","
+	"  \"symbols\": [\"CLUBS\",\"DIAMONDS\",\"HEARTS\",\"SPADES\"]"
+	"}";
+
+	avro_schema_t  enum_schema = NULL;
+	if (avro_schema_from_json_literal(SCHEMA_JSON, &enum_schema)) {
+		fprintf(stderr, "Error parsing schema:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	avro_value_iface_t  *enum_class =
+	    avro_generic_class_from_schema(enum_schema);
+
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 4; i++) {
+		int  expected = i;
+		avro_value_t  val;
+		try(avro_generic_value_new(enum_class, &val),
+		    "Cannot create enum");
+		check(rval, check_type_and_schema
+			    ("enum", &val, AVRO_ENUM,
+			     avro_schema_incref(enum_schema)));
+		try(avro_value_reset(&val),
+		    "Cannot reset enum");
+		try(avro_value_set_enum(&val, expected),
+		    "Cannot set enum");
+
+		int  actual = -1;
+		try(avro_value_get_enum(&val, &actual),
+		    "Cannot get enum value");
+
+		if (actual != expected) {
+			fprintf(stderr, "Unexpected enum value\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("enum", &val);
+		check_write_read(&val);
+		check_copy(&val);
+		avro_value_decref(&val);
+	}
+
+	avro_schema_decref(enum_schema);
+	avro_value_iface_decref(enum_class);
+	return 0;
+}
+
+static int
+test_fixed(void)
+{
+	static const char  SCHEMA_JSON[] =
+	"{"
+	"  \"type\": \"fixed\","
+	"  \"name\": \"ipv4\","
+	"  \"size\": 4"
+	"}";
+
+	avro_schema_t  fixed_schema = NULL;
+	if (avro_schema_from_json_literal(SCHEMA_JSON, &fixed_schema)) {
+		fprintf(stderr, "Error parsing schema:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	avro_value_iface_t  *fixed_class =
+	    avro_generic_class_from_schema(fixed_schema);
+
+	int  rval;
+
+	char fixed[] = { 0xDE, 0xAD, 0xBE, 0xEF };
+
+	avro_value_t  val;
+	try(avro_generic_value_new(fixed_class, &val),
+	    "Cannot create fixed");
+	check(rval, check_type_and_schema
+		    ("fixed", &val, AVRO_FIXED,
+		     avro_schema_incref(fixed_schema)));
+	try(avro_value_reset(&val),
+	    "Cannot reset fixed");
+
+	/* verify an error on invalid size */
+	try(!avro_value_set_fixed(&val, fixed, 0),
+	    "Expected error with invalid size");
+
+	try(avro_value_set_fixed(&val, fixed, sizeof(fixed)),
+	    "Cannot set fixed");
+
+	const void  *actual_buf = NULL;
+	size_t  actual_size = 0;
+	try(avro_value_get_fixed(&val, &actual_buf, &actual_size),
+	    "Cannot get fixed value");
+
+	if (actual_size != sizeof(fixed)) {
+		fprintf(stderr, "Unexpected fixed size\n");
+		return EXIT_FAILURE;
+	}
+
+	if (memcmp(actual_buf, fixed, sizeof(fixed)) != 0) {
+		fprintf(stderr, "Unexpected fixed contents\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_wrapped_buffer_t  wbuf;
+	try(avro_value_grab_fixed(&val, &wbuf),
+	    "Cannot grab fixed value");
+
+	if (wbuf.size != sizeof(fixed)) {
+		fprintf(stderr, "Unexpected grabbed fixed size\n");
+		return EXIT_FAILURE;
+	}
+
+	if (memcmp(wbuf.buf, fixed, sizeof(fixed)) != 0) {
+		fprintf(stderr, "Unexpected grabbed fixed contents\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_wrapped_buffer_free(&wbuf);
+
+	check_invalid_methods("fixed", &val);
+	check_write_read(&val);
+	check_copy(&val);
+	avro_value_decref(&val);
+	avro_schema_decref(fixed_schema);
+	avro_value_iface_decref(fixed_class);
+	return 0;
+}
+
+static int
+test_map(void)
+{
+	avro_schema_t  double_schema = avro_schema_double();
+	avro_schema_t  map_schema = avro_schema_map(double_schema);
+
+	avro_value_iface_t  *map_class =
+	    avro_generic_class_from_schema(map_schema);
+
+	int  rval;
+
+	int  i;
+	for (i = 0; i < 100; i++) {
+		size_t  count = rand_count();
+
+		avro_value_t  val;
+		try(avro_generic_value_new(map_class, &val),
+		    "Cannot create map");
+		check(rval, check_type_and_schema
+			    ("map", &val, AVRO_MAP,
+			     avro_schema_incref(map_schema)));
+
+		size_t  j;
+		for (j = 0; j < count; j++) {
+			avro_value_t  element;
+			size_t  new_index;
+			int  is_new = 0;
+
+			char  key[64];
+			snprintf(key, 64, "%" PRIsz, j);
+
+			try(avro_value_add(&val, key,
+					   &element, &new_index, &is_new),
+			    "Cannot add to map");
+
+			if (new_index != j) {
+				fprintf(stderr, "Unexpected index\n");
+				return EXIT_FAILURE;
+			}
+
+			if (!is_new) {
+				fprintf(stderr, "Expected new element\n");
+				return EXIT_FAILURE;
+			}
+
+			double  expected = rand_number(-1e10, 1e10);
+			try(avro_value_set_double(&element, expected),
+			    "Cannot set double");
+			try(avro_value_add(&val, key,
+					   &element, &new_index, &is_new),
+			    "Cannot re-add to map");
+
+			if (is_new) {
+				fprintf(stderr, "Expected non-new element\n");
+				return EXIT_FAILURE;
+			}
+
+			const char  *actual_key = NULL;
+			try(avro_value_get_by_index(&val, j, &element,
+						    &actual_key),
+			    "Cannot get from map");
+
+			if (strcmp(actual_key, key) != 0) {
+				fprintf(stderr, "Unexpected key\n");
+				return EXIT_FAILURE;
+			}
+
+			double  actual = 0.0;
+			try(avro_value_get_double(&element, &actual),
+			    "Cannot get double value");
+
+			if (actual != expected) {
+				fprintf(stderr, "Unexpected double value\n");
+				return EXIT_FAILURE;
+			}
+		}
+
+		size_t  actual_size = 0;
+		try(avro_value_get_size(&val, &actual_size),
+		    "Cannot get_size map");
+
+		if (actual_size != count) {
+			fprintf(stderr, "Unexpected size\n");
+			return EXIT_FAILURE;
+		}
+
+		/*
+		 * Create a reversed copy of the map to ensure that the
+		 * element ordering doesn't affect the hash value.
+		 */
+
+		avro_value_t  reversed;
+		try(avro_generic_value_new(map_class, &reversed),
+		    "Cannot create map");
+
+		for (j = count; j-- > 0; ) {
+			avro_value_t  element;
+			const char  *key = NULL;
+			double  element_value = 0.0;
+			try(avro_value_get_by_index(&val, j, &element, &key),
+			    "Cannot get from map");
+			try(avro_value_get_double(&element, &element_value),
+			    "Cannot get double value");
+
+			try(avro_value_add(&reversed, key, &element, NULL, NULL),
+			    "Cannot add to map");
+			try(avro_value_set_double(&element, element_value),
+			    "Cannot set double");
+		}
+
+		check_hash(&val, &reversed);
+		if (!avro_value_equal(&val, &reversed)) {
+			fprintf(stderr, "Reversed values not equal\n");
+			return EXIT_FAILURE;
+		}
+
+		/* Final tests and cleanup */
+
+		check_write_read(&val);
+		check_copy(&val);
+
+		try(avro_value_reset(&val),
+		    "Cannot reset map");
+		try(avro_value_get_size(&val, &actual_size),
+		    "Cannot get_size empty map");
+
+		if (actual_size != 0) {
+			fprintf(stderr, "Unexpected empty size\n");
+			return EXIT_FAILURE;
+		}
+
+		check_invalid_methods("map", &val);
+		avro_value_decref(&val);
+		avro_value_decref(&reversed);
+	}
+
+	avro_schema_decref(double_schema);
+	avro_schema_decref(map_schema);
+	avro_value_iface_decref(map_class);
+	return 0;
+}
+
+static int
+test_record(void)
+{
+	static const char  SCHEMA_JSON[] =
+	"{"
+	"  \"type\": \"record\","
+	"  \"name\": \"test\","
+	"  \"fields\": ["
+	"    { \"name\": \"b\", \"type\": \"boolean\" },"
+	"    { \"name\": \"i\", \"type\": \"int\" },"
+	"    { \"name\": \"s\", \"type\": \"string\" },"
+	"    { \"name\": \"ds\", \"type\": "
+	"      { \"type\": \"array\", \"items\": \"double\" } },"
+	"    { \"name\": \"sub\", \"type\": "
+	"      {"
+	"        \"type\": \"record\","
+	"        \"name\": \"subtest\","
+	"        \"fields\": ["
+	"          { \"name\": \"f\", \"type\": \"float\" },"
+	"          { \"name\": \"l\", \"type\": \"long\" }"
+	"        ]"
+	"      }"
+	"    },"
+	"    { \"name\": \"nested\", \"type\": [\"null\", \"test\"] }"
+	"  ]"
+	"}";
+
+	avro_schema_t  record_schema = NULL;
+	if (avro_schema_from_json_literal(SCHEMA_JSON, &record_schema)) {
+		fprintf(stderr, "Error parsing schema:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	avro_value_iface_t  *record_class =
+	    avro_generic_class_from_schema(record_schema);
+
+	int  rval;
+
+	avro_value_t  val;
+	try(avro_generic_value_new(record_class, &val),
+	    "Cannot create record");
+	check(rval, check_type_and_schema
+		    ("record", &val, AVRO_RECORD,
+		     avro_schema_incref(record_schema)));
+
+	size_t  field_count;
+	try(avro_value_get_size(&val, &field_count),
+	    "Cannot get field count");
+	if (field_count != 6) {
+		fprintf(stderr, "Unexpected field count\n");
+		return EXIT_FAILURE;
+	}
+
+	/* Assign to each field */
+	avro_value_t  field;
+	avro_value_t  element;
+	avro_value_t  subfield;
+	avro_value_t  branch;
+	const char  *name;
+	size_t  index;
+
+	try(avro_value_get_by_index(&val, 0, &field, NULL),
+	    "Cannot get field 0");
+	try(avro_value_set_boolean(&field, 1),
+	    "Cannot set field 0");
+
+	try(avro_value_get_by_index(&val, 1, &field, &name),
+	    "Cannot get field 1");
+	try(avro_value_set_int(&field, 42),
+	    "Cannot set field 1");
+	if (strcmp(name, "i") != 0) {
+		fprintf(stderr, "Unexpected name for field 1: %s\n", name);
+		return EXIT_FAILURE;
+	}
+
+	try(avro_value_get_by_index(&val, 2, &field, NULL),
+	    "Cannot get field 2");
+	try(avro_value_set_string(&field, "Hello world!"),
+	    "Cannot set field 2");
+
+	try(avro_value_get_by_name(&val, "i", &field, &index),
+	    "Cannot get \"i\" field");
+	if (index != 1) {
+		fprintf(stderr, "Unexpected index for \"i\" field: %" PRIsz "\n", index);
+		return EXIT_FAILURE;
+	}
+
+	try(avro_value_get_by_index(&val, 3, &field, NULL),
+	    "Cannot get field 3");
+	try(avro_value_append(&field, &element, NULL),
+	    "Cannot append to field 3");
+	try(avro_value_set_double(&element, 10.0),
+	    "Cannot set field 3, element 0");
+
+	try(avro_value_get_by_index(&val, 4, &field, NULL),
+	    "Cannot get field 4");
+
+	try(avro_value_get_by_index(&field, 0, &subfield, NULL),
+	    "Cannot get field 4, subfield 0");
+	try(avro_value_set_float(&subfield, 5.0f),
+	    "Cannot set field 4, subfield 0");
+
+	try(avro_value_get_by_index(&field, 1, &subfield, NULL),
+	    "Cannot get field 4, subfield 1");
+	try(avro_value_set_long(&subfield, 10000),
+	    "Cannot set field 4, subfield 1");
+
+	try(avro_value_get_by_index(&val, 5, &field, NULL),
+	    "Cannot get field 5");
+	try(avro_value_set_branch(&field, 0, &branch),
+	    "Cannot select null branch");
+
+	check_write_read(&val);
+	check_copy(&val);
+
+	/* Reset and verify that the fields are empty again */
+	try(avro_value_reset(&val),
+	    "Cannot reset record");
+
+	int  bval;
+	try(avro_value_get_by_index(&val, 0, &field, NULL),
+	    "Cannot get field 0");
+	try(avro_value_get_boolean(&field, &bval),
+	    "Cannot get field 0 value");
+	if (bval) {
+		fprintf(stderr, "Unexpected value for field 0\n");
+		return EXIT_FAILURE;
+	}
+
+	size_t  count;
+	try(avro_value_get_by_index(&val, 3, &field, NULL),
+	    "Cannot get field 3");
+	try(avro_value_get_size(&field, &count),
+	    "Cannot get field 3 size");
+	if (count != 0) {
+		fprintf(stderr, "Unexpected size for field 3\n");
+		return EXIT_FAILURE;
+	}
+
+	check_invalid_methods("record", &val);
+	avro_value_decref(&val);
+	avro_value_iface_decref(record_class);
+	avro_schema_decref(record_schema);
+	return EXIT_SUCCESS;
+}
+
+static int
+test_union(void)
+{
+	static const char  SCHEMA_JSON[] =
+	"["
+	"  \"null\","
+	"  \"int\","
+	"  \"double\","
+	"  \"bytes\""
+	"]";
+
+	avro_schema_t  union_schema = NULL;
+	if (avro_schema_from_json_literal(SCHEMA_JSON, &union_schema)) {
+		fprintf(stderr, "Error parsing schema:\n  %s\n",
+			avro_strerror());
+		return EXIT_FAILURE;
+	}
+
+	avro_value_iface_t  *union_class =
+	    avro_generic_class_from_schema(union_schema);
+
+	int  rval;
+
+	avro_value_t  val;
+	try(avro_generic_value_new(union_class, &val),
+	    "Cannot create union");
+	check(rval, check_type_and_schema
+		    ("union", &val, AVRO_UNION,
+		     avro_schema_incref(union_schema)));
+
+	int discriminant = 0;
+	try(avro_value_get_discriminant(&val, &discriminant),
+	    "Cannot get union discriminant");
+
+	if (discriminant != -1) {
+		fprintf(stderr, "Unexpected union discriminant\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_value_t  branch;
+	try(!avro_value_get_current_branch(&val, &branch),
+	    "Expected error getting empty current branch");
+
+	try(avro_value_set_branch(&val, 0, &branch),
+	    "Cannot select null branch");
+	try(avro_value_set_null(&branch),
+	    "Cannot set null branch value");
+
+	try(avro_value_set_branch(&val, 1, &branch),
+	    "Cannot select int branch");
+	try(avro_value_set_int(&branch, 42),
+	    "Cannot set int branch value");
+
+	try(avro_value_set_branch(&val, 1, &branch),
+	    "Cannot select int branch");
+	try(avro_value_set_int(&branch, 10),
+	    "Cannot set int branch value");
+
+	try(avro_value_set_branch(&val, 2, &branch),
+	    "Cannot select double branch");
+	try(avro_value_set_double(&branch, 10.0),
+	    "Cannot set double branch value");
+
+	char bytes[] = { 0xDE, 0xAD, 0xBE, 0xEF };
+	try(avro_value_set_branch(&val, 3, &branch),
+	    "Cannot select bytes branch");
+	try(avro_value_set_bytes(&branch, bytes, sizeof(bytes)),
+	    "Cannot set bytes branch value");
+
+	check_invalid_methods("union", &val);
+	check_write_read(&val);
+	check_copy(&val);
+	avro_value_decref(&val);
+
+	avro_schema_decref(union_schema);
+	avro_value_iface_decref(union_class);
+	return 0;
+}
+
+int main(void)
+{
+	avro_set_allocator(test_allocator, NULL);
+
+	unsigned int i;
+	struct avro_tests {
+		char *name;
+		avro_test func;
+	} tests[] = {
+		{ "boolean", test_boolean },
+		{ "bytes", test_bytes },
+		{ "double", test_double },
+		{ "float", test_float },
+		{ "int", test_int },
+		{ "long", test_long },
+		{ "null", test_null },
+		{ "string", test_string },
+		{ "array", test_array },
+		{ "enum", test_enum },
+		{ "fixed", test_fixed },
+		{ "map", test_map },
+		{ "record", test_record },
+		{ "union", test_union }
+	};
+
+	init_rand();
+	for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++) {
+		struct avro_tests *test = tests + i;
+		fprintf(stderr, "**** Running %s tests ****\n", test->name);
+		if (test->func() != 0) {
+			return EXIT_FAILURE;
+		}
+	}
+	return EXIT_SUCCESS;
+}
diff --git a/lang/c/tests/test_cpp.cpp b/lang/c/tests/test_cpp.cpp
new file mode 100644
index 0000000..5c2fe50
--- /dev/null
+++ b/lang/c/tests/test_cpp.cpp
@@ -0,0 +1,10 @@
+#include "avro.h"
+
+int main(int argc, char **argv)
+{
+    (void)argc;
+    (void)argv;
+
+	return 0;
+}
+
diff --git a/lang/c/tests/test_data_structures.c b/lang/c/tests/test_data_structures.c
new file mode 100644
index 0000000..4e12625
--- /dev/null
+++ b/lang/c/tests/test_data_structures.c
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "avro_private.h"
+#include "avro/data.h"
+
+static int  result = EXIT_SUCCESS;
+
+typedef int (*avro_test) (void);
+
+
+static int
+test_array(void)
+{
+	avro_raw_array_t  array;
+	long  *element;
+
+	/* Test once on a fresh array */
+
+	avro_raw_array_init(&array, sizeof(long));
+	element = (long *) avro_raw_array_append(&array);
+	*element = 1;
+	element = (long *) avro_raw_array_append(&array);
+	*element = 3;
+
+	if (avro_raw_array_size(&array) != 2) {
+		fprintf(stderr, "Incorrect array size: got %lu, expected %lu.\n",
+			(unsigned long) avro_raw_array_size(&array),
+			(unsigned long) 2);
+		return EXIT_FAILURE;
+	}
+
+	if (avro_raw_array_get(&array, long, 0) != 1) {
+		fprintf(stderr, "Unexpected array element %u: got %ld, expected %ld.\n",
+			(unsigned int) 0, avro_raw_array_get(&array, long, 0),
+			(long) 1);
+		return EXIT_FAILURE;
+	}
+
+	/* And test again after clearing the array */
+
+	avro_raw_array_clear(&array);
+	element = (long *) avro_raw_array_append(&array);
+	*element = 1;
+	element = (long *) avro_raw_array_append(&array);
+	*element = 3;
+
+	if (avro_raw_array_size(&array) != 2) {
+		fprintf(stderr, "Incorrect array size: got %" PRIsz ", expected %" PRIsz ".\n",
+			(size_t) avro_raw_array_size(&array),
+			(size_t) 2);
+		return EXIT_FAILURE;
+	}
+
+	if (avro_raw_array_get(&array, long, 0) != 1) {
+		fprintf(stderr, "Unexpected array element %u: got %ld, expected %ld.\n",
+			(unsigned int) 0, avro_raw_array_get(&array, long, 0),
+			(long) 1);
+		return EXIT_FAILURE;
+	}
+
+	avro_raw_array_done(&array);
+	return EXIT_SUCCESS;
+}
+
+
+static int
+test_map(void)
+{
+	avro_raw_map_t  map;
+	long  *element;
+	size_t  index;
+
+	/* Test once on a fresh map */
+
+	avro_raw_map_init(&map, sizeof(long));
+	avro_raw_map_get_or_create(&map, "x", (void **) &element, NULL);
+	*element = 1;
+	avro_raw_map_get_or_create(&map, "y", (void **) &element, NULL);
+	*element = 3;
+
+	if (avro_raw_map_size(&map) != 2) {
+		fprintf(stderr, "Incorrect map size: got %" PRIsz ", expected %" PRIsz ".\n",
+			(size_t) avro_raw_map_size(&map),
+			(size_t) 2);
+		return EXIT_FAILURE;
+	}
+
+	if (avro_raw_map_get_by_index(&map, long, 0) != 1) {
+		fprintf(stderr, "Unexpected map element %u: got %ld, expected %ld.\n",
+			(unsigned int) 0,
+			avro_raw_map_get_by_index(&map, long, 0),
+			(long) 1);
+		return EXIT_FAILURE;
+	}
+
+	if (strcmp(avro_raw_map_get_key(&map, 0), "x") != 0) {
+		fprintf(stderr, "Unexpected key for map element 0: "
+			"got \"%s\", expected \"%s\".\n",
+			avro_raw_map_get_key(&map, 0), "x");
+		return EXIT_FAILURE;
+	}
+
+	element = (long *) avro_raw_map_get(&map, "y", &index);
+	if (index != 1) {
+		fprintf(stderr, "Unexpected index for map element \"%s\": "
+			"got %" PRIsz ", expected %u.\n",
+			"y", index, 1);
+		return EXIT_FAILURE;
+	}
+
+	if (*element != 3) {
+		fprintf(stderr, "Unexpected map element %s: got %ld, expected %ld.\n",
+			"y",
+			*element, (long) 3);
+		return EXIT_FAILURE;
+	}
+
+	/* And test again after clearing the map */
+
+	avro_raw_map_clear(&map);
+	avro_raw_map_get_or_create(&map, "x", (void **) &element, NULL);
+	*element = 1;
+	avro_raw_map_get_or_create(&map, "y", (void **) &element, NULL);
+	*element = 3;
+
+	if (avro_raw_map_size(&map) != 2) {
+		fprintf(stderr, "Incorrect map size: got %" PRIsz ", expected %" PRIsz ".\n",
+			(size_t) avro_raw_map_size(&map),
+			(size_t) 2);
+		return EXIT_FAILURE;
+	}
+
+	if (avro_raw_map_get_by_index(&map, long, 0) != 1) {
+		fprintf(stderr, "Unexpected map element %u: got %ld, expected %ld.\n",
+			(unsigned int) 0,
+			avro_raw_map_get_by_index(&map, long, 0),
+			(long) 1);
+		return EXIT_FAILURE;
+	}
+
+	element = (long *) avro_raw_map_get(&map, "y", &index);
+	if (index != 1) {
+		fprintf(stderr, "Unexpected index for map element \"%s\": "
+			"got %" PRIsz ", expected %u.\n",
+			"y", index, 1);
+		return EXIT_FAILURE;
+	}
+
+	if (*element != 3) {
+		fprintf(stderr, "Unexpected map element %s: got %ld, expected %ld.\n",
+			"y",
+			*element, (long) 3);
+		return EXIT_FAILURE;
+	}
+
+	avro_raw_map_done(&map);
+	return EXIT_SUCCESS;
+}
+
+
+static int
+test_string(void)
+{
+	avro_raw_string_t  str;
+
+	avro_raw_string_init(&str);
+
+	avro_raw_string_set(&str, "a");
+	avro_raw_string_set(&str, "abcdefgh");
+	avro_raw_string_set(&str, "abcd");
+
+	if (avro_raw_string_length(&str) != 5) {
+		fprintf(stderr, "Incorrect string size: got %" PRIsz ", expected %" PRIsz ".\n",
+			(size_t) avro_raw_string_length(&str),
+			(size_t) 5);
+		return EXIT_FAILURE;
+	}
+
+	if (strcmp((const char *) str.wrapped.buf, "abcd") != 0) {
+		fprintf(stderr, "Incorrect string contents: "
+				"got \"%s\", expected \"%s\".\n",
+			(char *) avro_raw_string_get(&str),
+			"abcd");
+		return EXIT_FAILURE;
+	}
+
+	avro_wrapped_buffer_t  wbuf;
+	avro_wrapped_buffer_new_string(&wbuf, "abcd");
+	avro_raw_string_give(&str, &wbuf);
+
+	if (avro_raw_string_length(&str) != 5) {
+		fprintf(stderr, "Incorrect string size: got %" PRIsz ", expected %" PRIsz ".\n",
+			(size_t) avro_raw_string_length(&str),
+			(size_t) 5);
+		return EXIT_FAILURE;
+	}
+
+	if (strcmp((const char *) str.wrapped.buf, "abcd") != 0) {
+		fprintf(stderr, "Incorrect string contents: "
+				"got \"%s\", expected \"%s\".\n",
+			(char *) avro_raw_string_get(&str),
+			"abcd");
+		return EXIT_FAILURE;
+	}
+
+	avro_raw_string_t  str2;
+	avro_raw_string_init(&str2);
+	avro_raw_string_set(&str2, "abcd");
+
+	if (!avro_raw_string_equals(&str, &str2)) {
+		fprintf(stderr, "Strings should be equal.\n");
+		return EXIT_FAILURE;
+	}
+
+	avro_raw_string_done(&str);
+	avro_raw_string_done(&str2);
+	return EXIT_SUCCESS;
+}
+
+
+int main(int argc, char *argv[])
+{
+	AVRO_UNUSED(argc);
+	AVRO_UNUSED(argv);
+
+	unsigned int i;
+	struct avro_tests {
+		char *name;
+		avro_test func;
+	} tests[] = {
+		{ "array", test_array },
+		{ "map", test_map },
+		{ "string", test_string }
+	};
+
+	for (i = 0; i < sizeof(tests) / sizeof(tests[0]); i++) {
+		struct avro_tests *test = tests + i;
+		fprintf(stderr, "**** Running %s tests ****\n", test->name);
+		if (test->func() != 0) {
+			result = EXIT_FAILURE;
+		}
+	}
+	return result;
+}
diff --git a/lang/c/tests/test_interop_data.c b/lang/c/tests/test_interop_data.c
new file mode 100644
index 0000000..58a0dec
--- /dev/null
+++ b/lang/c/tests/test_interop_data.c
@@ -0,0 +1,7 @@
+#include "avro_private.h"
+#include <stdio.h>
+
+int main(void)
+{
+	return 0;
+}
diff --git a/lang/c/tests/test_refcount.c b/lang/c/tests/test_refcount.c
new file mode 100644
index 0000000..1efc06c
--- /dev/null
+++ b/lang/c/tests/test_refcount.c
@@ -0,0 +1,44 @@
+#include <avro.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define SIMPLE_ARRAY \
+"{\"type\": \"array\", \"items\": \"long\"}"
+
+
+int main(void)
+{
+  avro_schema_t schema = NULL;
+  avro_schema_error_t error;
+  avro_value_iface_t *simple_array_class;
+  avro_value_t simple;
+
+  /* Initialize the schema structure from JSON */
+  if (avro_schema_from_json(SIMPLE_ARRAY, sizeof(SIMPLE_ARRAY),
+                            &schema, &error)) {
+    fprintf(stdout, "Unable to parse schema\n");
+    exit(EXIT_FAILURE);
+  }
+
+  // Create avro class and value
+  simple_array_class = avro_generic_class_from_schema( schema );
+  if ( simple_array_class == NULL )
+  {
+    fprintf(stdout, "Unable to create simple array class\n");
+    exit(EXIT_FAILURE);
+  }
+
+  if ( avro_generic_value_new( simple_array_class, &simple ) )
+  {
+    fprintf(stdout, "Error creating instance of record\n" );
+    exit(EXIT_FAILURE);
+  }
+
+  // Release the avro class and value
+  avro_value_decref( &simple );
+  avro_value_iface_decref( simple_array_class );
+  avro_schema_decref(schema);
+  
+  return 0;
+
+}
diff --git a/lang/c/tests/test_valgrind b/lang/c/tests/test_valgrind
new file mode 100755
index 0000000..f9572c4
--- /dev/null
+++ b/lang/c/tests/test_valgrind
@@ -0,0 +1,33 @@
+#!/bin/sh
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to you under the Apache License, Version 2.0 
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.  See the License for the specific language governing
+# permissions and limitations under the License. 
+set +e
+set -x
+
+if ! which valgrind; then
+	echo "Unable to find valgrind installed. Test will not run."
+	# This special exit value will show that we skipped this test
+	exit 77
+fi
+
+../libtool execute valgrind --leak-check=full -q test_avro_data 2>&1 |\
+grep -E '^==[0-9]+== '
+if [ $? -eq 0 ]; then
+	# Expression found. Test failed.
+	exit 1
+else
+	# We're all clean
+	exit 0
+fi
diff --git a/lang/c/version.sh b/lang/c/version.sh
new file mode 100755
index 0000000..b60aba2
--- /dev/null
+++ b/lang/c/version.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+#
+# This script is used to generate version numbers for autotools
+#
+# The top-level main version is collected from the top-level build.xml
+#
+# The information for libtool is maintained manually since
+# the public API for the C library can change independent of the project
+#
+# Do each of these steps in order and libtool will do the right thing
+# (1) If there are changes to libavro:
+#         libavro_micro_version++
+#         libavro_interface_age++ 
+#         libavro_binary_age++
+# (2) If any functions have been added:
+#         libavro_interface_age = 0
+# (3) If backwards compatibility has been broken:
+#         libavro_binary_age = 0
+#         libavro_interface_age = 0
+#
+libavro_micro_version=22
+libavro_interface_age=0
+libavro_binary_age=0
+
+# IGNORE EVERYTHING ELSE FROM HERE DOWN.........
+if test $# != 1; then
+	echo "USAGE: $0 CMD"
+  	echo "  where CMD is one of: project, libtool, libcurrent, librevision, libage"
+	exit 1
+fi
+
+# http://sources.redhat.com/autobook/autobook/autobook_91.html
+# 'Current' is the most recent interface number that this library implements
+libcurrent=$(($libavro_micro_version - $libavro_interface_age))
+# The implementation number of the 'current' interface
+librevision=$libavro_interface_age
+# The difference between the newest and oldest interfaces that this library implements
+# In other words, the library implements all the interface numbers in the range from 
+# number 'current - age' to current
+libage=$(($libavro_binary_age - $libavro_interface_age))
+
+if test "$1" = "project"; then
+	project_ver="undef"
+	version_file="VERSION.txt"
+	if test -f $version_file; then
+		project_ver=$(cat $version_file)
+	else
+		version_file="../../share/VERSION.txt"
+		if test -f $version_file; then
+			project_ver=$(cat $version_file)
+		fi
+	fi
+	printf "%s" $project_ver
+elif test "$1" = "libtool"; then
+	# useful for the -version-info flag for libtool
+	printf "%d:%d:%d" $libcurrent $librevision $libage
+elif test "$1" = "libcurrent"; then
+	printf "%d" $libcurrent
+elif test "$1" = "librevision"; then
+	printf "%d" $librevision
+elif test "$1" = "libage"; then
+	printf "%d" $libage
+fi
diff --git a/lang/csharp/.gitignore b/lang/csharp/.gitignore
new file mode 100644
index 0000000..511adaf
--- /dev/null
+++ b/lang/csharp/.gitignore
@@ -0,0 +1,49 @@
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+/build
+/*.user
+/*.suo
+/_ReSharper.Avro
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+
+# User-specific files
+*.suo
+*.user
+*.sln.docstates
+
+# Build results
+[Dd]ebug/
+[Rr]elease/
+*_i.c
+*_p.c
+*.ilk
+*.meta
+*.obj
+*.pch
+*.pdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.vspscc
+.builds
+*.dotCover
+*.nupkg
diff --git a/lang/csharp/Avro.dox b/lang/csharp/Avro.dox
new file mode 100644
index 0000000..b285fc4
--- /dev/null
+++ b/lang/csharp/Avro.dox
@@ -0,0 +1,1630 @@
+# Doxyfile 1.7.1
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project
+#
+# All text after a hash (#) is considered a comment and will be ignored
+# The format is:
+#       TAG = value [value, ...]
+# For lists items can also be appended using:
+#       TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (" ")
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# This tag specifies the encoding used for all characters in the config file
+# that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# http://www.gnu.org/software/libiconv for the list of possible encodings.
+
+DOXYFILE_ENCODING      = UTF-8
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
+# by quotes) that should identify the project.
+
+PROJECT_NAME           = "Avro C#"
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number.
+# This could be handy for archiving the generated documentation or
+# if some version control system is used.
+
+PROJECT_NUMBER         =
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
+# base path where the generated documentation will be put.
+# If a relative path is entered, it will be relative to the location
+# where doxygen was started. If left blank the current directory will be used.
+
+OUTPUT_DIRECTORY       = build/doc
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
+# 4096 sub-directories (in 2 levels) under the output directory of each output
+# format and will distribute the generated files over these directories.
+# Enabling this option can be useful when feeding doxygen a huge amount of
+# source files, where putting all generated files in the same directory would
+# otherwise cause performance problems for the file system.
+
+CREATE_SUBDIRS         = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# The default language is English, other supported languages are:
+# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
+# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German,
+# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English
+# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian,
+# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak,
+# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
+
+OUTPUT_LANGUAGE        = English
+
+# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
+# include brief member descriptions after the members that are listed in
+# the file and class documentation (similar to JavaDoc).
+# Set to NO to disable this.
+
+BRIEF_MEMBER_DESC      = YES
+
+# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
+# the brief description of a member or function before the detailed description.
+# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+
+REPEAT_BRIEF           = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator
+# that is used to form the text in various listings. Each string
+# in this list, if found as the leading text of the brief description, will be
+# stripped from the text and the result after processing the whole list, is
+# used as the annotated text. Otherwise, the brief description is used as-is.
+# If left blank, the following values are used ("$name" is automatically
+# replaced with the name of the entity): "The $name class" "The $name widget"
+# "The $name file" "is" "provides" "specifies" "contains"
+# "represents" "a" "an" "the"
+
+ABBREVIATE_BRIEF       =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# Doxygen will generate a detailed section even if there is only a brief
+# description.
+
+ALWAYS_DETAILED_SEC    = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+
+INLINE_INHERITED_MEMB  = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
+# path before files name in the file list and in the header files. If set
+# to NO the shortest path that makes the file name unique will be used.
+
+FULL_PATH_NAMES        = YES
+
+# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
+# can be used to strip a user-defined part of the path. Stripping is
+# only done if one of the specified strings matches the left-hand part of
+# the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the
+# path to strip.
+
+STRIP_FROM_PATH        =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
+# the path mentioned in the documentation of a class, which tells
+# the reader which header file to include in order to use a class.
+# If left blank only the name of the header file containing the class
+# definition is used. Otherwise one should specify the include paths that
+# are normally passed to the compiler using the -I flag.
+
+STRIP_FROM_INC_PATH    =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
+# (but less readable) file names. This can be useful is your file systems
+# doesn't support long names like on DOS, Mac, or CD-ROM.
+
+SHORT_NAMES            = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
+# will interpret the first line (until the first dot) of a JavaDoc-style
+# comment as the brief description. If set to NO, the JavaDoc
+# comments will behave just like regular Qt-style comments
+# (thus requiring an explicit @brief command for a brief description.)
+
+JAVADOC_AUTOBRIEF      = NO
+
+# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
+# interpret the first line (until the first dot) of a Qt-style
+# comment as the brief description. If set to NO, the comments
+# will behave just like regular Qt-style comments (thus requiring
+# an explicit \brief command for a brief description.)
+
+QT_AUTOBRIEF           = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
+# treat a multi-line C++ special comment block (i.e. a block of //! or ///
+# comments) as a brief description. This used to be the default behaviour.
+# The new default is to treat a multi-line C++ comment block as a detailed
+# description. Set this tag to YES if you prefer the old behaviour instead.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
+# member inherits the documentation from any documented member that it
+# re-implements.
+
+INHERIT_DOCS           = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
+# a new page for each member. If set to NO, the documentation of a member will
+# be part of the file/class/namespace that contains it.
+
+SEPARATE_MEMBER_PAGES  = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab.
+# Doxygen uses this value to replace tabs by spaces in code fragments.
+
+TAB_SIZE               = 8
+
+# This tag can be used to specify a number of aliases that acts
+# as commands in the documentation. An alias has the form "name=value".
+# For example adding "sideeffect=\par Side Effects:\n" will allow you to
+# put the command \sideeffect (or @sideeffect) in the documentation, which
+# will result in a user-defined paragraph with heading "Side Effects:".
+# You can put \n's in the value part of an alias to insert newlines.
+
+ALIASES                =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
+# sources only. Doxygen will then generate output that is more tailored for C.
+# For instance, some of the names that are used will be different. The list
+# of all members will be omitted, etc.
+
+OPTIMIZE_OUTPUT_FOR_C  = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
+# sources only. Doxygen will then generate output that is more tailored for
+# Java. For instance, namespaces will be presented as packages, qualified
+# scopes will look different, etc.
+
+OPTIMIZE_OUTPUT_JAVA   = NO
+
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources only. Doxygen will then generate output that is more tailored for
+# Fortran.
+
+OPTIMIZE_FOR_FORTRAN   = NO
+
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for
+# VHDL.
+
+OPTIMIZE_OUTPUT_VHDL   = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given extension.
+# Doxygen has a built-in mapping, but you can override or extend it using this
+# tag. The format is ext=language, where ext is a file extension, and language
+# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C,
+# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make
+# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
+# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions
+# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen.
+
+EXTENSION_MAPPING      =
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should
+# set this tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
+# func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+
+BUILTIN_STL_SUPPORT    = NO
+
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
+# enable parsing support.
+
+CPP_CLI_SUPPORT        = NO
+
+# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
+# Doxygen will parse them like normal C++ but will assume all classes use public
+# instead of private inheritance when no explicit protection keyword is present.
+
+SIP_SUPPORT            = NO
+
+# For Microsoft's IDL there are propget and propput attributes to indicate getter
+# and setter methods for a property. Setting this option to YES (the default)
+# will make doxygen to replace the get and set methods by a property in the
+# documentation. This will only work if the methods are indeed getting or
+# setting a simple type. If this is not the case, or you want to show the
+# methods anyway, you should set this option to NO.
+
+IDL_PROPERTY_SUPPORT   = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+
+DISTRIBUTE_GROUP_DOC   = NO
+
+# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
+# the same type (for instance a group of public functions) to be put as a
+# subgroup of that type (e.g. under the Public Functions section). Set it to
+# NO to prevent subgrouping. Alternatively, this can be done per class using
+# the \nosubgrouping command.
+
+SUBGROUPING            = YES
+
+# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
+# is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically
+# be useful for C code in case the coding convention dictates that all compound
+# types are typedef'ed and only the typedef is referenced, never the tag name.
+
+TYPEDEF_HIDES_STRUCT   = NO
+
+# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to
+# determine which symbols to keep in memory and which to flush to disk.
+# When the cache is full, less often used symbols will be written to disk.
+# For small to medium size projects (<1000 input files) the default value is
+# probably good enough. For larger projects a too small cache size can cause
+# doxygen to be busy swapping symbols to and from disk most of the time
+# causing a significant performance penality.
+# If the system has enough physical memory increasing the cache will improve the
+# performance by keeping more symbols in memory. Note that the value works on
+# a logarithmic scale so increasing the size by one will rougly double the
+# memory usage. The cache size is given by this formula:
+# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0,
+# corresponding to a cache size of 2^16 = 65536 symbols
+
+SYMBOL_CACHE_SIZE      = 0
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available.
+# Private class members and static file members will be hidden unless
+# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
+
+EXTRACT_ALL            = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
+# will be included in the documentation.
+
+EXTRACT_PRIVATE        = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file
+# will be included in the documentation.
+
+EXTRACT_STATIC         = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
+# defined locally in source files will be included in the documentation.
+# If set to NO only classes defined in header files are included.
+
+EXTRACT_LOCAL_CLASSES  = YES
+
+# This flag is only useful for Objective-C code. When set to YES local
+# methods, which are defined in the implementation section but not in
+# the interface are included in the documentation.
+# If set to NO (the default) only methods in the interface are included.
+
+EXTRACT_LOCAL_METHODS  = NO
+
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base
+# name of the file that contains the anonymous namespace. By default
+# anonymous namespace are hidden.
+
+EXTRACT_ANON_NSPACES   = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
+# undocumented members of documented classes, files or namespaces.
+# If set to NO (the default) these members will be included in the
+# various overviews, but no documentation section is generated.
+# This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_MEMBERS     = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy.
+# If set to NO (the default) these classes will be included in the various
+# overviews. This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_CLASSES     = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
+# friend (class|struct|union) declarations.
+# If set to NO (the default) these declarations will be included in the
+# documentation.
+
+HIDE_FRIEND_COMPOUNDS  = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
+# documentation blocks found inside the body of a function.
+# If set to NO (the default) these blocks will be appended to the
+# function's detailed documentation block.
+
+HIDE_IN_BODY_DOCS      = NO
+
+# The INTERNAL_DOCS tag determines if documentation
+# that is typed after a \internal command is included. If the tag is set
+# to NO (the default) then the documentation will be excluded.
+# Set it to YES to include the internal documentation.
+
+INTERNAL_DOCS          = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
+# file names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+
+CASE_SENSE_NAMES       = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
+# will show members with their full class and namespace scopes in the
+# documentation. If set to YES the scope will be hidden.
+
+HIDE_SCOPE_NAMES       = NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
+# will put a list of the files that are included by a file in the documentation
+# of that file.
+
+SHOW_INCLUDE_FILES     = YES
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen
+# will list include files with double quotes in the documentation
+# rather than with sharp brackets.
+
+FORCE_LOCAL_INCLUDES   = NO
+
+# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
+# is inserted in the documentation for inline members.
+
+INLINE_INFO            = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
+# will sort the (detailed) documentation of file and class members
+# alphabetically by member name. If set to NO the members will appear in
+# declaration order.
+
+SORT_MEMBER_DOCS       = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
+# brief documentation of file, namespace and class members alphabetically
+# by member name. If set to NO (the default) the members will appear in
+# declaration order.
+
+SORT_BRIEF_DOCS        = NO
+
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen
+# will sort the (brief and detailed) documentation of class members so that
+# constructors and destructors are listed first. If set to NO (the default)
+# the constructors will appear in the respective orders defined by
+# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS.
+# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO
+# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
+
+SORT_MEMBERS_CTORS_1ST = NO
+
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
+# hierarchy of group names into alphabetical order. If set to NO (the default)
+# the group names will appear in their defined order.
+
+SORT_GROUP_NAMES       = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
+# sorted by fully-qualified names, including namespaces. If set to
+# NO (the default), the class list will be sorted only by class name,
+# not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the
+# alphabetical list.
+
+SORT_BY_SCOPE_NAME     = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or
+# disable (NO) the todo list. This list is created by putting \todo
+# commands in the documentation.
+
+GENERATE_TODOLIST      = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or
+# disable (NO) the test list. This list is created by putting \test
+# commands in the documentation.
+
+GENERATE_TESTLIST      = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or
+# disable (NO) the bug list. This list is created by putting \bug
+# commands in the documentation.
+
+GENERATE_BUGLIST       = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
+# disable (NO) the deprecated list. This list is created by putting
+# \deprecated commands in the documentation.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional
+# documentation sections, marked by \if sectionname ... \endif.
+
+ENABLED_SECTIONS       =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
+# the initial value of a variable or define consists of for it to appear in
+# the documentation. If the initializer consists of more lines than specified
+# here it will be hidden. Use a value of 0 to hide initializers completely.
+# The appearance of the initializer of individual variables and defines in the
+# documentation can be controlled using \showinitializer or \hideinitializer
+# command in the documentation regardless of this setting.
+
+MAX_INITIALIZER_LINES  = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
+# at the bottom of the documentation of classes and structs. If set to YES the
+# list will mention the files that were used to generate the documentation.
+
+SHOW_USED_FILES        = YES
+
+# If the sources in your project are distributed over multiple directories
+# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
+# in the documentation. The default is NO.
+
+SHOW_DIRECTORIES       = NO
+
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page.
+# This will remove the Files entry from the Quick Index and from the
+# Folder Tree View (if specified). The default is YES.
+
+SHOW_FILES             = YES
+
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the
+# Namespaces page.
+# This will remove the Namespaces entry from the Quick Index
+# and from the Folder Tree View (if specified). The default is YES.
+
+SHOW_NAMESPACES        = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command <command> <input-file>, where <command> is the value of
+# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
+# provided by doxygen. Whatever the program writes to standard output
+# is used as the file version. See the manual for examples.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. The create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option.
+# You can optionally specify a file name after the option, if omitted
+# DoxygenLayout.xml will be used as the name of the layout file.
+
+LAYOUT_FILE            =
+
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated
+# by doxygen. Possible values are YES and NO. If left blank NO is used.
+
+QUIET                  = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated by doxygen. Possible values are YES and NO. If left blank
+# NO is used.
+
+WARNINGS               = YES
+
+# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
+# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
+# automatically be disabled.
+
+WARN_IF_UNDOCUMENTED   = YES
+
+# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some
+# parameters in a documented function, or documenting parameters that
+# don't exist or using markup commands wrongly.
+
+WARN_IF_DOC_ERROR      = YES
+
+# This WARN_NO_PARAMDOC option can be abled to get warnings for
+# functions that are documented, but have no documentation for their parameters
+# or return value. If set to NO (the default) doxygen will only warn about
+# wrong or incomplete parameter documentation, but not about the absence of
+# documentation.
+
+WARN_NO_PARAMDOC       = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that
+# doxygen can produce. The string should contain the $file, $line, and $text
+# tags, which will be replaced by the file and line number from which the
+# warning originated and the warning text. Optionally the format may contain
+# $version, which will be replaced by the version of the file (if it could
+# be obtained via FILE_VERSION_FILTER)
+
+WARN_FORMAT            = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning
+# and error messages should be written. If left blank the output is written
+# to stderr.
+
+WARN_LOGFILE           =
+
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag can be used to specify the files and/or directories that contain
+# documented source files. You may enter file names like "myfile.cpp" or
+# directories like "/usr/src/myproject". Separate the files or directories
+# with spaces.
+
+INPUT                  = src/apache/main
+
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
+# also the default input encoding. Doxygen uses libiconv (or the iconv built
+# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
+# the list of possible encodings.
+
+INPUT_ENCODING         = UTF-8
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank the following patterns are tested:
+# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx
+# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90
+
+FILE_PATTERNS          =
+
+# The RECURSIVE tag can be used to turn specify whether or not subdirectories
+# should be searched for input files as well. Possible values are YES and NO.
+# If left blank NO is used.
+
+RECURSIVE              = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+
+EXCLUDE                =
+
+# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
+# directories that are symbolic links (a Unix filesystem feature) are excluded
+# from the input.
+
+EXCLUDE_SYMLINKS       = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories. Note that the wildcards are matched
+# against the file with absolute path, so to exclude all test directories
+# for example use the pattern */test/*
+
+EXCLUDE_PATTERNS       =
+
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
+# AClass::ANamespace, ANamespace::*Test
+
+EXCLUDE_SYMBOLS        =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or
+# directories that contain example code fragments that are included (see
+# the \include command).
+
+EXAMPLE_PATH           =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank all files are included.
+
+EXAMPLE_PATTERNS       =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude
+# commands irrespective of the value of the RECURSIVE tag.
+# Possible values are YES and NO. If left blank NO is used.
+
+EXAMPLE_RECURSIVE      = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or
+# directories that contain image that are included in the documentation (see
+# the \image command).
+
+IMAGE_PATH             =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command <filter> <input-file>, where <filter>
+# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
+# input file. Doxygen will then use the output that the filter program writes
+# to standard output.
+# If FILTER_PATTERNS is specified, this tag will be
+# ignored.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis.
+# Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match.
+# The filters are a list of the form:
+# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
+# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER
+# is applied to all files.
+
+FILTER_PATTERNS        =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will be used to filter the input files when producing source
+# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+
+FILTER_SOURCE_FILES    = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will
+# be generated. Documented entities will be cross-referenced with these sources.
+# Note: To get rid of all source code in the generated output, make sure also
+# VERBATIM_HEADERS is set to NO.
+
+SOURCE_BROWSER         = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body
+# of functions and classes directly in the documentation.
+
+INLINE_SOURCES         = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
+# doxygen to hide any special comment blocks from generated source code
+# fragments. Normal C and C++ comments will always remain visible.
+
+STRIP_CODE_COMMENTS    = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES
+# then for each documented function all documented
+# functions referencing it will be listed.
+
+REFERENCED_BY_RELATION = NO
+
+# If the REFERENCES_RELATION tag is set to YES
+# then for each documented function all documented entities
+# called/used by that function will be listed.
+
+REFERENCES_RELATION    = NO
+
+# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
+# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
+# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
+# link to the source code.
+# Otherwise they will link to the documentation.
+
+REFERENCES_LINK_SOURCE = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code
+# will point to the HTML generated by the htags(1) tool instead of doxygen
+# built-in source browser. The htags tool is part of GNU's global source
+# tagging system (see http://www.gnu.org/software/global/global.html). You
+# will need version 4.8.6 or higher.
+
+USE_HTAGS              = NO
+
+# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
+# will generate a verbatim copy of the header file for each class for
+# which an include is specified. Set to NO to disable this.
+
+VERBATIM_HEADERS       = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
+# of all compounds will be generated. Enable this if the project
+# contains a lot of classes, structs, unions or interfaces.
+
+ALPHABETICAL_INDEX     = YES
+
+# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
+# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
+# in which this list will be split (can be a number in the range [1..20])
+
+COLS_IN_ALPHA_INDEX    = 5
+
+# In case all classes in a project start with a common prefix, all
+# classes will be put under the same header in the alphabetical index.
+# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
+# should be ignored while generating the index headers.
+
+IGNORE_PREFIX          =
+
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
+# generate HTML output.
+
+GENERATE_HTML          = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `html' will be used as the default path.
+
+HTML_OUTPUT            = .
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
+# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
+# doxygen will generate files with .html extension.
+
+HTML_FILE_EXTENSION    = .html
+
+# The HTML_HEADER tag can be used to specify a personal HTML header for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard header.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a personal HTML footer for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard footer.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
+# style sheet that is used by each HTML page. It can be used to
+# fine-tune the look of the HTML output. If the tag is left blank doxygen
+# will generate a default style sheet. Note that doxygen will try to copy
+# the style sheet file to the HTML output directory, so don't put your own
+# stylesheet in the HTML output directory as well, or it will be erased!
+
+HTML_STYLESHEET        =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output.
+# Doxygen will adjust the colors in the stylesheet and background images
+# according to this color. Hue is specified as an angle on a colorwheel,
+# see http://en.wikipedia.org/wiki/Hue for more information.
+# For instance the value 0 represents red, 60 is yellow, 120 is green,
+# 180 is cyan, 240 is blue, 300 purple, and 360 is red again.
+# The allowed range is 0 to 359.
+
+HTML_COLORSTYLE_HUE    = 220
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of
+# the colors in the HTML output. For a value of 0 the output will use
+# grayscales only. A value of 255 will produce the most vivid colors.
+
+HTML_COLORSTYLE_SAT    = 100
+
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to
+# the luminance component of the colors in the HTML output. Values below
+# 100 gradually make the output lighter, whereas values above 100 make
+# the output darker. The value divided by 100 is the actual gamma applied,
+# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2,
+# and 100 does not change the gamma.
+
+HTML_COLORSTYLE_GAMMA  = 80
+
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting
+# this to NO can help when comparing the output of multiple runs.
+
+HTML_TIMESTAMP         = YES
+
+# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
+# files or namespaces will be aligned in HTML using tables. If set to
+# NO a bullet list will be used.
+
+HTML_ALIGN_MEMBERS     = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
+# page has loaded. For this to work a browser that supports
+# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox
+# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari).
+
+HTML_DYNAMIC_SECTIONS  = NO
+
+# If the GENERATE_DOCSET tag is set to YES, additional index files
+# will be generated that can be used as input for Apple's Xcode 3
+# integrated development environment, introduced with OSX 10.5 (Leopard).
+# To create a documentation set, doxygen will generate a Makefile in the
+# HTML output directory. Running make will produce the docset in that
+# directory and running "make install" will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
+# it at startup.
+# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
+# for more information.
+
+GENERATE_DOCSET        = NO
+
+# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
+# feed. A documentation feed provides an umbrella under which multiple
+# documentation sets from a single provider (such as a company or product suite)
+# can be grouped.
+
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+
+# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
+# should uniquely identify the documentation set bundle. This should be a
+# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
+# will append .docset to the name.
+
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+
+# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
+# string, e.g. com.mycompany.MyDocSet.documentation.
+
+DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
+
+# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher.
+
+DOCSET_PUBLISHER_NAME  = Publisher
+
+# If the GENERATE_HTMLHELP tag is set to YES, additional index files
+# will be generated that can be used as input for tools like the
+# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
+# of the generated HTML documentation.
+
+GENERATE_HTMLHELP      = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
+# be used to specify the file name of the resulting .chm file. You
+# can add a path in front of the file if the result should not be
+# written to the html output directory.
+
+CHM_FILE               =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
+# be used to specify the location (absolute path including file name) of
+# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
+# the HTML help compiler on the generated index.hhp.
+
+HHC_LOCATION           =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
+# controls if a separate .chi index file is generated (YES) or that
+# it should be included in the master .chm file (NO).
+
+GENERATE_CHI           = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING
+# is used to encode HtmlHelp index (hhk), content (hhc) and project file
+# content.
+
+CHM_INDEX_ENCODING     =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
+# controls whether a binary table of contents is generated (YES) or a
+# normal table of contents (NO) in the .chm file.
+
+BINARY_TOC             = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members
+# to the contents of the HTML help documentation and to the tree view.
+
+TOC_EXPAND             = NO
+
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated
+# that can be used as input for Qt's qhelpgenerator to generate a
+# Qt Compressed Help (.qch) of the generated HTML documentation.
+
+GENERATE_QHP           = NO
+
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can
+# be used to specify the file name of the resulting .qch file.
+# The path specified is relative to the HTML output folder.
+
+QCH_FILE               =
+
+# The QHP_NAMESPACE tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#namespace
+
+QHP_NAMESPACE          = org.doxygen.Project
+
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating
+# Qt Help Project output. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#virtual-folders
+
+QHP_VIRTUAL_FOLDER     = doc
+
+# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to
+# add. For more information please see
+# http://doc.trolltech.com/qthelpproject.html#custom-filters
+
+QHP_CUST_FILTER_NAME   =
+
+# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see
+# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters">
+# Qt Help Project / Custom Filters</a>.
+
+QHP_CUST_FILTER_ATTRS  =
+
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's
+# filter section matches.
+# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes">
+# Qt Help Project / Filter Attributes</a>.
+
+QHP_SECT_FILTER_ATTRS  =
+
+# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can
+# be used to specify the location of Qt's qhelpgenerator.
+# If non-empty doxygen will try to run qhelpgenerator on the generated
+# .qhp file.
+
+QHG_LOCATION           =
+
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files
+#  will be generated, which together with the HTML files, form an Eclipse help
+# plugin. To install this plugin and make it available under the help contents
+# menu in Eclipse, the contents of the directory containing the HTML and XML
+# files needs to be copied into the plugins directory of eclipse. The name of
+# the directory within the plugins directory should be the same as
+# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before
+# the help appears.
+
+GENERATE_ECLIPSEHELP   = NO
+
+# A unique identifier for the eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have
+# this name.
+
+ECLIPSE_DOC_ID         = org.doxygen.Project
+
+# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
+# top of each HTML page. The value NO (the default) enables the index and
+# the value YES disables it.
+
+DISABLE_INDEX          = NO
+
+# This tag can be used to set the number of enum values (range [1..20])
+# that doxygen will group on one line in the generated HTML documentation.
+
+ENUM_VALUES_PER_LINE   = 4
+
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information.
+# If the tag value is set to YES, a side panel will be generated
+# containing a tree-like index structure (just like the one that
+# is generated for HTML Help). For this to work a browser that supports
+# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser).
+# Windows users are probably better off using the HTML help feature.
+
+GENERATE_TREEVIEW      = NO
+
+# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories,
+# and Class Hierarchy pages using a tree view instead of an ordered list.
+
+USE_INLINE_TREES       = NO
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
+# used to set the initial width (in pixels) of the frame in which the tree
+# is shown.
+
+TREEVIEW_WIDTH         = 250
+
+# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open
+# links to external symbols imported via tag files in a separate window.
+
+EXT_LINKS_IN_WINDOW    = NO
+
+# Use this tag to change the font size of Latex formulas included
+# as images in the HTML documentation. The default is 10. Note that
+# when you change the font size after a successful doxygen run you need
+# to manually remove any form_*.png images from the HTML output directory
+# to force them to be regenerated.
+
+FORMULA_FONTSIZE       = 10
+
+# Use the FORMULA_TRANPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are
+# not supported properly for IE 6.0, but are supported on all modern browsers.
+# Note that when changing this option you need to delete any form_*.png files
+# in the HTML output before the changes have effect.
+
+FORMULA_TRANSPARENT    = YES
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box
+# for the HTML output. The underlying search engine uses javascript
+# and DHTML and should work on any modern browser. Note that when using
+# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets
+# (GENERATE_DOCSET) there is already a search function so this one should
+# typically be disabled. For large projects the javascript based search engine
+# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a PHP enabled web server instead of at the web client
+# using Javascript. Doxygen will generate the search PHP script and index
+# file to put on the web server. The advantage of the server
+# based approach is that it scales better to large projects and allows
+# full text search. The disadvances is that it is more difficult to setup
+# and does not have live searching capabilities.
+
+SERVER_BASED_SEARCH    = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
+# generate Latex output.
+
+GENERATE_LATEX         = NO
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `latex' will be used as the default path.
+
+LATEX_OUTPUT           = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked. If left blank `latex' will be used as the default command name.
+# Note that when enabling USE_PDFLATEX this option is only used for
+# generating bitmaps for formulas in the HTML output, but not in the
+# Makefile that is written to the output directory.
+
+LATEX_CMD_NAME         = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
+# generate index for LaTeX. If left blank `makeindex' will be used as the
+# default command name.
+
+MAKEINDEX_CMD_NAME     = makeindex
+
+# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
+# LaTeX documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_LATEX          = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used
+# by the printer. Possible values are: a4, a4wide, letter, legal and
+# executive. If left blank a4wide will be used.
+
+PAPER_TYPE             = a4wide
+
+# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
+# packages that should be included in the LaTeX output.
+
+EXTRA_PACKAGES         =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
+# the generated latex document. The header should contain everything until
+# the first chapter. If it is left blank doxygen will generate a
+# standard header. Notice: only use this tag if you know what you are doing!
+
+LATEX_HEADER           =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
+# is prepared for conversion to pdf (using ps2pdf). The pdf file will
+# contain links (just like the HTML output) instead of page references
+# This makes the output suitable for online browsing using a pdf viewer.
+
+PDF_HYPERLINKS         = YES
+
+# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
+# plain latex in the generated Makefile. Set this option to YES to get a
+# higher quality PDF documentation.
+
+USE_PDFLATEX           = YES
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
+# command to the generated LaTeX files. This will instruct LaTeX to keep
+# running if errors occur, instead of asking the user for help.
+# This option is also used when generating formulas in HTML.
+
+LATEX_BATCHMODE        = NO
+
+# If LATEX_HIDE_INDICES is set to YES then doxygen will not
+# include the index chapters (such as File Index, Compound Index, etc.)
+# in the output.
+
+LATEX_HIDE_INDICES     = NO
+
+# If LATEX_SOURCE_CODE is set to YES then doxygen will include
+# source code with syntax highlighting in the LaTeX output.
+# Note that which sources are shown also depends on other settings
+# such as SOURCE_BROWSER.
+
+LATEX_SOURCE_CODE      = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
+# The RTF output is optimized for Word 97 and may not look very pretty with
+# other RTF readers or editors.
+
+GENERATE_RTF           = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `rtf' will be used as the default path.
+
+RTF_OUTPUT             = rtf
+
+# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
+# RTF documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_RTF            = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
+# will contain hyperlink fields. The RTF file will
+# contain links (just like the HTML output) instead of page references.
+# This makes the output suitable for online browsing using WORD or other
+# programs which support those fields.
+# Note: wordpad (write) and others do not support links.
+
+RTF_HYPERLINKS         = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# config file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+
+RTF_STYLESHEET_FILE    =
+
+# Set optional variables used in the generation of an rtf document.
+# Syntax is similar to doxygen's config file.
+
+RTF_EXTENSIONS_FILE    =
+
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
+# generate man pages
+
+GENERATE_MAN           = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `man' will be used as the default path.
+
+MAN_OUTPUT             = man
+
+# The MAN_EXTENSION tag determines the extension that is added to
+# the generated man pages (default is the subroutine's section .3)
+
+MAN_EXTENSION          = .3
+
+# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
+# then it will generate one additional man file for each entity
+# documented in the real man page(s). These additional files
+# only source the real man page, but without them the man command
+# would be unable to find the correct page. The default is NO.
+
+MAN_LINKS              = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES Doxygen will
+# generate an XML file that captures the structure of
+# the code including all documentation.
+
+GENERATE_XML           = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `xml' will be used as the default path.
+
+XML_OUTPUT             = xml
+
+# The XML_SCHEMA tag can be used to specify an XML schema,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_SCHEMA             =
+
+# The XML_DTD tag can be used to specify an XML DTD,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_DTD                =
+
+# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
+# dump the program listings (including syntax highlighting
+# and cross-referencing information) to the XML output. Note that
+# enabling this will significantly increase the size of the XML output.
+
+XML_PROGRAMLISTING     = YES
+
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
+# generate an AutoGen Definitions (see autogen.sf.net) file
+# that captures the structure of the code including all
+# documentation. Note that this feature is still experimental
+# and incomplete at the moment.
+
+GENERATE_AUTOGEN_DEF   = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES Doxygen will
+# generate a Perl module file that captures the structure of
+# the code including all documentation. Note that this
+# feature is still experimental and incomplete at the
+# moment.
+
+GENERATE_PERLMOD       = NO
+
+# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
+# the necessary Makefile rules, Perl scripts and LaTeX code to be able
+# to generate PDF and DVI output from the Perl module output.
+
+PERLMOD_LATEX          = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
+# nicely formatted so it can be parsed by a human reader.
+# This is useful
+# if you want to understand what is going on.
+# On the other hand, if this
+# tag is set to NO the size of the Perl module output will be much smaller
+# and Perl will parse it just the same.
+
+PERLMOD_PRETTY         = YES
+
+# The names of the make variables in the generated doxyrules.make file
+# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
+# This is useful so different doxyrules.make files included by the same
+# Makefile don't overwrite each other's variables.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
+# evaluate all C-preprocessor directives found in the sources and include
+# files.
+
+ENABLE_PREPROCESSING   = YES
+
+# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
+# names in the source code. If set to NO (the default) only conditional
+# compilation will be performed. Macro expansion can be done in a controlled
+# way by setting EXPAND_ONLY_PREDEF to YES.
+
+MACRO_EXPANSION        = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
+# then the macro expansion is limited to the macros specified with the
+# PREDEFINED and EXPAND_AS_DEFINED tags.
+
+EXPAND_ONLY_PREDEF     = NO
+
+# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
+# in the INCLUDE_PATH (see below) will be search if a #include is found.
+
+SEARCH_INCLUDES        = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by
+# the preprocessor.
+
+INCLUDE_PATH           =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will
+# be used.
+
+INCLUDE_FILE_PATTERNS  =
+
+# The PREDEFINED tag can be used to specify one or more macro names that
+# are defined before the preprocessor is started (similar to the -D option of
+# gcc). The argument of the tag is a list of macros of the form: name
+# or name=definition (no spaces). If the definition and the = are
+# omitted =1 is assumed. To prevent a macro definition from being
+# undefined via #undef or recursively expanded use the := operator
+# instead of the = operator.
+
+PREDEFINED             =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
+# this tag can be used to specify a list of macro names that should be expanded.
+# The macro definition that is found in the sources will be used.
+# Use the PREDEFINED tag if you want to use a different macro definition.
+
+EXPAND_AS_DEFINED      =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
+# doxygen's preprocessor will remove all function-like macros that are alone
+# on a line, have an all uppercase name, and do not end with a semicolon. Such
+# function macros are typically used for boiler-plate code, and will confuse
+# the parser if not removed.
+
+SKIP_FUNCTION_MACROS   = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES option can be used to specify one or more tagfiles.
+# Optionally an initial location of the external documentation
+# can be added for each tagfile. The format of a tag file without
+# this location is as follows:
+#
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+#
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where "loc1" and "loc2" can be relative or absolute paths or
+# URLs. If a location is present for each tag, the installdox tool
+# does not have to be run to correct the links.
+# Note that each tag file must have a unique name
+# (where the name does NOT include the path)
+# If a tag file is not located in the directory in which doxygen
+# is run, you must also specify the path to the tagfile here.
+
+TAGFILES               =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create
+# a tag file that is based on the input files it reads.
+
+GENERATE_TAGFILE       =
+
+# If the ALLEXTERNALS tag is set to YES all external classes will be listed
+# in the class index. If set to NO only the inherited external classes
+# will be listed.
+
+ALLEXTERNALS           = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will
+# be listed.
+
+EXTERNAL_GROUPS        = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of `which perl').
+
+PERL_PATH              = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
+# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
+# or super classes. Setting the tag to NO turns the diagrams off. Note that
+# this option is superseded by the HAVE_DOT option below. This is only a
+# fallback. It is recommended to install and use dot, since it yields more
+# powerful graphs.
+
+CLASS_DIAGRAMS         = YES
+
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see
+# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
+# default search path.
+
+MSCGEN_PATH            =
+
+# If set to YES, the inheritance and collaboration graphs will hide
+# inheritance and usage relations if the target is undocumented
+# or is not a class.
+
+HIDE_UNDOC_RELATIONS   = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz, a graph visualization
+# toolkit from AT&T and Lucent Bell Labs. The other options in this section
+# have no effect if this option is set to NO (the default)
+
+HAVE_DOT               = NO
+
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is
+# allowed to run in parallel. When set to 0 (the default) doxygen will
+# base this on the number of processors available in the system. You can set it
+# explicitly to a value larger than 0 to get control over the balance
+# between CPU load and processing speed.
+
+DOT_NUM_THREADS        = 0
+
+# By default doxygen will write a font called FreeSans.ttf to the output
+# directory and reference it in all dot files that doxygen generates. This
+# font does not include all possible unicode characters however, so when you need
+# these (or just want a differently looking font) you can specify the font name
+# using DOT_FONTNAME. You need need to make sure dot is able to find the font,
+# which can be done by putting it in a standard location or by setting the
+# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory
+# containing the font.
+
+DOT_FONTNAME           = FreeSans.ttf
+
+# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs.
+# The default size is 10pt.
+
+DOT_FONTSIZE           = 10
+
+# By default doxygen will tell dot to use the output directory to look for the
+# FreeSans.ttf font (which doxygen will put there itself). If you specify a
+# different font using DOT_FONTNAME you can set the path where dot
+# can find it using this tag.
+
+DOT_FONTPATH           =
+
+# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect inheritance relations. Setting this tag to YES will force the
+# the CLASS_DIAGRAMS tag to NO.
+
+CLASS_GRAPH            = YES
+
+# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect implementation dependencies (inheritance, containment, and
+# class references variables) of the class with other documented classes.
+
+COLLABORATION_GRAPH    = YES
+
+# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for groups, showing the direct groups dependencies
+
+GROUP_GRAPHS           = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+
+UML_LOOK               = NO
+
+# If set to YES, the inheritance and collaboration graphs will show the
+# relations between templates and their instances.
+
+TEMPLATE_RELATIONS     = NO
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
+# tags are set to YES then doxygen will generate a graph for each documented
+# file showing the direct and indirect include dependencies of the file with
+# other documented files.
+
+INCLUDE_GRAPH          = YES
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
+# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
+# documented header file showing the documented files that directly or
+# indirectly include this file.
+
+INCLUDED_BY_GRAPH      = YES
+
+# If the CALL_GRAPH and HAVE_DOT options are set to YES then
+# doxygen will generate a call dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable call graphs
+# for selected functions only using the \callgraph command.
+
+CALL_GRAPH             = NO
+
+# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
+# doxygen will generate a caller dependency graph for every global function
+# or class method. Note that enabling this option will significantly increase
+# the time of a run. So in most cases it will be better to enable caller
+# graphs for selected functions only using the \callergraph command.
+
+CALLER_GRAPH           = NO
+
+# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
+# will graphical hierarchy of all classes instead of a textual one.
+
+GRAPHICAL_HIERARCHY    = YES
+
+# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
+# then doxygen will show the dependencies a directory has on other directories
+# in a graphical way. The dependency relations are determined by the #include
+# relations between the files in the directories.
+
+DIRECTORY_GRAPH        = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. Possible values are png, jpg, or gif
+# If left blank png will be used.
+
+DOT_IMAGE_FORMAT       = png
+
+# The tag DOT_PATH can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the
+# \dotfile command).
+
+DOTFILE_DIRS           =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
+# nodes that will be shown in the graph. If the number of nodes in a graph
+# becomes larger than this value, doxygen will truncate the graph, which is
+# visualized by representing a node as a red box. Note that doxygen if the
+# number of direct children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
+# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+
+DOT_GRAPH_MAX_NODES    = 50
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
+# graphs generated by dot. A depth value of 3 means that only nodes reachable
+# from the root by following a path via at most 3 edges will be shown. Nodes
+# that lay further from the root node will be omitted. Note that setting this
+# option to 1 or 2 may greatly reduce the computation time needed for large
+# code bases. Also note that the size of a graph can be further restricted by
+# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+
+MAX_DOT_GRAPH_DEPTH    = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not
+# seem to support this out of the box. Warning: Depending on the platform used,
+# enabling this option may lead to badly anti-aliased labels on the edges of
+# a graph (i.e. they become hard to read).
+
+DOT_TRANSPARENT        = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10)
+# support this, this feature is disabled by default.
+
+DOT_MULTI_TARGETS      = YES
+
+# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
+# generate a legend page explaining the meaning of the various boxes and
+# arrows in the dot generated graphs.
+
+GENERATE_LEGEND        = YES
+
+# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
+# remove the intermediate dot files that are used to generate
+# the various graphs.
+
+DOT_CLEANUP            = YES
diff --git a/lang/csharp/Avro.nunit b/lang/csharp/Avro.nunit
new file mode 100644
index 0000000..501a27a
--- /dev/null
+++ b/lang/csharp/Avro.nunit
@@ -0,0 +1,17 @@
+<NUnitProject>
+  <Settings activeconfig="Release" />
+  <Config name="Debug" binpathtype="Auto">
+    <assembly path="build/test/Debug/Avro.test.dll" />
+    <assembly path="build/test/Debug/Avro.dll" />
+	<assembly path="lib/main/Castle.Core.dll" />
+    <assembly path="lib/main/Newtonsoft.Json.dll" />
+    <assembly path="lib/test/nunit.framework.dll" />
+  </Config>
+  <Config name="Release" binpathtype="Auto">
+    <assembly path="build/test/Release/Avro.test.dll" />
+    <assembly path="build/test/Release/Avro.dll" />
+	<assembly path="lib/main/Castle.Core.dll" />
+    <assembly path="lib/main/Newtonsoft.Json.dll" />
+    <assembly path="lib/test/nunit.framework.dll" />
+  </Config>
+</NUnitProject>
diff --git a/lang/csharp/Avro.sln b/lang/csharp/Avro.sln
new file mode 100644
index 0000000..58687b2
--- /dev/null
+++ b/lang/csharp/Avro.sln
@@ -0,0 +1,95 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.main", "src\apache\main\Avro.main.csproj", "{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.test", "src\apache\test\Avro.test.csproj", "{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.codegen", "src\apache\codegen\Avro.codegen.csproj", "{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.ipc", "src\apache\ipc\Avro.ipc.csproj", "{3B05043A-DC6C-49B6-85BF-9AB055D0B414}"
+	ProjectSection(ProjectDependencies) = postProject
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B} = {AEB22F94-4ECF-4008-B159-389B3F05D54B}
+	EndProjectSection
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.msbuild", "src\apache\msbuild\Avro.msbuild.csproj", "{AEB22F94-4ECF-4008-B159-389B3F05D54B}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro.perf", "src\apache\perf\Avro.perf.csproj", "{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}"
+EndProject
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Any CPU = Debug|Any CPU
+		Debug|Mixed Platforms = Debug|Mixed Platforms
+		Debug|x86 = Debug|x86
+		Release|Any CPU = Release|Any CPU
+		Release|Mixed Platforms = Release|Mixed Platforms
+		Release|x86 = Release|x86
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Release|Any CPU.Build.0 = Release|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}.Release|x86.ActiveCfg = Release|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Release|Any CPU.Build.0 = Release|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}.Release|x86.ActiveCfg = Release|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|x86.ActiveCfg = Debug|x86
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Debug|x86.Build.0 = Debug|x86
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|Any CPU.Build.0 = Release|Any CPU
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|Mixed Platforms.ActiveCfg = Release|x86
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|Mixed Platforms.Build.0 = Release|x86
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|x86.ActiveCfg = Release|x86
+		{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}.Release|x86.Build.0 = Release|x86
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Release|Any CPU.Build.0 = Release|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{3B05043A-DC6C-49B6-85BF-9AB055D0B414}.Release|x86.ActiveCfg = Release|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Release|Any CPU.Build.0 = Release|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{AEB22F94-4ECF-4008-B159-389B3F05D54B}.Release|x86.ActiveCfg = Release|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Debug|Any CPU.Build.0 = Debug|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Debug|x86.ActiveCfg = Debug|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Release|Any CPU.ActiveCfg = Release|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Release|Any CPU.Build.0 = Release|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+		{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}.Release|x86.ActiveCfg = Release|Any CPU
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+EndGlobal
diff --git a/lang/csharp/Avro.snk b/lang/csharp/Avro.snk
new file mode 100644
index 0000000..ad2c010
Binary files /dev/null and b/lang/csharp/Avro.snk differ
diff --git a/lang/csharp/README b/lang/csharp/README
new file mode 100644
index 0000000..a9ca8af
--- /dev/null
+++ b/lang/csharp/README
@@ -0,0 +1,36 @@
+1. Building Avro for .NET
+
+a. Windows
+
+Prerequisites
+    Microsoft Visual C# Express 2010
+
+To build open Avro.sln solution in VC# Express 2010 and build from there.
+
+b. Linux
+
+Prerequisites
+    Mono 2.6 or above.
+    
+For Ubuntu Lucid Lynx, mono that comes with it is 2.4. To install mono 2.6
+follow the procedure described at http://badgerports.org/
+
+To build issue the command:
+$ cd lang/csharp
+
+then
+
+$ CONFIGURATION=Release TARGETFRAMEWORKVERSION=3.5 xbuild Avro.sln
+
+or 
+
+$ CONFIGURATION=Debug TARGETFRAMEWORKVERSION=3.5 xbuild Avro.sln
+
+Note: In Ubuntu 12.04 (Precise Pangolin), please omit TARGETFRAMEWORKVERSION argument and leave it to the default.
+
+2. Unit tests
+
+This is a common procedure for both Windows and Linux.
+
+To run unit-tests, please install NUnit 2.5 or above. From within the NUnit
+GUI, open the project Avro.nunit and run the tests.
diff --git a/lang/csharp/build.sh b/lang/csharp/build.sh
new file mode 100755
index 0000000..bab086c
--- /dev/null
+++ b/lang/csharp/build.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e						  # exit on error
+set -x		
+
+cd `dirname "$0"`				  # connect to root
+
+ROOT=../..
+VERSION=`cat $ROOT/share/VERSION.txt`
+
+export CONFIGURATION=Release
+export TARGETFRAMEWORKVERSION=v3.5
+
+case "$1" in
+
+    test)
+	xbuild
+	nunit-console -framework=4.0 Avro.nunit
+	;;
+
+    perf)
+	xbuild
+	mono build/perf/Release/Avro.perf.exe
+	;;
+
+    dist)
+        # build binary tarball
+	xbuild
+	mkdir -p $ROOT/dist
+        (cd build; tar czf $ROOT/../dist/avro-csharp-$VERSION.tar.gz main codegen ipc)
+
+        # build documentation
+        doxygen Avro.dox
+	mkdir -p $ROOT/build/avro-doc-$VERSION/api/csharp
+        cp -pr build/doc/* $ROOT/build/avro-doc-$VERSION/api/csharp
+	;;
+
+    clean)
+	rm -rf src/apache/{main,test,codegen,ipc}/obj
+        rm -rf build
+	;;
+
+    *)
+        echo "Usage: $0 {test|clean|dist|perf}"
+        exit 1
+esac
+
+exit 0
diff --git a/lang/csharp/lib/main/Castle.Core.dll b/lang/csharp/lib/main/Castle.Core.dll
new file mode 100644
index 0000000..ccc7d5f
Binary files /dev/null and b/lang/csharp/lib/main/Castle.Core.dll differ
diff --git a/lang/csharp/lib/main/Newtonsoft.Json.dll b/lang/csharp/lib/main/Newtonsoft.Json.dll
new file mode 100644
index 0000000..4703b89
Binary files /dev/null and b/lang/csharp/lib/main/Newtonsoft.Json.dll differ
diff --git a/lang/csharp/lib/main/log4net.dll b/lang/csharp/lib/main/log4net.dll
new file mode 100644
index 0000000..ffc57e1
Binary files /dev/null and b/lang/csharp/lib/main/log4net.dll differ
diff --git a/lang/csharp/lib/test/nunit.framework.dll b/lang/csharp/lib/test/nunit.framework.dll
new file mode 100644
index 0000000..639dbb0
Binary files /dev/null and b/lang/csharp/lib/test/nunit.framework.dll differ
diff --git a/lang/csharp/src/apache/codegen/Avro.codegen.csproj b/lang/csharp/src/apache/codegen/Avro.codegen.csproj
new file mode 100644
index 0000000..695929e
--- /dev/null
+++ b/lang/csharp/src/apache/codegen/Avro.codegen.csproj
@@ -0,0 +1,162 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProductVersion>8.0.30703</ProductVersion>
+    <SchemaVersion>2.0</SchemaVersion>
+    <ProjectGuid>{BF0D313C-1AA3-4900-B277-B0F5F9DDCDA8}</ProjectGuid>
+    <OutputType>Exe</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro.codegen</RootNamespace>
+    <AssemblyName>avrogen</AssemblyName>
+    <TargetFrameworkVersion Condition=" '$(TargetFrameworkVersion)' == '' ">v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <IsWebBootstrapper>false</IsWebBootstrapper>
+    <FileUpgradeFlags>
+    </FileUpgradeFlags>
+    <OldToolsVersion>3.5</OldToolsVersion>
+    <UpgradeBackupLocation />
+    <PublishUrl>publish\</PublishUrl>
+    <Install>true</Install>
+    <InstallFrom>Disk</InstallFrom>
+    <UpdateEnabled>false</UpdateEnabled>
+    <UpdateMode>Foreground</UpdateMode>
+    <UpdateInterval>7</UpdateInterval>
+    <UpdateIntervalUnits>Days</UpdateIntervalUnits>
+    <UpdatePeriodically>false</UpdatePeriodically>
+    <UpdateRequired>false</UpdateRequired>
+    <MapFileExtensions>true</MapFileExtensions>
+    <ApplicationRevision>0</ApplicationRevision>
+    <ApplicationVersion>1.0.0.%2a</ApplicationVersion>
+    <UseApplicationTrust>false</UseApplicationTrust>
+    <BootstrapperEnabled>true</BootstrapperEnabled>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|x86' ">
+    <PlatformTarget>AnyCPU</PlatformTarget>
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\codegen\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <PlatformTarget>AnyCPU</PlatformTarget>
+    <DebugType>none</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\codegen\Release\</OutputPath>
+    <DefineConstants>
+    </DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Debug|AnyCPU'">
+    <DebugSymbols>true</DebugSymbols>
+    <OutputPath>..\..\..\build\codegen\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <DebugType>full</DebugType>
+    <PlatformTarget>AnyCPU</PlatformTarget>
+    <CodeAnalysisLogFile>..\..\..\build\codegen\Debug\avrogen.exe.CodeAnalysisLog.xml</CodeAnalysisLogFile>
+    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
+    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
+    <ErrorReport>prompt</ErrorReport>
+    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
+    <CodeAnalysisRuleSetDirectories>;C:\Program Files\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
+    <CodeAnalysisIgnoreBuiltInRuleSets>true</CodeAnalysisIgnoreBuiltInRuleSets>
+    <CodeAnalysisRuleDirectories>;C:\Program Files\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
+    <CodeAnalysisIgnoreBuiltInRules>true</CodeAnalysisIgnoreBuiltInRules>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|AnyCPU'">
+    <OutputPath>..\..\..\build\codegen\Release\</OutputPath>
+    <DefineConstants>
+    </DefineConstants>
+    <Optimize>true</Optimize>
+    <DebugType>none</DebugType>
+    <PlatformTarget>AnyCPU</PlatformTarget>
+    <CodeAnalysisLogFile>..\..\..\build\codegen\Release\avrogen.exe.CodeAnalysisLog.xml</CodeAnalysisLogFile>
+    <CodeAnalysisUseTypeNameInSuppression>true</CodeAnalysisUseTypeNameInSuppression>
+    <CodeAnalysisModuleSuppressionsFile>GlobalSuppressions.cs</CodeAnalysisModuleSuppressionsFile>
+    <ErrorReport>prompt</ErrorReport>
+    <CodeAnalysisRuleSet>MinimumRecommendedRules.ruleset</CodeAnalysisRuleSet>
+    <CodeAnalysisRuleSetDirectories>;C:\Program Files\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\\Rule Sets</CodeAnalysisRuleSetDirectories>
+    <CodeAnalysisIgnoreBuiltInRuleSets>false</CodeAnalysisIgnoreBuiltInRuleSets>
+    <CodeAnalysisRuleDirectories>;C:\Program Files\Microsoft Visual Studio 10.0\Team Tools\Static Analysis Tools\FxCop\\Rules</CodeAnalysisRuleDirectories>
+    <CodeAnalysisIgnoreBuiltInRules>false</CodeAnalysisIgnoreBuiltInRules>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)' == 'Release|x86'">
+    <OutputPath>..\..\..\build\codegen\Release\</OutputPath>
+  </PropertyGroup>
+  <PropertyGroup>
+    <SignAssembly>true</SignAssembly>
+  </PropertyGroup>
+  <PropertyGroup>
+    <AssemblyOriginatorKeyFile>..\..\..\Avro.snk</AssemblyOriginatorKeyFile>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="AvroGen.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\main\Avro.main.csproj">
+      <Project>{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}</Project>
+      <Name>Avro.main</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <ItemGroup>
+    <BootstrapperPackage Include=".NETFramework,Version=v3.5">
+      <Visible>False</Visible>
+      <ProductName>Microsoft .NET Framework 3.5 %28x86 and x64%29</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
+      <Visible>False</Visible>
+      <ProductName>.NET Framework 3.5 SP1</ProductName>
+      <Install>false</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Windows.Installer.3.1">
+      <Visible>False</Visible>
+      <ProductName>Windows Installer 3.1</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+  </ItemGroup>
+  <ItemGroup>
+    <None Include="app.config" />
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/codegen/AvroGen.cs b/lang/csharp/src/apache/codegen/AvroGen.cs
new file mode 100644
index 0000000..033b840
--- /dev/null
+++ b/lang/csharp/src/apache/codegen/AvroGen.cs
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    class AvroGen
+    {
+        static void Main(string[] args)
+        {
+            if (args.Length != 3)
+            {
+                Usage();
+                return;
+            }
+            if (args[0] == "-p")
+                GenProtocol(args[1], args[2]);
+            else if (args[0] == "-s")
+                GenSchema(args[1], args[2]);
+            else
+                Usage();
+        }
+
+        static void Usage()
+        {
+            Console.WriteLine("Usage:\navrogen -p <protocolfile> <outputdir>\navrogen -s <schemafile> <outputdir>");
+            return;
+        }
+        static void GenProtocol(string infile, string outdir)
+        {
+            try
+            {
+                string text = System.IO.File.ReadAllText(infile);
+                Protocol protocol = Protocol.Parse(text);
+
+                CodeGen codegen = new CodeGen();
+                codegen.AddProtocol(protocol);
+
+                codegen.GenerateCode();
+                codegen.WriteTypes(outdir);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine("Exception occurred. " + ex.Message);
+            }
+        }
+        static void GenSchema(string infile, string outdir)
+        {
+            try
+            {
+                string text = System.IO.File.ReadAllText(infile);
+                Schema schema = Schema.Parse(text);
+
+                CodeGen codegen = new CodeGen();
+                codegen.AddSchema(schema);
+
+                codegen.GenerateCode();
+                codegen.WriteTypes(outdir);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine("Exception occurred. " + ex.Message);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/codegen/Properties/AssemblyInfo.cs b/lang/csharp/src/apache/codegen/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..0eeb236
--- /dev/null
+++ b/lang/csharp/src/apache/codegen/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Reflection;
+using System.Runtime.InteropServices;
+
+[assembly: AssemblyTitle("Avro.codegen")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Apache")]
+[assembly: AssemblyProduct("Avro.codegen")]
+[assembly: AssemblyCopyright("Copyright © Apache 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: Guid("3C23DD33-DD4F-42B1-B71F-8F9C86929E58")]
+[assembly: AssemblyVersion("0.9.0.0")]
+[assembly: AssemblyFileVersion("0.9.0.0")]
\ No newline at end of file
diff --git a/lang/csharp/src/apache/codegen/app.config b/lang/csharp/src/apache/codegen/app.config
new file mode 100755
index 0000000..ae24790
--- /dev/null
+++ b/lang/csharp/src/apache/codegen/app.config
@@ -0,0 +1,19 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+<startup><supportedRuntime version="v2.0.50727"/></startup></configuration>
diff --git a/lang/csharp/src/apache/ipc/Avro.ipc.csproj b/lang/csharp/src/apache/ipc/Avro.ipc.csproj
new file mode 100644
index 0000000..9d738c9
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Avro.ipc.csproj
@@ -0,0 +1,108 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{3B05043A-DC6C-49B6-85BF-9AB055D0B414}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro.ipc</RootNamespace>
+    <AssemblyName>Avro.ipc</AssemblyName>
+    <TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\ipc\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\ipc\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup>
+    <SignAssembly>true</SignAssembly>
+  </PropertyGroup>
+  <PropertyGroup>
+    <AssemblyOriginatorKeyFile>..\..\..\Avro.snk</AssemblyOriginatorKeyFile>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="Castle.Core">
+      <HintPath>..\..\..\lib\main\Castle.Core.dll</HintPath>
+    </Reference>
+    <Reference Include="log4net">
+      <HintPath>..\..\..\lib\main\log4net.dll</HintPath>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="CallFuture.cs" />
+    <Compile Include="CountdownLatch.cs" />
+    <Compile Include="Generic\GenericRequestor.cs" />
+    <Compile Include="Generic\GenericResponder.cs" />
+    <Compile Include="HttpListenerServer.cs" />
+    <Compile Include="HttpTransceiver.cs" />
+    <Compile Include="OutputStream.cs" />
+    <Compile Include="RpcRequest.cs" />
+    <Compile Include="SocketServer.cs" />
+    <Compile Include="Specific\SpecificResponder.cs" />
+    <Compile Include="Specific\SpecificRequestor.cs" />
+    <Compile Include="Transceiver.cs" />
+    <Compile Include="LocalTransceiver.cs" />
+    <Compile Include="org\apache\avro\ipc\HandshakeMatch.cs" />
+    <Compile Include="org\apache\avro\ipc\HandshakeRequest.cs" />
+    <Compile Include="org\apache\avro\ipc\HandshakeResponse.cs" />
+    <Compile Include="org\apache\avro\ipc\MD5.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Requestor.cs" />
+    <Compile Include="Responder.cs" />
+    <Compile Include="RpcContext.cs" />
+    <Compile Include="SocketTransceiver.cs" />
+  </ItemGroup>
+  <ItemGroup />
+  <ItemGroup>
+    <ProjectReference Include="..\main\Avro.main.csproj">
+      <Project>{a0a5ca3c-f58c-4d07-98b0-2c7b62ab20f0}</Project>
+      <Name>Avro.main</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!--<UsingTask AssemblyFile="..\..\..\build\msbuild\$(Configuration)\Avro.msbuild.dll" TaskName="Avro.msbuild.AvroBuildTask" />-->
+  <Target Name="BeforeBuild">
+    <!--<AvroBuildTask OutDir="..\ipc\" SchemaFiles="..\..\..\..\..\share\schemas\org\apache\avro\ipc\HandshakeRequest.avsc;..\..\..\..\..\share\schemas\org\apache\avro\ipc\HandshakeResponse.avsc">
+    </AvroBuildTask>-->
+  </Target>
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/CallFuture.cs b/lang/csharp/src/apache/ipc/CallFuture.cs
new file mode 100644
index 0000000..15400d2
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/CallFuture.cs
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using Avro.IO;
+
+namespace Avro.ipc
+{
+    public class CallFuture<T> : ICallback<T>, IDisposable 
+    {
+        private readonly ICallback<T> chainedCallback;
+        private CountdownLatch latch = new CountdownLatch(1);
+
+        public CallFuture(ICallback<T> chainedCallback = null)
+        {
+            this.chainedCallback = chainedCallback;
+        }
+
+        public T Result { get; private set; }
+        public Exception Error { get; private set; }
+
+        public bool IsDone
+        {
+            get { return latch.CurrentCount == 0; }
+        }
+
+        public virtual void HandleResult(T result)
+        {
+            Result = result;
+            latch.Signal();
+            if (chainedCallback != null)
+            {
+                chainedCallback.HandleResult(result);
+            }
+        }
+
+        public virtual void HandleException(Exception exception)
+        {
+            Error = exception;
+            latch.Signal();
+            if (chainedCallback != null)
+            {
+                chainedCallback.HandleException(exception);
+            }
+        }
+
+        public T WaitForResult()
+        {
+            latch.Wait();
+            if (Error != null)
+            {
+                throw Error;
+            }
+            return Result;
+        }
+
+        public T WaitForResult(int millisecondsTimeout)
+        {
+            if (latch.Wait(millisecondsTimeout))
+            {
+                if (Error != null)
+                {
+                    throw Error;
+                }
+                return Result;
+            }
+
+            throw new TimeoutException();
+        }
+
+        public void Wait()
+        {
+            latch.Wait();
+        }
+
+        public void Wait(int millisecondsTimeout)
+        {
+            if (!latch.Wait(millisecondsTimeout))
+            {
+                throw new TimeoutException();
+            }
+        }
+
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        ~CallFuture()
+        {
+            // Finalizer calls Dispose(false)
+            Dispose(false);
+        }
+
+        protected virtual void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                // free managed resources
+                if (latch != null)
+                {
+                    latch.Dispose();
+                    latch = null;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/CountdownLatch.cs b/lang/csharp/src/apache/ipc/CountdownLatch.cs
new file mode 100644
index 0000000..1fca100
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/CountdownLatch.cs
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Threading;
+
+namespace Avro.ipc
+{
+    public class CountdownLatch : IDisposable
+    {
+        private ManualResetEvent evt;
+        private int currentCount;
+
+        public int CurrentCount
+        {
+            get { return currentCount; }
+        }
+
+        public CountdownLatch(int count)
+        {
+            currentCount = count;
+            evt = new ManualResetEvent(false);
+        }
+
+        public void Signal()
+        {
+            if (Interlocked.Decrement(ref currentCount) == 0)
+                evt.Set();
+        }
+
+        public void Wait()
+        {
+            evt.WaitOne();
+        }
+
+        public bool Wait(int milliseconds)
+        {
+            return evt.WaitOne(milliseconds);
+        }
+
+        public void Dispose()
+        {
+            Dispose(true);
+            GC.SuppressFinalize(this);
+        }
+
+        ~CountdownLatch()
+        {
+            // Finalizer calls Dispose(false)
+            Dispose(false);
+        }
+
+        protected virtual void Dispose(bool disposing)
+        {
+            if (disposing)
+            {
+                // free managed resources
+                if (evt != null)
+                {
+                    evt.Close();
+                    evt = null;
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Generic/GenericRequestor.cs b/lang/csharp/src/apache/ipc/Generic/GenericRequestor.cs
new file mode 100644
index 0000000..feb2512
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Generic/GenericRequestor.cs
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using Avro.Generic;
+using Avro.IO;
+
+namespace Avro.ipc.Generic
+{
+    public class GenericRequestor : Requestor
+    {
+        public GenericRequestor(Transceiver transceiver,Protocol protocol)
+            : base(transceiver,protocol)
+        {
+        }
+
+        public override void WriteRequest(RecordSchema schema, object request, Encoder encoder)
+        {
+            new GenericWriter<object>(schema).Write(request, encoder);
+        }
+
+        public override object ReadResponse(Schema writer, Schema reader, Decoder decoder)
+        {
+            return new GenericReader<Object>(writer, reader).Read(null, decoder);
+        }
+
+        public override Exception ReadError(Schema writer, Schema reader, Decoder decoder)
+        {
+            object results = new GenericReader<Object>(writer, reader).Read(null, decoder);
+
+            return new Exception(results.ToString());
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/ipc/Generic/GenericResponder.cs b/lang/csharp/src/apache/ipc/Generic/GenericResponder.cs
new file mode 100644
index 0000000..9b2c142
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Generic/GenericResponder.cs
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using Avro.Generic;
+using Decoder = Avro.IO.Decoder;
+using Encoder = Avro.IO.Encoder;
+
+namespace Avro.ipc.Generic
+{
+    public abstract class GenericResponder : Responder
+    {
+        protected GenericResponder(Protocol protocol)
+            : base(protocol)
+        {
+        }
+
+        static protected DatumWriter<Object> GetDatumWriter(Schema schema)
+        {
+            return new GenericWriter<Object>(schema);
+        }
+
+        static protected DatumReader<Object> GetDatumReader(Schema actual, Schema expected)
+        {
+            return new GenericReader<Object>(actual, expected);
+        }
+
+        public override object ReadRequest(Schema actual, Schema expected, Decoder input)
+        {
+            return GetDatumReader(actual, expected).Read(null, input);
+        }
+
+        public override void WriteResponse(Schema schema, object response, Encoder output)
+        {
+            GetDatumWriter(schema).Write(response, output);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/ipc/HttpListenerServer.cs b/lang/csharp/src/apache/ipc/HttpListenerServer.cs
new file mode 100644
index 0000000..66de091
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/HttpListenerServer.cs
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Net;
+using System.IO;
+using System.Diagnostics;
+
+namespace Avro.ipc
+{
+    public class HttpListenerServer
+    {
+        IEnumerable<string> _prefixes;
+        HttpListener _listener;
+        Responder _responder;
+
+        public HttpListenerServer(IEnumerable<string> listenOnPrefixes, Responder responder)
+        {
+            _responder = responder;
+            _prefixes = listenOnPrefixes;
+        }
+
+        //TODO: apparently this doesn't compile in Mono - investigate
+        //public Action<Exception, IAsyncResult> ExceptionHandler { get; set; }
+
+        protected void HttpListenerCallback(IAsyncResult result)
+        {
+            try
+            {                
+                HttpListener listener = (HttpListener)result.AsyncState;
+                if (_listener != listener) //the server which began this callback was stopped - just exit
+                    return;
+                HttpListenerContext context = listener.EndGetContext(result);
+
+                listener.BeginGetContext(HttpListenerCallback, listener); //spawn listening for next request so it can be processed while we are dealing with this one
+
+                //process this request
+                if (!context.Request.HttpMethod.Equals("POST"))
+                    throw new AvroRuntimeException("HTTP method must be POST");
+                if (!context.Request.ContentType.Equals("avro/binary"))
+                    throw new AvroRuntimeException("Content-type must be avro/binary");
+
+                byte[] intBuffer = new byte[4];
+                var buffers = HttpTransceiver.ReadBuffers(context.Request.InputStream, intBuffer);
+
+                buffers = _responder.Respond(buffers);
+                context.Response.ContentType = "avro/binary";
+                context.Response.ContentLength64 = HttpTransceiver.CalculateLength(buffers);
+
+                HttpTransceiver.WriteBuffers(buffers, context.Response.OutputStream);
+
+                context.Response.OutputStream.Close();
+                context.Response.Close();
+            }
+            catch (Exception ex)
+            {
+                //TODO: apparently this doesn't compile in Mono - investigate
+                //if (ExceptionHandler != null)
+                //    ExceptionHandler(ex, result);
+                //else
+                //    Debug.Print("Exception occured while processing a request, no exception handler was provided - ignoring", ex);
+                Debug.Print("Exception occured while processing a web request, skipping this request: ", ex);
+            }
+        }
+
+        public void Start()
+        {
+            _listener = new HttpListener();
+
+            foreach (string s in _prefixes)
+            {
+                _listener.Prefixes.Add(s);
+            }
+
+            _listener.Start();
+
+            _listener.BeginGetContext(HttpListenerCallback, _listener);
+        }
+
+        public void Stop()
+        {
+            _listener.Stop();
+            _listener.Close();
+            _listener = null;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/ipc/HttpTransceiver.cs b/lang/csharp/src/apache/ipc/HttpTransceiver.cs
new file mode 100644
index 0000000..48851f1
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/HttpTransceiver.cs
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.IO;
+using System.Net;
+
+namespace Avro.ipc
+{
+    public class HttpTransceiver : Transceiver
+    {
+        private byte[] _intBuffer = new byte[4]; //this buffer is used by read/write behind the latch controlled by base class so we are sure there is no race condition
+        private HttpWebRequest _httpRequest;
+        private HttpWebRequest _modelRequest;
+
+        public override string RemoteName
+        {
+            get
+            {
+                return _modelRequest.RequestUri.AbsoluteUri;
+            }
+        }
+
+        public HttpTransceiver(HttpWebRequest modelRequest)
+        {
+            _modelRequest = modelRequest;
+        }
+
+        public HttpTransceiver(Uri serviceUri, int timeoutMs)
+        {
+            _modelRequest = (HttpWebRequest)WebRequest.Create(serviceUri);
+            _modelRequest.Method = "POST";
+            _modelRequest.ContentType = "avro/binary";
+            _modelRequest.Timeout = timeoutMs;
+        }
+
+        private static int ReadInt(Stream stream, byte[] buffer)
+        {
+            stream.Read(buffer, 0, 4);
+            return IPAddress.NetworkToHostOrder(BitConverter.ToInt32(buffer, 0));
+        }
+
+        public static byte[] ConvertIntToBytes(int value)
+        {
+            return BitConverter.GetBytes(IPAddress.HostToNetworkOrder(value));
+        }
+
+        public static int CalculateLength(IList<MemoryStream> buffers)
+        {
+            int num = 0;
+            foreach (MemoryStream memoryStream in (IEnumerable<MemoryStream>)buffers)
+            {
+                num += 4;
+                num += (int)memoryStream.Length;
+            }
+            return num + 4;
+        }
+
+        public static IList<MemoryStream> ReadBuffers(Stream inStream, byte[] intBuffer)
+        {
+            List<MemoryStream> list = new List<MemoryStream>();
+            while (true)
+            {
+                int length = ReadInt(inStream, intBuffer);
+
+                if (length == 0) //end of transmission
+                    break;
+
+                byte[] buffer = new byte[length];
+                int offset = 0;
+                int count = length;
+                while (offset < length)
+                {
+                    int num = inStream.Read(buffer, offset, count);
+                    if (num == 0)
+                        throw new Exception(string.Format("Unexpected end of response binary stream - expected {0} more bytes in current chunk", (object)count));
+                    offset += num;
+                    count -= num;
+                }
+
+                list.Add(new MemoryStream(buffer));
+            }
+            return (IList<MemoryStream>)list;
+        }
+
+        public override IList<MemoryStream> ReadBuffers()
+        {
+            using (Stream responseStream = this._httpRequest.GetResponse().GetResponseStream())
+            {
+                return ReadBuffers(responseStream, _intBuffer);
+            }
+        }
+
+        protected HttpWebRequest CreateAvroHttpRequest(long contentLength)
+        {
+            HttpWebRequest wr = (HttpWebRequest)WebRequest.Create(_modelRequest.RequestUri);
+            
+            //TODO: what else to copy from model request?
+            wr.AllowAutoRedirect = _modelRequest.AllowAutoRedirect;
+            wr.AllowWriteStreamBuffering = _modelRequest.AllowWriteStreamBuffering;
+            wr.AuthenticationLevel = _modelRequest.AuthenticationLevel;
+            wr.AutomaticDecompression = _modelRequest.AutomaticDecompression;
+            wr.CachePolicy = _modelRequest.CachePolicy;
+            wr.ClientCertificates.AddRange(_modelRequest.ClientCertificates);
+            wr.ConnectionGroupName = _modelRequest.ConnectionGroupName;
+            wr.ContinueDelegate = _modelRequest.ContinueDelegate;
+            wr.CookieContainer = _modelRequest.CookieContainer;
+            wr.Credentials = _modelRequest.Credentials;
+            wr.UnsafeAuthenticatedConnectionSharing = _modelRequest.UnsafeAuthenticatedConnectionSharing;
+            wr.UseDefaultCredentials = _modelRequest.UseDefaultCredentials;
+
+            wr.KeepAlive = _modelRequest.KeepAlive;
+            wr.Expect = _modelRequest.Expect;
+            //wr.Date = _modelRequest.Date;
+            //wr.Host = _modelRequest.Host;
+            wr.UserAgent = _modelRequest.UserAgent;
+            //wr.Headers = _modelRequest.Headers;
+            wr.Referer = _modelRequest.Referer;
+
+            wr.Pipelined = _modelRequest.Pipelined;
+            wr.PreAuthenticate = _modelRequest.PreAuthenticate;
+            wr.ProtocolVersion = _modelRequest.ProtocolVersion;
+            wr.Proxy = _modelRequest.Proxy;
+            wr.ReadWriteTimeout = _modelRequest.ReadWriteTimeout;
+            wr.Timeout = _modelRequest.Timeout;
+
+            //the properties which are defined by Avro specification
+            wr.Method = "POST";
+            wr.ContentType = "avro/binary";
+            wr.ContentLength = contentLength;
+
+            return wr;
+        }
+
+        public static void WriteBuffers(IList<MemoryStream> buffers, Stream outStream)
+        {
+            foreach (MemoryStream memoryStream in buffers)
+            {
+                int num = (int)memoryStream.Length;
+                outStream.Write(ConvertIntToBytes(num), 0, 4);
+                memoryStream.WriteTo(outStream);
+            }
+            outStream.Write(ConvertIntToBytes(0), 0, 4);
+            outStream.Flush();
+        }
+
+        public override void WriteBuffers(IList<MemoryStream> buffers)
+        {
+            _httpRequest = CreateAvroHttpRequest(CalculateLength(buffers));
+            using (Stream requestStream = _httpRequest.GetRequestStream())
+            {
+                WriteBuffers(buffers, requestStream);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/ipc/LocalTransceiver.cs b/lang/csharp/src/apache/ipc/LocalTransceiver.cs
new file mode 100644
index 0000000..fde6800
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/LocalTransceiver.cs
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Avro.ipc
+{
+    public class LocalTransceiver : Transceiver
+    {
+        private readonly Responder responder;
+
+        public LocalTransceiver(Responder responder)
+        {
+            if (responder == null) throw new ArgumentNullException("responder");
+
+            this.responder = responder;
+        }
+
+        public override string RemoteName
+        {
+            get { return "local"; }
+        }
+
+        public override IList<MemoryStream> Transceive(IList<MemoryStream> request)
+        {
+            return responder.Respond(request);
+        }
+
+        public override IList<MemoryStream> ReadBuffers()
+        {
+            throw new NotSupportedException();
+        }
+
+        public override void WriteBuffers(IList<MemoryStream> getBytes)
+        {
+            throw new NotSupportedException();
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/OutputStream.cs b/lang/csharp/src/apache/ipc/OutputStream.cs
new file mode 100644
index 0000000..025bbfe
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/OutputStream.cs
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.IO;
+
+namespace Avro.ipc
+{
+    internal abstract class OutputStream : Stream
+    {
+        public override bool CanWrite
+        {
+            get { return true; }
+        }
+
+        public override bool CanRead
+        {
+            get { return false; }
+        }
+
+        public override bool CanSeek
+        {
+            get { return false; }
+        }
+
+        public override long Position
+        {
+            get { throw new NotSupportedException(); }
+            set { throw new NotSupportedException(); }
+        }
+
+        public override int Read(byte[] buffer, int offset, int count)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override long Seek(long offset, SeekOrigin origin)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override void SetLength(long value)
+        {
+            throw new NotSupportedException();
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Properties/AssemblyInfo.cs b/lang/csharp/src/apache/ipc/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..4875ed1
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Reflection;
+using System.Runtime.InteropServices;
+
+[assembly: AssemblyTitle("Avro.ipc")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Apache")]
+[assembly: AssemblyProduct("Avro.ipc")]
+[assembly: AssemblyCopyright("Copyright © Apache 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: Guid("CC3EF4B8-52AA-47D9-B170-5BAAA57A38F3")]
+[assembly: AssemblyVersion("0.9.0.0")]
+[assembly: AssemblyFileVersion("0.9.0.0")]
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Requestor.cs b/lang/csharp/src/apache/ipc/Requestor.cs
new file mode 100644
index 0000000..29ba211
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Requestor.cs
@@ -0,0 +1,428 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using Avro.Generic;
+using Avro.IO;
+using Avro.Specific;
+using org.apache.avro.ipc;
+
+namespace Avro.ipc
+{
+    public abstract class Requestor
+    {
+        private static readonly Schema Meta = MapSchema.CreateMap(PrimitiveSchema.NewInstance("bytes"));
+
+        private static readonly GenericReader<IDictionary<string, object>> MetaReader =
+            new GenericReader<IDictionary<string, object>>(Meta, Meta);
+
+        public static readonly GenericWriter<IDictionary<string, object>> MetaWriter =
+            new GenericWriter<IDictionary<string, object>>(Meta);
+
+        private static readonly Dictionary<String, MD5> RemoteHashes = new Dictionary<String, MD5>();
+        private static readonly Dictionary<MD5, Protocol> RemoteProtocols = new Dictionary<MD5, Protocol>();
+
+        private static readonly object remoteHashLock = new object();
+        private static readonly object remoteProtocolsLock = new object();
+
+        private static readonly SpecificWriter<HandshakeRequest> HandshakeWriter =
+            new SpecificWriter<HandshakeRequest>(new HandshakeRequest().Schema);
+         private static readonly SpecificReader<HandshakeResponse> HandshakeReader =
+            new SpecificReader<HandshakeResponse>(new HandshakeResponse().Schema, new HandshakeResponse().Schema);
+
+        protected readonly Transceiver transceiver;
+        private Protocol localProtocol;
+        private volatile Protocol remoteProtocol;
+        private volatile bool sendLocalText;
+
+        private readonly object handshakeLock = new object();
+        private Thread handshakeThread;
+
+        protected Requestor(Transceiver transceiver, Protocol protocol)
+        {
+            this.transceiver = transceiver;
+            localProtocol = protocol;
+        }
+
+        public Protocol Local
+        {
+            get { return localProtocol; }
+            protected set { localProtocol = value; }
+        }
+
+        public Transceiver Transceiver
+        {
+            get { return transceiver; }
+        }
+
+        public object Request(string messageName, object request)
+        {
+            transceiver.VerifyConnection();
+
+            var rpcRequest = new RpcRequest(messageName, request, new RpcContext());
+
+            CallFuture<Object> future =
+                GetMessage(rpcRequest).Oneway.GetValueOrDefault() ? null : new CallFuture<Object>();
+
+            Request(rpcRequest, future);
+
+            return future == null ? null : future.WaitForResult();
+        }
+
+            //    public void Request<TSpecificRecord, TCallback>(String messageName, Object request, ICallback<TCallback> callback)
+            //where TCallback : class
+            //where TSpecificRecord : class, ISpecificRecord
+        public void Request<T>(String messageName, Object request, ICallback<T> callback)
+        {
+            var rpcRequest = new RpcRequest(messageName, request, new RpcContext());
+
+            Request(rpcRequest, callback);
+        }
+
+        private void Request<T>(RpcRequest request, ICallback<T> callback)
+        {
+            Transceiver t = transceiver;
+            if (!t.IsConnected)
+            {
+                Monitor.Enter(handshakeLock);
+                handshakeThread  = Thread.CurrentThread;
+
+                try
+                {
+                    if (!t.IsConnected)
+                    {
+                        var callFuture = new CallFuture<T>(callback);
+                        IList<MemoryStream> bytes = request.GetBytes(Local, this);
+                        var transceiverCallback = new TransceiverCallback<T>(this, request, callFuture, Local);
+
+                        t.Transceive(bytes, transceiverCallback);
+
+                        // Block until handshake complete
+                        callFuture.Wait();
+                        Message message = GetMessage(request);
+                        if (message.Oneway.GetValueOrDefault())
+                        {
+                            Exception error = callFuture.Error;
+                            if (error != null)
+                            {
+                                throw error;
+                            }
+                        }
+                        return;
+                    }
+
+                }
+                finally
+                {
+                    if (Thread.CurrentThread == handshakeThread)
+                    {
+                        handshakeThread = null;
+                        Monitor.Exit(handshakeLock);
+                    }
+                }
+            }
+
+            if (GetMessage(request).Oneway.GetValueOrDefault())
+            {
+                t.LockChannel();
+                try
+                {
+                    IList<MemoryStream> bytes = request.GetBytes(Local, this);
+                    t.WriteBuffers(bytes);
+                    if (callback != null)
+                    {
+                        callback.HandleResult(default(T));
+                    }
+                }
+                finally
+                {
+                    t.UnlockChannel();
+                }
+            }
+            else
+            {
+                IList<MemoryStream> bytes = request.GetBytes(Local, this);
+                var transceiverCallback = new TransceiverCallback<T>(this, request, callback, Local);
+
+                t.Transceive(bytes, transceiverCallback);
+
+                //if (Thread.CurrentThread == handshakeThread)
+                //{
+                //    Monitor.Exit(handshakeLock);
+                //}
+            }
+        }
+
+        private Message GetMessage(RpcRequest request)
+        {
+            return request.GetMessage(Local);
+        }
+
+        public abstract void WriteRequest(RecordSchema schema, Object request, Encoder encoder);
+        public abstract object ReadResponse(Schema writer, Schema reader, Decoder decoder);
+        public abstract Exception ReadError(Schema writer, Schema reader, Decoder decoder);
+
+
+        public void WriteHandshake(Encoder outEncoder)
+        {
+            if (transceiver.IsConnected) return;
+
+            var localHash = new MD5 {Value = localProtocol.MD5};
+
+            String remoteName = transceiver.RemoteName;
+            MD5 remoteHash;// = RemoteHashes[remoteName];
+
+            lock (remoteHashLock)
+            {
+                if (!RemoteHashes.TryGetValue(remoteName, out remoteHash))
+                {
+                    // guess remote is local
+                    remoteHash = localHash;
+                    remoteProtocol = localProtocol;
+                }
+            }
+
+            if (remoteProtocol == null)
+            {
+                lock (remoteProtocolsLock)
+                {
+                    remoteProtocol = RemoteProtocols[remoteHash];
+                }
+            }
+
+            var handshake = new HandshakeRequest {clientHash = localHash, serverHash = remoteHash};
+
+            if (sendLocalText)
+                handshake.clientProtocol = localProtocol.ToString();
+
+            var context = new RpcContext {HandshakeRequest = handshake};
+
+            handshake.meta = context.RequestHandshakeMeta;
+
+            HandshakeWriter.Write(handshake, outEncoder);
+        }
+
+        private void setRemote(HandshakeResponse handshake)
+        {
+            remoteProtocol = Protocol.Parse(handshake.serverProtocol);
+
+            MD5 remoteHash = handshake.serverHash;
+            lock (remoteHashLock)
+            {
+                RemoteHashes[transceiver.RemoteName] = remoteHash;
+            }
+            lock (remoteProtocolsLock)
+            {
+                RemoteProtocols[remoteHash] = remoteProtocol;
+            }
+        }
+
+        public Protocol GetRemote()
+        {
+            if (remoteProtocol != null) return remoteProtocol; // already have it
+
+            lock (remoteHashLock)
+            {
+                MD5 remoteHash;
+                if (RemoteHashes.TryGetValue(transceiver.RemoteName, out remoteHash))
+                {
+                    lock (remoteProtocolsLock)
+                    {
+                        remoteProtocol = RemoteProtocols[remoteHash];
+                        if (remoteProtocol != null) return remoteProtocol; // already cached
+                    }
+                }
+            }
+
+            Monitor.Enter(handshakeLock);
+
+            try
+            {
+                // force handshake
+                var bbo = new ByteBufferOutputStream();
+                // direct because the payload is tiny.
+                Encoder outp = new BinaryEncoder(bbo);
+
+                WriteHandshake(outp);
+                outp.WriteInt(0); // empty metadata
+                outp.WriteString(""); // bogus message name
+                IList<MemoryStream> response = Transceiver.Transceive(bbo.GetBufferList());
+
+                var bbi = new ByteBufferInputStream(response);
+                var inp = new BinaryDecoder(bbi);
+
+                ReadHandshake(inp);
+                return remoteProtocol;
+            }
+            finally
+            {
+                Monitor.Exit(handshakeLock);
+            }
+        }
+
+        private bool ReadHandshake(BinaryDecoder input)
+        {
+            if (Transceiver.IsConnected) return true;
+            bool established = false;
+
+            HandshakeResponse handshake = HandshakeReader.Read(null, input);
+
+            switch (handshake.match)
+            {
+                case HandshakeMatch.BOTH:
+                    established = true;
+                    sendLocalText = false;
+                    break;
+                case HandshakeMatch.CLIENT:
+                    setRemote(handshake);
+                    established = true;
+                    sendLocalText = false;
+                    break;
+                case HandshakeMatch.NONE:
+                    setRemote(handshake);
+                    sendLocalText = true;
+                    break;
+                default:
+                    throw new AvroRuntimeException("Unexpected match: " + handshake.match);
+            }
+
+            if (established)
+                transceiver.Remote = remoteProtocol;
+            return established;
+        }
+
+        private class Response
+        {
+            private readonly Requestor requestor;
+            private readonly RpcRequest request;
+            private readonly BinaryDecoder input;
+
+            public Response(Requestor requestor, RpcRequest request, BinaryDecoder input)
+            {
+                this.requestor = requestor;
+                this.request = request;
+                this.input = input;
+            }
+
+            public Object getResponse()
+            {
+                Message lm = request.GetMessage(requestor.Local);
+                Message rm;
+                if (!requestor.remoteProtocol.Messages.TryGetValue(request.GetMessage(requestor.Local).Name, out rm))
+                    throw new AvroRuntimeException
+                        ("Not a remote message: " + request.GetMessage(requestor.Local).Name);
+
+                Transceiver t = requestor.Transceiver;
+                if ((lm.Oneway.GetValueOrDefault() != rm.Oneway.GetValueOrDefault()) && t.IsConnected)
+                    throw new AvroRuntimeException
+                        ("Not both one-way messages: " + request.GetMessage(requestor.Local));
+
+                if (lm.Oneway.GetValueOrDefault() && t.IsConnected) return null; // one-way w/ handshake
+
+                RpcContext context = request.Context;
+                context.ResponseCallMeta = MetaReader.Read(null, input);
+
+                if (!input.ReadBoolean())
+                {
+                    // no error
+                    Object response = requestor.ReadResponse(rm.Response, lm.Response, input);
+                    context.Response = response;
+
+                    return response;
+                }
+
+                Exception error = requestor.ReadError(rm.SupportedErrors, lm.SupportedErrors, input);
+                context.Error = error;
+
+                throw error;
+            }
+        }
+
+
+        private class TransceiverCallback<T> : ICallback<IList<MemoryStream>> 
+        {
+            private readonly Requestor requestor;
+            private readonly RpcRequest request;
+            private readonly ICallback<T> callback;
+            private readonly Protocol local;
+
+            public TransceiverCallback(Requestor requestor, RpcRequest request, ICallback<T> callback,
+                                       Protocol local)
+            {
+                this.requestor = requestor;
+                this.request = request;
+                this.callback = callback;
+                this.local = local;
+            }
+
+            public void HandleResult(IList<MemoryStream> result)
+            {
+                var bbi = new ByteBufferInputStream(result);
+                var input = new BinaryDecoder(bbi);
+
+                if (!requestor.ReadHandshake(input))
+                {
+                    // Resend the handshake and return
+                    var handshake = new RpcRequest(request);
+
+                    IList<MemoryStream> requestBytes = handshake.GetBytes(requestor.Local, requestor);
+                    var transceiverCallback = new TransceiverCallback<T>(requestor, handshake, callback,
+                                                                            local);
+
+                    requestor.Transceiver.Transceive(requestBytes, transceiverCallback);
+                    return;
+                }
+
+                // Read response; invoke callback
+                var response = new Response(requestor, request, input);
+                try
+                {
+                    Object responseObject;
+                    try
+                    {
+                        responseObject = response.getResponse();
+                    }
+                    catch (Exception e)
+                    {
+                        if (callback != null)
+                        {
+                            callback.HandleException(e);
+                        }
+                        return;
+                    }
+                    if (callback != null)
+                    {
+                        callback.HandleResult((T) responseObject);
+                    }
+                }
+                catch
+                {
+                    //LOG.error("Error in callback handler: " + t, t);
+                }
+            }
+
+            public void HandleException(Exception exception)
+            {
+                callback.HandleException(exception);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/ipc/Responder.cs b/lang/csharp/src/apache/ipc/Responder.cs
new file mode 100644
index 0000000..a108393
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Responder.cs
@@ -0,0 +1,228 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using Avro.Generic;
+using Avro.IO;
+using Avro.Specific;
+using log4net;
+using org.apache.avro.ipc;
+
+namespace Avro.ipc
+{
+    public abstract class Responder
+    {
+        private static readonly ILog log = LogManager.GetLogger(typeof (Responder));
+
+        private static readonly Schema META = MapSchema.CreateMap(PrimitiveSchema.NewInstance("bytes"));
+
+        private static readonly GenericReader<Dictionary<String, object>>
+            META_READER = new GenericReader<Dictionary<String, object>>(META, META);
+
+        private static readonly GenericWriter<IDictionary<String, object>>
+            META_WRITER = new GenericWriter<IDictionary<String, object>>(META);
+
+        private readonly SpecificReader<HandshakeRequest> handshakeReader =
+            new SpecificReader<HandshakeRequest>(new HandshakeRequest().Schema, new HandshakeRequest().Schema);
+
+        private readonly SpecificWriter<HandshakeResponse> handshakeWriter =
+            new SpecificWriter<HandshakeResponse>(new HandshakeResponse().Schema);
+
+        private readonly Protocol local;
+        private readonly MD5 localHash;
+        private readonly IDictionary<Schema, Protocol> protocols = new Dictionary<Schema, Protocol>();
+        private readonly object protocolsLock = new object();
+
+        protected Responder(Protocol local)
+        {
+            this.local = local;
+            localHash = new MD5 {Value = local.MD5};
+
+            lock (protocolsLock)
+            {
+                protocols[localHash.Schema] = local;
+            }
+        }
+        
+        public Protocol Local
+        {
+            get { return local; }
+        }
+
+        public abstract Object Respond(Message message, Object request);
+        public abstract Object ReadRequest(Schema actual, Schema expected, Decoder input);
+
+        public abstract void WriteResponse(Schema schema, Object response, Encoder output);
+        public abstract void WriteError(Schema schema, Object error, Encoder output);
+
+        public IList<MemoryStream> Respond(IList<MemoryStream> buffers)
+        {
+            return Respond(buffers, null);
+        }
+
+        private Protocol Handshake(Decoder input, Encoder output, Transceiver connection)
+        {
+            if (connection != null && connection.IsConnected)
+                return connection.Remote;
+            HandshakeRequest request = handshakeReader.Read(null, input);
+
+            Protocol remote;
+            lock (protocolsLock)
+            {
+                remote = protocols[request.clientHash.Schema];
+                if (remote == null && request.clientProtocol != null)
+                {
+                    remote = Protocol.Parse(request.clientProtocol);
+                    protocols[request.clientHash.Schema] = remote;
+                }
+            }
+            var response = new HandshakeResponse();
+            if (localHash.Schema.Equals(request.serverHash.Schema))
+            {
+                response.match =
+                    remote == null ? HandshakeMatch.NONE : HandshakeMatch.BOTH;
+            }
+            else
+            {
+                response.match =
+                    remote == null ? HandshakeMatch.NONE : HandshakeMatch.CLIENT;
+            }
+            if (response.match != HandshakeMatch.BOTH)
+            {
+                response.serverProtocol = local.ToString();
+                response.serverHash = localHash;
+            }
+
+            handshakeWriter.Write(response, output);
+
+            if (connection != null && response.match != HandshakeMatch.NONE)
+                connection.Remote = remote;
+
+            return remote;
+        }
+
+        public IList<MemoryStream> Respond(IList<MemoryStream> buffers,
+                                          Transceiver connection)
+        {
+            Decoder input = new BinaryDecoder(new ByteBufferInputStream(buffers));
+
+            var bbo = new ByteBufferOutputStream();
+            var output = new BinaryEncoder(bbo);
+            Exception error = null;
+            var context = new RpcContext();
+            List<MemoryStream> handshake = null;
+
+            bool wasConnected = connection != null && connection.IsConnected;
+            try
+            {
+                Protocol remote = Handshake(input, output, connection);
+                output.Flush();
+                if (remote == null) // handshake failed
+                    return bbo.GetBufferList();
+                handshake = bbo.GetBufferList();
+
+                // read request using remote protocol specification
+                context.RequestCallMeta = META_READER.Read(null, input);
+                String messageName = input.ReadString();
+                if (messageName.Equals("")) // a handshake ping
+                    return handshake;
+                Message rm = remote.Messages[messageName];
+                if (rm == null)
+                    throw new AvroRuntimeException("No such remote message: " + messageName);
+                Message m = Local.Messages[messageName];
+                if (m == null)
+                    throw new AvroRuntimeException("No message named " + messageName
+                                                   + " in " + Local);
+
+                Object request = ReadRequest(rm.Request, m.Request, input);
+
+                context.Message = rm;
+
+                // create response using local protocol specification
+                if ((m.Oneway.GetValueOrDefault() != rm.Oneway.GetValueOrDefault()) && wasConnected)
+                    throw new AvroRuntimeException("Not both one-way: " + messageName);
+
+                Object response = null;
+
+                try
+                {
+                    response = Respond(m, request);
+                    context.Response = response;
+                }
+                catch (Exception e)
+                {
+                    error = e;
+                    context.Error = error;
+                    log.Warn("user error", e);
+                }
+
+                if (m.Oneway.GetValueOrDefault() && wasConnected) // no response data
+                    return null;
+
+                output.WriteBoolean(error != null);
+                if (error == null)
+                    WriteResponse(m.Response, response, output);
+                else
+                {
+                    try 
+                    {
+                        WriteError(m.SupportedErrors, error, output);
+                    } 
+                    catch (Exception)
+                    {    
+                        // Presumably no match on the exception, throw the original
+                        throw error;
+                    }
+                }
+            }
+            catch (Exception e)
+            {
+                // system error
+                log.Warn("system error", e);
+                context.Error = e;
+                bbo = new ByteBufferOutputStream();
+                output = new BinaryEncoder(bbo);
+                output.WriteBoolean(true);
+
+                WriteError(errorSchema /*Protocol.SYSTEM_ERRORS*/, e.ToString(), output);
+                if (null == handshake)
+                {
+                    handshake = new ByteBufferOutputStream().GetBufferList();
+                }
+            }
+
+            output.Flush();
+            List<MemoryStream> payload = bbo.GetBufferList();
+
+            // Grab meta-data from plugins
+            context.ResponsePayload = payload;
+
+            META_WRITER.Write(context.ResponseCallMeta, output);
+            output.Flush();
+            // Prepend handshake and append payload
+            bbo.Prepend(handshake);
+            bbo.Append(payload);
+
+            return bbo.GetBufferList();
+        }
+
+        static Schema errorSchema = Schema.Parse("[\"string\"]");
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/RpcContext.cs b/lang/csharp/src/apache/ipc/RpcContext.cs
new file mode 100644
index 0000000..75e53b2
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/RpcContext.cs
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using org.apache.avro.ipc;
+
+namespace Avro.ipc
+{
+    public class RpcContext
+    {
+        private Exception _error;
+        private IDictionary<String, object> _requestCallMeta;
+        private Object _response;
+        private IDictionary<String, object> _responseCallMeta;
+
+        public HandshakeRequest HandshakeRequest { get; set; }
+        public HandshakeResponse HandshakeResponse { get; set; }
+
+        public IList<MemoryStream> RequestPayload { get; set; }
+        public IList<MemoryStream> ResponsePayload { get; set; }
+
+        public Exception Error
+        {
+            set
+            {
+                _response = null;
+                _error = value;
+            }
+            get { return _error; }
+        }
+
+        public Object Response
+        {
+            set
+            {
+                _response = value;
+                _error = null;
+            }
+
+            get { return _response; }
+        }
+
+
+        public IDictionary<String, byte[]> RequestHandshakeMeta
+        {
+            set { HandshakeRequest.meta = value; }
+
+            get
+            {
+                if (HandshakeRequest.meta == null)
+                    HandshakeRequest.meta = new Dictionary<String, byte[]>();
+
+                return HandshakeRequest.meta;
+            }
+        }
+
+
+        public IDictionary<String, byte[]> ResponseHandshakeMeta
+        {
+            set { HandshakeResponse.meta = value; }
+
+            get
+            {
+                if (HandshakeResponse.meta == null)
+                    HandshakeResponse.meta = new Dictionary<String, byte[]>();
+
+                return HandshakeResponse.meta;
+            }
+        }
+
+        /**
+         * This is an access method for the per-call state
+         * provided by the client to the server.
+         * @return a map representing per-call state from
+         * the client to the server
+         */
+
+        public IDictionary<String, object> RequestCallMeta
+        {
+            get
+            {
+                if (_requestCallMeta == null)
+                {
+                    _requestCallMeta = new Dictionary<string, object>();
+                }
+                return _requestCallMeta;
+            }
+            set { _requestCallMeta = value; }
+        }
+
+
+        /**
+         * This is an access method for the per-call state
+         * provided by the server back to the client.
+         * @return a map representing per-call state from
+         * the server to the client
+         */
+
+        public IDictionary<String, object> ResponseCallMeta
+        {
+            get
+            {
+                if (_responseCallMeta == null)
+                {
+                    _responseCallMeta = new Dictionary<String, object>();
+                }
+                return _responseCallMeta;
+            }
+            set { _responseCallMeta = value; }
+        }
+
+        /**
+         * Indicates whether an exception was generated
+         * at the server
+         * @return true is an exception was generated at
+         * the server, or false if not
+         */
+
+        public bool IsError
+        {
+            get { return Error != null; }
+        }
+
+        public Message Message { get; set; }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/RpcRequest.cs b/lang/csharp/src/apache/ipc/RpcRequest.cs
new file mode 100644
index 0000000..5d1b56f
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/RpcRequest.cs
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using Avro.IO;
+
+namespace Avro.ipc
+{
+    public class RpcRequest
+    {
+        private readonly String messageName;
+        private readonly Object request;
+        private readonly RpcContext context;
+
+        private Message message;
+        private List<MemoryStream> requestBytes;
+
+        public RpcRequest(string messageName, object request, RpcContext rpcContext)
+        {
+            if (messageName == null) throw new ArgumentNullException("messageName");
+            if (request == null) throw new ArgumentNullException("request");
+            if (rpcContext == null) throw new ArgumentNullException("rpcContext");
+
+            this.messageName = messageName;
+            this.request = request;
+            context = rpcContext;
+        }
+
+        public RpcRequest(RpcRequest request)
+            : this(request.messageName, request.request, request.Context)
+        {
+        }
+
+        public RpcContext Context
+        {
+            get { return context; }
+        }
+
+
+        public Message GetMessage(Protocol local)
+        {
+            if (message == null)
+            {
+                message = local.Messages[messageName];
+                if (message == null)
+                {
+                    throw new AvroRuntimeException("Not a local message: " + messageName);
+                }
+            }
+            return message;
+        }
+
+        public IList<MemoryStream> GetBytes(Protocol local, Requestor requestor)
+        {
+            if (local == null) throw new ArgumentNullException("local");
+            if (requestor == null) throw new ArgumentNullException("requestor");
+
+            if (requestBytes == null)
+            {
+                using (var bbo = new ByteBufferOutputStream())
+                {
+                    var o = new BinaryEncoder(bbo);
+
+                    // use local protocol to write request
+                    Message m = GetMessage(local);
+                    Context.Message = m;
+
+                    requestor.WriteRequest(m.Request, request, o); // write request payload
+
+                    o.Flush();
+                    List<MemoryStream> payload = bbo.GetBufferList();
+
+                    requestor.WriteHandshake(o); // prepend handshake if needed
+
+                    Context.RequestPayload = payload;
+
+                    IDictionary<string, object> responseCallMeta = Context.ResponseCallMeta;
+                    Requestor.MetaWriter.Write(responseCallMeta, o);
+
+                    o.WriteString(m.Name); // write message name
+                    o.Flush();
+
+                    bbo.Append(payload);
+
+                    requestBytes = bbo.GetBufferList();
+                }
+            }
+
+            return requestBytes;
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/SocketServer.cs b/lang/csharp/src/apache/ipc/SocketServer.cs
new file mode 100644
index 0000000..60a3a56
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/SocketServer.cs
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Sockets;
+using System.Threading;
+
+namespace Avro.ipc
+{
+    public class SocketServer
+    {
+        public static ManualResetEvent allDone = new ManualResetEvent(false);
+        private readonly string hostName;
+        private readonly int port;
+        private Responder responder;
+        private bool cancellationRequested;
+        private Socket channel;
+        private List<Socket> sockets = new List<Socket>();
+        private Thread serverThread;
+
+        public SocketServer(string hostName, int port, Responder responder = null)
+        {
+            if (hostName == null) throw new ArgumentNullException("hostName");
+            if (port < 0) throw new ArgumentOutOfRangeException("port");
+
+            this.responder = responder;
+            this.hostName = hostName;
+            this.port = port;
+        }
+
+        public bool IsBound
+        {
+            get { return channel.IsBound; }
+        }
+
+        public int Port
+        {
+            get { return ((IPEndPoint) channel.LocalEndPoint).Port; }
+        }
+
+        public void SetResponder(Responder responder)
+        {
+            this.responder = responder;
+        }
+
+        public void Start()
+        {
+            channel = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
+
+            serverThread = new Thread(Run);
+            serverThread.Start();
+
+            while (!IsBound)
+            {
+                Thread.Sleep(10);
+            }
+        }
+
+        public void Stop()
+        {
+            cancellationRequested = true;
+            
+            while (serverThread.IsAlive)
+            {
+                Thread.Sleep(10);
+            }
+        }
+
+        private void Run()
+        {
+            IPHostEntry host = Dns.GetHostEntry(hostName);
+            IPAddress ipAddress =
+                host.AddressList.FirstOrDefault(x => x.AddressFamily == AddressFamily.InterNetwork);
+
+            if (ipAddress == null)
+                throw new InvalidDataException(
+                    string.Format("There is not IP Address with the hostname {0} and AddressFamily InterNetwork",
+                                  hostName));
+
+            var localEndPoint = new IPEndPoint(ipAddress, port);
+
+            channel.Bind(localEndPoint);
+            channel.Listen(100);
+
+            var results = new List<IAsyncResult>();
+            while (true)
+            {
+                // Set the event to nonsignaled state.
+                allDone.Reset();
+
+                // Start an asynchronous socket to listen for connections.
+                IAsyncResult t = channel.BeginAccept(AcceptCallback, channel);
+                results.Add(t);
+
+                // Wait until a connection is made before continuing.
+                while (!allDone.WaitOne(1000))
+                {
+                    if (cancellationRequested)
+                    {
+                        try
+                        {
+                            channel.Close();
+                        }
+                        catch
+                        {
+                        }
+
+                        try
+                        {
+                            CloseSockets();
+                        }
+                        catch
+                        {
+                        }
+
+                        return;
+                    }
+                }
+            }
+        }
+
+        private void CloseSockets()
+        {
+            lock (this)
+            {
+                try
+                {
+                    foreach (var socket in sockets)
+                    {
+                        var myOpts = new LingerOption(true, 1);
+                        socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.DontLinger, true);
+                        socket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Linger, myOpts);
+                        socket.SendTimeout = 1;
+                        socket.ReceiveTimeout = 1;
+                        socket.Shutdown(SocketShutdown.Both);
+                        socket.Disconnect(false);
+                    }
+                    sockets = new List<Socket>();
+                }
+                catch (Exception)
+                {
+                }
+            }
+        }
+
+        public void AddSocket(Socket socket)
+        {
+            lock (this)
+            {
+                sockets.Add(socket);
+            }
+        }
+
+        public void RemoveSocket(Socket socket)
+        {
+            lock (this)
+            {
+                sockets.Remove(socket);
+            }
+        }
+
+        private void AcceptCallback(IAsyncResult ar)
+        {
+            // Signal the main thread to continue.
+            allDone.Set();
+
+            // Get the socket that handles the client request.
+            var listener = (Socket) ar.AsyncState;
+
+            if (cancellationRequested)
+            {
+                return;
+            }
+
+            Socket socket = listener.EndAccept(ar);
+            AddSocket(socket);
+
+            // Create the state object.
+            var xc = new SocketTransceiver(socket);
+
+            while (true)
+            {
+                try
+                {
+                    IList<MemoryStream> request = xc.ReadBuffers();
+                    IList<MemoryStream> response = responder.Respond(request, xc);
+                    xc.WriteBuffers(response);
+                }
+                catch (ObjectDisposedException)
+                {
+                    break;
+                }
+                catch (SocketException)
+                {
+                    break;
+                }
+                catch (AvroRuntimeException)
+                {
+                    break;
+                }
+                catch (Exception)
+                {
+                    break;
+                }
+            }
+
+            try
+            {
+                xc.Disconnect();
+            }
+            catch (Exception) { }
+
+            RemoveSocket(socket);
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/SocketTransceiver.cs b/lang/csharp/src/apache/ipc/SocketTransceiver.cs
new file mode 100644
index 0000000..a6ea2a0
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/SocketTransceiver.cs
@@ -0,0 +1,237 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Net;
+using System.Net.Sockets;
+using System.Threading;
+
+namespace Avro.ipc
+{
+    public class SocketTransceiver : Transceiver
+    {
+        private readonly byte[] header = new byte[4];
+        private readonly string host;
+        private readonly int port;
+        private readonly Stopwatch timeoutStopWatch;
+        private Socket channel;
+
+        private int serialNumber;
+
+        public SocketTransceiver(string host, int port)
+            : this(CreateSocket())
+        {
+            this.host = host;
+            this.port = port;
+
+            Connect();
+        }
+
+        public SocketTransceiver(Socket channel)
+        {
+            this.channel = channel;
+            this.channel.NoDelay = true;
+
+            timeoutStopWatch = new Stopwatch();
+        }
+
+        public override bool IsConnected
+        {
+            get
+            {
+                LockChannel();
+
+                try
+                {
+                    return Remote != null;
+                }
+                finally
+                {
+                    UnlockChannel();
+                }
+            }
+        }
+
+        public override Protocol Remote { get; set; }
+
+        public bool SocketConnected
+        {
+            get { return channel.Connected; }
+        }
+
+        public override string RemoteName
+        {
+            get
+            {
+                var ipEndPoint = ((IPEndPoint) channel.RemoteEndPoint);
+                return ipEndPoint.Address.ToString();
+            }
+        }
+
+        private static Socket CreateSocket()
+        {
+            return new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
+        }
+
+        public void Connect()
+        {
+            channel.Connect(host, port);
+        }
+
+        public void Reconnect()
+        {
+            if (host == null)
+                throw new InvalidOperationException("Cannot reconnect to a null host");
+
+            channel = CreateSocket();
+            Connect();
+
+            Remote = null;
+        }
+
+        public void Disconnect()
+        {
+            if (channel != null && channel.Connected)
+            {
+                var myOpts = new LingerOption(true, 0);
+                channel.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.DontLinger, true);
+                channel.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Linger, myOpts);
+                channel.Close();
+            }
+        }
+
+        public override IList<MemoryStream> ReadBuffers()
+        {
+            // Ignore first part for now.
+            ReadBuffer(header, 4);
+
+            int numberOfLists = ReadInt();
+
+            // With 8 byte references, this supports creation of a generous 2GB list
+            const int MaxNumLists = 250000000;
+
+            if (numberOfLists > MaxNumLists)
+            {
+                Disconnect();
+                throw new AvroRuntimeException(
+                    string.Format("Excessively large list allocation request detected: {0} items! Connection closed.",
+                                  numberOfLists));
+            }
+
+            var buffers = new List<MemoryStream>(numberOfLists);
+
+            for (int i = 0; i < numberOfLists; i++)
+            {
+                // size of the list.
+                int length = ReadInt();
+
+                var buffer = new byte[length];
+                ReadBuffer(buffer, length);
+
+                buffers.Add(new MemoryStream(buffer));
+            }
+
+            return buffers;
+        }
+
+        public override void WriteBuffers(IList<MemoryStream> buffers)
+        {
+            if (buffers == null) return;
+
+            Interlocked.Increment(ref serialNumber);
+
+            byte[] serial = ConvertIntToBytes(serialNumber);
+            channel.Send(serial);
+
+            byte[] numBuffers = ConvertIntToBytes(buffers.Count);
+            channel.Send(numBuffers);
+
+            foreach (MemoryStream buffer in buffers)
+            {
+                var length = (int) buffer.Length;
+
+                byte[] bufferLength = ConvertIntToBytes(length);
+                channel.Send(bufferLength);
+
+                byte[] bytes = buffer.GetBuffer();
+                channel.Send(bytes, (int) buffer.Length, SocketFlags.None);
+            }
+        }
+
+        private int ReadInt()
+        {
+            ReadBuffer(header, 4);
+
+            int num = BitConverter.ToInt32(header, 0);
+            num = IPAddress.NetworkToHostOrder(num);
+            return num;
+        }
+
+        private static byte[] ConvertIntToBytes(int length)
+        {
+            int hostToNetworkOrder = IPAddress.HostToNetworkOrder(length);
+            byte[] bufferLength = BitConverter.GetBytes(hostToNetworkOrder);
+            return bufferLength;
+        }
+
+        private void ReadBuffer(byte[] buffer, int length)
+        {
+            if (length == 0)
+                return;
+            
+            int numReceived = 0;
+            do
+            {
+                numReceived += channel.Receive(buffer, numReceived, length - numReceived, SocketFlags.None);
+
+                Timeout(numReceived);
+            } while (numReceived < length);
+        }
+
+        private void Timeout(int numReceived)
+        {
+            if (numReceived == 0)
+            {
+                if (!timeoutStopWatch.IsRunning)
+                {
+                    timeoutStopWatch.Start();
+                }
+                else if (timeoutStopWatch.ElapsedMilliseconds > 10000)
+                {
+                    throw new TimeoutException(string.Format("Failed to receive any data after [{0}] milliseconds.",
+                                                             timeoutStopWatch.ElapsedMilliseconds));
+                }
+            }
+        }
+
+        public override void VerifyConnection()
+        {
+            if (!SocketConnected)
+            {
+                Reconnect();
+            }
+        }
+
+        public void Close()
+        {
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Specific/SpecificRequestor.cs b/lang/csharp/src/apache/ipc/Specific/SpecificRequestor.cs
new file mode 100644
index 0000000..f36ef38
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Specific/SpecificRequestor.cs
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Linq;
+using Avro.IO;
+using Avro.Specific;
+using Castle.DynamicProxy;
+
+namespace Avro.ipc.Specific
+{
+    public class SpecificRequestor : Requestor, IInterceptor, ICallbackRequestor
+    {
+        private ISpecificProtocol specificProtocol;
+
+        private SpecificRequestor(Transceiver transceiver) :
+            base(transceiver, null)
+        {
+        }
+
+        void ICallbackRequestor.Request<TCallFuture>(string messageName, object[] args, object callback)
+        {
+            var specificCallback = (CallFuture<TCallFuture>) callback;
+            Request(messageName, args, specificCallback);
+        }
+
+        public void Intercept(IInvocation invocation)
+        {
+            string methodName = invocation.Method.Name;
+
+            int argumentsLength = invocation.Arguments.Length;
+            if (argumentsLength > 0 && LastArgumentIsCallback(invocation.Arguments[argumentsLength - 1]))
+            {
+                var args = new object[argumentsLength - 1];
+                Array.Copy(invocation.Arguments, args, argumentsLength - 1);
+                var callback = invocation.Arguments[argumentsLength - 1];
+
+                specificProtocol.Request(this, methodName, args, callback);
+            }
+            else
+            {
+                invocation.ReturnValue = Request(methodName, invocation.Arguments);
+            }
+        }
+
+        public static T CreateClient<T>(Transceiver transceiver) where T : class, ISpecificProtocol
+        {
+            var generator = new ProxyGenerator();
+
+            var specificRequestor = new SpecificRequestor(transceiver);
+            var client = generator.CreateClassProxy<T>(specificRequestor);
+            specificRequestor.specificProtocol = client;
+            specificRequestor.Local = client.Protocol;
+
+            return client;
+        }
+
+        public override void WriteRequest(RecordSchema schema, object request, Encoder encoder)
+        {
+            var args = (Object[]) request;
+            int i = 0;
+            foreach (Field p in schema.Fields)
+            {
+                new SpecificWriter<object>(p.Schema).Write(args[i++], encoder);
+            }
+        }
+
+        public override object ReadResponse(Schema writer, Schema reader, Decoder decoder)
+        {
+            return new SpecificReader<object>(writer, reader).Read(null, decoder);
+        }
+
+        public override Exception ReadError(Schema writer, Schema reader, Decoder decoder)
+        {
+            var response = new SpecificReader<object>(writer, reader).Read(null, decoder);
+
+            var error = response as Exception;
+            if(error != null)
+                return error;
+
+            return new Exception(response.ToString());
+        }
+
+        private static bool LastArgumentIsCallback(object o)
+        {
+            Type type = o.GetType();
+            Type[] interfaces = type.GetInterfaces();
+
+            bool isCallback =
+                interfaces.Any(i => i.IsGenericType && i.GetGenericTypeDefinition() == typeof (ICallback<>));
+            return isCallback;
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Specific/SpecificResponder.cs b/lang/csharp/src/apache/ipc/Specific/SpecificResponder.cs
new file mode 100644
index 0000000..d36899c
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Specific/SpecificResponder.cs
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Reflection;
+using Avro.Generic;
+using Avro.IO;
+using Avro.Specific;
+using Avro.ipc.Generic;
+
+namespace Avro.ipc.Specific
+{
+    public class SpecificResponder<T> : GenericResponder
+        where T : class, ISpecificProtocol
+    {
+        private readonly T impl;
+
+        public SpecificResponder(T impl)
+            : base(impl.Protocol)
+        {
+            this.impl = impl;
+        }
+
+        public override object Respond(Message message, object request)
+        {
+            int numParams = message.Request.Fields.Count;
+            var parameters = new Object[numParams];
+            var parameterTypes = new Type[numParams];
+
+            int i = 0;
+
+            foreach (Field field in message.Request.Fields)
+            {
+                Type type = ObjectCreator.Instance.GetType(field.Schema);
+                parameterTypes[i] = type;
+                parameters[i] = ((GenericRecord) request)[field.Name];
+
+                i++;
+            }
+
+            MethodInfo method = typeof (T).GetMethod(message.Name, parameterTypes);
+            try
+            {
+                return method.Invoke(impl, parameters);
+            }
+            catch (TargetInvocationException ex)
+            {
+                throw ex.InnerException;
+            }
+        }
+
+        public override void WriteError(Schema schema, object error, Encoder output)
+        {
+            new SpecificWriter<object>(schema).Write(error, output);
+        }
+
+        public override object ReadRequest(Schema actual, Schema expected, Decoder input)
+        {
+            return new SpecificReader<object>(actual, expected).Read(null, input);
+        }
+
+        public override void WriteResponse(Schema schema, object response, Encoder output)
+        {
+            new SpecificWriter<object>(schema).Write(response, output);
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/Transceiver.cs b/lang/csharp/src/apache/ipc/Transceiver.cs
new file mode 100644
index 0000000..5ff1796
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/Transceiver.cs
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using Avro.IO;
+
+namespace Avro.ipc
+{
+    public abstract class Transceiver
+    {
+        private readonly object channelLock = new object();
+        private Thread threadWhenLocked;
+
+        public virtual bool IsConnected
+        {
+            get { return false; }
+        }
+
+        public abstract String RemoteName { get; }
+
+        public virtual Protocol Remote
+        {
+            get { throw new InvalidOperationException("Not connected."); }
+            set { }
+        }
+
+        public virtual IList<MemoryStream> Transceive(IList<MemoryStream> request)
+        {
+            if (request == null) throw new ArgumentNullException("request");
+
+            LockChannel();
+            try
+            {
+                WriteBuffers(request);
+                return ReadBuffers();
+            }
+            finally
+            {
+                UnlockChannel();
+            }
+        }
+
+        public virtual void VerifyConnection()
+        {
+        }
+
+        public void Transceive(IList<MemoryStream> request, ICallback<IList<MemoryStream>> callback)
+        {
+            if (request == null) throw new ArgumentNullException("request");
+
+            try
+            {
+                IList<MemoryStream> response = Transceive(request);
+                callback.HandleResult(response);
+            }
+            catch (IOException e)
+            {
+                callback.HandleException(e);
+            }
+        }
+
+        public void LockChannel()
+        {
+            Monitor.Enter(channelLock);
+
+            threadWhenLocked = Thread.CurrentThread;
+        }
+
+        public void UnlockChannel()
+        {
+            if (Thread.CurrentThread == threadWhenLocked)
+            {
+                Monitor.Exit(channelLock);
+            }
+        }
+
+        public abstract IList<MemoryStream> ReadBuffers();
+        public abstract void WriteBuffers(IList<MemoryStream> getBytes);
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeMatch.cs b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeMatch.cs
new file mode 100644
index 0000000..ca20a63
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeMatch.cs
@@ -0,0 +1,22 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by MSBuild.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.ipc
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public enum HandshakeMatch
+	{
+		BOTH,
+		CLIENT,
+		NONE,
+	}
+}
diff --git a/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeRequest.cs b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeRequest.cs
new file mode 100644
index 0000000..9e31e6e
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeRequest.cs
@@ -0,0 +1,97 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by MSBuild.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.ipc
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class HandshakeRequest : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""HandshakeRequest"",""namespace"":""org.apache.avro.ipc"",""fields"":[{""name"":""clientHash"",""type"":{""type"":""fixed"",""name"":""MD5"",""namespace"":""org.apache.avro.ipc"",""size"":16}},{""name"":""clientProtocol"",""type"":[""null"",""string""]},{""name"":""serverHash"",""type"":""MD5""},{""name"":""meta"",""type"":[""null"",{""type"":""map"",""values"":""bytes""}]}]}");
+		private org.apache.avro.ipc.MD5 _clientHash;
+		private string _clientProtocol;
+		private org.apache.avro.ipc.MD5 _serverHash;
+		private IDictionary<string,System.Byte[]> _meta;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return HandshakeRequest._SCHEMA;
+			}
+		}
+		public org.apache.avro.ipc.MD5 clientHash
+		{
+			get
+			{
+				return this._clientHash;
+			}
+			set
+			{
+				this._clientHash = value;
+			}
+		}
+		public string clientProtocol
+		{
+			get
+			{
+				return this._clientProtocol;
+			}
+			set
+			{
+				this._clientProtocol = value;
+			}
+		}
+		public org.apache.avro.ipc.MD5 serverHash
+		{
+			get
+			{
+				return this._serverHash;
+			}
+			set
+			{
+				this._serverHash = value;
+			}
+		}
+		public IDictionary<string,System.Byte[]> meta
+		{
+			get
+			{
+				return this._meta;
+			}
+			set
+			{
+				this._meta = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.clientHash;
+			case 1: return this.clientProtocol;
+			case 2: return this.serverHash;
+			case 3: return this.meta;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.clientHash = (org.apache.avro.ipc.MD5)fieldValue; break;
+			case 1: this.clientProtocol = (System.String)fieldValue; break;
+			case 2: this.serverHash = (org.apache.avro.ipc.MD5)fieldValue; break;
+			case 3: this.meta = (IDictionary<string,System.Byte[]>)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeResponse.cs b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeResponse.cs
new file mode 100644
index 0000000..4ac13c6
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/HandshakeResponse.cs
@@ -0,0 +1,97 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by MSBuild.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.ipc
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class HandshakeResponse : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""HandshakeResponse"",""namespace"":""org.apache.avro.ipc"",""fields"":[{""name"":""match"",""type"":{""type"":""enum"",""name"":""HandshakeMatch"",""namespace"":""org.apache.avro.ipc"",""symbols"":[""BOTH"",""CLIENT"",""NONE""]}},{""name"":""serverProtocol"",""type"":[""null"",""string""]},{""name"":""serverHash"",""type"":[""null"",{""type"":""fixed"",""name"":""MD5"",""namespace"":""org.apache.avro.ipc [...]
+		private org.apache.avro.ipc.HandshakeMatch _match;
+		private string _serverProtocol;
+		private org.apache.avro.ipc.MD5 _serverHash;
+		private IDictionary<string,System.Byte[]> _meta;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return HandshakeResponse._SCHEMA;
+			}
+		}
+		public org.apache.avro.ipc.HandshakeMatch match
+		{
+			get
+			{
+				return this._match;
+			}
+			set
+			{
+				this._match = value;
+			}
+		}
+		public string serverProtocol
+		{
+			get
+			{
+				return this._serverProtocol;
+			}
+			set
+			{
+				this._serverProtocol = value;
+			}
+		}
+		public org.apache.avro.ipc.MD5 serverHash
+		{
+			get
+			{
+				return this._serverHash;
+			}
+			set
+			{
+				this._serverHash = value;
+			}
+		}
+		public IDictionary<string,System.Byte[]> meta
+		{
+			get
+			{
+				return this._meta;
+			}
+			set
+			{
+				this._meta = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.match;
+			case 1: return this.serverProtocol;
+			case 2: return this.serverHash;
+			case 3: return this.meta;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.match = (org.apache.avro.ipc.HandshakeMatch)fieldValue; break;
+			case 1: this.serverProtocol = (System.String)fieldValue; break;
+			case 2: this.serverHash = (org.apache.avro.ipc.MD5)fieldValue; break;
+			case 3: this.meta = (IDictionary<string,System.Byte[]>)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/ipc/org/apache/avro/ipc/MD5.cs b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/MD5.cs
new file mode 100644
index 0000000..afbb251
--- /dev/null
+++ b/lang/csharp/src/apache/ipc/org/apache/avro/ipc/MD5.cs
@@ -0,0 +1,39 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by MSBuild.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.ipc
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class MD5 : SpecificFixed
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"fixed\",\"name\":\"MD5\",\"namespace\":\"org.apache.avro.ipc\",\"size\":16}");
+		private static uint fixedSize = 16;
+		public MD5() : 
+				base(fixedSize)
+		{
+		}
+		public override Schema Schema
+		{
+			get
+			{
+				return MD5._SCHEMA;
+			}
+		}
+		public static uint FixedSize
+		{
+			get
+			{
+				return MD5.fixedSize;
+			}
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/main/Avro.main.csproj b/lang/csharp/src/apache/main/Avro.main.csproj
new file mode 100644
index 0000000..7f41e3f
--- /dev/null
+++ b/lang/csharp/src/apache/main/Avro.main.csproj
@@ -0,0 +1,180 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProductVersion>8.0.30703</ProductVersion>
+    <SchemaVersion>2.0</SchemaVersion>
+    <ProjectGuid>{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro</RootNamespace>
+    <AssemblyName>Avro</AssemblyName>
+    <TargetFrameworkVersion Condition=" '$(TargetFrameworkVersion)' == '' ">v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <FileUpgradeFlags>
+    </FileUpgradeFlags>
+    <OldToolsVersion>3.5</OldToolsVersion>
+    <UpgradeBackupLocation />
+    <PublishUrl>publish\</PublishUrl>
+    <Install>true</Install>
+    <InstallFrom>Disk</InstallFrom>
+    <UpdateEnabled>false</UpdateEnabled>
+    <UpdateMode>Foreground</UpdateMode>
+    <UpdateInterval>7</UpdateInterval>
+    <UpdateIntervalUnits>Days</UpdateIntervalUnits>
+    <UpdatePeriodically>false</UpdatePeriodically>
+    <UpdateRequired>false</UpdateRequired>
+    <MapFileExtensions>true</MapFileExtensions>
+    <ApplicationRevision>0</ApplicationRevision>
+    <ApplicationVersion>1.0.0.%2a</ApplicationVersion>
+    <IsWebBootstrapper>false</IsWebBootstrapper>
+    <UseApplicationTrust>false</UseApplicationTrust>
+    <BootstrapperEnabled>true</BootstrapperEnabled>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\main\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>none</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\main\Release\</OutputPath>
+    <DefineConstants>
+    </DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup>
+    <SignAssembly>true</SignAssembly>
+  </PropertyGroup>
+  <PropertyGroup>
+    <AssemblyOriginatorKeyFile>..\..\..\Avro.snk</AssemblyOriginatorKeyFile>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="log4net">
+      <HintPath>..\..\..\lib\main\log4net.dll</HintPath>
+    </Reference>
+    <Reference Include="Newtonsoft.Json">
+      <HintPath>..\..\..\lib\main\Newtonsoft.Json.dll</HintPath>
+    </Reference>
+    <Reference Include="System" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="CodeGen\AvroRuntimeException.cs" />
+    <Compile Include="CodeGen\CodeGen.cs" />
+    <Compile Include="CodeGen\CodeGenException.cs" />
+    <Compile Include="CodeGen\CodeGenUtil.cs" />
+    <Compile Include="File\Codec.cs" />
+    <Compile Include="File\DataBlock.cs" />
+    <Compile Include="File\DataFileConstants.cs" />
+    <Compile Include="File\DataFileReader.cs" />
+    <Compile Include="File\DataFileWriter.cs" />
+    <Compile Include="File\DeflateCodec.cs" />
+    <Compile Include="File\Header.cs" />
+    <Compile Include="File\IFileReader.cs" />
+    <Compile Include="File\IFileWriter.cs" />
+    <Compile Include="File\NullCodec.cs" />
+    <Compile Include="Generic\DatumReader.cs" />
+    <Compile Include="Generic\DatumWriter.cs" />
+    <Compile Include="Generic\GenericDatumReader.cs" />
+    <Compile Include="Generic\GenericDatumWriter.cs" />
+    <Compile Include="Generic\GenericEnum.cs" />
+    <Compile Include="Generic\GenericFixed.cs" />
+    <Compile Include="Generic\GenericReader.cs" />
+    <Compile Include="Generic\GenericRecord.cs" />
+    <Compile Include="Generic\GenericWriter.cs" />
+    <Compile Include="Generic\PreresolvingDatumReader.cs" />
+    <Compile Include="Generic\PreresolvingDatumWriter.cs" />
+    <Compile Include="IO\BinaryDecoder.cs" />
+    <Compile Include="IO\BinaryEncoder.cs" />
+    <Compile Include="IO\ByteBufferInputStream.cs" />
+    <Compile Include="IO\ByteBufferOutputStream.cs" />
+    <Compile Include="IO\Decoder.cs" />
+    <Compile Include="IO\Encoder.cs" />
+    <Compile Include="IO\ICallback.cs" />
+    <Compile Include="IO\InputStream.cs" />
+    <Compile Include="IO\OutputStream.cs" />
+    <Compile Include="IO\Resolver.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Protocol\Message.cs" />
+    <Compile Include="Protocol\Protocol.cs" />
+    <Compile Include="Protocol\ProtocolParseException.cs" />
+    <Compile Include="Schema\ArraySchema.cs" />
+    <Compile Include="Schema\AvroException.cs" />
+    <Compile Include="Schema\AvroTypeException.cs" />
+    <Compile Include="Schema\EnumSchema.cs" />
+    <Compile Include="Schema\Field.cs" />
+    <Compile Include="Schema\FixedSchema.cs" />
+    <Compile Include="Schema\JsonHelper.cs" />
+    <Compile Include="Schema\MapSchema.cs" />
+    <Compile Include="Schema\NamedSchema.cs" />
+    <Compile Include="Schema\PrimitiveSchema.cs" />
+    <Compile Include="Schema\Property.cs" />
+    <Compile Include="Schema\RecordSchema.cs" />
+    <Compile Include="Schema\Schema.cs" />
+    <Compile Include="Schema\SchemaName.cs" />
+    <Compile Include="Schema\SchemaNormalization.cs" />
+    <Compile Include="Schema\SchemaParseException.cs" />
+    <Compile Include="Schema\UnionSchema.cs" />
+    <Compile Include="Schema\UnnamedSchema.cs" />
+    <Compile Include="Specific\SpecificDatumWriter.cs" />
+    <Compile Include="Specific\SpecificException.cs" />
+    <Compile Include="Specific\SpecificProtocol.cs" />
+    <Compile Include="Specific\ObjectCreator.cs" />
+    <Compile Include="Specific\SpecificDatumReader.cs" />
+    <Compile Include="Specific\SpecificFixed.cs" />
+    <Compile Include="Specific\SpecificReader.cs" />
+    <Compile Include="Specific\SpecificWriter.cs" />
+    <Compile Include="Specific\SpecificRecord.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <BootstrapperPackage Include=".NETFramework,Version=v3.5">
+      <Visible>False</Visible>
+      <ProductName>Microsoft .NET Framework 4 %28x86 and x64%29</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
+      <Visible>False</Visible>
+      <ProductName>.NET Framework 3.5 SP1</ProductName>
+      <Install>false</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Windows.Installer.3.1">
+      <Visible>False</Visible>
+      <ProductName>Windows Installer 3.1</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/CodeGen/AvroRuntimeException.cs b/lang/csharp/src/apache/main/CodeGen/AvroRuntimeException.cs
new file mode 100644
index 0000000..857a438
--- /dev/null
+++ b/lang/csharp/src/apache/main/CodeGen/AvroRuntimeException.cs
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro
+{
+    public class AvroRuntimeException : AvroException
+    {
+        public AvroRuntimeException(string s)
+            : base(s)
+        {
+
+        }
+        public AvroRuntimeException(string s, Exception inner)
+            : base(s, inner)
+        {
+
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/CodeGen/CodeGen.cs b/lang/csharp/src/apache/main/CodeGen/CodeGen.cs
new file mode 100644
index 0000000..1f51c42
--- /dev/null
+++ b/lang/csharp/src/apache/main/CodeGen/CodeGen.cs
@@ -0,0 +1,855 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.Text;
+using System.CodeDom;
+using System.CodeDom.Compiler;
+using Microsoft.CSharp;
+using System.IO;
+
+namespace Avro
+{
+    public class CodeGen
+    {
+        /// <summary>
+        /// Object that contains all the generated types
+        /// </summary>
+        public CodeCompileUnit CompileUnit { get; private set; }
+
+        /// <summary>
+        /// List of schemas to generate code for
+        /// </summary>
+        public IList<Schema> Schemas { get; private set; }
+
+        /// <summary>
+        /// List of protocols to generate code for
+        /// </summary>
+        public IList<Protocol> Protocols { get; private set; }
+
+        /// <summary>
+        /// List of generated namespaces 
+        /// </summary>
+        protected Dictionary<string, CodeNamespace> namespaceLookup = new Dictionary<string, CodeNamespace>(StringComparer.Ordinal);
+
+        /// <summary>
+        /// Default constructor
+        /// </summary>
+        public CodeGen()
+        {
+            this.Schemas = new List<Schema>();
+            this.Protocols = new List<Protocol>();
+        }
+
+        /// <summary>
+        /// Adds a protocol object to generate code for
+        /// </summary>
+        /// <param name="protocol">protocol object</param>
+        public virtual void AddProtocol(Protocol protocol)
+        {
+            Protocols.Add(protocol);
+        }
+
+        /// <summary>
+        /// Adds a schema object to generate code for
+        /// </summary>
+        /// <param name="schema">schema object</param>
+        public virtual void AddSchema(Schema schema)
+        {
+            Schemas.Add(schema);
+        }
+
+        /// <summary>
+        /// Adds a namespace object for the given name into the dictionary if it doesn't exist yet
+        /// </summary>
+        /// <param name="name">name of namespace</param>
+        /// <returns></returns>
+        protected virtual CodeNamespace addNamespace(string name)
+        {
+            if (string.IsNullOrEmpty(name)) 
+                throw new ArgumentNullException("name", "name cannot be null.");
+
+            CodeNamespace ns = null;
+
+            if (!namespaceLookup.TryGetValue(name, out ns))
+            {
+                ns = new CodeNamespace(CodeGenUtil.Instance.Mangle(name));
+                foreach (CodeNamespaceImport nci in CodeGenUtil.Instance.NamespaceImports)
+                    ns.Imports.Add(nci);
+
+                CompileUnit.Namespaces.Add(ns);
+                namespaceLookup.Add(name, ns);
+            }
+            return ns;
+        }
+
+        /// <summary>
+        /// Generates code for the given protocol and schema objects
+        /// </summary>
+        /// <returns>CodeCompileUnit object</returns>
+        public virtual CodeCompileUnit GenerateCode()
+        {
+            CompileUnit = new CodeCompileUnit();
+
+            processSchemas();
+            processProtocols();
+
+            return CompileUnit;
+        }
+
+        /// <summary>
+        /// Generates code for the schema objects
+        /// </summary>
+        protected virtual void processSchemas()
+        {
+            foreach (Schema schema in this.Schemas)
+            {
+                SchemaNames names = generateNames(schema);
+                foreach (KeyValuePair<SchemaName, NamedSchema> sn in names)
+                {
+                    switch (sn.Value.Tag)
+                    {
+                        case Schema.Type.Enumeration: processEnum(sn.Value); break;
+                        case Schema.Type.Fixed: processFixed(sn.Value); break;
+                        case Schema.Type.Record: processRecord(sn.Value); break;
+                        case Schema.Type.Error: processRecord(sn.Value); break;
+                        default:
+                            throw new CodeGenException("Names in schema should only be of type NamedSchema, type found " + sn.Value.Tag);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// Generates code for the protocol objects
+        /// </summary>
+        protected virtual void processProtocols()
+        {
+            foreach (Protocol protocol in Protocols)
+            {
+                SchemaNames names = generateNames(protocol);
+                foreach (KeyValuePair<SchemaName, NamedSchema> sn in names)
+                {
+                    switch (sn.Value.Tag)
+                    {
+                        case Schema.Type.Enumeration: processEnum(sn.Value); break;
+                        case Schema.Type.Fixed: processFixed(sn.Value); break;
+                        case Schema.Type.Record: processRecord(sn.Value); break;
+                        case Schema.Type.Error: processRecord(sn.Value); break;
+                        default:
+                            throw new CodeGenException("Names in protocol should only be of type NamedSchema, type found " + sn.Value.Tag);
+                    }
+                }
+
+                processInterface(protocol);
+            }
+        }
+
+        /// <summary>
+        /// Generate list of named schemas from given protocol
+        /// </summary>
+        /// <param name="protocol">protocol to process</param>
+        /// <returns></returns>
+        protected virtual SchemaNames generateNames(Protocol protocol)
+        {
+            var names = new SchemaNames();
+            foreach (Schema schema in protocol.Types)
+                addName(schema, names);
+            return names;
+        }
+
+        /// <summary>
+        /// Generate list of named schemas from given schema
+        /// </summary>
+        /// <param name="schema">schema to process</param>
+        /// <returns></returns>
+        protected virtual SchemaNames generateNames(Schema schema)
+        {
+            var names = new SchemaNames();
+            addName(schema, names);
+            return names;
+        }
+
+        /// <summary>
+        /// Recursively search the given schema for named schemas and adds them to the given container
+        /// </summary>
+        /// <param name="schema">schema object to search</param>
+        /// <param name="names">list of named schemas</param>
+        protected virtual void addName(Schema schema, SchemaNames names)
+        {
+            NamedSchema ns = schema as NamedSchema;
+            if (null != ns) if (names.Contains(ns.SchemaName)) return;
+
+            switch (schema.Tag)
+            {
+                case Schema.Type.Null:
+                case Schema.Type.Boolean:
+                case Schema.Type.Int:
+                case Schema.Type.Long:
+                case Schema.Type.Float:
+                case Schema.Type.Double:
+                case Schema.Type.Bytes:
+                case Schema.Type.String:
+                    break;
+
+                case Schema.Type.Enumeration:
+                case Schema.Type.Fixed:
+                    names.Add(ns);
+                    break;
+
+                case Schema.Type.Record:
+                case Schema.Type.Error:
+                    var rs = schema as RecordSchema;
+                    names.Add(rs);
+                    foreach (Field field in rs.Fields)
+                        addName(field.Schema, names);
+                    break;
+
+                case Schema.Type.Array:
+                    var asc = schema as ArraySchema;
+                    addName(asc.ItemSchema, names);
+                    break;
+
+                case Schema.Type.Map:
+                    var ms = schema as MapSchema;
+                    addName(ms.ValueSchema, names);
+                    break;
+
+                case Schema.Type.Union:
+                    var us = schema as UnionSchema;
+                    foreach (Schema usc in us.Schemas)
+                        addName(usc, names);
+                    break;
+
+                default:
+                    throw new CodeGenException("Unable to add name for " + schema.Name + " type " + schema.Tag);
+            }
+        }
+
+        /// <summary>
+        /// Creates a class declaration for fixed schema
+        /// </summary>
+        /// <param name="schema">fixed schema</param>
+        /// <param name="ns">namespace object</param>
+        protected virtual void processFixed(Schema schema)
+        {
+            FixedSchema fixedSchema = schema as FixedSchema;
+            if (null == fixedSchema) throw new CodeGenException("Unable to cast schema into a fixed");
+
+            CodeTypeDeclaration ctd = new CodeTypeDeclaration();
+            ctd.Name = CodeGenUtil.Instance.Mangle(fixedSchema.Name);
+            ctd.IsClass = true;
+            ctd.IsPartial = true;
+            ctd.Attributes = MemberAttributes.Public;
+            ctd.BaseTypes.Add("SpecificFixed");
+
+            // create static schema field
+            createSchemaField(schema, ctd, true);
+
+            // Add Size field
+            string sizefname = "fixedSize";
+            var ctrfield = new CodeTypeReference(typeof(uint));
+            var codeField = new CodeMemberField(ctrfield, sizefname);
+            codeField.Attributes = MemberAttributes.Private | MemberAttributes.Static;
+            codeField.InitExpression = new CodePrimitiveExpression(fixedSchema.Size);
+            ctd.Members.Add(codeField);
+
+            // Add Size property
+            var fieldRef = new CodeFieldReferenceExpression(new CodeThisReferenceExpression(), sizefname);
+            var property = new CodeMemberProperty();
+            property.Attributes = MemberAttributes.Public | MemberAttributes.Static;
+            property.Name = "FixedSize";
+            property.Type = ctrfield;
+            property.GetStatements.Add(new CodeMethodReturnStatement(new CodeTypeReferenceExpression(schema.Name + "." + sizefname)));
+            ctd.Members.Add(property);
+
+            // create constructor to initiate base class SpecificFixed
+            CodeConstructor cc = new CodeConstructor();
+            cc.Attributes = MemberAttributes.Public;
+            cc.BaseConstructorArgs.Add(new CodeVariableReferenceExpression(sizefname));
+            ctd.Members.Add(cc);
+
+            string nspace = fixedSchema.Namespace;
+            if (string.IsNullOrEmpty(nspace))
+                throw new CodeGenException("Namespace required for enum schema " + fixedSchema.Name);
+            CodeNamespace codens = addNamespace(nspace);
+            codens.Types.Add(ctd);
+        }
+
+        /// <summary>
+        /// Creates an enum declaration
+        /// </summary>
+        /// <param name="schema">enum schema</param>
+        /// <param name="ns">namespace</param>
+        protected virtual void processEnum(Schema schema)
+        {
+            EnumSchema enumschema = schema as EnumSchema;
+            if (null == enumschema) throw new CodeGenException("Unable to cast schema into an enum");
+
+            CodeTypeDeclaration ctd = new CodeTypeDeclaration(CodeGenUtil.Instance.Mangle(enumschema.Name));
+            ctd.IsEnum = true;
+            ctd.Attributes = MemberAttributes.Public;
+
+            foreach (string symbol in enumschema.Symbols)
+            {
+                if (CodeGenUtil.Instance.ReservedKeywords.Contains(symbol))
+                    throw new CodeGenException("Enum symbol " + symbol + " is a C# reserved keyword");
+                CodeMemberField field = new CodeMemberField(typeof(int), symbol);
+                ctd.Members.Add(field);
+            }
+
+            string nspace = enumschema.Namespace;
+            if (string.IsNullOrEmpty(nspace))
+                throw new CodeGenException("Namespace required for enum schema " + enumschema.Name);
+            CodeNamespace codens = addNamespace(nspace);
+            
+            codens.Types.Add(ctd);
+        }
+
+        protected virtual void processInterface(Protocol protocol)
+        {
+            // Create abstract class
+            string protocolNameMangled = CodeGenUtil.Instance.Mangle(protocol.Name);
+
+            var ctd = new CodeTypeDeclaration(protocolNameMangled);
+            ctd.TypeAttributes = TypeAttributes.Abstract | TypeAttributes.Public;
+            ctd.IsClass = true;
+            ctd.BaseTypes.Add("Avro.Specific.ISpecificProtocol");
+
+            AddProtocolDocumentation(protocol, ctd);
+
+            // Add static protocol field.
+            var protocolField = new CodeMemberField();
+            protocolField.Attributes = MemberAttributes.Private | MemberAttributes.Static | MemberAttributes.Final;
+            protocolField.Name = "protocol";
+            protocolField.Type = new CodeTypeReference("readonly Avro.Protocol");
+
+            var cpe = new CodePrimitiveExpression(protocol.ToString());
+            var cmie = new CodeMethodInvokeExpression(
+                new CodeMethodReferenceExpression(new CodeTypeReferenceExpression(typeof(Protocol)), "Parse"),
+                new CodeExpression[] { cpe });
+
+            protocolField.InitExpression = cmie;
+
+            ctd.Members.Add(protocolField);
+
+            // Add overridden Protocol method.
+            var property = new CodeMemberProperty();
+            property.Attributes = MemberAttributes.Public | MemberAttributes.Final;
+            property.Name = "Protocol";
+            property.Type = new CodeTypeReference("Avro.Protocol");
+            property.HasGet = true;
+
+
+            property.GetStatements.Add(new CodeTypeReferenceExpression("return protocol"));
+            ctd.Members.Add(property);
+
+            //var requestMethod = CreateRequestMethod();
+            //ctd.Members.Add(requestMethod);
+
+            var requestMethod = CreateRequestMethod();
+            //requestMethod.Attributes |= MemberAttributes.Override;
+            var builder = new StringBuilder();
+
+            if (protocol.Messages.Count > 0)
+            {
+                builder.Append("switch(messageName)\n\t\t\t{");
+
+                foreach (var a in protocol.Messages)
+                {
+                    builder.Append("\n\t\t\t\tcase \"").Append(a.Key).Append("\":\n");
+
+                    bool unused = false;
+                    string type = getType(a.Value.Response, false, ref unused);
+
+                    builder.Append("\t\t\t\trequestor.Request<")
+                           .Append(type)
+                           .Append(">(messageName, args, callback);\n");
+                    builder.Append("\t\t\t\tbreak;\n");
+                }
+
+                builder.Append("\t\t\t}");
+            }
+            var cseGet = new CodeSnippetExpression(builder.ToString());
+
+            requestMethod.Statements.Add(cseGet);
+            ctd.Members.Add(requestMethod);
+
+            AddMethods(protocol, false, ctd);
+
+            string nspace = protocol.Namespace;
+            if (string.IsNullOrEmpty(nspace))
+                throw new CodeGenException("Namespace required for enum schema " + nspace);
+            CodeNamespace codens = addNamespace(nspace);
+
+            codens.Types.Add(ctd);
+
+            // Create callback abstract class
+            ctd = new CodeTypeDeclaration(protocolNameMangled + "Callback");
+            ctd.TypeAttributes = TypeAttributes.Abstract | TypeAttributes.Public;
+            ctd.IsClass = true;
+            ctd.BaseTypes.Add(protocolNameMangled);
+
+            // Need to override
+            
+
+
+            AddProtocolDocumentation(protocol, ctd);
+
+            AddMethods(protocol, true, ctd);
+
+            codens.Types.Add(ctd);
+        }
+
+        private static CodeMemberMethod CreateRequestMethod()
+        {
+            var requestMethod = new CodeMemberMethod();
+            requestMethod.Attributes = MemberAttributes.Public | MemberAttributes.Final;
+            requestMethod.Name = "Request";
+            requestMethod.ReturnType = new CodeTypeReference(typeof (void));
+            {
+                var requestor = new CodeParameterDeclarationExpression(typeof (Avro.Specific.ICallbackRequestor),
+                                                                       "requestor");
+                requestMethod.Parameters.Add(requestor);
+
+                var messageName = new CodeParameterDeclarationExpression(typeof (string), "messageName");
+                requestMethod.Parameters.Add(messageName);
+
+                var args = new CodeParameterDeclarationExpression(typeof (object[]), "args");
+                requestMethod.Parameters.Add(args);
+
+                var callback = new CodeParameterDeclarationExpression(typeof (object), "callback");
+                requestMethod.Parameters.Add(callback);
+            }
+            return requestMethod;
+        }
+
+        private static void AddMethods(Protocol protocol, bool generateCallback, CodeTypeDeclaration ctd)
+        {
+            foreach (var e in protocol.Messages)
+            {
+                var name = e.Key;
+                var message = e.Value;
+                var response = message.Response;
+
+                if (generateCallback && message.Oneway.GetValueOrDefault())
+                    continue;
+
+                var messageMember = new CodeMemberMethod();
+                messageMember.Name = CodeGenUtil.Instance.Mangle(name);
+                messageMember.Attributes = MemberAttributes.Public | MemberAttributes.Abstract;
+
+                if (message.Doc!= null && message.Doc.Trim() != string.Empty)
+                    messageMember.Comments.Add(new CodeCommentStatement(message.Doc));
+
+                if (message.Oneway.GetValueOrDefault() || generateCallback)
+                {
+                    messageMember.ReturnType = new CodeTypeReference(typeof (void));
+                }
+                else
+                {
+                    bool ignored = false;
+                    string type = getType(response, false, ref ignored);
+
+                    messageMember.ReturnType = new CodeTypeReference(type);
+                }
+
+                foreach (Field field in message.Request.Fields)
+                {
+                    bool ignored = false;
+                    string type = getType(field.Schema, false, ref ignored);
+
+                    string fieldName = CodeGenUtil.Instance.Mangle(field.Name);
+                    var parameter = new CodeParameterDeclarationExpression(type, fieldName);
+                    messageMember.Parameters.Add(parameter);
+                }
+
+                if (generateCallback)
+                {
+                    bool unused = false;
+                    var type = getType(response, false, ref unused);
+                    var parameter = new CodeParameterDeclarationExpression("Avro.IO.ICallback<" + type + ">",
+                                                                           "callback");
+                    messageMember.Parameters.Add(parameter);
+                }
+
+
+                ctd.Members.Add(messageMember);
+            }
+        }
+
+        private void AddProtocolDocumentation(Protocol protocol, CodeTypeDeclaration ctd)
+        {
+            // Add interface documentation
+            if (protocol.Doc != null && protocol.Doc.Trim() != string.Empty)
+            {
+                var interfaceDoc = createDocComment(protocol.Doc);
+                if (interfaceDoc != null)
+                    ctd.Comments.Add(interfaceDoc);
+            }
+        }
+
+        /// <summary>
+        /// Creates a class declaration
+        /// </summary>
+        /// <param name="schema">record schema</param>
+        /// <param name="ns">namespace</param>
+        /// <returns></returns>
+        protected virtual CodeTypeDeclaration processRecord(Schema schema)
+        {
+            RecordSchema recordSchema = schema as RecordSchema;
+            if (null == recordSchema) throw new CodeGenException("Unable to cast schema into a record");
+
+            bool isError = recordSchema.Tag == Schema.Type.Error;
+
+            // declare the class
+            var ctd = new CodeTypeDeclaration(CodeGenUtil.Instance.Mangle(recordSchema.Name));
+            ctd.BaseTypes.Add(isError ? "SpecificException" : "ISpecificRecord");
+
+            ctd.Attributes = MemberAttributes.Public;
+            ctd.IsClass = true;
+            ctd.IsPartial = true;
+
+            createSchemaField(schema, ctd, isError);
+
+            // declare Get() to be used by the Writer classes
+            var cmmGet = new CodeMemberMethod();
+            cmmGet.Name = "Get";
+            cmmGet.Attributes = MemberAttributes.Public;
+            cmmGet.ReturnType = new CodeTypeReference("System.Object");
+            cmmGet.Parameters.Add(new CodeParameterDeclarationExpression(typeof(int), "fieldPos"));
+            StringBuilder getFieldStmt = new StringBuilder("switch (fieldPos)\n\t\t\t{\n");
+
+            // declare Put() to be used by the Reader classes
+            var cmmPut = new CodeMemberMethod();
+            cmmPut.Name = "Put";
+            cmmPut.Attributes = MemberAttributes.Public;
+            cmmPut.ReturnType = new CodeTypeReference(typeof(void));
+            cmmPut.Parameters.Add(new CodeParameterDeclarationExpression(typeof(int), "fieldPos"));
+            cmmPut.Parameters.Add(new CodeParameterDeclarationExpression("System.Object", "fieldValue"));
+            var putFieldStmt = new StringBuilder("switch (fieldPos)\n\t\t\t{\n");
+
+            if (isError)
+            {
+                cmmGet.Attributes |= MemberAttributes.Override;
+                cmmPut.Attributes |= MemberAttributes.Override;
+            }
+
+            foreach (Field field in recordSchema.Fields)
+            {
+                // Determine type of field
+                bool nullibleEnum = false;
+                string baseType = getType(field.Schema, false, ref nullibleEnum);
+                var ctrfield = new CodeTypeReference(baseType);
+
+                // Create field
+                string privFieldName = string.Concat("_", field.Name);
+                var codeField = new CodeMemberField(ctrfield, privFieldName);
+                codeField.Attributes = MemberAttributes.Private;
+
+                // Process field documentation if it exist and add to the field
+                CodeCommentStatement propertyComment = null;
+                if (!string.IsNullOrEmpty(field.Documentation))
+                {
+                    propertyComment = createDocComment(field.Documentation);
+                    if (null != propertyComment)
+                        codeField.Comments.Add(propertyComment);
+                }
+
+                // Add field to class
+                ctd.Members.Add(codeField);
+
+                // Create reference to the field - this.fieldname
+                var fieldRef = new CodeFieldReferenceExpression(new CodeThisReferenceExpression(), privFieldName);
+                var mangledName = CodeGenUtil.Instance.Mangle(field.Name);
+
+                // Create field property with get and set methods
+                var property = new CodeMemberProperty();
+                property.Attributes = MemberAttributes.Public | MemberAttributes.Final;
+                property.Name = mangledName;
+                property.Type = ctrfield;
+                property.GetStatements.Add(new CodeMethodReturnStatement(fieldRef));
+                property.SetStatements.Add(new CodeAssignStatement(fieldRef, new CodePropertySetValueReferenceExpression()));
+                if (null != propertyComment)
+                    property.Comments.Add(propertyComment);
+
+                // Add field property to class
+                ctd.Members.Add(property);
+
+                // add to Get()
+                getFieldStmt.Append("\t\t\tcase ");
+                getFieldStmt.Append(field.Pos);
+                getFieldStmt.Append(": return this.");
+                getFieldStmt.Append(mangledName);
+                getFieldStmt.Append(";\n");
+
+                // add to Put()
+                putFieldStmt.Append("\t\t\tcase ");
+                putFieldStmt.Append(field.Pos);
+                putFieldStmt.Append(": this.");
+                putFieldStmt.Append(mangledName);
+
+                if (nullibleEnum)
+                {
+                    putFieldStmt.Append(" = fieldValue == null ? (");
+                    putFieldStmt.Append(baseType);
+                    putFieldStmt.Append(")null : (");
+
+                    string type = baseType.Remove(0, 16);  // remove System.Nullable<
+                    type = type.Remove(type.Length - 1);   // remove >
+
+                    putFieldStmt.Append(type);
+                    putFieldStmt.Append(")fieldValue; break;\n");
+                }
+                else
+                {
+                    putFieldStmt.Append(" = (");
+                    putFieldStmt.Append(baseType);
+                    putFieldStmt.Append(")fieldValue; break;\n");
+                }
+            }
+
+            // end switch block for Get()
+            getFieldStmt.Append("\t\t\tdefault: throw new AvroRuntimeException(\"Bad index \" + fieldPos + \" in Get()\");\n\t\t\t}");
+            var cseGet = new CodeSnippetExpression(getFieldStmt.ToString());
+            cmmGet.Statements.Add(cseGet);
+            ctd.Members.Add(cmmGet);
+
+            // end switch block for Put()
+            putFieldStmt.Append("\t\t\tdefault: throw new AvroRuntimeException(\"Bad index \" + fieldPos + \" in Put()\");\n\t\t\t}");
+            var csePut = new CodeSnippetExpression(putFieldStmt.ToString());
+            cmmPut.Statements.Add(csePut);
+            ctd.Members.Add(cmmPut);
+
+            string nspace = recordSchema.Namespace;
+            if (string.IsNullOrEmpty(nspace))
+                throw new CodeGenException("Namespace required for record schema " + recordSchema.Name);
+            CodeNamespace codens = addNamespace(nspace);
+
+            codens.Types.Add(ctd);
+
+            return ctd;
+        }
+
+        /// <summary>
+        /// Gets the string representation of the schema's data type 
+        /// </summary>
+        /// <param name="schema">schema</param>
+        /// <param name="nullible">flag to indicate union with null</param>
+        /// <returns></returns>
+        internal static string getType(Schema schema, bool nullible, ref bool nullibleEnum)
+        {
+            switch (schema.Tag)
+            {
+                case Schema.Type.Null:
+                    return "System.Object";
+                case Schema.Type.Boolean:
+                    if (nullible) return "System.Nullable<bool>";
+                    else return typeof(bool).ToString();
+                case Schema.Type.Int:
+                    if (nullible) return "System.Nullable<int>";
+                    else return typeof(int).ToString();
+                case Schema.Type.Long:
+                    if (nullible) return "System.Nullable<long>";
+                    else return typeof(long).ToString();
+                case Schema.Type.Float:
+                    if (nullible) return "System.Nullable<float>";
+                    else return typeof(float).ToString();
+                case Schema.Type.Double:
+                    if (nullible) return "System.Nullable<double>";
+                    else return typeof(double).ToString();
+
+                case Schema.Type.Bytes:
+                    return typeof(byte[]).ToString();
+                case Schema.Type.String:
+                    return typeof(string).ToString();
+
+                case Schema.Type.Enumeration:
+                    var namedSchema = schema as NamedSchema;
+                    if (null == namedSchema)
+                        throw new CodeGenException("Unable to cast schema into a named schema");
+                    if (nullible)
+                    {
+                        nullibleEnum = true;
+                        return "System.Nullable<" + CodeGenUtil.Instance.Mangle(namedSchema.Fullname) + ">";
+                    }
+                    else return CodeGenUtil.Instance.Mangle(namedSchema.Fullname);
+
+                case Schema.Type.Fixed:
+                case Schema.Type.Record:
+                case Schema.Type.Error:
+                    namedSchema = schema as NamedSchema;
+                    if (null == namedSchema)
+                        throw new CodeGenException("Unable to cast schema into a named schema");
+                    return CodeGenUtil.Instance.Mangle(namedSchema.Fullname);
+
+                case Schema.Type.Array:
+                    var arraySchema = schema as ArraySchema;
+                    if (null == arraySchema)
+                        throw new CodeGenException("Unable to cast schema into an array schema");
+
+                    return "IList<" + getType(arraySchema.ItemSchema, false, ref nullibleEnum) + ">";
+
+                case Schema.Type.Map:
+                    var mapSchema = schema as MapSchema;
+                    if (null == mapSchema)
+                        throw new CodeGenException("Unable to cast schema into a map schema");
+                    return "IDictionary<string," + getType(mapSchema.ValueSchema, false, ref nullibleEnum) + ">";
+
+                case Schema.Type.Union:
+                    var unionSchema = schema as UnionSchema;
+                    if (null == unionSchema)
+                        throw new CodeGenException("Unable to cast schema into a union schema");
+                    Schema nullibleType = getNullableType(unionSchema);
+                    if (null == nullibleType)
+                        return CodeGenUtil.Object;
+                    else
+                        return getType(nullibleType, true, ref nullibleEnum);
+            }
+            throw new CodeGenException("Unable to generate CodeTypeReference for " + schema.Name + " type " + schema.Tag);
+        }
+
+        /// <summary>
+        /// Gets the schema of a union with null
+        /// </summary>
+        /// <param name="schema">union schema</param>
+        /// <returns>schema that is nullible</returns>
+        public static Schema getNullableType(UnionSchema schema)
+        {
+            Schema ret = null;
+            if (schema.Count == 2)
+            {
+                bool nullable = false;
+                foreach (Schema childSchema in schema.Schemas)
+                {
+                    if (childSchema.Tag == Schema.Type.Null)
+                        nullable = true;
+                    else
+                        ret = childSchema;
+                }
+                if (!nullable)
+                    ret = null;
+            }
+            return ret;
+        }
+
+        /// <summary>
+        /// Creates the static schema field for class types
+        /// </summary>
+        /// <param name="schema">schema</param>
+        /// <param name="ctd">CodeTypeDeclaration for the class</param>
+        protected virtual void createSchemaField(Schema schema, CodeTypeDeclaration ctd, bool overrideFlag)
+        {
+            // create schema field 
+            var ctrfield = new CodeTypeReference("Schema");
+            string schemaFname = "_SCHEMA";
+            var codeField = new CodeMemberField(ctrfield, schemaFname);
+            codeField.Attributes = MemberAttributes.Public | MemberAttributes.Static;
+            // create function call Schema.Parse(json)
+            var cpe = new CodePrimitiveExpression(schema.ToString());
+            var cmie = new CodeMethodInvokeExpression(
+                new CodeMethodReferenceExpression(new CodeTypeReferenceExpression(typeof(Schema)), "Parse"),
+                new CodeExpression[] { cpe });
+            codeField.InitExpression = cmie;
+            ctd.Members.Add(codeField);
+
+            // create property to get static schema field
+            var property = new CodeMemberProperty();
+            property.Attributes = MemberAttributes.Public;
+            if (overrideFlag) property.Attributes |= MemberAttributes.Override;
+            property.Name = "Schema";
+            property.Type = ctrfield;
+
+            property.GetStatements.Add(new CodeMethodReturnStatement(new CodeTypeReferenceExpression(ctd.Name + "." + schemaFname)));
+            ctd.Members.Add(property);
+        }
+
+        /// <summary>
+        /// Creates an XML documentation for the given comment
+        /// </summary>
+        /// <param name="comment">comment</param>
+        /// <returns>CodeCommentStatement object</returns>
+        protected virtual CodeCommentStatement createDocComment(string comment)
+        {
+            string text = string.Format("<summary>\r\n {0}\r\n </summary>", comment);
+            return new CodeCommentStatement(text, true);
+        }
+
+        /// <summary>
+        /// Writes the generated compile unit into one file
+        /// </summary>
+        /// <param name="outputFile">name of output file to write to</param>
+        public virtual void WriteCompileUnit(string outputFile)
+        {
+            var cscp = new CSharpCodeProvider();
+
+            var opts = new CodeGeneratorOptions();
+            opts.BracingStyle = "C";
+            opts.IndentString = "\t";
+            opts.BlankLinesBetweenMembers = false;
+
+            using (var outfile = new StreamWriter(outputFile))
+            {
+                cscp.GenerateCodeFromCompileUnit(CompileUnit, outfile, opts);
+            }
+        }
+
+        /// <summary>
+        /// Writes each types in each namespaces into individual files
+        /// </summary>
+        /// <param name="outputdir">name of directory to write to</param>
+        public virtual void WriteTypes(string outputdir)
+        {
+            var cscp = new CSharpCodeProvider();
+
+            var opts = new CodeGeneratorOptions();
+            opts.BracingStyle = "C";
+            opts.IndentString = "\t";
+            opts.BlankLinesBetweenMembers = false;
+
+            CodeNamespaceCollection nsc = CompileUnit.Namespaces;
+            for (int i = 0; i < nsc.Count; i++)
+            {
+                var ns = nsc[i];
+
+                string dir = outputdir + "\\" + CodeGenUtil.Instance.UnMangle(ns.Name).Replace('.', '\\');
+                Directory.CreateDirectory(dir);
+
+                var new_ns = new CodeNamespace(ns.Name);
+                new_ns.Comments.Add(CodeGenUtil.Instance.FileComment);
+                foreach (CodeNamespaceImport nci in CodeGenUtil.Instance.NamespaceImports)
+                    new_ns.Imports.Add(nci);
+
+                var types = ns.Types;
+                for (int j = 0; j < types.Count; j++)
+                {
+                    var ctd = types[j];
+                    string file = dir + "\\" + CodeGenUtil.Instance.UnMangle(ctd.Name) + ".cs";
+                    using (var writer = new StreamWriter(file, false))
+                    {
+                        new_ns.Types.Add(ctd);
+                        cscp.GenerateCodeFromNamespace(new_ns, writer, opts);
+                        new_ns.Types.Remove(ctd);
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/CodeGen/CodeGenException.cs b/lang/csharp/src/apache/main/CodeGen/CodeGenException.cs
new file mode 100644
index 0000000..025d769
--- /dev/null
+++ b/lang/csharp/src/apache/main/CodeGen/CodeGenException.cs
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro
+{
+    class CodeGenException : AvroException
+    {
+        public CodeGenException(string s)
+            : base(s)
+        {
+
+        }
+        public CodeGenException(string s, Exception inner)
+            : base(s, inner)
+        {
+
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/CodeGen/CodeGenUtil.cs b/lang/csharp/src/apache/main/CodeGen/CodeGenUtil.cs
new file mode 100644
index 0000000..68b83d8
--- /dev/null
+++ b/lang/csharp/src/apache/main/CodeGen/CodeGenUtil.cs
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.CodeDom;
+
+namespace Avro
+{
+    /// <summary>
+    /// A singleton class containing data used by codegen
+    /// </summary>
+    public sealed class CodeGenUtil
+    {
+        private static readonly CodeGenUtil instance = new CodeGenUtil();
+        public static CodeGenUtil Instance { get { return instance; } }
+
+        public CodeNamespaceImport[] NamespaceImports { get; private set; }
+        public CodeCommentStatement FileComment { get; private set; }
+        public HashSet<string> ReservedKeywords { get; private set; }
+        private const char At = '@';
+        private const char Dot = '.';
+        public const string Object = "System.Object";
+
+        private CodeGenUtil()
+        {
+            NamespaceImports = new CodeNamespaceImport[] {
+                new CodeNamespaceImport("System"),
+                new CodeNamespaceImport("System.Collections.Generic"),
+                new CodeNamespaceImport("System.Text"),
+                new CodeNamespaceImport("Avro"),
+                new CodeNamespaceImport("Avro.Specific") };
+
+            FileComment = new CodeCommentStatement(
+@"------------------------------------------------------------------------------
+ <auto-generated>
+    Generated by " + System.AppDomain.CurrentDomain.FriendlyName + ", version " + System.Reflection.Assembly.GetExecutingAssembly().GetName().Version + @"
+    Changes to this file may cause incorrect behavior and will be lost if code
+    is regenerated
+ </auto-generated>
+ ------------------------------------------------------------------------------");
+
+            // Visual Studio 2010 http://msdn.microsoft.com/en-us/library/x53a06bb.aspx
+            ReservedKeywords = new HashSet<string>() {
+                "abstract","as", "base", "bool", "break", "byte", "case", "catch", "char", "checked", "class",
+                "const", "continue", "decimal", "default", "delegate", "do", "double", "else", "enum", "event",
+                "explicit", "extern", "false", "finally", "fixed", "float", "for", "foreach", "goto", "if",
+                "implicit", "in", "int", "interface", "internal", "is", "lock", "long", "namespace", "new",
+                "null", "object", "operator", "out", "override", "params", "private", "protected", "public",
+                "readonly", "ref", "return", "sbyte", "sealed", "short", "sizeof", "stackalloc", "static",
+                "string", "struct", "switch", "this", "throw", "true", "try", "typeof", "uint", "ulong",
+                "unchecked", "unsafe", "ushort", "using", "virtual", "void", "volatile", "while", "value", "partial" };
+        }
+
+        /// <summary>
+        /// Append @ to all reserved keywords that appear on the given name
+        /// </summary>
+        /// <param name="name"></param>
+        /// <returns></returns>
+        public string Mangle(string name)
+        {
+            var builder = new StringBuilder();
+            string[] names = name.Split(Dot);
+            for (int i = 0; i < names.Length; ++i)
+            {
+                if (ReservedKeywords.Contains(names[i]))
+                    builder.Append(At);
+                builder.Append(names[i]);
+                builder.Append(Dot);
+            }
+            builder.Remove(builder.Length - 1, 1);
+            return builder.ToString();
+        }
+
+        /// <summary>
+        /// Remove all the @
+        /// </summary>
+        /// <param name="name"></param>
+        /// <returns></returns>
+        public string UnMangle(string name)
+        {
+            var builder = new StringBuilder(name.Length);
+            for (int i = 0; i < name.Length; ++i)
+                if (name[i] != At)
+                    builder.Append(name[i]);
+            return builder.ToString();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/Codec.cs b/lang/csharp/src/apache/main/File/Codec.cs
new file mode 100644
index 0000000..60453ae
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/Codec.cs
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+
+namespace Avro.File
+{
+    abstract public class Codec
+    {
+        /// <summary>
+        /// Compress data using implemented codec
+        /// </summary>
+        /// <param name="uncompressedData"></param>
+        /// <returns></returns>
+        abstract public byte[] Compress(byte[] uncompressedData);
+
+        /// <summary>
+        /// Decompress data using implemented codec
+        /// </summary>
+        /// <param name="compressedData"></param>
+        /// <returns></returns>
+        abstract public byte[] Decompress(byte[] compressedData);
+
+        /// <summary>
+        /// Name of this codec type
+        /// </summary>
+        /// <returns></returns>
+        abstract public string GetName();
+
+        /// <summary>
+        ///  Codecs must implement an equals() method
+        /// </summary>
+        /// <param name="other"></param>
+        /// <returns></returns>
+        abstract public override bool Equals(object other);
+
+        /// <summary>
+        /// Codecs must implement a HashCode() method that is
+        /// consistent with Equals
+        /// </summary>
+        /// <returns></returns>
+        abstract public override int GetHashCode();
+
+        /// <summary>
+        /// Codec types
+        /// </summary>
+        public enum Type
+        {
+            Deflate,
+            //Snappy 
+            Null
+        };
+
+        /// <summary>
+        /// Factory method to return child
+        /// codec instance based on Codec.Type  
+        /// </summary>
+        /// <param name="codecType"></param>
+        /// <returns></returns>
+        public static Codec CreateCodec(Type codecType)
+        {
+            switch (codecType)
+            {
+                case Type.Deflate:
+                    return new DeflateCodec();
+                default:
+                    return new NullCodec();
+            }
+        }
+
+        /// <summary>
+        /// Factory method to return child
+        /// codec instance based on string type  
+        /// </summary>
+        /// <param name="codecType"></param>
+        /// <returns></returns>
+        public static Codec CreateCodecFromString(string codecType)
+        {
+            switch (codecType)
+            {
+                case DataFileConstants.DeflateCodec:
+                    return new DeflateCodec();
+                default:
+                    return new NullCodec();
+            }
+        }
+
+        /// <summary>
+        /// Returns name of codec
+        /// </summary>
+        /// <returns></returns>
+        public override string ToString()
+        {
+            return GetName();
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/File/DataBlock.cs b/lang/csharp/src/apache/main/File/DataBlock.cs
new file mode 100644
index 0000000..8dfe39c
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/DataBlock.cs
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System.IO;
+using Avro.IO;
+using Avro.Generic;
+using System.Collections.Generic;
+
+namespace Avro.File
+{
+    public class DataBlock
+    {
+        public byte[] Data { get;  set; }
+        public long NumberOfEntries { get; set; }
+        public long BlockSize { get; set; }
+       
+        public DataBlock(long numberOfEntries, long blockSize)
+        {
+            this.NumberOfEntries = numberOfEntries;
+            this.BlockSize = blockSize;
+            this.Data = new byte[blockSize];
+        }
+
+        internal Stream GetDataAsStream()
+        {
+            return new MemoryStream(Data);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/DataFileConstants.cs b/lang/csharp/src/apache/main/File/DataFileConstants.cs
new file mode 100644
index 0000000..cb4b482
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/DataFileConstants.cs
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro.File
+{
+    public class DataFileConstants
+    {
+        public const string MetaDataSync = "avro.sync";
+        public const string MetaDataCodec = "avro.codec";
+        public const string MetaDataSchema = "avro.schema";
+        public const string NullCodec = "null";
+        public const string DeflateCodec = "deflate";
+        public const string MetaDataReserved = "avro";
+
+        public const int Version = 1;
+        public static byte[] Magic = { (byte)'O', 
+                                       (byte)'b', 
+                                       (byte)'j', 
+                                       (byte)Version };
+
+        public const int NullCodecHash = 2;
+        public const int DeflateCodecHash = 0;
+
+        public const int SyncSize = 16;
+        public const int DefaultSyncInterval = 4000 * SyncSize;
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/DataFileReader.cs b/lang/csharp/src/apache/main/File/DataFileReader.cs
new file mode 100644
index 0000000..e96686d
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/DataFileReader.cs
@@ -0,0 +1,425 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.IO;
+using Avro.Generic;
+using Avro.IO;
+using Avro.Specific;
+
+namespace Avro.File
+{
+    public class DataFileReader<T> : IFileReader<T>
+    {
+        public delegate DatumReader<T> CreateDatumReader(Schema writerSchema, Schema readerSchema);
+
+        private DatumReader<T> _reader;
+        private Decoder _decoder, _datumDecoder;
+        private Header _header;
+        private Codec _codec;
+        private DataBlock _currentBlock;
+        private long _blockRemaining;
+        private long _blockSize;
+        private bool _availableBlock;
+        private byte[] _syncBuffer;
+        private long _blockStart;
+        private Stream _stream;
+        private Schema _readerSchema;
+        private readonly CreateDatumReader _datumReaderFactory;
+
+        /// <summary>
+        ///  Open a reader for a file using path
+        /// </summary>
+        /// <param name="path"></param>
+        /// <returns></returns>
+        public static IFileReader<T> OpenReader(string path)
+        {
+            return OpenReader(new FileStream(path, FileMode.Open), null);
+        }
+
+        /// <summary>
+        ///  Open a reader for a file using path and the reader's schema
+        /// </summary>
+        /// <param name="path"></param>
+        /// <returns></returns>
+        public static IFileReader<T> OpenReader(string path, Schema readerSchema)
+        {
+            return OpenReader(new FileStream(path, FileMode.Open), readerSchema);
+        }
+
+        /// <summary>
+        ///  Open a reader for a stream
+        /// </summary>
+        /// <param name="inStream"></param>
+        /// <returns></returns>
+        public static IFileReader<T> OpenReader(Stream inStream)
+        {
+            return OpenReader(inStream, null);
+        }
+
+        /// <summary>
+        ///  Open a reader for a stream using the reader's schema
+        /// </summary>
+        /// <param name="inStream"></param>
+        /// <returns></returns>
+        public static IFileReader<T> OpenReader(Stream inStream, Schema readerSchema)
+        {
+            return OpenReader(inStream, readerSchema, CreateDefaultReader);
+        }
+
+
+        /// <summary>
+        ///  Open a reader for a stream using the reader's schema and a custom DatumReader
+        /// </summary>
+        /// <param name="inStream"></param>
+        /// <returns></returns>
+        public static IFileReader<T> OpenReader(Stream inStream, Schema readerSchema, CreateDatumReader datumReaderFactory)
+        {
+            if (!inStream.CanSeek)
+                throw new AvroRuntimeException("Not a valid input stream - must be seekable!");
+
+            if (inStream.Length < DataFileConstants.Magic.Length)
+                throw new AvroRuntimeException("Not an Avro data file");
+
+            // verify magic header
+            byte[] magic = new byte[DataFileConstants.Magic.Length];
+            inStream.Seek(0, SeekOrigin.Begin);
+            for (int c = 0; c < magic.Length; c = inStream.Read(magic, c, magic.Length - c)) { }
+            inStream.Seek(0, SeekOrigin.Begin);
+
+            if (magic.SequenceEqual(DataFileConstants.Magic))   // current format
+                return new DataFileReader<T>(inStream, readerSchema, datumReaderFactory);         // (not supporting 1.2 or below, format) 
+
+            throw new AvroRuntimeException("Not an Avro data file");
+        }
+
+        DataFileReader(Stream stream, Schema readerSchema, CreateDatumReader datumReaderFactory)
+        {
+            _readerSchema = readerSchema;
+            _datumReaderFactory = datumReaderFactory;
+            Init(stream);
+            BlockFinished();
+        }
+
+        public Header GetHeader()
+        {
+            return _header;
+        }
+
+        public Schema GetSchema()
+        {
+            return _header.Schema;
+        }
+
+        public ICollection<string> GetMetaKeys()
+        {
+            return _header.MetaData.Keys;
+        }
+
+        public byte[] GetMeta(string key)
+        {
+            try
+            {
+                return _header.MetaData[key];
+            }
+            catch (KeyNotFoundException)
+            {
+                return null; 
+            }
+        }
+
+        public long GetMetaLong(string key)
+        {
+            return long.Parse(GetMetaString(key));
+        }
+
+        public string GetMetaString(string key)
+        {
+            byte[] value = GetMeta(key);
+            if (value == null)
+            {
+                return null;
+            }
+            try
+            {
+                return System.Text.Encoding.UTF8.GetString(value);          
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(string.Format("Error fetching meta data for key: {0}", key), e);
+            }
+        }
+
+        public void Seek(long position)
+        {
+            _stream.Position = position;
+            _decoder = new BinaryDecoder(_stream);
+            _datumDecoder = null;
+            _blockRemaining = 0;
+            _blockStart = position;
+        }
+
+        public void Sync(long position)
+        {
+            Seek(position);
+            // work around an issue where 1.5.4 C stored sync in metadata
+            if ((position == 0) && (GetMeta(DataFileConstants.MetaDataSync) != null)) 
+            {
+                Init(_stream); // re-init to skip header
+                return;
+            }
+
+            try
+            {
+                bool done = false;
+
+                do // read until sync mark matched
+                {
+                    _decoder.ReadFixed(_syncBuffer);
+                    if (Enumerable.SequenceEqual(_syncBuffer, _header.SyncData))
+                        done = true;
+                    else
+                        _stream.Position = _stream.Position - (DataFileConstants.SyncSize - 1);
+                } while (!done);
+            }
+            catch (Exception) { } // could not find .. default to EOF
+
+            _blockStart = _stream.Position;
+        }
+
+        public bool PastSync(long position)
+        {
+            return ((_blockStart >= position + DataFileConstants.SyncSize) || (_blockStart >= _stream.Length));
+        }
+
+        public long PreviousSync()
+        {
+            return _blockStart;
+        }
+
+        public long Tell()
+        {
+            return _stream.Position;
+        }
+
+        public IEnumerable<T> NextEntries
+        {
+            get
+            {
+                while (HasNext())
+                {
+                    yield return Next();
+                }
+            }
+        }
+
+        public bool HasNext()
+        {
+            try
+            {
+                if (_blockRemaining == 0)
+                {
+                    // TODO: Check that the (block) stream is not partially read
+                    /*if (_datumDecoder != null) 
+                    { }*/
+                    if (HasNextBlock())
+                    {
+                        _currentBlock = NextRawBlock(_currentBlock);
+                        _currentBlock.Data = _codec.Decompress(_currentBlock.Data);
+                        _datumDecoder = new BinaryDecoder(_currentBlock.GetDataAsStream());
+                    }
+                }
+                return _blockRemaining != 0;
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(string.Format("Error fetching next object from block: {0}", e));
+            }
+        }
+
+        public void Reset()
+        {
+            Init(_stream);
+        }
+
+        public void Dispose()
+        {
+            _stream.Close();
+        }
+
+        private void Init(Stream stream)
+        {
+            _stream = stream;
+            _header = new Header();
+            _decoder = new BinaryDecoder(stream);
+            _syncBuffer = new byte[DataFileConstants.SyncSize];
+
+            // read magic 
+            byte[] firstBytes = new byte[DataFileConstants.Magic.Length];
+            try
+            {
+                _decoder.ReadFixed(firstBytes);
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException("Not a valid data file!", e);
+            }
+            if (!firstBytes.SequenceEqual(DataFileConstants.Magic))
+                throw new AvroRuntimeException("Not a valid data file!");
+
+            // read meta data 
+            long len = _decoder.ReadMapStart();
+            if (len > 0)
+            {
+                do
+                {
+                    for (long i = 0; i < len; i++)
+                    {
+                        string key = _decoder.ReadString();
+                        byte[] val = _decoder.ReadBytes();
+                        _header.MetaData.Add(key, val);
+                    }
+                } while ((len = _decoder.ReadMapNext()) != 0);
+            }
+
+            // read in sync data 
+            _decoder.ReadFixed(_header.SyncData);
+
+            // parse schema and set codec 
+            _header.Schema = Schema.Parse(GetMetaString(DataFileConstants.MetaDataSchema));
+            _reader = _datumReaderFactory(_header.Schema, _readerSchema ?? _header.Schema);
+            _codec = ResolveCodec();
+        }
+
+        private static DatumReader<T> CreateDefaultReader(Schema writerSchema, Schema readerSchema)
+        {
+            DatumReader<T> reader = null;
+            Type type = typeof(T);
+
+            if (typeof(ISpecificRecord).IsAssignableFrom(type))
+            {
+                reader = new SpecificReader<T>(writerSchema, readerSchema);
+            }
+            else // generic
+            {
+                reader = new GenericReader<T>(writerSchema, readerSchema);
+            }
+            return reader;
+        }
+
+        private Codec ResolveCodec()
+        {
+            return Codec.CreateCodecFromString(GetMetaString(DataFileConstants.MetaDataCodec));
+        }
+
+        public T Next()
+        {
+            return Next(default(T));
+        }
+
+        private T Next(T reuse)
+        {
+            try
+            {
+                if (!HasNext())
+                    throw new AvroRuntimeException("No more datum objects remaining in block!");
+
+                T result = _reader.Read(reuse, _datumDecoder);
+                if (--_blockRemaining == 0)
+                {
+                    BlockFinished();
+                }
+                return result;
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(string.Format("Error fetching next object from block: {0}", e));
+            }
+        }
+
+        private void BlockFinished()
+        {
+            _blockStart = _stream.Position;
+        }
+
+        private DataBlock NextRawBlock(DataBlock reuse)
+        {
+            if (!HasNextBlock())
+                throw new AvroRuntimeException("No data remaining in block!");
+
+            if (reuse == null || reuse.Data.Length < _blockSize)
+            {
+                reuse = new DataBlock(_blockRemaining, _blockSize);
+            }
+            else
+            {
+                reuse.NumberOfEntries = _blockRemaining;
+                reuse.BlockSize = _blockSize;
+            }
+
+            _decoder.ReadFixed(reuse.Data, 0, (int)reuse.BlockSize);
+            _decoder.ReadFixed(_syncBuffer);
+
+            if (!Enumerable.SequenceEqual(_syncBuffer, _header.SyncData))
+                throw new AvroRuntimeException("Invalid sync!");
+
+            _availableBlock = false;
+            return reuse;
+        }
+
+        private bool DataLeft()
+        {
+            long currentPosition = _stream.Position;
+            if (_stream.ReadByte() != -1)
+                _stream.Position = currentPosition;
+            else
+                return false;
+
+            return true;
+        }
+
+        private bool HasNextBlock()
+        {
+            try
+            {
+                // block currently being read 
+                if (_availableBlock)
+                    return true;
+
+                // check to ensure still data to read 
+                if (!DataLeft())
+                    return false;
+
+                _blockRemaining = _decoder.ReadLong();      // read block count
+                _blockSize = _decoder.ReadLong();           // read block size
+                if (_blockSize > System.Int32.MaxValue || _blockSize < 0)
+                {
+                    throw new AvroRuntimeException("Block size invalid or too large for this " +
+                                                   "implementation: " + _blockSize);
+                }
+                _availableBlock = true;
+                return true;
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(string.Format("Error ascertaining if data has next block: {0}", e));
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/File/DataFileWriter.cs b/lang/csharp/src/apache/main/File/DataFileWriter.cs
new file mode 100644
index 0000000..a26d224
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/DataFileWriter.cs
@@ -0,0 +1,315 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Globalization;
+using System.IO;
+using Avro.IO;
+using Avro.Generic;
+
+namespace Avro.File
+{
+    public class DataFileWriter<T> : IFileWriter<T>
+    {
+        private Schema _schema;
+        private Codec _codec;
+        private Stream _stream;
+        private MemoryStream _blockStream;
+        private Encoder _encoder, _blockEncoder;
+        private DatumWriter<T> _writer;
+
+        private byte[] _syncData;
+        private bool _isOpen;
+        private bool _headerWritten;
+        private int _blockCount;
+        private int _syncInterval;
+        private IDictionary<string, byte[]> _metaData;
+
+        /// <summary>
+        /// Open a new writer instance to write  
+        /// to a file path, using a Null codec
+        /// </summary>
+        /// <param name="writer"></param>
+        /// <param name="path"></param>
+        /// <returns></returns>
+        public static IFileWriter<T> OpenWriter(DatumWriter<T> writer, string path)
+        {
+            return OpenWriter(writer, new FileStream(path, FileMode.Create), Codec.CreateCodec(Codec.Type.Null));
+        }
+
+        /// <summary>
+        /// Open a new writer instance to write  
+        /// to an output stream, using a Null codec
+        /// </summary>
+        /// <param name="writer"></param>
+        /// <param name="outStream"></param>
+        /// <returns></returns>
+        public static IFileWriter<T> OpenWriter(DatumWriter<T> writer, Stream outStream)
+        {
+            return OpenWriter(writer, outStream, Codec.CreateCodec(Codec.Type.Null));
+        }
+
+        /// <summary>
+        /// Open a new writer instance to write  
+        /// to a file path with a specified codec
+        /// </summary>
+        /// <param name="writer"></param>
+        /// <param name="path"></param>
+        /// <param name="codec"></param>
+        /// <returns></returns>
+        public static IFileWriter<T> OpenWriter(DatumWriter<T> writer, string path, Codec codec)
+        {
+            return OpenWriter(writer, new FileStream(path, FileMode.Create), codec);
+        }
+
+        /// <summary>
+        /// Open a new writer instance to write
+        /// to an output stream with a specified codec
+        /// </summary>
+        /// <param name="writer"></param>
+        /// <param name="outStream"></param>
+        /// <param name="codec"></param>
+        /// <returns></returns>
+        public static IFileWriter<T> OpenWriter(DatumWriter<T> writer, Stream outStream, Codec codec)
+        {
+            return new DataFileWriter<T>(writer).Create(writer.Schema, outStream, codec);
+        }
+
+        DataFileWriter(DatumWriter<T> writer)
+        {
+            _writer = writer;
+            _syncInterval = DataFileConstants.DefaultSyncInterval;
+        }
+
+        public bool IsReservedMeta(string key)
+        {
+            return key.StartsWith(DataFileConstants.MetaDataReserved);
+        }
+
+        public void SetMeta(String key, byte[] value)
+        {
+            if (IsReservedMeta(key))
+            {
+                throw new AvroRuntimeException("Cannot set reserved meta key: " + key);
+            }
+            _metaData.Add(key, value);
+        }
+
+        public void SetMeta(String key, long value)
+        {
+            try
+            {
+                SetMeta(key, GetByteValue(value.ToString(CultureInfo.InvariantCulture)));
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(e.Message, e);
+            }
+        }
+
+        public void SetMeta(String key, string value)
+        {
+            try
+            {
+                SetMeta(key, GetByteValue(value));
+            }
+            catch (Exception e)
+            {
+                throw new AvroRuntimeException(e.Message, e);
+            }
+        }
+
+        public void SetSyncInterval(int syncInterval)
+        {
+            if (syncInterval < 32 || syncInterval > (1 << 30))
+            {
+                throw new AvroRuntimeException("Invalid sync interval value: " + syncInterval);
+            }
+            _syncInterval = syncInterval;
+        }
+
+        public void Append(T datum) 
+        {
+            AssertOpen();
+            EnsureHeader();
+
+            long usedBuffer = _blockStream.Position;
+
+            try
+            {
+                _writer.Write(datum, _blockEncoder);
+            }
+            catch (Exception e)
+            {
+                _blockStream.Position = usedBuffer;
+                throw new AvroRuntimeException("Error appending datum to writer", e);
+            }
+            _blockCount++;
+            WriteIfBlockFull();
+        }
+
+        private void EnsureHeader()
+        {
+            if (!_headerWritten)
+            {
+                WriteHeader();
+                _headerWritten = true;
+            }
+        }
+
+        public void Flush()
+        {
+            EnsureHeader();
+            Sync();
+        }
+
+        public long Sync()
+        {
+            AssertOpen();
+            WriteBlock();
+            return _stream.Position;
+        }
+
+        public void Close()
+        {
+            EnsureHeader();
+            Flush();
+            _stream.Flush();
+            _stream.Close();
+            _isOpen = false;
+        }
+
+        private void WriteHeader()
+        {
+            _encoder.WriteFixed(DataFileConstants.Magic);
+            WriteMetaData();
+            WriteSyncData();
+        }
+
+        private void Init()
+        {
+            _blockCount = 0;
+            _encoder = new BinaryEncoder(_stream);
+            _blockStream = new MemoryStream();
+            _blockEncoder = new BinaryEncoder(_blockStream);
+
+            if (_codec == null)
+                _codec = Codec.CreateCodec(Codec.Type.Null);
+
+            _isOpen = true;
+        }
+
+        private void AssertOpen()
+        {
+            if (!_isOpen) throw new AvroRuntimeException("Cannot complete operation: avro file/stream not open");
+        }
+
+        private IFileWriter<T> Create(Schema schema, Stream outStream, Codec codec)
+        {
+            _codec = codec;
+            _stream = outStream;
+            _metaData = new Dictionary<string, byte[]>();
+            _schema = schema;
+
+            Init();
+
+            return this;
+        }
+
+        private void WriteMetaData()
+        {
+            // Add sync, code & schema to metadata
+            GenerateSyncData();
+            //SetMetaInternal(DataFileConstants.MetaDataSync, _syncData); - Avro 1.5.4 C
+            SetMetaInternal(DataFileConstants.MetaDataCodec, GetByteValue(_codec.GetName()));
+            SetMetaInternal(DataFileConstants.MetaDataSchema, GetByteValue(_schema.ToString()));
+            
+            // write metadata 
+            int size = _metaData.Count;
+            _encoder.WriteInt(size);
+
+            foreach (KeyValuePair<String, byte[]> metaPair in _metaData)
+            {
+                _encoder.WriteString(metaPair.Key);
+                _encoder.WriteBytes(metaPair.Value);
+            }
+            _encoder.WriteMapEnd();
+        }
+
+        private void WriteIfBlockFull()
+        {
+            if (BufferInUse() >= _syncInterval)
+                WriteBlock();
+        }
+
+        private long BufferInUse()
+        {
+            return _blockStream.Position;
+        }
+
+        private void WriteBlock() 
+        { 
+            if (_blockCount > 0) 
+            {
+                byte[] dataToWrite = _blockStream.ToArray();
+
+                // write count 
+                _encoder.WriteLong(_blockCount);
+
+                // write data 
+                _encoder.WriteBytes(_codec.Compress(dataToWrite));
+                    
+                // write sync marker 
+                _encoder.WriteFixed(_syncData);
+            
+                // reset / re-init block
+                _blockCount = 0;
+                _blockStream = new MemoryStream();
+                _blockEncoder = new BinaryEncoder(_blockStream);
+            }
+        }
+
+        private void WriteSyncData()
+        {
+            _encoder.WriteFixed(_syncData);
+        }
+
+        private void GenerateSyncData()
+        {
+            _syncData = new byte[16];
+
+            Random random = new Random();
+            random.NextBytes(_syncData);
+        }
+
+        private void SetMetaInternal(string key, byte[] value)
+        {
+            _metaData.Add(key, value);
+        }
+  
+        private byte[] GetByteValue(string value)
+        {
+            return System.Text.Encoding.UTF8.GetBytes(value);
+        }
+
+        public void Dispose()
+        {
+            Close();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/DeflateCodec.cs b/lang/csharp/src/apache/main/File/DeflateCodec.cs
new file mode 100644
index 0000000..8cd411e
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/DeflateCodec.cs
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+using System.IO.Compression;
+
+namespace Avro.File
+{
+    public class DeflateCodec : Codec
+    {
+        public override byte[] Compress(byte[] uncompressedData)
+        {
+            MemoryStream outStream = new MemoryStream();
+
+            using (DeflateStream Compress =
+                        new DeflateStream(outStream,
+                        CompressionMode.Compress))
+            {
+                Compress.Write(uncompressedData, 0, uncompressedData.Length);
+            }
+            return outStream.ToArray();
+        }
+
+        public override byte[] Decompress(byte[] compressedData)
+        {
+            MemoryStream inStream = new MemoryStream(compressedData);
+            MemoryStream outStream = new MemoryStream();
+
+            using (DeflateStream Decompress =
+                        new DeflateStream(inStream,
+                        CompressionMode.Decompress))
+            {
+                CopyTo(Decompress, outStream);
+            }
+            return outStream.ToArray();
+        }
+
+        private static void CopyTo(Stream from, Stream to)
+        {
+            byte[] buffer = new byte[4096];
+            int read;
+            while((read = from.Read(buffer, 0, buffer.Length)) != 0)
+            {
+                to.Write(buffer, 0, read);
+            }
+        }
+
+        public override string GetName()
+        {
+            return DataFileConstants.DeflateCodec;
+        }
+
+        public override bool Equals(object other)
+        {
+            if (this == other)
+                return true;
+            return (this.GetType().Name == other.GetType().Name);
+        }
+
+        public override int GetHashCode()
+        {
+            return DataFileConstants.DeflateCodecHash;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/Header.cs b/lang/csharp/src/apache/main/File/Header.cs
new file mode 100644
index 0000000..c5fb1de
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/Header.cs
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Avro.IO;
+
+namespace Avro.File
+{
+    public class Header
+    {
+        private IDictionary<string, byte[]> _metaData;
+        private byte[] _syncData;
+
+        public IDictionary<string, byte[]> MetaData { get { return _metaData; }}
+        public byte[] SyncData { get { return _syncData; }}
+        public Schema Schema { get; set; }
+
+        public Header()
+        {
+            _metaData = new Dictionary<string, byte[]>();
+            _syncData = new byte[16];
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/IFileReader.cs b/lang/csharp/src/apache/main/File/IFileReader.cs
new file mode 100644
index 0000000..4984fa3
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/IFileReader.cs
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro.File
+{
+    public interface IFileReader<T> : IDisposable
+    {
+        /// <summary>
+        /// Return the header for the input 
+        /// file / stream
+        /// </summary>
+        /// <returns></returns>
+        Header GetHeader();
+
+        /// <summary>
+        /// Return the schema as read from 
+        /// the input file / stream
+        /// </summary>
+        /// <returns></returns>
+        Schema GetSchema();
+
+        /// <summary>
+        /// Return the list of keys in the metadata
+        /// </summary>
+        /// <returns></returns>
+        ICollection<string> GetMetaKeys();
+
+        /// <summary>
+        /// Return an enumeration of the remaining entries in the file
+        /// </summary>
+        /// <returns></returns>
+        IEnumerable<T> NextEntries { get; }
+
+        /// <summary>
+        /// Read the next datum from the file.
+        /// </summary>
+        T Next();
+
+        /// <summary>
+        /// True if more entries remain in this file.
+        /// </summary>
+        bool HasNext();
+
+        /// <summary>
+        /// Return the byte value of a metadata property
+        /// </summary>
+        /// <param name="key"></param>
+        /// <returns></returns>
+        byte[] GetMeta(string key);
+
+        /// <summary>
+        /// Return the long value of a metadata property
+        /// </summary>
+        /// <param name="key"></param>
+        /// <returns></returns>
+        long GetMetaLong(string key);
+
+        /// <summary>
+        /// Return the string value of a metadata property
+        /// </summary>
+        /// <param name="key"></param>
+        /// <returns></returns>
+        string GetMetaString(string key);
+
+        /// <summary>
+        /// Return true if past the next synchronization
+        /// point after a position
+        /// </summary>
+        /// <param name="position"></param>
+        /// <returns></returns>
+        bool PastSync(long position);
+
+        /// <summary>
+        /// Return the last synchronization point before
+        /// our current position
+        /// </summary>
+        /// <returns></returns>
+        long PreviousSync();
+
+        /// <summary>
+        /// Move to a specific, known synchronization point, 
+        /// one returned from IFileWriter.Sync() while writing
+        /// </summary>
+        /// <param name="position"></param>
+        void Seek(long position);
+
+        /// <summary>
+        /// Move to the next synchronization point
+        /// after a position
+        /// </summary>
+        /// <param name="position"></param>
+        void Sync(long position);
+
+        /// <summary>
+        /// Return the current position in the input
+        /// </summary>
+        /// <returns></returns>
+        long Tell();
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/IFileWriter.cs b/lang/csharp/src/apache/main/File/IFileWriter.cs
new file mode 100644
index 0000000..d727b21
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/IFileWriter.cs
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+
+namespace Avro.File
+{
+    public interface IFileWriter<T> : IDisposable
+    {
+        /// <summary>
+        /// Append datum to a file / stream
+        /// </summary>
+        /// <param name="datum"></param>
+        void Append(T datum);
+
+        /// <summary>
+        /// Closes the file / stream
+        /// </summary>
+        void Close();
+
+        /// <summary>
+        /// Flush out any buffered data
+        /// </summary>
+        void Flush();
+
+        /// Returns true if parameter is a
+        /// reserved Avro meta data value
+        /// </summary>
+        /// <param name="key"></param>
+        /// <returns></returns>
+        bool IsReservedMeta(string key);
+
+        /// <summary>
+        /// Set meta data pair
+        /// </summary>
+        /// <param name="key"></param>
+        /// <param name="value"></param>
+        void SetMeta(String key, byte[] value);
+
+        /// <summary>
+        /// Set meta data pair (long value)
+        /// </summary>
+        /// <param name="key"></param>
+        /// <param name="value"></param>
+        void SetMeta(String key, long value);
+
+        /// <summary>
+        /// Set meta data pair (string value)
+        /// </summary>
+        /// <param name="key"></param>
+        /// <param name="value"></param>
+        void SetMeta(String key, string value);
+
+        /// <summary>
+        /// Set the synchronization interval for this 
+        /// file / stream, in bytes. Valid values range 
+        /// from 32 to 2^30. Suggested values are 
+        /// between 2K and 2M
+        /// </summary>
+        /// <param name="syncInterval"></param>
+        /// <returns></returns>
+        void SetSyncInterval(int syncInterval);
+
+        /// <summary>
+        /// Forces the end of the current block, 
+        /// emitting a synchronization marker
+        /// </summary>
+        /// <returns></returns>
+        long Sync();
+    }
+}
diff --git a/lang/csharp/src/apache/main/File/NullCodec.cs b/lang/csharp/src/apache/main/File/NullCodec.cs
new file mode 100644
index 0000000..b270478
--- /dev/null
+++ b/lang/csharp/src/apache/main/File/NullCodec.cs
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro.File
+{
+    public class NullCodec : Codec
+    {
+        public NullCodec() { }
+
+        public override byte[] Compress(byte[] uncompressedData)
+        {
+            return uncompressedData;
+        }
+
+        public override byte[] Decompress(byte[] compressedData)
+        {
+            return compressedData;
+        }
+
+        public override string GetName()
+        {
+            return DataFileConstants.NullCodec;
+        }
+
+        public override bool Equals(object other)
+        {
+            if (this == other)
+                return true;
+            return (this.GetType().Name == other.GetType().Name);
+        }
+
+        public override int GetHashCode()
+        {
+            return DataFileConstants.NullCodecHash;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/DatumReader.cs b/lang/csharp/src/apache/main/Generic/DatumReader.cs
new file mode 100644
index 0000000..954aa40
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/DatumReader.cs
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Avro.IO;
+
+namespace Avro.Generic
+{
+    public interface DatumReader<T>
+    {
+        Schema ReaderSchema { get; }
+        Schema WriterSchema { get; }
+
+        /// <summary>
+        /// Read a datum.  Traverse the schema, depth-first, reading all leaf values
+        /// in the schema into a datum that is returned.  If the provided datum is
+        /// non-null it may be reused and returned.
+        /// </summary>        
+        T Read(T reuse, Decoder decoder);
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/DatumWriter.cs b/lang/csharp/src/apache/main/Generic/DatumWriter.cs
new file mode 100644
index 0000000..87095d6
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/DatumWriter.cs
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using Avro.IO;
+
+namespace Avro.Generic
+{
+    public interface DatumWriter<T>
+    {
+        Schema Schema { get; }
+        void Write(T datum, Encoder encoder);
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericDatumReader.cs b/lang/csharp/src/apache/main/Generic/GenericDatumReader.cs
new file mode 100644
index 0000000..37b6800
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericDatumReader.cs
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Avro.IO;
+
+namespace Avro.Generic
+{
+    /// PreresolvingDatumReader for reading data to GenericRecord classes or primitives.
+    /// <see cref="PreresolvingDatumReader{T}">For more information about performance considerations for choosing this implementation</see>
+    public class GenericDatumReader<T> : PreresolvingDatumReader<T>
+    {
+        public GenericDatumReader(Schema writerSchema, Schema readerSchema) : base(writerSchema, readerSchema)
+        {
+        }
+
+        protected override bool IsReusable(Schema.Type tag)
+        {
+            switch (tag)
+            {
+                case Schema.Type.Double:
+                case Schema.Type.Boolean:
+                case Schema.Type.Int:
+                case Schema.Type.Long:
+                case Schema.Type.Float:
+                case Schema.Type.Bytes:
+                case Schema.Type.String:
+                case Schema.Type.Null:
+                    return false;
+            }
+            return true;
+        }
+
+        protected override ArrayAccess GetArrayAccess(ArraySchema readerSchema)
+        {
+            return new GenericArrayAccess();
+        }
+
+        protected override EnumAccess GetEnumAccess(EnumSchema readerSchema)
+        {
+            return new GenericEnumAccess(readerSchema);
+        }
+
+        protected override MapAccess GetMapAccess(MapSchema readerSchema)
+        {
+            return new GenericMapAccess();
+        }
+
+        protected override RecordAccess GetRecordAccess(RecordSchema readerSchema)
+        {
+            return new GenericRecordAccess(readerSchema);
+        }
+
+        protected override FixedAccess GetFixedAccess(FixedSchema readerSchema)
+        {
+            return new GenericFixedAccess(readerSchema);
+        }
+
+        class GenericEnumAccess : EnumAccess
+        {
+            private EnumSchema schema;
+
+            public GenericEnumAccess(EnumSchema schema)
+            {
+                this.schema = schema;
+            }
+
+            public object CreateEnum(object reuse, int ordinal)
+            {
+                if (reuse is GenericEnum)
+                {
+                    var ge = (GenericEnum) reuse;
+                    if (ge.Schema.Equals(this.schema))
+                    {
+                        ge.Value = this.schema[ordinal];
+                        return ge;
+                    }
+                }
+                return new GenericEnum(this.schema, this.schema[ordinal]);
+            }
+        }
+
+        internal class GenericRecordAccess : RecordAccess
+        {
+            private RecordSchema schema;
+
+            public GenericRecordAccess(RecordSchema schema)
+            {
+                this.schema = schema;
+            }
+
+            public object CreateRecord(object reuse)
+            {
+                GenericRecord ru = (reuse == null || !(reuse is GenericRecord) || !(reuse as GenericRecord).Schema.Equals(this.schema)) ?
+                    new GenericRecord(this.schema) :
+                    reuse as GenericRecord;
+                return ru;
+            }
+
+            public object GetField(object record, string fieldName, int fieldPos)
+            {
+                object result;
+                if(!((GenericRecord)record).TryGetValue(fieldName, out result))
+                {
+                    return null;
+                }
+                return result;
+            }
+
+            public void AddField(object record, string fieldName, int fieldPos, object fieldValue)
+            {
+                ((GenericRecord)record).Add(fieldName, fieldValue);
+            }
+        }
+
+        class GenericFixedAccess : FixedAccess
+        {
+            private FixedSchema schema;
+
+            public GenericFixedAccess(FixedSchema schema)
+            {
+                this.schema = schema;
+            }
+
+            public object CreateFixed(object reuse)
+            {
+                return (reuse is GenericFixed && (reuse as GenericFixed).Schema.Equals(this.schema)) ?
+                    reuse : new GenericFixed(this.schema);
+            }
+
+            public byte[] GetFixedBuffer( object f )
+            {
+                return ((GenericFixed)f).Value;
+            }
+        }
+
+        class GenericArrayAccess : ArrayAccess
+        {
+            public object Create(object reuse)
+            {
+                    return (reuse is object[]) ? reuse : new object[0];
+            }
+
+            public void EnsureSize(ref object array, int targetSize)
+            {
+                if (((object[])array).Length < targetSize)
+                    SizeTo(ref array, targetSize);
+            }
+
+            public void Resize(ref object array, int targetSize)
+            {
+                SizeTo(ref array, targetSize);
+            }
+
+            public void AddElements( object arrayObj, int elements, int index, ReadItem itemReader, Decoder decoder, bool reuse )
+            {
+                var array = (object[]) arrayObj;
+                for (int i = index; i < index + elements; i++)
+                {
+                    array[i] = reuse ? itemReader(array[i], decoder) : itemReader(null, decoder);
+                }
+            }
+
+            private static void SizeTo(ref object array, int targetSize)
+            {
+                var o = (object[]) array;
+                Array.Resize(ref o, targetSize);
+                array = o;
+            }
+        }
+
+        class GenericMapAccess : MapAccess
+        {
+            public object Create(object reuse)
+            {
+                if (reuse is IDictionary<string, object>)
+                {
+                    var result = (IDictionary<string, object>)reuse;
+                    result.Clear();
+                    return result;
+                }
+                return new Dictionary<string, object>();
+            }
+
+            public void AddElements(object mapObj, int elements, ReadItem itemReader, Decoder decoder, bool reuse)
+            {
+                var map = ((IDictionary<string, object>)mapObj);
+                for (int i = 0; i < elements; i++)
+                {
+                    var key = decoder.ReadString();
+                    map[key] = itemReader(null, decoder);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Generic/GenericDatumWriter.cs b/lang/csharp/src/apache/main/Generic/GenericDatumWriter.cs
new file mode 100644
index 0000000..a326fd4
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericDatumWriter.cs
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Encoder = Avro.IO.Encoder;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// PreresolvingDatumWriter for writing data from GenericRecords or primitive types.
+    /// <see cref="PreresolvingDatumWriter{T}">For more information about performance considerations for choosing this implementation</see>
+    /// </summary>
+    public class GenericDatumWriter<T> : PreresolvingDatumWriter<T>
+    {
+        public GenericDatumWriter( Schema schema ) : base(schema, new GenericArrayAccess(), new DictionaryMapAccess())
+        {
+        }
+
+        protected override void WriteRecordFields(object recordObj, RecordFieldWriter[] writers, Encoder encoder)
+        {
+            var record = (GenericRecord) recordObj;
+            foreach (var writer in writers)
+            {
+                writer.WriteField(record[writer.Field.Name], encoder);
+            }
+        }
+
+        protected override void EnsureRecordObject( RecordSchema recordSchema, object value )
+        {
+            if( value == null || !( value is GenericRecord ) || !( ( value as GenericRecord ).Schema.Equals( recordSchema ) ) )
+            {
+                throw TypeMismatch( value, "record", "GenericRecord" );
+            }
+        }
+
+        protected override void WriteField(object record, string fieldName, int fieldPos, WriteItem writer, Encoder encoder)
+        {
+            writer(((GenericRecord)record)[fieldName], encoder);
+        }
+
+        protected override WriteItem ResolveEnum(EnumSchema es)
+        {
+            return (v,e) =>
+                       {
+                            if( v == null || !(v is GenericEnum) || !((v as GenericEnum).Schema.Equals(es)))
+                                throw TypeMismatch(v, "enum", "GenericEnum");
+                            e.WriteEnum(es.Ordinal((v as GenericEnum ).Value));
+                       };
+        }
+
+        protected override void WriteFixed( FixedSchema es, object value, Encoder encoder )
+        {
+            if (value == null || !(value is GenericFixed) || !(value as GenericFixed).Schema.Equals(es))
+            {
+                throw TypeMismatch(value, "fixed", "GenericFixed");
+            }
+            GenericFixed ba = (GenericFixed)value;
+            encoder.WriteFixed(ba.Value);
+        }
+
+        /*
+         * FIXME: This method of determining the Union branch has problems. If the data is IDictionary<string, object>
+         * if there are two branches one with record schema and the other with map, it choose the first one. Similarly if
+         * the data is byte[] and there are fixed and bytes schemas as branches, it choose the first one that matches.
+         * Also it does not recognize the arrays of primitive types.
+         */
+        protected override bool UnionBranchMatches(Schema sc, object obj)
+        {
+            if (obj == null && sc.Tag != Avro.Schema.Type.Null) return false;
+            switch (sc.Tag)
+            {
+                case Schema.Type.Null:
+                    return obj == null;
+                case Schema.Type.Boolean:
+                    return obj is bool;
+                case Schema.Type.Int:
+                    return obj is int;
+                case Schema.Type.Long:
+                    return obj is long;
+                case Schema.Type.Float:
+                    return obj is float;
+                case Schema.Type.Double:
+                    return obj is double;
+                case Schema.Type.Bytes:
+                    return obj is byte[];
+                case Schema.Type.String:
+                    return obj is string;
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    //return obj is GenericRecord && (obj as GenericRecord).Schema.Equals(s);
+                    return obj is GenericRecord && (obj as GenericRecord).Schema.SchemaName.Equals((sc as RecordSchema).SchemaName);
+                case Schema.Type.Enumeration:
+                    //return obj is GenericEnum && (obj as GenericEnum).Schema.Equals(s);
+                    return obj is GenericEnum && (obj as GenericEnum).Schema.SchemaName.Equals((sc as EnumSchema).SchemaName);
+                case Schema.Type.Array:
+                    return obj is Array && !(obj is byte[]);
+                case Schema.Type.Map:
+                    return obj is IDictionary<string, object>;
+                case Schema.Type.Union:
+                    return false;   // Union directly within another union not allowed!
+                case Schema.Type.Fixed:
+                    //return obj is GenericFixed && (obj as GenericFixed).Schema.Equals(s);
+                    return obj is GenericFixed && (obj as GenericFixed).Schema.SchemaName.Equals((sc as FixedSchema).SchemaName);
+                default:
+                    throw new AvroException("Unknown schema type: " + sc.Tag);
+            }
+        }
+
+        private class GenericArrayAccess : ArrayAccess
+        {
+            public void EnsureArrayObject( object value )
+            {
+                if( value == null || !( value is Array ) ) throw TypeMismatch( value, "array", "Array" );
+            }
+
+            public long GetArrayLength( object value )
+            {
+                return ( (Array) value ).Length;
+            }
+
+            public void WriteArrayValues(object array, WriteItem valueWriter, Encoder encoder)
+            {
+                var arrayInstance = (Array) array;
+                for(int i = 0; i < arrayInstance.Length; i++)
+                {
+                    encoder.StartItem();
+                    valueWriter(arrayInstance.GetValue(i), encoder);
+                }
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericEnum.cs b/lang/csharp/src/apache/main/Generic/GenericEnum.cs
new file mode 100644
index 0000000..1aa0058
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericEnum.cs
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// The defualt class to hold values for enum schema in GenericReader and GenericWriter.
+    /// </summary>
+    public class GenericEnum
+    {
+        public EnumSchema Schema { get; private set; }
+        private string value;
+        public string Value {
+            get { return value; }
+            set
+            {
+                if (! Schema.Contains(value)) throw new AvroException("Unknown value for enum: " + value + "(" + Schema + ")");
+                this.value = value;
+            }
+        }
+
+        public GenericEnum(EnumSchema schema, string value)
+        {
+            this.Schema = schema;
+            this.Value = value;
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            return (obj != null && obj is GenericEnum) ? Value.Equals((obj as GenericEnum).Value) : false;
+        }
+
+        public override int GetHashCode()
+        {
+            return 17 * Value.GetHashCode();
+        }
+
+        public override string ToString()
+        {
+            return "Schema: " + Schema + ", value: " + Value;
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericFixed.cs b/lang/csharp/src/apache/main/Generic/GenericFixed.cs
new file mode 100644
index 0000000..8b94a52
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericFixed.cs
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// The default type used by GenericReader and GenericWriter for objects for FixedSchema
+    /// </summary>
+    public class GenericFixed
+    {
+        protected readonly byte[] value;
+        private FixedSchema schema;
+
+        public FixedSchema Schema
+        {
+            get
+            {
+                return schema;
+            }
+
+            set
+            {
+                if (!(value is FixedSchema))
+                    throw new AvroException("Schema " + value.Name + " in set is not FixedSchema");
+
+                if ((value as FixedSchema).Size != this.value.Length)
+                    throw new AvroException("Schema " + value.Name + " Size " + (value as FixedSchema).Size + "is not equal to bytes length " + this.value.Length);
+
+                schema = value;
+            }
+        }
+
+        public GenericFixed(FixedSchema schema)
+        {
+            value = new byte[schema.Size];
+            this.Schema = schema;
+        }
+
+        public GenericFixed(FixedSchema schema, byte[] value)
+        {
+            this.value = new byte[schema.Size];
+            this.Schema = schema;
+            Value = value;
+        }
+
+        protected GenericFixed(uint size) 
+        {
+            this.value = new byte[size];
+        }
+
+        public byte[] Value
+        {
+            get { return this.value; }
+            set
+            {
+                if (value.Length == this.value.Length)
+                {
+                    Array.Copy(value, this.value, value.Length);
+                    return;
+                }
+                throw new AvroException("Invalid length for fixed: " + value.Length + ", (" + Schema + ")");
+            }
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+            if (obj != null && obj is GenericFixed)
+            {
+                GenericFixed that = obj as GenericFixed;
+                if (that.Schema.Equals(this.Schema))
+                {
+                    for (int i = 0; i < value.Length; i++) if (this.value[i] != that.value[i]) return false;
+                    return true;
+                }
+            }
+            return false;
+        }
+
+        public override int GetHashCode()
+        {
+            int result = Schema.GetHashCode();
+            foreach (byte b in value)
+            {
+                result += 23 * b;
+            }
+            return result;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericReader.cs b/lang/csharp/src/apache/main/Generic/GenericReader.cs
new file mode 100644
index 0000000..76796a7
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericReader.cs
@@ -0,0 +1,633 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Avro.IO;
+using System.IO;
+
+namespace Avro.Generic
+{
+    public delegate T Reader<T>();
+
+    /// <summary>
+    /// A general purpose reader of data from avro streams. This can optionally resolve if the reader's and writer's
+    /// schemas are different. This class is a wrapper around DefaultReader and offers a little more type safety. The default reader
+    /// has the flexibility to return any type of object for each read call because the Read() method is generic. This
+    /// class on the other hand can only return a single type because the type is a parameter to the class. Any
+    /// user defined extension should, however, be done to DefaultReader. This class is sealed.
+    /// </summary>
+    /// <typeparam name="T"></typeparam>
+    public sealed class GenericReader<T> : DatumReader<T>
+    {
+        private readonly DefaultReader reader;
+
+        /// <summary>
+        /// Constructs a generic reader for the given schemas using the DefaultReader. If the
+        /// reader's and writer's schemas are different this class performs the resolution.
+        /// </summary>
+        /// <param name="writerSchema">The schema used while generating the data</param>
+        /// <param name="readerSchema">The schema desired by the reader</param>
+        public GenericReader(Schema writerSchema, Schema readerSchema)
+            : this(new DefaultReader(writerSchema, readerSchema))
+        {
+        }
+
+        /// <summary>
+        /// Constructs a generic reader by directly using the given DefaultReader
+        /// </summary>
+        /// <param name="reader">The actual reader to use</param>
+        public GenericReader(DefaultReader reader)
+        {
+            this.reader = reader;
+        }
+
+        public Schema WriterSchema { get { return reader.WriterSchema; } }
+
+        public Schema ReaderSchema { get { return reader.ReaderSchema; } }
+
+        public T Read(T reuse, Decoder d)
+        {
+            return reader.Read(reuse, d);
+        }
+    }
+
+    /// <summary>
+    /// The default implementation for the generic reader. It constructs new .NET objects for avro objects on the
+    /// stream and returns the .NET object. Users can directly use this class or, if they want to customize the
+    /// object types for differnt Avro schema types, can derive from this class. There are enough hooks in this
+    /// class to allow customization.
+    /// </summary>
+    /// <remarks>
+    /// <list type="table">
+    /// <listheader><term>Avro Type</term><description>.NET Type</description></listheader>
+    /// <item><term>null</term><description>null reference</description></item>
+    /// </list>
+    /// </remarks>
+    public class DefaultReader
+    {
+        public Schema ReaderSchema { get; private set; }
+        public Schema WriterSchema { get; private set; }
+
+
+        /// <summary>
+        /// Constructs the default reader for the given schemas using the DefaultReader. If the
+        /// reader's and writer's schemas are different this class performs the resolution.
+        /// This default implemenation maps Avro types to .NET types as follows:
+        /// </summary>
+        /// <param name="writerSchema">The schema used while generating the data</param>
+        /// <param name="readerSchema">The schema desired by the reader</param>
+        public DefaultReader(Schema writerSchema, Schema readerSchema)
+        {
+            this.ReaderSchema = readerSchema;
+            this.WriterSchema = writerSchema;
+        }
+
+        /// <summary>
+        /// Reads an object off the stream.
+        /// </summary>
+        /// <typeparam name="T">The type of object to read. A single schema typically returns an object of a single .NET class.
+        /// The only exception is UnionSchema, which can return a object of different types based on the branch selected.
+        /// </typeparam>
+        /// <param name="reuse">If not null, the implemenation will try to use to return the object</param>
+        /// <param name="decoder">The decoder for deserialization</param>
+        /// <returns></returns>
+        public T Read<T>(T reuse, Decoder decoder)
+        {
+            if (!ReaderSchema.CanRead(WriterSchema))
+                throw new AvroException("Schema mismatch. Reader: " + ReaderSchema + ", writer: " + WriterSchema);
+
+            return (T)Read(reuse, WriterSchema, ReaderSchema, decoder);
+        }
+
+        public object Read(object reuse, Schema writerSchema, Schema readerSchema, Decoder d)
+        {
+            if (readerSchema.Tag == Schema.Type.Union && writerSchema.Tag != Schema.Type.Union)
+            {
+                readerSchema = findBranch(readerSchema as UnionSchema, writerSchema);
+            }
+            /*
+            if (!readerSchema.CanRead(writerSchema))
+            {
+                throw new AvroException("Schema mismatch. Reader: " + readerSchema + ", writer: " + writerSchema);
+            }
+            */
+            switch (writerSchema.Tag)
+            {
+                case Schema.Type.Null:
+                    return ReadNull(readerSchema, d);
+                case Schema.Type.Boolean:
+                    return Read<bool>(writerSchema.Tag, readerSchema, d.ReadBoolean);
+                case Schema.Type.Int:
+                    {
+                        int i = Read<int>(writerSchema.Tag, readerSchema, d.ReadInt);
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Long:
+                                return (long)i;
+                            case Schema.Type.Float:
+                                return (float)i;
+                            case Schema.Type.Double:
+                                return (double)i;
+                            default:
+                                return i;
+                        }
+                    }
+                case Schema.Type.Long:
+                    {
+                        long l = Read<long>(writerSchema.Tag, readerSchema, d.ReadLong);
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Float:
+                                return (float)l;
+                            case Schema.Type.Double:
+                                return (double)l;
+                            default:
+                                return l;
+                        }
+                    }
+                case Schema.Type.Float:
+                    {
+                        float f = Read<float>(writerSchema.Tag, readerSchema, d.ReadFloat);
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Double:
+                                return (double)f;
+                            default:
+                                return f;
+                        }
+                    }
+                case Schema.Type.Double:
+                    return Read<double>(writerSchema.Tag, readerSchema, d.ReadDouble);
+                case Schema.Type.String:
+                    return Read<string>(writerSchema.Tag, readerSchema, d.ReadString);
+                case Schema.Type.Bytes:
+                    return Read<byte[]>(writerSchema.Tag, readerSchema, d.ReadBytes);
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    return ReadRecord(reuse, (RecordSchema)writerSchema, readerSchema, d);
+                case Schema.Type.Enumeration:
+                    return ReadEnum(reuse, (EnumSchema)writerSchema, readerSchema, d);
+                case Schema.Type.Fixed:
+                    return ReadFixed(reuse, (FixedSchema)writerSchema, readerSchema, d);
+                case Schema.Type.Array:
+                    return ReadArray(reuse, (ArraySchema)writerSchema, readerSchema, d);
+                case Schema.Type.Map:
+                    return ReadMap(reuse, (MapSchema)writerSchema, readerSchema, d);
+                case Schema.Type.Union:
+                    return ReadUnion(reuse, (UnionSchema)writerSchema, readerSchema, d);
+                default:
+                    throw new AvroException("Unknown schema type: " + writerSchema);
+            }
+        }
+
+        /// <summary>
+        /// Deserializes a null from the stream.
+        /// </summary>
+        /// <param name="readerSchema">Reader's schema, which should be a NullSchema</param>
+        /// <param name="d">The decoder for deserialization</param>
+        /// <returns></returns>
+        protected virtual object ReadNull(Schema readerSchema, Decoder d)
+        {
+            d.ReadNull();
+            return null;
+        }
+
+        /// <summary>
+        /// A generic function to read primitive types
+        /// </summary>
+        /// <typeparam name="S">The .NET type to read</typeparam>
+        /// <param name="tag">The Avro type tag for the object on the stream</param>
+        /// <param name="readerSchema">A schema compatible to the Avro type</param>
+        /// <param name="reader">A function that can read the avro type from the stream</param>
+        /// <returns>The primitive type just read</returns>
+        protected S Read<S>(Schema.Type tag, Schema readerSchema, Reader<S> reader)
+        {
+            return reader();
+        }
+
+        /// <summary>
+        /// Deserializes a record from the stream.
+        /// </summary>
+        /// <param name="reuse">If not null, a record object that could be reused for returning the result</param>
+        /// <param name="writerSchema">The writer's RecordSchema</param>
+        /// <param name="readerSchema">The reader's schema, must be RecordSchema too.</param>
+        /// <param name="dec">The decoder for deserialization</param>
+        /// <returns>The record object just read</returns>
+        protected virtual object ReadRecord(object reuse, RecordSchema writerSchema, Schema readerSchema, Decoder dec)
+        {
+            RecordSchema rs = (RecordSchema)readerSchema;
+
+            object rec = CreateRecord(reuse, rs);
+            foreach (Field wf in writerSchema)
+            {
+                try
+                {
+                    Field rf;
+                    if (rs.TryGetFieldAlias(wf.Name, out rf))
+                    {
+                        object obj = null;
+                        TryGetField(rec, wf.Name, rf.Pos, out obj);
+                        AddField(rec, wf.Name, rf.Pos, Read(obj, wf.Schema, rf.Schema, dec));
+                    }
+                    else
+                        Skip(wf.Schema, dec);
+                }
+                catch (Exception ex)
+                {
+                    throw new AvroException(ex.Message + " in field " + wf.Name);
+                }
+            }
+
+            var defaultStream = new MemoryStream();
+            var defaultEncoder = new BinaryEncoder(defaultStream);
+            var defaultDecoder = new BinaryDecoder(defaultStream);
+            foreach (Field rf in rs)
+            {
+                if (writerSchema.Contains(rf.Name)) continue;
+                
+                defaultStream.Position = 0; // reset for writing
+                Resolver.EncodeDefaultValue(defaultEncoder, rf.Schema, rf.DefaultValue);
+                defaultStream.Flush();
+                defaultStream.Position = 0; // reset for reading
+
+                object obj = null;
+                TryGetField(rec, rf.Name, rf.Pos, out obj);
+                AddField(rec, rf.Name, rf.Pos, Read(obj, rf.Schema, rf.Schema, defaultDecoder));
+            }
+
+            return rec;
+        }
+
+        /// <summary>
+        /// Creates a new record object. Derived classes can override this to return an object of their choice.
+        /// </summary>
+        /// <param name="reuse">If appropriate, will reuse this object instead of constructing a new one</param>
+        /// <param name="readerSchema">The schema the reader is using</param>
+        /// <returns></returns>
+        protected virtual object CreateRecord(object reuse, RecordSchema readerSchema)
+        {
+            GenericRecord ru = (reuse == null || !(reuse is GenericRecord) || !(reuse as GenericRecord).Schema.Equals(readerSchema)) ?
+                new GenericRecord(readerSchema) :
+                reuse as GenericRecord;
+            return ru;
+        }
+
+        /// <summary>
+        /// Used by the default implementation of ReadRecord() to get the existing field of a record object. The derived
+        /// classes can override this to make their own interpretation of the record object.
+        /// </summary>
+        /// <param name="record">The record object to be probed into. This is guaranteed to be one that was returned
+        /// by a previous call to CreateRecord.</param>
+        /// <param name="fieldName">The name of the field to probe.</param>
+        /// <param name="value">The value of the field, if found. Null otherwise.</param>
+        /// <returns>True if and only if a field with the given name is found.</returns>
+        protected virtual bool TryGetField(object record, string fieldName, int fieldPos, out object value)
+        {
+            return (record as GenericRecord).TryGetValue(fieldName, out value);
+        }
+
+        /// <summary>
+        /// Used by the default implementation of ReadRecord() to add a field to a record object. The derived
+        /// classes can override this to suit their own implementation of the record object.
+        /// </summary>
+        /// <param name="record">The record object to be probed into. This is guaranteed to be one that was returned
+        /// by a previous call to CreateRecord.</param>
+        /// <param name="fieldName">The name of the field to probe.</param>
+        /// <param name="fieldValue">The value to be added for the field</param>
+        protected virtual void AddField(object record, string fieldName, int fieldPos, object fieldValue)
+        {
+            (record as GenericRecord).Add(fieldName, fieldValue);
+        }
+
+        /// <summary>
+        /// Deserializes a enum. Uses CreateEnum to construct the new enum object.
+        /// </summary>
+        /// <param name="reuse">If appropirate, uses this instead of creating a new enum object.</param>
+        /// <param name="writerSchema">The schema the writer used while writing the enum</param>
+        /// <param name="readerSchema">The schema the reader is using</param>
+        /// <param name="d">The decoder for deserialization.</param>
+        /// <returns>An enum object.</returns>
+        protected virtual object ReadEnum(object reuse, EnumSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            EnumSchema es = readerSchema as EnumSchema;
+            return CreateEnum(reuse, readerSchema as EnumSchema, writerSchema[d.ReadEnum()]);
+        }
+
+        /// <summary>
+        /// Used by the default implementation of ReadEnum to construct a new enum object.
+        /// </summary>
+        /// <param name="reuse">If appropriate, use this enum object instead of a new one.</param>
+        /// <param name="es">The enum schema used by the reader.</param>
+        /// <param name="symbol">The symbol that needs to be used.</param>
+        /// <returns>The default implemenation returns a GenericEnum.</returns>
+        protected virtual object CreateEnum(object reuse, EnumSchema es, string symbol)
+        {
+            if (reuse is GenericEnum)
+            {
+                GenericEnum ge = reuse as GenericEnum;
+                if (ge.Schema.Equals(es))
+                {
+                    ge.Value = symbol;
+                    return ge;
+                }
+            }
+            return new GenericEnum(es, symbol);
+        }
+
+        /// <summary>
+        /// Deserializes an array and returns an array object. It uses CreateArray() and works on it before returning it.
+        /// It also uses GetArraySize(), ResizeArray(), SetArrayElement() and GetArrayElement() methods. Derived classes can
+        /// override these methods to customize their behavior.
+        /// </summary>
+        /// <param name="reuse">If appropriate, uses this instead of creating a new array object.</param>
+        /// <param name="writerSchema">The schema used by the writer.</param>
+        /// <param name="readerSchema">The schema that the reader uses.</param>
+        /// <param name="d">The decoder for deserialization.</param>
+        /// <returns>The deserialized array object.</returns>
+        protected virtual object ReadArray(object reuse, ArraySchema writerSchema, Schema readerSchema, Decoder d)
+        {
+
+            ArraySchema rs = (ArraySchema)readerSchema;
+            object result = CreateArray(reuse, rs);
+            int i = 0;
+            for (int n = (int)d.ReadArrayStart(); n != 0; n = (int)d.ReadArrayNext())
+            {
+                if (GetArraySize(result) < (i + n)) ResizeArray(ref result, i + n);
+                for (int j = 0; j < n; j++, i++)
+                {
+                    SetArrayElement(result, i, Read(GetArrayElement(result, i), writerSchema.ItemSchema, rs.ItemSchema, d));
+                }
+            }
+            if (GetArraySize(result) != i) ResizeArray(ref result, i);
+            return result;
+        }
+
+        /// <summary>
+        /// Creates a new array object. The initial size of the object could be anything. The users
+        /// should use GetArraySize() to determine the size. The default implementation creates an <c>object[]</c>.
+        /// </summary>
+        /// <param name="reuse">If appropriate use this instead of creating a new one.</param>
+        /// <returns>An object suitable to deserialize an avro array</returns>
+        protected virtual object CreateArray(object reuse, ArraySchema rs)
+        {
+            return (reuse != null && reuse is object[]) ? (object[])reuse : new object[0];
+        }
+
+        /// <summary>
+        /// Returns the size of the given array object.
+        /// </summary>
+        /// <param name="array">Array object whose size is required. This is guaranteed to be somthing returned by
+        /// a previous call to CreateArray().</param>
+        /// <returns>The size of the array</returns>
+        protected virtual int GetArraySize(object array)
+        {
+            return (array as object[]).Length;
+        }
+
+        /// <summary>
+        /// Resizes the array to the new value.
+        /// </summary>
+        /// <param name="array">Array object whose size is required. This is guaranteed to be somthing returned by
+        /// a previous call to CreateArray().</param>
+        /// <param name="n">The new size.</param>
+        protected virtual void ResizeArray(ref object array, int n)
+        {
+            object[] o = array as object[];
+            Array.Resize(ref o, n);
+            array = o;
+        }
+
+        /// <summary>
+        /// Assigns a new value to the object at the given index
+        /// </summary>
+        /// <param name="array">Array object whose size is required. This is guaranteed to be somthing returned by
+        /// a previous call to CreateArray().</param>
+        /// <param name="index">The index to reassign to.</param>
+        /// <param name="value">The value to assign.</param>
+        protected virtual void SetArrayElement(object array, int index, object value)
+        {
+            object[] a = array as object[];
+            a[index] = value;
+        }
+
+        /// <summary>
+        /// Returns the element at the given index.
+        /// </summary>
+        /// <param name="array">Array object whose size is required. This is guaranteed to be somthing returned by
+        /// a previous call to CreateArray().</param>
+        /// <param name="index">The index to look into.</param>
+        /// <returns>The object the given index. Null if no object has been assigned to that index.</returns>
+        protected virtual object GetArrayElement(object array, int index)
+        {
+            return (array as object[])[index];
+        }
+
+        /// <summary>
+        /// Deserialized an avro map. The default implemenation creats a new map using CreateMap() and then
+        /// adds elements to the map using AddMapEntry().
+        /// </summary>
+        /// <param name="reuse">If appropriate, use this instead of creating a new map object.</param>
+        /// <param name="writerSchema">The schema the writer used to write the map.</param>
+        /// <param name="readerSchema">The schema the reader is using.</param>
+        /// <param name="d">The decoder for serialization.</param>
+        /// <returns>The deserialized map object.</returns>
+        protected virtual object ReadMap(object reuse, MapSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            MapSchema rs = (MapSchema)readerSchema;
+            object result = CreateMap(reuse, rs);
+            for (int n = (int)d.ReadMapStart(); n != 0; n = (int)d.ReadMapNext())
+            {
+                for (int j = 0; j < n; j++)
+                {
+                    string k = d.ReadString();
+                    AddMapEntry(result, k, Read(null, writerSchema.ValueSchema, rs.ValueSchema, d));
+                }
+            }
+            return result;
+        }
+
+        /// <summary>
+        /// Used by the default implementation of ReadMap() to create a fresh map object. The default
+        /// implementaion of this method returns a IDictionary<string, map>.
+        /// </summary>
+        /// <param name="reuse">If appropriate, use this map object instead of creating a new one.</param>
+        /// <returns>An empty map object.</returns>
+        protected virtual object CreateMap(object reuse, MapSchema ms)
+        {
+            if (reuse != null && reuse is IDictionary<string, object>)
+            {
+                IDictionary<string, object> result = reuse as IDictionary<string, object>;
+                result.Clear();
+                return result;
+            }
+            return new Dictionary<string, object>();
+        }
+
+        /// <summary>
+        /// Adds an entry to the map.
+        /// </summary>
+        /// <param name="map">A map object, which is guaranteed to be one returned by a previous call to CreateMap().</param>
+        /// <param name="key">The key to add.</param>
+        /// <param name="value">The value to add.</param>
+        protected virtual void AddMapEntry(object map, string key, object value)
+        {
+            (map as IDictionary<string, object>).Add(key, value);
+        }
+
+        /// <summary>
+        /// Deserialized an object based on the writer's uninon schema.
+        /// </summary>
+        /// <param name="reuse">If appropriate, uses this object instead of creating a new one.</param>
+        /// <param name="writerSchema">The UnionSchema that the writer used.</param>
+        /// <param name="readerSchema">The schema the reader uses.</param>
+        /// <param name="d">The decoder for serialization.</param>
+        /// <returns>The deserialized object.</returns>
+        protected virtual object ReadUnion(object reuse, UnionSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            int index = d.ReadUnionIndex();
+            Schema ws = writerSchema[index];
+
+            if (readerSchema is UnionSchema)
+                readerSchema = findBranch(readerSchema as UnionSchema, ws);
+            else
+                if (!readerSchema.CanRead(ws))
+                    throw new AvroException("Schema mismatch. Reader: " + ReaderSchema + ", writer: " + WriterSchema);
+
+            return Read(reuse, ws, readerSchema, d);
+        }
+
+        /// <summary>
+        /// Deserializes a fixed object and returns the object. The default implementation uses CreateFixed()
+        /// and GetFixedBuffer() and returns what CreateFixed() returned.
+        /// </summary>
+        /// <param name="reuse">If appropriate, uses this object instead of creating a new one.</param>
+        /// <param name="writerSchema">The FixedSchema the writer used during serialization.</param>
+        /// <param name="readerSchema">The schema that the readr uses. Must be a FixedSchema with the same
+        /// size as the writerSchema.</param>
+        /// <param name="d">The decoder for deserialization.</param>
+        /// <returns>The deserilized object.</returns>
+        protected virtual object ReadFixed(object reuse, FixedSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            FixedSchema rs = (FixedSchema)readerSchema;
+            if (rs.Size != writerSchema.Size)
+            {
+                throw new AvroException("Size mismatch between reader and writer fixed schemas. Writer: " + writerSchema +
+                    ", reader: " + readerSchema);
+            }
+
+            object ru = CreateFixed(reuse, rs);
+            byte[] bb = GetFixedBuffer(ru);
+            d.ReadFixed(bb);
+            return ru;
+        }
+
+        /// <summary>
+        /// Returns a fixed object.
+        /// </summary>
+        /// <param name="reuse">If appropriate, uses this object instead of creating a new one.</param>
+        /// <param name="rs">The reader's FixedSchema.</param>
+        /// <returns>A fixed object with an appropriate buffer.</returns>
+        protected virtual object CreateFixed(object reuse, FixedSchema rs)
+        {
+            return (reuse != null && reuse is GenericFixed && (reuse as GenericFixed).Schema.Equals(rs)) ?
+                (GenericFixed)reuse : new GenericFixed(rs);
+        }
+
+        /// <summary>
+        /// Returns a buffer of appropriate size to read data into.
+        /// </summary>
+        /// <param name="f">The fixed object. It is guaranteed that this is something that has been previously
+        /// returned by CreateFixed</param>
+        /// <returns>A byte buffer of fixed's size.</returns>
+        protected virtual byte[] GetFixedBuffer(object f)
+        {
+            return (f as GenericFixed).Value;
+        }
+
+        protected virtual void Skip(Schema writerSchema, Decoder d)
+        {
+            switch (writerSchema.Tag)
+            {
+                case Schema.Type.Null:
+                    d.SkipNull();
+                    break;
+                case Schema.Type.Boolean:
+                    d.SkipBoolean();
+                    break;
+                case Schema.Type.Int:
+                    d.SkipInt();
+                    break;
+                case Schema.Type.Long:
+                    d.SkipLong();
+                    break;
+                case Schema.Type.Float:
+                    d.SkipFloat();
+                    break;
+                case Schema.Type.Double:
+                    d.SkipDouble();
+                    break;
+                case Schema.Type.String:
+                    d.SkipString();
+                    break;
+                case Schema.Type.Bytes:
+                    d.SkipBytes();
+                    break;
+                case Schema.Type.Record:
+                    foreach (Field f in writerSchema as RecordSchema) Skip(f.Schema, d);
+                    break;
+                case Schema.Type.Enumeration:
+                    d.SkipEnum();
+                    break;
+                case Schema.Type.Fixed:
+                    d.SkipFixed((writerSchema as FixedSchema).Size);
+                    break;
+                case Schema.Type.Array:
+                    {
+                        Schema s = (writerSchema as ArraySchema).ItemSchema;
+                        for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                        {
+                            for (long i = 0; i < n; i++) Skip(s, d);
+                        }
+                    }
+                    break;
+                case Schema.Type.Map:
+                    {
+                        Schema s = (writerSchema as MapSchema).ValueSchema;
+                        for (long n = d.ReadMapStart(); n != 0; n = d.ReadMapNext())
+                        {
+                            for (long i = 0; i < n; i++) { d.SkipString(); Skip(s, d); }
+                        }
+                    }
+                    break;
+                case Schema.Type.Union:
+                    Skip((writerSchema as UnionSchema)[d.ReadUnionIndex()], d);
+                    break;
+                default:
+                    throw new AvroException("Unknown schema type: " + writerSchema);
+            }
+        }
+
+        protected static Schema findBranch(UnionSchema us, Schema s)
+        {
+            int index = us.MatchingBranch(s);
+            if (index >= 0) return us[index];
+            throw new AvroException("No matching schema for " + s + " in " + us);
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericRecord.cs b/lang/csharp/src/apache/main/Generic/GenericRecord.cs
new file mode 100644
index 0000000..3804d15
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericRecord.cs
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using Avro;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// The default type used by GenericReader and GenericWriter for RecordSchema.
+    /// </summary>
+    public class GenericRecord
+    {
+        public RecordSchema Schema { get; private set; }
+
+        private IDictionary<string, object> contents = new Dictionary<string, object>();
+        public GenericRecord(RecordSchema schema)
+        {
+            this.Schema = schema;
+        }
+
+        public object this[string fieldName]
+        {
+            get { return contents[fieldName]; }
+        }
+
+        public void Add(string fieldName, object fieldValue)
+        {
+            if (Schema.Contains(fieldName))
+            {
+                // TODO: Use a matcher to verify that object has the right type for the field.
+                //contents.Add(fieldName, fieldValue);
+                contents[fieldName] = fieldValue;
+                return;
+            }
+            throw new AvroException("No such field: " + fieldName);
+        }
+
+        public bool TryGetValue(string fieldName, out object result)
+        {
+            return contents.TryGetValue(fieldName, out result);
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+            if (obj != null && obj is GenericRecord)
+            {
+                GenericRecord other = obj as GenericRecord;
+                return Schema.Equals(other.Schema) && areEqual(contents, other.contents);
+            }
+            return false;
+        }
+
+        private static bool areEqual(IDictionary<string, object> d1, IDictionary<string, object> d2)
+        {
+            if (d1.Count == d2.Count)
+            {
+                foreach (KeyValuePair<string, object> kv in d1)
+                {
+                    object o;
+                    if (!d2.TryGetValue(kv.Key, out o)) return false;
+                    if (!areEqual(o, kv.Value)) return false;
+                }
+                return true;
+            }
+            return false;
+        }
+
+        private static bool areEqual(object o1, object o2)
+        {
+            if (o1 == null) return o2 == null;
+            if (o2 == null) return false;
+            if (o1 is Array)
+            {
+                if (!(o2 is Array)) return false;
+                return areEqual(o1 as Array, o1 as Array);
+            }
+            else if (o1 is IDictionary<string, object>)
+            {
+                if (!(o2 is IDictionary<string, object>)) return false;
+                return areEqual(o1 as IDictionary<string, object>, o1 as IDictionary<string, object>);
+            }
+            return o1.Equals(o2);
+        }
+
+        private static bool areEqual(Array a1, Array a2)
+        {
+            if (a1.Length != a2.Length) return false;
+            for (int i = 0; i < a1.Length; i++)
+            {
+                if (!areEqual(a1.GetValue(i), a2.GetValue(i))) return false;
+            }
+            return true;
+        }
+
+        public override int GetHashCode()
+        {
+            return 31 * contents.GetHashCode()/* + 29 * Schema.GetHashCode()*/;
+        }
+
+        public override string ToString()
+        {
+            StringBuilder sb = new StringBuilder();
+            sb.Append("Schema: ");
+            sb.Append(Schema);
+            sb.Append(", contents: ");
+            sb.Append("{ ");
+            foreach (KeyValuePair<string, object> kv in contents)
+            {
+                sb.Append(kv.Key);
+                sb.Append(": ");
+                sb.Append(kv.Value);
+                sb.Append(", ");
+            }
+            sb.Append("}");
+            return sb.ToString();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/GenericWriter.cs b/lang/csharp/src/apache/main/Generic/GenericWriter.cs
new file mode 100644
index 0000000..6764a58
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/GenericWriter.cs
@@ -0,0 +1,446 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Avro.IO;
+
+namespace Avro.Generic
+{
+    public delegate void Writer<T>(T t);
+    /// <summary>
+    /// A typesafe wrapper around DefaultWriter. While a specific object of DefaultWriter
+    /// allows the client to serialize a generic type, an object of this class allows
+    /// only a single type of object to be serialized through it.
+    /// </summary>
+    /// <typeparam name="T">The type of object to be serialized.</typeparam>
+    public class GenericWriter<T> : DatumWriter<T>
+    {
+        private readonly DefaultWriter writer;
+        public GenericWriter(Schema schema) : this(new DefaultWriter(schema))
+        {
+
+        }
+
+        public Schema Schema { get { return writer.Schema; } }
+
+        public GenericWriter(DefaultWriter writer)
+        {
+            this.writer = writer;
+        }
+
+        /// <summary>
+        /// Serializes the given object using this writer's schema.
+        /// </summary>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder to use for serializing</param>
+        public void Write(T value, Encoder encoder)
+        {
+            writer.Write(value, encoder);
+        }
+    }
+
+    /// <summary>
+    /// A General purpose writer for serializing objects into a Stream using
+    /// Avro. This class implements a default way of serializing objects. But
+    /// one can derive a class from this and override different methods to
+    /// acheive results that are different from the default implementation.
+    /// </summary>
+    public class DefaultWriter
+    {
+        public Schema Schema { get; private set; }
+
+        /// <summary>
+        /// Constructs a generic writer for the given schema.
+        /// </summary>
+        /// <param name="schema">The schema for the object to be serialized</param>
+        public DefaultWriter(Schema schema)
+        {
+            this.Schema = schema;
+        }
+
+        public void Write<T>(T value, Encoder encoder)
+        {
+            Write(Schema, value, encoder);
+        }
+        /// <summary>
+        /// Examines the schema and dispatches the actual work to one
+        /// of the other methods of this class. This allows the derived
+        /// classes to override specific methods and get custom results.
+        /// </summary>
+        /// <param name="schema">The schema to use for serializing</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder to use during serialization</param>
+        public virtual void Write(Schema schema, object value, Encoder encoder)
+        {
+            switch (schema.Tag)
+            {
+                case Schema.Type.Null:
+                    WriteNull(value, encoder);
+                    break;
+                case Schema.Type.Boolean:
+                    Write<bool>(value, schema.Tag, encoder.WriteBoolean);
+                    break;
+                case Schema.Type.Int:
+                    Write<int>(value, schema.Tag, encoder.WriteInt);
+                    break;
+                case Schema.Type.Long:
+                    Write<long>(value, schema.Tag, encoder.WriteLong);
+                    break;
+                case Schema.Type.Float:
+                    Write<float>(value, schema.Tag, encoder.WriteFloat);
+                    break;
+                case Schema.Type.Double:
+                    Write<double>(value, schema.Tag, encoder.WriteDouble);
+                    break;
+                case Schema.Type.String:
+                    Write<string>(value, schema.Tag, encoder.WriteString);
+                    break;
+                case Schema.Type.Bytes:
+                    Write<byte[]>(value, schema.Tag, encoder.WriteBytes);
+                    break;
+                case Schema.Type.Record:
+                case Schema.Type.Error:
+                    WriteRecord(schema as RecordSchema, value, encoder);
+                    break;
+                case Schema.Type.Enumeration:
+                    WriteEnum(schema as EnumSchema, value, encoder);
+                    break;
+                case Schema.Type.Fixed:
+                    WriteFixed(schema as FixedSchema, value, encoder);
+                    break;
+                case Schema.Type.Array:
+                    WriteArray(schema as ArraySchema, value, encoder);
+                    break;
+                case Schema.Type.Map:
+                    WriteMap(schema as MapSchema, value, encoder);
+                    break;
+                case Schema.Type.Union:
+                    WriteUnion(schema as UnionSchema, value, encoder);
+                    break;
+                default:
+                    error(schema, value);
+                    break;
+            }
+        }
+
+        /// <summary>
+        /// Serializes a "null"
+        /// </summary>
+        /// <param name="value">The object to be serialized using null schema</param>
+        /// <param name="encoder">The encoder to use while serialization</param>
+        protected virtual void WriteNull(object value, Encoder encoder)
+        {
+            if (value != null) throw TypeMismatch(value, "null", "null");
+        }
+
+        /// <summary>
+        /// A generic method to serialize primitive Avro types.
+        /// </summary>
+        /// <typeparam name="S">Type of the C# type to be serialized</typeparam>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="tag">The schema type tag</param>
+        /// <param name="writer">The writer which should be used to write the given type.</param>
+        protected virtual void Write<S>(object value, Schema.Type tag, Writer<S> writer)
+        {
+            if (!(value is S)) throw TypeMismatch(value, tag.ToString(), typeof(S).ToString());
+            writer((S)value);
+        }
+
+        /// <summary>
+        /// Serialized a record using the given RecordSchema. It uses GetField method
+        /// to extract the field value from the given object.
+        /// </summary>
+        /// <param name="schema">The RecordSchema to use for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The Encoder for serialization</param>
+
+        protected virtual void WriteRecord(RecordSchema schema, object value, Encoder encoder)
+        {
+            EnsureRecordObject(schema, value);
+            foreach (Field field in schema)
+            {
+                try
+                {
+                    object obj = GetField(value, field.Name, field.Pos);
+                    Write(field.Schema, obj, encoder);
+                }
+                catch (Exception ex)
+                {
+                    throw new AvroException(ex.Message + " in field " + field.Name);
+                }
+            }
+        }
+
+        protected virtual void EnsureRecordObject(RecordSchema s, object value)
+        {
+            if (value == null || !(value is GenericRecord) || !((value as GenericRecord).Schema.Equals(s)))
+            {
+                throw TypeMismatch(value, "record", "GenericRecord");
+            }
+        }
+
+        /// <summary>
+        /// Extracts the field value from the given object. In this default implementation,
+        /// value should be of type GenericRecord.
+        /// </summary>
+        /// <param name="value">The record value from which the field needs to be extracted</param>
+        /// <param name="fieldName">The name of the field in the record</param>
+        /// <param name="fieldPos">The position of field in the record</param>
+        /// <returns></returns>
+        protected virtual object GetField(object value, string fieldName, int fieldPos)
+        {
+            GenericRecord d = value as GenericRecord;
+            return d[fieldName];
+        }
+
+        /// <summary>
+        /// Serializes an enumeration. The default implementation expectes the value to be string whose
+        /// value is the name of the enumeration.
+        /// </summary>
+        /// <param name="es">The EnumSchema for serialization</param>
+        /// <param name="value">Value to be written</param>
+        /// <param name="encoder">Encoder for serialization</param>
+        protected virtual void WriteEnum(EnumSchema es, object value, Encoder encoder)
+        {
+            if (value == null || !(value is GenericEnum) || !((value as GenericEnum).Schema.Equals(es)))
+                throw TypeMismatch(value, "enum", "GenericEnum");
+            encoder.WriteEnum(es.Ordinal((value as GenericEnum).Value));
+        }
+
+        /// <summary>
+        /// Serialized an array. The default implementation calls EnsureArrayObject() to ascertain that the
+        /// given value is an array. It then calls GetArrayLength() and GetArrayElement()
+        /// to access the members of the array and then serialize them.
+        /// </summary>
+        /// <param name="schema">The ArraySchema for serialization</param>
+        /// <param name="value">The value being serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected virtual void WriteArray(ArraySchema schema, object value, Encoder encoder)
+        {
+            EnsureArrayObject(value);
+            long l = GetArrayLength(value);
+            encoder.WriteArrayStart();
+            encoder.SetItemCount(l);
+            for (long i = 0; i < l; i++)
+            {
+                encoder.StartItem();
+                Write(schema.ItemSchema, GetArrayElement(value, i), encoder);
+            }
+            encoder.WriteArrayEnd();
+        }
+
+        /// <summary>
+        /// Checks if the given object is an array. If it is a valid array, this function returns normally. Otherwise,
+        /// it throws an exception. The default implementation checks if the value is an array.
+        /// </summary>
+        /// <param name="value"></param>
+        protected virtual void EnsureArrayObject(object value)
+        {
+            if (value == null || !(value is Array)) throw TypeMismatch(value, "array", "Array");
+        }
+
+        /// <summary>
+        /// Returns the length of an array. The default implementation requires the object
+        /// to be an array of objects and returns its length. The defaul implementation
+        /// gurantees that EnsureArrayObject() has been called on the value before this
+        /// function is called.
+        /// </summary>
+        /// <param name="value">The object whose array length is required</param>
+        /// <returns>The array length of the given object</returns>
+        protected virtual long GetArrayLength(object value)
+        {
+            return (value as Array).Length;
+        }
+
+        /// <summary>
+        /// Returns the element at the given index from the given array object. The default implementation
+        /// requires that the value is an object array and returns the element in that array. The defaul implementation
+        /// gurantees that EnsureArrayObject() has been called on the value before this
+        /// function is called.
+        /// </summary>
+        /// <param name="value">The array object</param>
+        /// <param name="index">The index to look for</param>
+        /// <returns>The array element at the index</returns>
+        protected virtual object GetArrayElement(object value, long index)
+        {
+            return (value as Array).GetValue(index);
+        }
+
+        /// <summary>
+        /// Serialized a map. The default implementation first ensure that the value is indeed a map and then uses
+        /// GetMapSize() and GetMapElements() to access the contents of the map.
+        /// </summary>
+        /// <param name="schema">The MapSchema for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected virtual void WriteMap(MapSchema schema, object value, Encoder encoder)
+        {
+            EnsureMapObject(value);
+            IDictionary<string, object> vv = (IDictionary<string, object>)value;
+            encoder.WriteMapStart();
+            encoder.SetItemCount(GetMapSize(value));
+            foreach (KeyValuePair<string, object> obj in GetMapValues(vv))
+            {
+                encoder.StartItem();
+                encoder.WriteString(obj.Key);
+                Write(schema.ValueSchema, obj.Value, encoder);
+            }
+            encoder.WriteMapEnd();
+        }
+
+        /// <summary>
+        /// Checks if the given object is a map. If it is a valid map, this function returns normally. Otherwise,
+        /// it throws an exception. The default implementation checks if the value is an IDictionary<string, object>.
+        /// </summary>
+        /// <param name="value"></param>
+        protected virtual void EnsureMapObject(object value)
+        {
+            if (value == null || !(value is IDictionary<string, object>)) throw TypeMismatch(value, "map", "IDictionary<string, object>");
+        }
+
+        /// <summary>
+        /// Returns the size of the map object. The default implementation gurantees that EnsureMapObject has been
+        /// successfully called with the given value. The default implementation requires the value
+        /// to be an IDictionary<string, object> and returns the number of elements in it.
+        /// </summary>
+        /// <param name="value">The map object whose size is desired</param>
+        /// <returns>The size of the given map object</returns>
+        protected virtual long GetMapSize(object value)
+        {
+            return (value as IDictionary<string, object>).Count;
+        }
+
+        /// <summary>
+        /// Returns the contents of the given map object. The default implementation guarantees that EnsureMapObject
+        /// has been called with the given value. The defualt implementation of this method requires that
+        /// the value is an IDictionary<string, object> and returns its contents.
+        /// </summary>
+        /// <param name="value">The map object whose size is desired</param>
+        /// <returns>The contents of the given map object</returns>
+        protected virtual IEnumerable<KeyValuePair<string, object>> GetMapValues(object value)
+        {
+            return value as IDictionary<string, object>;
+        }
+
+        /// <summary>
+        /// Resolves the given value against the given UnionSchema and serializes the object against
+        /// the resolved schema member. The default implementation of this method uses
+        /// ResolveUnion to find the member schema within the UnionSchema.
+        /// </summary>
+        /// <param name="us">The UnionSchema to resolve against</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected virtual void WriteUnion(UnionSchema us, object value, Encoder encoder)
+        {
+            int index = ResolveUnion(us, value);
+            encoder.WriteUnionIndex(index);
+            Write(us[index], value, encoder);
+        }
+
+        /// <summary>
+        /// Finds the branch within the given UnionSchema that matches the given object. The default implementation
+        /// calls Matches() method in the order of branches within the UnionSchema. If nothing matches, throws
+        /// an exception.
+        /// </summary>
+        /// <param name="us">The UnionSchema to resolve against</param>
+        /// <param name="obj">The object that should be used in matching</param>
+        /// <returns></returns>
+        protected virtual int ResolveUnion(UnionSchema us, object obj)
+        {
+            for (int i = 0; i < us.Count; i++)
+            {
+                if (Matches(us[i], obj)) return i;
+            }
+            throw new AvroException("Cannot find a match for " + obj.GetType() + " in " + us);
+        }
+
+        /// <summary>
+        /// Serialized a fixed object. The default implementation requires that the value is
+        /// a GenericFixed object with an identical schema as es.
+        /// </summary>
+        /// <param name="es">The schema for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected virtual void WriteFixed(FixedSchema es, object value, Encoder encoder)
+        {
+            if (value == null || !(value is GenericFixed) || !(value as GenericFixed).Schema.Equals(es))
+            {
+                throw TypeMismatch(value, "fixed", "GenericFixed");
+            }
+            GenericFixed ba = (GenericFixed)value;
+            encoder.WriteFixed(ba.Value);
+        }
+
+        protected AvroException TypeMismatch(object obj, string schemaType, string type)
+        {
+            return new AvroException(type + " required to write against " + schemaType + " schema but found " + (null == obj ? "null" : obj.GetType().ToString()) );
+        }
+
+        private void error(Schema schema, Object value)
+        {
+            throw new AvroTypeException("Not a " + schema + ": " + value);
+        }
+
+        /*
+         * FIXME: This method of determining the Union branch has problems. If the data is IDictionary<string, object>
+         * if there are two branches one with record schema and the other with map, it choose the first one. Similarly if
+         * the data is byte[] and there are fixed and bytes schemas as branches, it choose the first one that matches.
+         * Also it does not recognize the arrays of primitive types.
+         */
+        protected virtual bool Matches(Schema sc, object obj)
+        {
+            if (obj == null && sc.Tag != Avro.Schema.Type.Null) return false;
+            switch (sc.Tag)
+            {
+                case Schema.Type.Null:
+                    return obj == null;
+                case Schema.Type.Boolean:
+                    return obj is bool;
+                case Schema.Type.Int:
+                    return obj is int;
+                case Schema.Type.Long:
+                    return obj is long;
+                case Schema.Type.Float:
+                    return obj is float;
+                case Schema.Type.Double:
+                    return obj is double;
+                case Schema.Type.Bytes:
+                    return obj is byte[];
+                case Schema.Type.String:
+                    return obj is string;
+                case Schema.Type.Record:
+                    //return obj is GenericRecord && (obj as GenericRecord).Schema.Equals(s);
+                    return obj is GenericRecord && (obj as GenericRecord).Schema.SchemaName.Equals((sc as RecordSchema).SchemaName);
+                case Schema.Type.Enumeration:
+                    //return obj is GenericEnum && (obj as GenericEnum).Schema.Equals(s);
+                    return obj is GenericEnum && (obj as GenericEnum).Schema.SchemaName.Equals((sc as EnumSchema).SchemaName);
+                case Schema.Type.Array:
+                    return obj is Array && !(obj is byte[]);
+                case Schema.Type.Map:
+                    return obj is IDictionary<string, object>;
+                case Schema.Type.Union:
+                    return false;   // Union directly within another union not allowed!
+                case Schema.Type.Fixed:
+                    //return obj is GenericFixed && (obj as GenericFixed).Schema.Equals(s);
+                    return obj is GenericFixed && (obj as GenericFixed).Schema.SchemaName.Equals((sc as FixedSchema).SchemaName);
+                default:
+                    throw new AvroException("Unknown schema type: " + sc.Tag);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/PreresolvingDatumReader.cs b/lang/csharp/src/apache/main/Generic/PreresolvingDatumReader.cs
new file mode 100644
index 0000000..e0ef409
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/PreresolvingDatumReader.cs
@@ -0,0 +1,596 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System.Collections.Generic;
+using System.IO;
+using Avro.IO;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// A general purpose reader of data from avro streams. This reader analyzes and resolves the reader and writer schemas
+    /// when constructed so that reads can be more efficient. Once constructed, a reader can be reused or shared among threads
+    /// to avoid incurring more resolution costs.
+    /// </summary>
+    public abstract class PreresolvingDatumReader<T> : DatumReader<T>
+    {
+        public Schema ReaderSchema { get; private set; }
+        public Schema WriterSchema { get; private set; }
+
+        protected delegate object ReadItem(object reuse, Decoder dec);
+        
+        // read a specific field from a decoder
+        private delegate object DecoderRead(Decoder dec);
+        // skip specific field(s) from a decoder
+        private delegate void DecoderSkip(Decoder dec);
+        // read & set fields on a record
+        private delegate void FieldReader(object record, Decoder decoder);
+
+        private readonly ReadItem _reader;
+        private readonly Dictionary<SchemaPair,ReadItem> _recordReaders = new Dictionary<SchemaPair,ReadItem>();
+        
+        protected PreresolvingDatumReader(Schema writerSchema, Schema readerSchema)
+        {
+            ReaderSchema = readerSchema;
+            WriterSchema = writerSchema;
+            if (!ReaderSchema.CanRead(WriterSchema))
+                throw new AvroException("Schema mismatch. Reader: " + ReaderSchema + ", writer: " + WriterSchema);
+            _reader = ResolveReader(writerSchema, readerSchema);
+        }
+
+        public T Read(T reuse, Decoder decoder)
+        {
+            return (T)_reader(reuse, decoder);
+        }
+
+        protected abstract ArrayAccess GetArrayAccess(ArraySchema readerSchema);
+        protected abstract EnumAccess GetEnumAccess(EnumSchema readerSchema);
+        protected abstract MapAccess GetMapAccess(MapSchema readerSchema);
+        protected abstract RecordAccess GetRecordAccess(RecordSchema readerSchema);
+        protected abstract FixedAccess GetFixedAccess(FixedSchema readerSchema);
+
+        /// <summary>
+        /// Build a reader that accounts for schema differences between the reader and writer schemas.
+        /// </summary>
+        private ReadItem ResolveReader(Schema writerSchema, Schema readerSchema)
+        {
+            if (readerSchema.Tag == Schema.Type.Union && writerSchema.Tag != Schema.Type.Union)
+            {
+                readerSchema = FindBranch(readerSchema as UnionSchema, writerSchema);
+            }
+            switch (writerSchema.Tag)
+            {
+                case Schema.Type.Null:
+                    return ReadNull;
+                case Schema.Type.Boolean:
+                    return ReadBoolean;
+                case Schema.Type.Int:
+                    {
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Long:
+                                return Read(d => (long) d.ReadInt());
+                            case Schema.Type.Float:
+                                return Read(d => (float) d.ReadInt());
+                            case Schema.Type.Double:
+                                return Read(d => (double) d.ReadInt());
+                            default:
+                                return Read(d => d.ReadInt());
+                        }
+                    }
+                case Schema.Type.Long:
+                    {
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Float:
+                                return Read(d => (float) d.ReadLong());
+                            case Schema.Type.Double:
+                                return Read(d => (double) d.ReadLong());
+                            default:
+                                return Read(d => d.ReadLong());
+                        }
+                    }
+                case Schema.Type.Float:
+                    {
+                        switch (readerSchema.Tag)
+                        {
+                            case Schema.Type.Double:
+                                return Read(d => (double) d.ReadFloat());
+                            default:
+                                return Read(d => d.ReadFloat());
+                        }
+                    }
+                case Schema.Type.Double:
+                    return Read(d => d.ReadDouble());
+                case Schema.Type.String:
+                    return Read(d => d.ReadString());
+                case Schema.Type.Bytes:
+                    return Read(d => d.ReadBytes());
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    return ResolveRecord((RecordSchema)writerSchema, (RecordSchema)readerSchema);
+                case Schema.Type.Enumeration:
+                    return ResolveEnum((EnumSchema)writerSchema, (EnumSchema)readerSchema);
+                case Schema.Type.Fixed:
+                    return ResolveFixed((FixedSchema)writerSchema, (FixedSchema)readerSchema);
+                case Schema.Type.Array:
+                    return ResolveArray((ArraySchema)writerSchema, (ArraySchema)readerSchema);
+                case Schema.Type.Map:
+                    return ResolveMap((MapSchema)writerSchema, (MapSchema)readerSchema);
+                case Schema.Type.Union:
+                    return ResolveUnion((UnionSchema)writerSchema, readerSchema);
+                default:
+                    throw new AvroException("Unknown schema type: " + writerSchema);
+            }
+        }
+
+        private ReadItem ResolveEnum(EnumSchema writerSchema, EnumSchema readerSchema)
+        {
+            var enumAccess = GetEnumAccess(readerSchema);
+
+            if (readerSchema.Equals(writerSchema))
+            {
+                return (r, d) => enumAccess.CreateEnum(r, d.ReadEnum());
+            }
+
+            var translator = new int[writerSchema.Symbols.Count];
+
+            foreach (var symbol in writerSchema.Symbols)
+            {
+                var writerOrdinal = writerSchema.Ordinal(symbol);
+                if (readerSchema.Contains(symbol))
+                {
+                    translator[writerOrdinal] = readerSchema.Ordinal(symbol);
+                }
+                else
+                {
+                    translator[writerOrdinal] = -1;
+                }
+            }
+
+            return (r, d) =>
+                        {
+                            var writerOrdinal = d.ReadEnum();
+                            var readerOrdinal = translator[writerOrdinal];
+                            if (readerOrdinal == -1)
+                            {
+                                throw new AvroException("No such symbol: " + writerSchema[writerOrdinal]);
+                            }
+                            return enumAccess.CreateEnum(r, readerOrdinal);
+                        };
+        }
+
+        private ReadItem ResolveRecord(RecordSchema writerSchema, RecordSchema readerSchema)
+        {
+            var schemaPair = new SchemaPair(writerSchema, readerSchema);
+            ReadItem recordReader;
+
+            if (_recordReaders.TryGetValue(schemaPair, out recordReader))
+            {
+                return recordReader;
+            }
+
+            FieldReader[] fieldReaderArray = null;
+            var recordAccess = GetRecordAccess(readerSchema);
+
+            recordReader = (r, d) => ReadRecord(r, d, recordAccess, fieldReaderArray);
+            _recordReaders.Add(schemaPair, recordReader);
+
+            var readSteps = new List<FieldReader>();
+
+            foreach (Field wf in writerSchema)
+            {
+                Field rf;
+                if (readerSchema.TryGetFieldAlias(wf.Name, out rf))
+                {
+                    var readItem = ResolveReader(wf.Schema, rf.Schema);
+                    if(IsReusable(rf.Schema.Tag))
+                    {
+                        readSteps.Add((rec,d) => recordAccess.AddField(rec, rf.Name, rf.Pos,
+                            readItem(recordAccess.GetField(rec, rf.Name, rf.Pos), d)));
+                    }
+                    else
+                    {
+                        readSteps.Add((rec, d) => recordAccess.AddField(rec, rf.Name, rf.Pos,
+                            readItem(null, d)));
+                    }
+                }
+                else
+                {
+                    var skip = GetSkip(wf.Schema);
+                    readSteps.Add((rec, d) => skip(d));
+                }
+            }
+
+            // fill in defaults for any reader fields not in the writer schema
+            foreach (Field rf in readerSchema)
+            {
+                if (writerSchema.Contains(rf.Name)) continue;
+
+                var defaultStream = new MemoryStream();
+                var defaultEncoder = new BinaryEncoder(defaultStream);
+
+                defaultStream.Position = 0; // reset for writing
+                Resolver.EncodeDefaultValue(defaultEncoder, rf.Schema, rf.DefaultValue);
+                defaultStream.Flush();
+	            var defaultBytes = defaultStream.ToArray();
+
+                var readItem = ResolveReader(rf.Schema, rf.Schema);
+
+                var rfInstance = rf;
+                if(IsReusable(rf.Schema.Tag))
+                {
+                    readSteps.Add((rec, d) => recordAccess.AddField(rec, rfInstance.Name, rfInstance.Pos,
+                        readItem(recordAccess.GetField(rec, rfInstance.Name, rfInstance.Pos),
+                            new BinaryDecoder(new MemoryStream( defaultBytes)))));
+                }
+                else
+                {
+                    readSteps.Add((rec, d) => recordAccess.AddField(rec, rfInstance.Name, rfInstance.Pos,
+                        readItem(null, new BinaryDecoder(new MemoryStream(defaultBytes)))));
+                }
+            }
+
+            fieldReaderArray = readSteps.ToArray();
+            return recordReader;
+        }
+
+        private object ReadRecord(object reuse, Decoder decoder, RecordAccess recordAccess, IEnumerable<FieldReader> readSteps )
+        {
+            var rec = recordAccess.CreateRecord(reuse);
+            foreach (FieldReader fr in readSteps)
+            {
+                fr(rec, decoder);
+                // TODO: on exception, report offending field
+            }
+            return rec;
+        }
+
+        private ReadItem ResolveUnion(UnionSchema writerSchema, Schema readerSchema)
+        {
+            var lookup = new ReadItem[writerSchema.Count];
+
+            for (int i = 0; i < writerSchema.Count; i++)
+            {
+                var writerBranch = writerSchema[i];
+
+                if (readerSchema is UnionSchema)
+                {
+                    var unionReader = (UnionSchema) readerSchema;
+                    var readerBranch = unionReader.MatchingBranch(writerBranch);
+                    if (readerBranch == -1)
+                    {
+                        lookup[i] = (r, d) => { throw new AvroException( "No matching schema for " + writerBranch + " in " + unionReader ); };
+                    }
+                    else
+                    {
+                        lookup[i] = ResolveReader(writerBranch, unionReader[readerBranch]);
+                    }
+                }
+                else
+                {
+                    if (!readerSchema.CanRead(writerBranch))
+                    {
+                        lookup[i] = (r, d) => { throw new AvroException( "Schema mismatch Reader: " + ReaderSchema + ", writer: " + WriterSchema ); };
+                    }
+                    else
+                    {
+                        lookup[i] = ResolveReader(writerBranch, readerSchema);
+                    }
+                }
+            }
+
+            return (r, d) => ReadUnion(r, d, lookup);
+        }
+
+        private object ReadUnion(object reuse, Decoder d, ReadItem[] branchLookup)
+        {
+            return branchLookup[d.ReadUnionIndex()](reuse, d);
+        }
+
+        private ReadItem ResolveMap(MapSchema writerSchema, MapSchema readerSchema)
+        {
+            var rs = readerSchema.ValueSchema;
+            var ws = writerSchema.ValueSchema;
+            
+            var reader = ResolveReader(ws, rs);
+            var mapAccess = GetMapAccess(readerSchema);
+
+            return (r,d) => ReadMap(r, d, mapAccess, reader);
+        }
+
+        private object ReadMap(object reuse, Decoder decoder, MapAccess mapAccess, ReadItem valueReader)
+        {
+            object map = mapAccess.Create(reuse);
+
+            for (int n = (int)decoder.ReadMapStart(); n != 0; n = (int)decoder.ReadMapNext())
+            {
+                mapAccess.AddElements(map, n, valueReader, decoder, false);
+            }
+            return map;
+        }
+
+        private ReadItem ResolveArray(ArraySchema writerSchema, ArraySchema readerSchema)
+        {
+            var itemReader = ResolveReader(writerSchema.ItemSchema, readerSchema.ItemSchema);
+
+            var arrayAccess = GetArrayAccess(readerSchema);
+            return (r, d) => ReadArray(r, d, arrayAccess, itemReader, IsReusable(readerSchema.ItemSchema.Tag));
+        }
+
+        private object ReadArray(object reuse, Decoder decoder, ArrayAccess arrayAccess, ReadItem itemReader, bool itemReusable)
+        {
+            object array = arrayAccess.Create(reuse);
+            int i = 0;
+            for (int n = (int)decoder.ReadArrayStart(); n != 0; n = (int)decoder.ReadArrayNext())
+            {
+                arrayAccess.EnsureSize(ref array, i + n);
+                arrayAccess.AddElements(array, n, i, itemReader, decoder, itemReusable);
+                i += n;
+            }
+            arrayAccess.Resize(ref array, i);
+            return array;
+        }
+
+        private ReadItem ResolveFixed(FixedSchema writerSchema, FixedSchema readerSchema)
+        {
+            if (readerSchema.Size != writerSchema.Size)
+            {
+                throw new AvroException("Size mismatch between reader and writer fixed schemas. Writer: " + writerSchema +
+                    ", reader: " + readerSchema);
+            }
+            var fixedAccess = GetFixedAccess(readerSchema);
+            return (r, d) => ReadFixed(r, d, fixedAccess);
+        }
+
+        private object ReadFixed(object reuse, Decoder decoder, FixedAccess fixedAccess)
+        {
+            var fixedrec = fixedAccess.CreateFixed(reuse);
+            decoder.ReadFixed(fixedAccess.GetFixedBuffer(fixedrec));
+            return fixedrec;
+        }
+
+        protected static Schema FindBranch(UnionSchema us, Schema s)
+        {
+            int index = us.MatchingBranch(s);
+            if (index >= 0) return us[index];
+            throw new AvroException("No matching schema for " + s + " in " + us);
+        }
+
+        private object ReadNull(object reuse, Decoder decoder)
+        {
+            decoder.ReadNull();
+            return null;
+        }
+
+        private object ReadBoolean(object reuse, Decoder decoder)
+        {
+            return decoder.ReadBoolean();
+        }
+
+        private ReadItem Read(DecoderRead decoderRead)
+        {
+            return (r, d) => decoderRead(d);
+        }
+
+        private DecoderSkip GetSkip(Schema writerSchema)
+        {
+            switch (writerSchema.Tag)
+            {
+                case Schema.Type.Null:
+                    return d => d.SkipNull();
+                case Schema.Type.Boolean:
+                    return d => d.SkipBoolean();
+                case Schema.Type.Int:
+                    return d => d.SkipInt();
+                case Schema.Type.Long:
+                    return d => d.SkipLong();
+                case Schema.Type.Float:
+                    return d => d.SkipFloat();
+                case Schema.Type.Double:
+                    return d => d.SkipDouble();
+                case Schema.Type.String:
+                    return d => d.SkipString();
+                case Schema.Type.Bytes:
+                    return d => d.SkipBytes();
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    var recordSkips = new List<DecoderSkip>();
+                    var recSchema = (RecordSchema)writerSchema;
+                    recSchema.Fields.ForEach(r => recordSkips.Add(GetSkip(r.Schema)));
+                    return d => recordSkips.ForEach(s=>s(d));
+                case Schema.Type.Enumeration:
+                    return d => d.SkipEnum();
+                case Schema.Type.Fixed:
+                    var size = ((FixedSchema)writerSchema).Size;
+                    return d => d.SkipFixed(size);
+                case Schema.Type.Array:
+                    var itemSkip = GetSkip(((ArraySchema)writerSchema).ItemSchema);
+                    return d =>
+                    {
+                        for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                        {
+                            for (long i = 0; i < n; i++) itemSkip(d);
+                        }
+                    };
+                case Schema.Type.Map:
+                    {
+                        var valueSkip = GetSkip(((MapSchema)writerSchema).ValueSchema);
+                        return d =>
+                        {
+                            for (long n = d.ReadMapStart(); n != 0; n = d.ReadMapNext())
+                            {
+                                for (long i = 0; i < n; i++) { d.SkipString(); valueSkip(d); }
+                            }
+                        };
+                    }
+                case Schema.Type.Union:
+                    var unionSchema = (UnionSchema)writerSchema;
+                    var lookup = new DecoderSkip[unionSchema.Count];
+                    for (int i = 0; i < unionSchema.Count; i++)
+                    {
+                        lookup[i] = GetSkip( unionSchema[i] );
+                    }
+                    return d => lookup[d.ReadUnionIndex()](d);
+                default:
+                    throw new AvroException("Unknown schema type: " + writerSchema);
+            }
+        }
+
+        /// <summary>
+        /// Indicates if it's possible to reuse an object of the specified type. Generally
+        /// false for immutable objects like int, long, string, etc but may differ between
+        /// the Specific and Generic implementations. Used to avoid retrieving the existing
+        /// value if it's not reusable.
+        /// </summary>
+        protected virtual bool IsReusable(Schema.Type tag)
+        {
+            return true;
+        }
+
+        // interfaces to handle details of working with Specific vs Generic objects
+
+        protected interface RecordAccess
+        {
+            /// <summary>
+            /// Creates a new record object. Derived classes can override this to return an object of their choice.
+            /// </summary>
+            /// <param name="reuse">If appropriate, will reuse this object instead of constructing a new one</param>
+            /// <returns></returns>
+            object CreateRecord(object reuse);
+
+            /// <summary>
+            /// Used by the default implementation of ReadRecord() to get the existing field of a record object. The derived
+            /// classes can override this to make their own interpretation of the record object.
+            /// </summary>
+            /// <param name="record">The record object to be probed into. This is guaranteed to be one that was returned
+            /// by a previous call to CreateRecord.</param>
+            /// <param name="fieldName">The name of the field to probe.</param>
+            /// <param name="fieldPos">field number</param>
+            /// <returns>The value of the field, if found. Null otherwise.</returns>
+            object GetField(object record, string fieldName, int fieldPos);
+
+            /// <summary>
+            /// Used by the default implementation of ReadRecord() to add a field to a record object. The derived
+            /// classes can override this to suit their own implementation of the record object.
+            /// </summary>
+            /// <param name="record">The record object to be probed into. This is guaranteed to be one that was returned
+            /// by a previous call to CreateRecord.</param>
+            /// <param name="fieldName">The name of the field to probe.</param>
+            /// <param name="fieldPos">field number</param>
+            /// <param name="fieldValue">The value to be added for the field</param>
+            void AddField(object record, string fieldName, int fieldPos, object fieldValue);
+        }
+
+        protected interface EnumAccess
+        {
+            object CreateEnum(object reuse, int ordinal);
+        }
+
+        protected interface FixedAccess
+        {
+            /// <summary>
+            /// Returns a fixed object.
+            /// </summary>
+            /// <param name="reuse">If appropriate, uses this object instead of creating a new one.</param>
+            /// <returns>A fixed object with an appropriate buffer.</returns>
+            object CreateFixed(object reuse);
+
+            /// <summary>
+            /// Returns a buffer of appropriate size to read data into.
+            /// </summary>
+            /// <param name="f">The fixed object. It is guaranteed that this is something that has been previously
+            /// returned by CreateFixed</param>
+            /// <returns>A byte buffer of fixed's size.</returns>
+            byte[] GetFixedBuffer(object f);
+        }
+
+        protected interface ArrayAccess
+        {
+            /// <summary>
+            /// Creates a new array object. The initial size of the object could be anything.
+            /// </summary>
+            /// <param name="reuse">If appropriate use this instead of creating a new one.</param>
+            /// <returns>An object suitable to deserialize an avro array</returns>
+            object Create(object reuse);
+            
+            /// <summary>
+            /// Hint that the array should be able to handle at least targetSize elements. The array
+            /// is not required to be resized
+            /// </summary>
+            /// <param name="array">Array object who needs to support targetSize elements. This is guaranteed to be somthing returned by
+            /// a previous call to CreateArray().</param>
+            /// <param name="targetSize">The new size.</param>
+            void EnsureSize(ref object array, int targetSize);
+
+            /// <summary>
+            /// Resizes the array to the new value.
+            /// </summary>
+            /// <param name="array">Array object whose size is required. This is guaranteed to be somthing returned by
+            /// a previous call to CreateArray().</param>
+            /// <param name="targetSize">The new size.</param>
+            void Resize(ref object array, int targetSize);
+
+            void AddElements( object array, int elements, int index, ReadItem itemReader, Decoder decoder, bool reuse );
+        }
+
+        protected interface MapAccess
+        {
+            /// <summary>
+            /// Creates a new map object.
+            /// </summary>
+            /// <param name="reuse">If appropriate, use this map object instead of creating a new one.</param>
+            /// <returns>An empty map object.</returns>
+            object Create(object reuse);
+
+            void AddElements(object map, int elements, ReadItem itemReader, Decoder decoder, bool reuse);
+        }
+
+        private class SchemaPair
+        {
+            private Schema _writerSchema;
+            private Schema _readerSchema;
+
+            public SchemaPair( Schema writerSchema, Schema readerSchema )
+            {
+                _writerSchema = writerSchema;
+                _readerSchema = readerSchema;
+            }
+
+            protected bool Equals( SchemaPair other )
+            {
+                return Equals( _writerSchema, other._writerSchema ) && Equals( _readerSchema, other._readerSchema );
+            }
+
+            public override bool Equals( object obj )
+            {
+                if( ReferenceEquals( null, obj ) ) return false;
+                if( ReferenceEquals( this, obj ) ) return true;
+                if( obj.GetType() != this.GetType() ) return false;
+                return Equals( (SchemaPair) obj );
+            }
+
+            public override int GetHashCode()
+            {
+                unchecked
+                {
+                    return ( ( _writerSchema != null ? _writerSchema.GetHashCode() : 0 ) * 397 ) ^ ( _readerSchema != null ? _readerSchema.GetHashCode() : 0 );
+                }
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Generic/PreresolvingDatumWriter.cs b/lang/csharp/src/apache/main/Generic/PreresolvingDatumWriter.cs
new file mode 100644
index 0000000..39e68f6
--- /dev/null
+++ b/lang/csharp/src/apache/main/Generic/PreresolvingDatumWriter.cs
@@ -0,0 +1,375 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Linq;
+using Encoder = Avro.IO.Encoder;
+
+namespace Avro.Generic
+{
+    /// <summary>
+    /// A general purpose writer of data from avro streams. This writer analyzes the writer schema
+    /// when constructed so that writes can be more efficient. Once constructed, a writer can be reused or shared among threads
+    /// to avoid incurring more resolution costs.
+    /// </summary>
+    public abstract class PreresolvingDatumWriter<T> : DatumWriter<T>
+    {
+        public Schema Schema { get; private set; }
+
+        protected delegate void WriteItem(Object value, Encoder encoder);
+
+        private readonly WriteItem _writer;
+        private readonly ArrayAccess _arrayAccess;
+        private readonly MapAccess _mapAccess;
+
+        private readonly Dictionary<RecordSchema,WriteItem> _recordWriters = new Dictionary<RecordSchema,WriteItem>();
+
+        public void Write(T datum, Encoder encoder)
+        {
+            _writer( datum, encoder );
+        }
+
+        protected PreresolvingDatumWriter(Schema schema, ArrayAccess arrayAccess, MapAccess mapAccess)
+        {
+            Schema = schema;
+            _arrayAccess = arrayAccess;
+            _mapAccess = mapAccess;
+            _writer = ResolveWriter(schema);
+        }
+
+        private WriteItem ResolveWriter( Schema schema )
+        {
+            switch (schema.Tag)
+            {
+                case Schema.Type.Null:
+                    return WriteNull;
+                case Schema.Type.Boolean:
+                    return (v, e) => Write<bool>( v, schema.Tag, e.WriteBoolean );
+                case Schema.Type.Int:
+                    return (v, e) => Write<int>( v, schema.Tag, e.WriteInt );
+                case Schema.Type.Long:
+                    return (v, e) => Write<long>( v, schema.Tag, e.WriteLong );
+                case Schema.Type.Float:
+                    return (v, e) => Write<float>( v, schema.Tag, e.WriteFloat );
+                case Schema.Type.Double:
+                    return (v, e) => Write<double>( v, schema.Tag, e.WriteDouble );
+                case Schema.Type.String:
+                    return (v, e) => Write<string>( v, schema.Tag, e.WriteString );
+                case Schema.Type.Bytes:
+                    return (v, e) => Write<byte[]>( v, schema.Tag, e.WriteBytes );
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    return ResolveRecord((RecordSchema) schema);
+                case Schema.Type.Enumeration:
+                    return ResolveEnum(schema as EnumSchema);
+                case Schema.Type.Fixed:
+                    return (v, e) => WriteFixed(schema as FixedSchema, v, e);
+                case Schema.Type.Array:
+                    return ResolveArray((ArraySchema)schema);
+                case Schema.Type.Map:
+                    return ResolveMap((MapSchema)schema);
+                case Schema.Type.Union:
+                    return ResolveUnion((UnionSchema)schema);
+                default:
+                    return (v, e) => error(schema, v);
+            }
+        }
+
+        /// <summary>
+        /// Serializes a "null"
+        /// </summary>
+        /// <param name="value">The object to be serialized using null schema</param>
+        /// <param name="encoder">The encoder to use while serialization</param>
+        protected void WriteNull(object value, Encoder encoder)
+        {
+            if (value != null) throw TypeMismatch(value, "null", "null");
+        }
+
+        /// <summary>
+        /// A generic method to serialize primitive Avro types.
+        /// </summary>
+        /// <typeparam name="S">Type of the C# type to be serialized</typeparam>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="tag">The schema type tag</param>
+        /// <param name="writer">The writer which should be used to write the given type.</param>
+        protected void Write<S>(object value, Schema.Type tag, Writer<S> writer)
+        {
+            if (!(value is S)) throw TypeMismatch(value, tag.ToString(), typeof(S).ToString());
+            writer((S)value);
+        }
+
+
+        /// <summary>
+        /// Serialized a record using the given RecordSchema. It uses GetField method
+        /// to extract the field value from the given object.
+        /// </summary>
+        /// <param name="schema">The RecordSchema to use for serialization</param>
+        private WriteItem ResolveRecord(RecordSchema recordSchema)
+        {
+            WriteItem recordResolver;
+            if (_recordWriters.TryGetValue(recordSchema, out recordResolver))
+            {
+                return recordResolver;
+            }
+            var writeSteps = new RecordFieldWriter[recordSchema.Fields.Count];
+            recordResolver = (v, e) => WriteRecordFields(v, writeSteps, e);
+            
+            _recordWriters.Add(recordSchema, recordResolver);
+
+            int index = 0;
+            foreach (Field field in recordSchema)
+            {
+                var record = new RecordFieldWriter
+                                 {
+                                     WriteField = ResolveWriter(field.Schema),
+                                     Field = field
+                                 };
+                writeSteps[index++] = record;
+            }
+
+            return recordResolver;
+        }
+
+        protected abstract void WriteRecordFields(object record, RecordFieldWriter[] writers, Encoder encoder);
+
+
+        protected class RecordFieldWriter
+        {
+            public WriteItem WriteField { get; set; }
+            public Field Field { get; set; }
+        }
+
+        protected abstract void EnsureRecordObject(RecordSchema recordSchema, object value);
+
+        /// <summary>
+        /// Extracts the field value from the given object.
+        /// </summary>
+        /// <param name="value">The record value from which the field needs to be extracted</param>
+        /// <param name="fieldName">The name of the field in the record</param>
+        /// <param name="fieldPos">The position of field in the record</param>
+        /// <returns></returns>
+        protected abstract void WriteField(object record, string fieldName, int fieldPos, WriteItem writer, Encoder encoder );
+
+        /// <summary>
+        /// Serializes an enumeration.
+        /// </summary>
+        /// <param name="es">The EnumSchema for serialization</param>
+        protected abstract WriteItem ResolveEnum(EnumSchema es);
+
+        /// <summary>
+        /// Serialized an array. The default implementation calls EnsureArrayObject() to ascertain that the
+        /// given value is an array. It then calls GetArrayLength() and GetArrayElement()
+        /// to access the members of the array and then serialize them.
+        /// </summary>
+        /// <param name="schema">The ArraySchema for serialization</param>
+        /// <param name="value">The value being serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected WriteItem ResolveArray(ArraySchema schema)
+        {
+            var itemWriter = ResolveWriter(schema.ItemSchema);
+            return (d,e) => WriteArray(itemWriter, d, e);
+        }
+
+        private void WriteArray(WriteItem itemWriter, object array, Encoder encoder)
+        {
+            _arrayAccess.EnsureArrayObject(array);
+            long l = _arrayAccess.GetArrayLength(array);
+            encoder.WriteArrayStart();
+            encoder.SetItemCount(l);
+            _arrayAccess.WriteArrayValues(array, itemWriter, encoder);
+            encoder.WriteArrayEnd();
+        }
+
+        private WriteItem ResolveMap(MapSchema mapSchema)
+        {
+            var itemWriter = ResolveWriter(mapSchema.ValueSchema);
+            return (v, e) => WriteMap(itemWriter, v, e);
+        }
+
+        /// <summary>
+        /// Serialized a map. The default implementation first ensure that the value is indeed a map and then uses
+        /// GetMapSize() and GetMapElements() to access the contents of the map.
+        /// </summary>
+        /// <param name="schema">The MapSchema for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected void WriteMap(WriteItem itemWriter, object value, Encoder encoder)
+        {
+            _mapAccess.EnsureMapObject(value);
+            encoder.WriteMapStart();
+            encoder.SetItemCount(_mapAccess.GetMapSize(value));
+            _mapAccess.WriteMapValues(value, itemWriter, encoder);
+            encoder.WriteMapEnd();
+        }
+
+
+        private WriteItem ResolveUnion(UnionSchema unionSchema)
+        {
+            var branchSchemas = unionSchema.Schemas.ToArray();
+            var branchWriters = new WriteItem[branchSchemas.Length];
+            int branchIndex = 0;
+            foreach (var branch in branchSchemas)
+            {
+                branchWriters[branchIndex++] = ResolveWriter(branch);
+            }
+
+
+            return (v, e) => WriteUnion(unionSchema, branchSchemas, branchWriters, v, e);
+        }
+
+        /// <summary>
+        /// Resolves the given value against the given UnionSchema and serializes the object against
+        /// the resolved schema member.
+        /// </summary>
+        /// <param name="us">The UnionSchema to resolve against</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        private void WriteUnion(UnionSchema unionSchema, Schema[] branchSchemas, WriteItem[] branchWriters, object value, Encoder encoder)
+        {
+            int index = ResolveUnion(unionSchema, branchSchemas, value);
+            encoder.WriteUnionIndex(index);
+            branchWriters[index](value, encoder);
+        }
+
+        /// <summary>
+        /// Finds the branch within the given UnionSchema that matches the given object. The default implementation
+        /// calls Matches() method in the order of branches within the UnionSchema. If nothing matches, throws
+        /// an exception.
+        /// </summary>
+        /// <param name="us">The UnionSchema to resolve against</param>
+        /// <param name="obj">The object that should be used in matching</param>
+        /// <returns></returns>
+        protected int ResolveUnion(UnionSchema us, Schema[] branchSchemas, object obj)
+        {
+            for (int i = 0; i < branchSchemas.Length; i++)
+            {
+                if (UnionBranchMatches(branchSchemas[i], obj)) return i;
+            }
+            throw new AvroException("Cannot find a match for " + obj.GetType() + " in " + us);
+        }
+
+        /// <summary>
+        /// Serialized a fixed object. The default implementation requires that the value is
+        /// a GenericFixed object with an identical schema as es.
+        /// </summary>
+        /// <param name="es">The schema for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected abstract void WriteFixed(FixedSchema es, object value, Encoder encoder);
+
+        protected static AvroException TypeMismatch(object obj, string schemaType, string type)
+        {
+            return new AvroException(type + " required to write against " + schemaType + " schema but found " + (null == obj ? "null" : obj.GetType().ToString()) );
+        }
+
+        private void error(Schema schema, Object value)
+        {
+            throw new AvroTypeException("Not a " + schema + ": " + value);
+        }
+
+        protected abstract bool UnionBranchMatches(Schema sc, object obj);
+
+        protected interface EnumAccess
+        {
+            void WriteEnum(object value);
+        }
+
+        protected interface ArrayAccess
+        {
+            /// <summary>
+            /// Checks if the given object is an array. If it is a valid array, this function returns normally. Otherwise,
+            /// it throws an exception. The default implementation checks if the value is an array.
+            /// </summary>
+            /// <param name="value"></param>
+            void EnsureArrayObject(object value);
+
+            /// <summary>
+            /// Returns the length of an array. The default implementation requires the object
+            /// to be an array of objects and returns its length. The defaul implementation
+            /// gurantees that EnsureArrayObject() has been called on the value before this
+            /// function is called.
+            /// </summary>
+            /// <param name="value">The object whose array length is required</param>
+            /// <returns>The array length of the given object</returns>
+            long GetArrayLength(object value);
+
+            /// <summary>
+            /// Returns the element at the given index from the given array object. The default implementation
+            /// requires that the value is an object array and returns the element in that array. The defaul implementation
+            /// gurantees that EnsureArrayObject() has been called on the value before this
+            /// function is called.
+            /// </summary>
+            /// <param name="value">The array object</param>
+            /// <param name="index">The index to look for</param>
+            /// <returns>The array element at the index</returns>
+            void WriteArrayValues(object array, WriteItem valueWriter, Encoder encoder);
+        }
+
+        protected interface MapAccess
+        {
+            /// <summary>
+            /// Checks if the given object is a map. If it is a valid map, this function returns normally. Otherwise,
+            /// it throws an exception. The default implementation checks if the value is an IDictionary<string, object>.
+            /// </summary>
+            /// <param name="value"></param>
+            void EnsureMapObject(object value);
+
+            /// <summary>
+            /// Returns the size of the map object. The default implementation gurantees that EnsureMapObject has been
+            /// successfully called with the given value. The default implementation requires the value
+            /// to be an IDictionary<string, object> and returns the number of elements in it.
+            /// </summary>
+            /// <param name="value">The map object whose size is desired</param>
+            /// <returns>The size of the given map object</returns>
+            long GetMapSize(object value);
+
+            /// <summary>
+            /// Returns the contents of the given map object. The default implementation guarantees that EnsureMapObject
+            /// has been called with the given value. The defualt implementation of this method requires that
+            /// the value is an IDictionary<string, object> and returns its contents.
+            /// </summary>
+            /// <param name="value">The map object whose size is desired</param>
+            /// <returns>The contents of the given map object</returns>
+            void WriteMapValues(object map, WriteItem valueWriter, Encoder encoder);
+        }
+
+        protected class DictionaryMapAccess : MapAccess
+        {
+            public void EnsureMapObject( object value )
+            {
+                if( value == null || !( value is IDictionary ) ) throw TypeMismatch( value, "map", "IDictionary" );
+            }
+
+            public long GetMapSize( object value )
+            {
+                return ( (IDictionary) value ).Count;
+            }
+
+            public void WriteMapValues(object map, WriteItem valueWriter, Encoder encoder)
+            {
+                foreach (DictionaryEntry entry in ((IDictionary)map))
+                {
+                    encoder.StartItem();
+                    encoder.WriteString(entry.Key.ToString());
+                    valueWriter(entry.Value, encoder);
+                }
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/BinaryDecoder.cs b/lang/csharp/src/apache/main/IO/BinaryDecoder.cs
new file mode 100644
index 0000000..a8afed8
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/BinaryDecoder.cs
@@ -0,0 +1,299 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Avro.IO
+{
+    /// <summary>
+    /// Decoder for Avro binary format
+    /// </summary>
+    public class BinaryDecoder : Decoder
+    {
+        private readonly Stream stream;
+
+        public BinaryDecoder(Stream stream)
+        {
+            this.stream = stream;
+        }
+
+        /// <summary>
+        /// null is written as zero bytes
+        /// </summary>
+        public void ReadNull()
+        {
+        }
+
+        /// <summary>
+        /// a boolean is written as a single byte 
+        /// whose value is either 0 (false) or 1 (true).
+        /// </summary>
+        /// <returns></returns>
+        public bool ReadBoolean()
+        {
+            byte b = read();
+            if (b == 0) return false;
+            if (b == 1) return true;
+            throw new AvroException("Not a boolean value in the stream: " + b);
+        }
+
+        /// <summary>
+        /// int and long values are written using variable-length, zig-zag coding.
+        /// </summary>
+        /// <param name="?"></param>
+        /// <returns></returns>
+        public int ReadInt()
+        {
+            return (int)ReadLong();
+        }
+        /// <summary>
+        /// int and long values are written using variable-length, zig-zag coding.
+        /// </summary>
+        /// <param name="?"></param>
+        /// <returns></returns>
+        public long ReadLong()
+        {
+            byte b = read();
+            ulong n = b & 0x7FUL;
+            int shift = 7;
+            while ((b & 0x80) != 0)
+            {
+                b = read();
+                n |= (b & 0x7FUL) << shift;
+                shift += 7;
+            }
+            long value = (long)n;
+            return (-(value & 0x01L)) ^ ((value >> 1) & 0x7fffffffffffffffL);
+        }
+
+        /// <summary>
+        /// A float is written as 4 bytes.
+        /// The float is converted into a 32-bit integer using a method equivalent to
+        /// Java's floatToIntBits and then encoded in little-endian format.
+        /// </summary>
+        /// <returns></returns>
+        public float ReadFloat()
+        {
+            byte[] buffer = read(4);
+
+            if (!BitConverter.IsLittleEndian)
+                Array.Reverse(buffer);
+
+            return BitConverter.ToSingle(buffer, 0);
+
+            //int bits = (Stream.ReadByte() & 0xff |
+            //(Stream.ReadByte()) & 0xff << 8 |
+            //(Stream.ReadByte()) & 0xff << 16 |
+            //(Stream.ReadByte()) & 0xff << 24);
+            //return intBitsToFloat(bits);
+        }
+
+        /// <summary>
+        /// A double is written as 8 bytes.
+        /// The double is converted into a 64-bit integer using a method equivalent to
+        /// Java's doubleToLongBits and then encoded in little-endian format.
+        /// </summary>
+        /// <param name="?"></param>
+        /// <returns></returns>
+        public double ReadDouble()
+        {
+            long bits = (stream.ReadByte() & 0xffL) |
+              (stream.ReadByte() & 0xffL) << 8 |
+              (stream.ReadByte() & 0xffL) << 16 |
+              (stream.ReadByte() & 0xffL) << 24 |
+              (stream.ReadByte() & 0xffL) << 32 |
+              (stream.ReadByte() & 0xffL) << 40 |
+              (stream.ReadByte() & 0xffL) << 48 |
+              (stream.ReadByte() & 0xffL) << 56;
+             return BitConverter.Int64BitsToDouble(bits);
+        }
+
+        /// <summary>
+        /// Bytes are encoded as a long followed by that many bytes of data. 
+        /// </summary>
+        /// <returns></returns>
+        public byte[] ReadBytes()
+        {
+            return read(ReadLong());
+        }
+
+        public string ReadString()
+        {
+            int length = ReadInt();
+            byte[] buffer = new byte[length];
+            //TODO: Fix this because it's lame;
+            ReadFixed(buffer);
+            return System.Text.Encoding.UTF8.GetString(buffer);
+        }
+
+        public int ReadEnum()
+        {
+            return ReadInt();
+        }
+
+        public long ReadArrayStart()
+        {
+            return doReadItemCount();
+        }
+
+        public long ReadArrayNext()
+        {
+            return doReadItemCount();
+        }
+
+        public long ReadMapStart()
+        {
+            return doReadItemCount();
+        }
+
+        public long ReadMapNext()
+        {
+            return doReadItemCount();
+        }
+
+        public int ReadUnionIndex()
+        {
+            return ReadInt();
+        }
+
+        public void ReadFixed(byte[] buffer)
+        {
+            ReadFixed(buffer, 0, buffer.Length);
+        }
+
+        public void ReadFixed(byte[] buffer, int start, int length)
+        {
+            Read(buffer, start, length);
+        }
+
+        public void SkipNull()
+        {
+            ReadNull();
+        }
+
+        public void SkipBoolean()
+        {
+            ReadBoolean();
+        }
+
+
+        public void SkipInt()
+        {
+            ReadInt();
+        }
+
+        public void SkipLong()
+        {
+            ReadLong();
+        }
+
+        public void SkipFloat()
+        {
+            Skip(4);
+        }
+
+        public void SkipDouble()
+        {
+            Skip(8);
+        }
+
+        public void SkipBytes()
+        {
+            Skip(ReadLong());
+        }
+
+        public void SkipString()
+        {
+            SkipBytes();
+        }
+
+        public void SkipEnum()
+        {
+            ReadLong();
+        }
+
+        public void SkipUnionIndex()
+        {
+            ReadLong();
+        }
+
+        public void SkipFixed(int len)
+        {
+            Skip(len);
+        }
+
+        // Read p bytes into a new byte buffer
+        private byte[] read(long p)
+        {
+            byte[] buffer = new byte[p];
+            Read(buffer, 0, buffer.Length);
+            return buffer;
+        }
+
+        private static float intBitsToFloat(int value)
+        {
+            return BitConverter.ToSingle(BitConverter.GetBytes(value), 0);
+        }
+
+        private byte read()
+        {
+            int n = stream.ReadByte();
+            if (n >= 0) return (byte)n;
+            throw new AvroException("End of stream reached");
+        }
+
+        private void Read(byte[] buffer, int start, int len)
+        {
+            while (len > 0)
+            {
+                int n = stream.Read(buffer, start, len);
+                if (n <= 0) throw new AvroException("End of stream reached");
+                start += n;
+                len -= n;
+            }
+        }
+
+        private long doReadItemCount()
+        {
+            long result = ReadLong();
+            if (result < 0)
+            {
+                ReadLong(); // Consume byte-count if present
+                result = -result;
+            }
+            return result;
+        }
+
+        private void Skip(int p)
+        {
+            stream.Seek(p, SeekOrigin.Current);
+        }
+
+        private void Skip(long p)
+        {
+            stream.Seek(p, SeekOrigin.Current);
+        }
+
+        internal void skip(long block_size)
+        {
+            throw new NotImplementedException();
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/BinaryEncoder.cs b/lang/csharp/src/apache/main/IO/BinaryEncoder.cs
new file mode 100644
index 0000000..8cee9a6
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/BinaryEncoder.cs
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Avro.IO
+{
+    /// <summary>
+    /// Write leaf values.
+    /// </summary>
+    public class BinaryEncoder : Encoder
+    {
+        private readonly Stream Stream;
+
+        public BinaryEncoder() : this(null)
+        {
+        }
+
+        public BinaryEncoder(Stream stream)
+        {
+            this.Stream = stream;
+        }
+
+        /// <summary>
+        /// null is written as zero bytes
+        /// </summary>
+        public void WriteNull()
+        {
+        }
+        
+        /// <summary>
+        /// true is written as 1 and false 0.
+        /// </summary>
+        /// <param name="b">Boolean value to write</param>
+        public void WriteBoolean(bool b)
+        {
+            writeByte((byte)(b ? 1 : 0));
+        }
+
+        /// <summary>
+        /// int and long values are written using variable-length, zig-zag coding.
+        /// </summary>
+        /// <param name="datum"></param>
+        public void WriteInt(int value)
+        {
+            WriteLong(value);
+        }
+        /// <summary>
+        /// int and long values are written using variable-length, zig-zag coding.
+        /// </summary>
+        /// <param name="datum"></param>
+        public void WriteLong(long value)
+        {
+            ulong n = (ulong)((value << 1) ^ (value >> 63));
+            while ((n & ~0x7FUL) != 0)
+            {
+                writeByte((byte)((n & 0x7f) | 0x80));
+                n >>= 7;
+            }
+            writeByte((byte)n);
+        }
+
+        /// <summary>
+        /// A float is written as 4 bytes.
+        /// The float is converted into a 32-bit integer using a method equivalent to
+        /// Java's floatToIntBits and then encoded in little-endian format.
+        /// </summary>
+        /// <param name="value"></param>
+        public void WriteFloat(float value)
+        {
+            byte[] buffer = BitConverter.GetBytes(value);
+            if (!BitConverter.IsLittleEndian) Array.Reverse(buffer);
+            writeBytes(buffer);
+        }
+        /// <summary>
+        ///A double is written as 8 bytes.
+        ///The double is converted into a 64-bit integer using a method equivalent to
+        ///Java's doubleToLongBits and then encoded in little-endian format.
+        /// </summary>
+        /// <param name="value"></param>
+        public void WriteDouble(double value)
+        {
+            long bits = BitConverter.DoubleToInt64Bits(value);
+            
+            writeByte((byte)((bits) & 0xFF));
+            writeByte((byte)((bits >> 8) & 0xFF));
+            writeByte((byte)((bits >> 16) & 0xFF));
+            writeByte((byte)((bits >> 24) & 0xFF));
+            writeByte((byte)((bits >> 32) & 0xFF));
+            writeByte((byte)((bits >> 40) & 0xFF));
+            writeByte((byte)((bits >> 48) & 0xFF));
+            writeByte((byte)((bits >> 56) & 0xFF));
+            
+        }
+
+        /// <summary>
+        /// Bytes are encoded as a long followed by that many bytes of data.
+        /// </summary>
+        /// <param name="value"></param>
+        /// 
+        public void WriteBytes(byte[] value)
+        {
+            WriteLong(value.Length);
+            writeBytes(value);
+        }
+
+        /// <summary>
+        /// A string is encoded as a long followed by
+        /// that many bytes of UTF-8 encoded character data.
+        /// </summary>
+        /// <param name="value"></param>
+        public void WriteString(string value)
+        {
+            WriteBytes(System.Text.Encoding.UTF8.GetBytes(value));
+        }
+
+        public void WriteEnum(int value)
+        {
+            WriteLong(value);
+        }
+
+        public void StartItem()
+        {
+        }
+
+        public void SetItemCount(long value)
+        {
+            if (value > 0) WriteLong(value);
+        }
+
+        public void WriteArrayStart()
+        {
+        }
+
+        public void WriteArrayEnd()
+        {
+            WriteLong(0);
+        }
+
+        public void WriteMapStart()
+        {
+        }
+
+        public void WriteMapEnd()
+        {
+            WriteLong(0);
+        }
+
+        public void WriteUnionIndex(int value)
+        {
+            WriteLong(value);
+        }
+
+        public void WriteFixed(byte[] data)
+        {
+            WriteFixed(data, 0, data.Length);
+        }
+
+        public void WriteFixed(byte[] data, int start, int len)
+        {
+            Stream.Write(data, start, len);
+        }
+
+        private void writeBytes(byte[] bytes)
+        {
+            Stream.Write(bytes, 0, bytes.Length);
+        }
+
+        private void writeByte(byte b)
+        {
+            Stream.WriteByte(b);
+        }
+
+        public void Flush()
+        {
+            Stream.Flush();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/ByteBufferInputStream.cs b/lang/csharp/src/apache/main/IO/ByteBufferInputStream.cs
new file mode 100644
index 0000000..27b3859
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/ByteBufferInputStream.cs
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Avro.IO
+{
+    public class ByteBufferInputStream : InputStream
+    {
+        private readonly IList<MemoryStream> _buffers;
+        private int _currentBuffer;
+        
+        public ByteBufferInputStream(IList<MemoryStream> buffers)
+        {
+            _buffers = buffers;
+        }
+
+        public override int Read(byte[] b, int off, int len)
+        {
+            if (len == 0) return 0;
+            MemoryStream buffer = GetNextNonEmptyBuffer();
+            long remaining = buffer.Length - buffer.Position;
+            if (len > remaining)
+            {
+                int remainingCheck = buffer.Read(b, off, (int) remaining);
+
+                if(remainingCheck != remaining)
+                    throw new InvalidDataException(string.Format("remainingCheck [{0}] and remaining[{1}] are different.",
+                        remainingCheck, remaining));
+                return (int)remaining;
+            }
+
+            int lenCheck = buffer.Read(b, off, len);
+
+            if (lenCheck != len)
+                throw new InvalidDataException(string.Format("lenCheck [{0}] and len[{1}] are different.",
+                                                             lenCheck, len));
+
+            return len;
+        }
+
+        private MemoryStream GetNextNonEmptyBuffer()
+        {
+            while (_currentBuffer < _buffers.Count)
+            {
+                MemoryStream buffer = _buffers[_currentBuffer];
+                if (buffer.Position < buffer.Length)
+                    return buffer;
+
+                _currentBuffer++;
+            }
+            throw new EndOfStreamException();
+        }
+
+        public override long Length
+        {
+            get { throw new NotSupportedException(); }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/IO/ByteBufferOutputStream.cs b/lang/csharp/src/apache/main/IO/ByteBufferOutputStream.cs
new file mode 100644
index 0000000..2faa71a
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/ByteBufferOutputStream.cs
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System.Collections.Generic;
+using System.IO;
+//using System.Linq;
+
+namespace Avro.IO
+{
+    public class ByteBufferOutputStream : OutputStream
+    {
+        public const int BUFFER_SIZE = 8192;
+
+        public ByteBufferOutputStream()
+        {
+            Reset();
+        }
+
+        private void Reset()
+        {
+            _buffers = new List<MemoryStream> {CreateBuffer()};
+        }
+
+        private List<MemoryStream> _buffers;
+
+        private static MemoryStream CreateBuffer()
+        {
+            return new MemoryStream(new byte[BUFFER_SIZE], 0, BUFFER_SIZE, true, true);
+        }
+
+        public void Prepend(List<MemoryStream> lists)
+        {
+            foreach (var stream in lists)
+            {
+                stream.Position = stream.Length;
+            }
+
+            _buffers.InsertRange(0, lists);
+        }
+
+        public void Append(List<MemoryStream> lists)
+        {
+            foreach (var stream in lists)
+            {
+                stream.Position = stream.Length;
+            }
+
+            _buffers.AddRange(lists);
+        }
+
+        public override void Write(byte[] b, int off, int len)
+        {
+            var buffer = _buffers[_buffers.Count -1];
+            var remaining = (int) (buffer.Length - buffer.Position);
+            while (len > remaining)
+            {
+                buffer.Write(b, off, remaining);
+                len -= remaining;
+                off += remaining;
+
+                buffer = CreateBuffer();
+                _buffers.Add(buffer);
+
+                remaining = (int) buffer.Length;
+            }
+
+            buffer.Write(b, off, len);
+        }
+
+        public List<MemoryStream> GetBufferList()
+        {
+            List<MemoryStream> result = _buffers;
+            
+            Reset();
+
+            foreach (MemoryStream b in result)
+            {
+                // Flip()
+                b.SetLength(b.Position);
+                b.Position = 0;
+            }
+
+            return result;
+        }
+
+        public override long Length
+        {
+            get
+            {
+                long sum = 0;
+                foreach (var buffer in _buffers)
+                {
+                    sum += buffer.Length;
+                }
+
+                return sum;
+            }
+        }
+
+        public override void Flush()
+        {
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/IO/Decoder.cs b/lang/csharp/src/apache/main/IO/Decoder.cs
new file mode 100644
index 0000000..e3fdec6
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/Decoder.cs
@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.IO;
+
+namespace Avro.IO
+{
+    /// <summary>
+    /// Decoder is used to decode Avro data on a stream. There are methods to read the Avro types on the stream. There are also
+    /// methods to skip items, which are usually more efficient than reading, on the stream.
+    /// </summary>
+    public interface Decoder
+    {
+        /// <summary>
+        /// Reads a null Avro type.
+        /// </summary>
+        void ReadNull();
+
+        /// <summary>
+        /// Read a boolean Avro type
+        /// </summary>
+        /// <returns>The boolean just read</returns>
+        bool ReadBoolean();
+
+        /// <summary>
+        /// Reads an int Avro type.
+        /// </summary>
+        /// <returns>The int just read</returns>
+        int ReadInt();
+
+        /// <summary>
+        /// Reads a long Avro type.
+        /// </summary>
+        /// <returns>The long just read</returns>
+        long ReadLong();
+
+        /// <summary>
+        /// Reads a float Avro type
+        /// </summary>
+        /// <returns>The float just read</returns>
+        float ReadFloat();
+
+        /// <summary>
+        /// Reads a double Avro type
+        /// </summary>
+        /// <returns>The double just read</returns>
+        double ReadDouble();
+
+        /// <summary>
+        /// Reads the bytes Avro type
+        /// </summary>
+        /// <returns>The bytes just read</returns>
+        byte[] ReadBytes();
+
+        /// <summary>
+        /// Reads a string Avro type
+        /// </summary>
+        /// <returns>The string just read</returns>
+        string ReadString();
+
+        /// <summary>
+        /// Reads an enum AvroType
+        /// </summary>
+        /// <returns>The enum just read</returns>
+        int ReadEnum();
+
+        /// <summary>
+        /// Starts reading the array Avro type. This, together with ReadArrayNext() is used to read the
+        /// items from Avro array. This returns the number of entries in the initial chunk. After consuming
+        /// the chunk, the client should call ReadArrayNext() to get the number of entries in the next
+        /// chunk. The client should repeat the procedure until there are no more entries in the array.
+        /// 
+        /// for (int n = decoder.ReadArrayStart(); n > 0; n = decoder.ReadArrayNext())
+        /// {
+        ///     // Read one array entry.
+        /// }
+        /// </summary>
+        /// <returns>The number of entries in the initial chunk, 0 if the array is empty.</returns>
+        long ReadArrayStart();
+
+        /// <summary>
+        /// See ReadArrayStart().
+        /// </summary>
+        /// <returns>The number of array entries in the next chunk, 0 if there are no more entries.</returns>
+        long ReadArrayNext();
+
+        /// <summary>
+        /// Starts reading the map Avro type. This, together with ReadMapNext() is used to read the
+        /// entries from Avro map. This returns the number of entries in the initial chunk. After consuming
+        /// the chunk, the client should call ReadMapNext() to get the number of entriess in the next
+        /// chunk. The client should repeat the procedure until there are no more entries in the array.
+        /// for (int n = decoder.ReadMapStart(); n > 0; n = decoder.ReadMapNext())
+        /// {
+        ///     // Read one map entry.
+        /// }
+        /// </summary>
+        /// <returns>The number of entries in the initial chunk, 0 if the map is empty.</returns>
+        long ReadMapStart();
+
+        /// <summary>
+        /// See ReadMapStart().
+        /// </summary>
+        /// <returns>The number of map entries in the next chunk, 0 if there are no more entries.</returns>
+        long ReadMapNext();
+
+        /// <summary>
+        /// Reads the index, which determines the type in an union Avro type.
+        /// </summary>
+        /// <returns>The index of the type within the union.</returns>
+        int ReadUnionIndex();
+
+        /// <summary>
+        /// A convenience method for ReadFixed(buffer, 0, buffer.Length);
+        /// </summary>
+        /// <param name="buffer"> The buffer to read into.</param>
+        void ReadFixed(byte[] buffer);
+
+        /// <summary>
+        /// Read a Fixed Avro type of length.
+        /// </summary>
+        /// <param name="buffer">Buffer to read into</param>
+        /// <param name="start">Starting position of buffer to read into</param>
+        /// <param name="length">Number of bytes to read</param>
+        void ReadFixed(byte[] buffer, int start, int length);
+
+        /// <summary>
+        /// Skips a null Avro type on the stream.
+        /// </summary>
+        void SkipNull();
+
+        /// <summary>
+        ///  Skips a boolean Avro type on the stream.
+        /// </summary>
+        void SkipBoolean();
+
+        /// <summary>
+        ///  Skips a int Avro type on the stream.
+        /// </summary>
+        void SkipInt();
+        
+        /// <summary>
+        ///  Skips a long Avro type on the stream.
+        /// </summary>
+        void SkipLong();
+
+        /// <summary>
+        /// Skips a float Avro type on the stream.
+        /// </summary>
+        void SkipFloat();
+
+        /// <summary>
+        /// Skips a double Avro type on the stream.
+        /// </summary>
+        void SkipDouble();
+
+        /// <summary>
+        /// Skips a bytes Avro type on the stream.
+        /// </summary>
+        void SkipBytes();
+
+        /// <summary>
+        /// Skips a string Avro type on the stream.
+        /// </summary>
+        void SkipString();
+
+        void SkipEnum();
+
+        void SkipUnionIndex();
+
+        void SkipFixed(int len);
+    }
+
+}
diff --git a/lang/csharp/src/apache/main/IO/Encoder.cs b/lang/csharp/src/apache/main/IO/Encoder.cs
new file mode 100644
index 0000000..010d334
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/Encoder.cs
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+
+namespace Avro.IO
+{
+    public interface Encoder
+    {
+        void WriteNull();
+        void WriteBoolean(bool value);
+        void WriteInt(int value);
+        void WriteLong(long value);
+        void WriteFloat(float value);
+        void WriteDouble(double value);
+        void WriteBytes(byte[] value);
+        void WriteString(string value);
+
+        void WriteEnum(int value);
+
+        void SetItemCount(long value);
+        void StartItem();
+        
+        void WriteArrayStart();
+        void WriteArrayEnd();
+
+        void WriteMapStart();
+        void WriteMapEnd();
+
+        void WriteUnionIndex(int value);
+        void WriteFixed(byte[] data);
+        void WriteFixed(byte[] data, int start, int len);
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/ICallback.cs b/lang/csharp/src/apache/main/IO/ICallback.cs
new file mode 100644
index 0000000..70a6a5f
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/ICallback.cs
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Avro.IO
+{
+    public interface ICallback<in T> 
+    {
+        /**
+         * Receives a callback result.
+         * @param result the result returned in the callback.
+         */
+        void HandleResult(T result);
+
+        /**
+         * Receives an error.
+         * @param error the error returned in the callback.
+         */
+        void HandleException(Exception exception);
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/IO/InputStream.cs b/lang/csharp/src/apache/main/IO/InputStream.cs
new file mode 100644
index 0000000..10e33d8
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/InputStream.cs
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.IO;
+
+namespace Avro.IO
+{
+    public abstract class InputStream : Stream
+    {
+        public override void Flush()
+        {
+        }
+
+        public override long Seek(long offset, SeekOrigin origin)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override void SetLength(long value)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override void Write(byte[] buffer, int offset, int count)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override bool CanRead
+        {
+            get { return true; }
+        }
+
+        public override bool CanSeek
+        {
+            get { return false; }
+        }
+
+        public override bool CanWrite
+        {
+            get { return false; }
+        }
+
+        public override long Position
+        {
+            get { throw new NotSupportedException(); }
+            set { throw new NotSupportedException(); }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/OutputStream.cs b/lang/csharp/src/apache/main/IO/OutputStream.cs
new file mode 100644
index 0000000..ed7edd1
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/OutputStream.cs
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.IO;
+
+namespace Avro.IO
+{
+    public abstract class OutputStream : Stream
+    {
+        public override bool CanWrite
+        {
+            get { return true; }
+        }
+
+        public override bool CanRead
+        {
+            get { return false; }
+        }
+
+        public override bool CanSeek
+        {
+            get { return false; }
+        }
+
+        public override long Position
+        {
+            get { throw new NotSupportedException(); }
+            set { throw new NotSupportedException(); }
+        }
+
+        public override int Read(byte[] buffer, int offset, int count)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override long Seek(long offset, SeekOrigin origin)
+        {
+            throw new NotSupportedException();
+        }
+
+        public override void SetLength(long value)
+        {
+            throw new NotSupportedException();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/IO/Resolver.cs b/lang/csharp/src/apache/main/IO/Resolver.cs
new file mode 100644
index 0000000..e06bea7
--- /dev/null
+++ b/lang/csharp/src/apache/main/IO/Resolver.cs
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro.IO
+{
+    static class Resolver
+    {
+        /// <summary>
+        /// Reads the passed JToken default value field and writes it in the specified encoder 
+        /// </summary>
+        /// <param name="enc">encoder to use for writing</param>
+        /// <param name="schema">schema object for the current field</param>
+        /// <param name="jtok">default value as JToken</param>
+        public static void EncodeDefaultValue(Encoder enc, Schema schema, JToken jtok)
+        {
+            if (null == jtok) return;
+
+            switch (schema.Tag)
+            {
+                case Schema.Type.Boolean:
+                    if (jtok.Type != JTokenType.Boolean)
+                        throw new AvroException("Default boolean value " + jtok.ToString() + " is invalid, expected is json boolean.");
+                    enc.WriteBoolean((bool)jtok);
+                    break;
+
+                case Schema.Type.Int:
+                    if (jtok.Type != JTokenType.Integer)
+                        throw new AvroException("Default int value " + jtok.ToString() + " is invalid, expected is json integer.");
+                    enc.WriteInt(Convert.ToInt32((int)jtok));
+                    break;
+
+                case Schema.Type.Long:
+                    if (jtok.Type != JTokenType.Integer)
+                        throw new AvroException("Default long value " + jtok.ToString() + " is invalid, expected is json integer.");
+                    enc.WriteLong(Convert.ToInt64((long)jtok));
+                    break;
+
+                case Schema.Type.Float:
+                    if (jtok.Type != JTokenType.Float)
+                        throw new AvroException("Default float value " + jtok.ToString() + " is invalid, expected is json number.");
+                    enc.WriteFloat((float)jtok);
+                    break;
+
+                case Schema.Type.Double:
+                    if (jtok.Type == JTokenType.Integer)
+                        enc.WriteDouble(Convert.ToDouble((int)jtok));
+                    else if (jtok.Type == JTokenType.Float)
+                        enc.WriteDouble(Convert.ToDouble((float)jtok));
+                    else
+                        throw new AvroException("Default double value " + jtok.ToString() + " is invalid, expected is json number.");
+
+                    break;
+
+                case Schema.Type.Bytes:
+                    if (jtok.Type != JTokenType.String)
+                        throw new AvroException("Default bytes value " + jtok.ToString() + " is invalid, expected is json string.");
+                    var en = System.Text.Encoding.GetEncoding("iso-8859-1");
+                    enc.WriteBytes(en.GetBytes((string)jtok));
+                    break;
+
+                case Schema.Type.Fixed:
+                    if (jtok.Type != JTokenType.String)
+                        throw new AvroException("Default fixed value " + jtok.ToString() + " is invalid, expected is json string.");
+                    en = System.Text.Encoding.GetEncoding("iso-8859-1");
+                    int len = (schema as FixedSchema).Size;
+                    byte[] bb = en.GetBytes((string)jtok);
+                    if (bb.Length != len)
+                        throw new AvroException("Default fixed value " + jtok.ToString() + " is not of expected length " + len);
+                    enc.WriteFixed(bb);
+                    break;
+
+                case Schema.Type.String:
+                    if (jtok.Type != JTokenType.String)
+                        throw new AvroException("Default string value " + jtok.ToString() + " is invalid, expected is json string.");
+                    enc.WriteString((string)jtok);
+                    break;
+
+                case Schema.Type.Enumeration:
+                    if (jtok.Type != JTokenType.String)
+                        throw new AvroException("Default enum value " + jtok.ToString() + " is invalid, expected is json string.");
+                    enc.WriteEnum((schema as EnumSchema).Ordinal((string)jtok));
+                    break;
+
+                case Schema.Type.Null:
+                    if (jtok.Type != JTokenType.Null)
+                        throw new AvroException("Default null value " + jtok.ToString() + " is invalid, expected is json null.");
+                    enc.WriteNull();
+                    break;
+
+                case Schema.Type.Array:
+                    if (jtok.Type != JTokenType.Array)
+                        throw new AvroException("Default array value " + jtok.ToString() + " is invalid, expected is json array.");
+                    JArray jarr = jtok as JArray;
+                    enc.WriteArrayStart();
+                    enc.SetItemCount(jarr.Count);
+                    foreach (JToken jitem in jarr)
+                    {
+                        enc.StartItem();
+                        EncodeDefaultValue(enc, (schema as ArraySchema).ItemSchema, jitem);
+                    }
+                    enc.WriteArrayEnd();
+                    break;
+
+                case Schema.Type.Record:
+                case Schema.Type.Error:
+                    if (jtok.Type != JTokenType.Object)
+                        throw new AvroException("Default record value " + jtok.ToString() + " is invalid, expected is json object.");
+                    RecordSchema rcs = schema as RecordSchema;
+                    JObject jo = jtok as JObject;
+                    foreach (Field field in rcs)
+                    {
+                        JToken val = jo[field.Name];
+                        if (null == val)
+                            val = field.DefaultValue;
+                        if (null == val)
+                            throw new AvroException("No default value for field " + field.Name);
+
+                        EncodeDefaultValue(enc, field.Schema, val);
+                    }
+                    break;
+
+                case Schema.Type.Map:
+                    if (jtok.Type != JTokenType.Object)
+                        throw new AvroException("Default map value " + jtok.ToString() + " is invalid, expected is json object.");
+                    jo = jtok as JObject;
+                    enc.WriteMapStart();
+                    enc.SetItemCount(jo.Count);
+                    foreach (KeyValuePair<string, JToken> jp in jo)
+                    {
+                        enc.StartItem();
+                        enc.WriteString(jp.Key);
+                        EncodeDefaultValue(enc, (schema as MapSchema).ValueSchema, jp.Value);
+                    }
+                    enc.WriteMapEnd();
+                    break;
+
+                case Schema.Type.Union:
+                    enc.WriteUnionIndex(0);
+                    EncodeDefaultValue(enc, (schema as UnionSchema).Schemas[0], jtok);
+                    break;
+
+                default:
+                    throw new AvroException("Unsupported schema type " + schema.Tag);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Properties/AssemblyInfo.cs b/lang/csharp/src/apache/main/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..1ef0b2e
--- /dev/null
+++ b/lang/csharp/src/apache/main/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Reflection;
+using System.Runtime.InteropServices;
+
+[assembly: AssemblyTitle("Avro")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Apache")]
+[assembly: AssemblyProduct("Avro")]
+[assembly: AssemblyCopyright("Copyright © Apache 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: Guid("152D7B83-9A97-45F3-B4B3-A367AFC090C4")]
+[assembly: AssemblyVersion("0.9.0.0")]
+[assembly: AssemblyFileVersion("0.9.0.0")]
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Properties/Settings.Designer.cs b/lang/csharp/src/apache/main/Properties/Settings.Designer.cs
new file mode 100644
index 0000000..2e7908b
--- /dev/null
+++ b/lang/csharp/src/apache/main/Properties/Settings.Designer.cs
@@ -0,0 +1,44 @@
+//------------------------------------------------------------------------------
+// <auto-generated>
+//     This code was generated by a tool.
+//     Runtime Version:4.0.30319.1
+//
+//     Changes to this file may cause incorrect behavior and will be lost if
+//     the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace Avro.Properties {
+    
+    
+    [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+    [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "10.0.0.0")]
+    internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
+        
+        private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+        
+        public static Settings Default {
+            get {
+                return defaultInstance;
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Protocol/Message.cs b/lang/csharp/src/apache/main/Protocol/Message.cs
new file mode 100644
index 0000000..80912d2
--- /dev/null
+++ b/lang/csharp/src/apache/main/Protocol/Message.cs
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    public class Message
+    {
+        /// <summary>
+        /// Name of the message
+        /// </summary>
+        public string Name { get; set; }
+
+        /// <summary>
+        /// Documentation for the message
+        /// </summary>
+        public string Doc { get; set; }
+
+        /// <summary>
+        /// Anonymous record for the list of parameters for the request fields
+        /// </summary>
+        public RecordSchema Request { get; set; }
+
+        /// <summary>
+        /// Schema object for the 'response' attribute
+        /// </summary>
+        public Schema Response { get; set; }
+
+        /// <summary>
+        /// Union schema object for the 'error' attribute
+        /// </summary>
+        public UnionSchema Error { get; set; }
+
+        /// <summary>
+        /// Optional one-way attribute
+        /// </summary>
+        public bool? Oneway { get; set; }
+
+        /// <summary>
+        /// Explicitly defined protocol errors plus system added "string" error
+        /// </summary>
+        public UnionSchema SupportedErrors { get; set; }
+
+        /// <summary>
+        /// Constructor for Message class
+        /// </summary>
+        /// <param name="name">name property</param>
+        /// <param name="doc">doc property</param>
+        /// <param name="request">list of parameters</param>
+        /// <param name="response">response property</param>
+        /// <param name="error">error union schema</param>
+        public Message(string name, string doc, RecordSchema request, Schema response, UnionSchema error, bool? oneway)
+        {
+            if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name", "name cannot be null.");
+            this.Request = request;
+            this.Response = response;
+            this.Error = error;
+            this.Name = name;
+            this.Doc = doc;
+            this.Oneway = oneway;
+
+            if (error != null && error.CanRead(Schema.Parse("string")))
+            {
+                this.SupportedErrors = error;
+            }
+            else
+            {
+                this.SupportedErrors = (UnionSchema) Schema.Parse("[\"string\"]");
+
+                if (error != null)
+                {
+                    for (int i = 0; i < error.Schemas.Count; ++i)
+                    {
+                        this.SupportedErrors.Schemas.Add(error.Schemas[i]);
+                    }
+                }
+            }
+        }
+
+        /// <summary>
+        /// Parses the messages section of a protocol definition
+        /// </summary>
+        /// <param name="jmessage">messages JSON object</param>
+        /// <param name="names">list of parsed names</param>
+        /// <param name="encspace">enclosing namespace</param>
+        /// <returns></returns>
+        internal static Message Parse(JProperty jmessage, SchemaNames names, string encspace)
+        {
+            string name = jmessage.Name;
+            string doc = JsonHelper.GetOptionalString(jmessage.Value, "doc");
+            bool? oneway = JsonHelper.GetOptionalBoolean(jmessage.Value, "one-way");
+
+            PropertyMap props = Schema.GetProperties(jmessage.Value);
+            RecordSchema schema = RecordSchema.NewInstance(Schema.Type.Record, jmessage.Value as JObject, props, names, encspace);
+
+            JToken jresponse = jmessage.Value["response"];
+            var response = Schema.ParseJson(jresponse, names, encspace);
+
+            JToken jerrors = jmessage.Value["errors"];
+            UnionSchema uerrorSchema = null;
+            if (null != jerrors)
+            {
+                Schema errorSchema = Schema.ParseJson(jerrors, names, encspace);
+                if (!(errorSchema is UnionSchema))
+                    throw new AvroException("");
+
+                uerrorSchema = errorSchema as UnionSchema;
+            }
+
+            return new Message(name, doc, schema, response, uerrorSchema, oneway);
+        }
+
+        /// <summary>
+        /// Writes the messages section of a protocol definition
+        /// </summary>
+        /// <param name="writer">writer</param>
+        /// <param name="names">list of names written</param>
+        /// <param name="encspace">enclosing namespace</param>
+        internal void writeJson(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writer.WriteStartObject();
+            JsonHelper.writeIfNotNullOrEmpty(writer, "doc", this.Doc);
+
+            if (null != this.Request)
+                this.Request.WriteJsonFields(writer, names, null);
+
+            if (null != this.Response)
+            {
+                writer.WritePropertyName("response");
+                Response.WriteJson(writer, names, encspace);
+            }
+
+            if (null != this.Error)
+            {
+                writer.WritePropertyName("errors");
+                this.Error.WriteJson(writer, names, encspace);
+            }
+
+            if (null != Oneway)
+            {
+                writer.WritePropertyName("one-way");
+                writer.WriteValue(Oneway);
+            }
+
+            writer.WriteEndObject();
+        }
+
+        /// <summary>
+        /// Tests equality of this Message object with the passed object
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(Object obj) 
+        {
+          if (obj == this) return true;
+          if (!(obj is Message)) return false;
+
+          Message that = obj as Message;
+          return this.Name.Equals(that.Name) && 
+                 this.Request.Equals(that.Request) &&
+                 areEqual(this.Response, that.Response) && 
+                 areEqual(this.Error, that.Error);
+        }
+
+        /// <summary>
+        /// Returns the hash code of this Message object
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode() 
+        {
+            return Name.GetHashCode() +
+                   Request.GetHashCode() +
+                  (Response == null ? 0 : Response.GetHashCode()) +
+                  (Error == null ? 0 : Error.GetHashCode());
+        }
+
+        /// <summary>
+        /// Tests equality of two objects taking null values into account 
+        /// </summary>
+        /// <param name="o1"></param>
+        /// <param name="o2"></param>
+        /// <returns></returns>
+        protected static bool areEqual(object o1, object o2)
+        {
+            return o1 == null ? o2 == null : o1.Equals(o2);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Protocol/Protocol.cs b/lang/csharp/src/apache/main/Protocol/Protocol.cs
new file mode 100644
index 0000000..e9645a5
--- /dev/null
+++ b/lang/csharp/src/apache/main/Protocol/Protocol.cs
@@ -0,0 +1,294 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using Newtonsoft.Json;
+using Newtonsoft.Json.Linq;
+
+namespace Avro
+{
+    public class Protocol
+    {
+        /// <summary>
+        /// Name of the protocol
+        /// </summary>
+        public string Name { get; set; }
+
+        /// <summary>
+        /// Namespace of the protocol
+        /// </summary>
+        public string Namespace { get; set; }
+
+        /// <summary>
+        /// Documentation for the protocol
+        /// </summary>
+        public string Doc { get; set; }
+
+        /// <summary>
+        /// List of schemas objects representing the different schemas defined under the 'types' attribute
+        /// </summary>
+        public IList<Schema> Types { get; set; }
+
+        /// <summary>
+        /// List of message objects representing the different schemas defined under the 'messages' attribute
+        /// </summary>
+        public IDictionary<string,Message> Messages { get; set; }
+
+        private byte[] md5;
+        public byte[] MD5
+        {
+            get 
+            {
+                try
+                {
+                    if (md5 == null)
+                        md5 = System.Security.Cryptography.MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(ToString()));
+                }
+                catch (Exception ex)
+                {
+                    throw new AvroRuntimeException("MD5 get exception", ex);
+                }
+                return md5; 
+            }
+        }
+
+        /// <summary>
+        /// Constructor for Protocol class
+        /// </summary>
+        /// <param name="name">required name of protocol</param>
+        /// <param name="space">optional namespace</param>
+        /// <param name="doc">optional documentation</param>
+        /// <param name="types">required list of types</param>
+        /// <param name="messages">required list of messages</param>
+        public Protocol(string name, string space,
+                        string doc, IEnumerable<Schema> types,
+                        IDictionary<string,Message> messages)
+        {
+            if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name", "name cannot be null.");
+            if (null == types) throw new ArgumentNullException("types", "types cannot be null.");
+            if (null == messages) throw new ArgumentNullException("messages", "messages cannot be null.");
+
+            this.Name = name;
+            this.Namespace = space;
+            this.Doc = doc;
+            this.Types = new List<Schema>(types);
+            this.Messages = new Dictionary<string, Message>(messages);
+        }
+
+        /// <summary>
+        /// Parses the given JSON string to create a Protocol object
+        /// </summary>
+        /// <param name="jstring">JSON string</param>
+        /// <returns>Protocol object</returns>
+        public static Protocol Parse(string jstring)
+        {
+            if (string.IsNullOrEmpty(jstring)) throw new ArgumentNullException("json", "json cannot be null.");
+
+            JToken jtok = null;
+            try
+            {
+                jtok = JObject.Parse(jstring);
+            }
+            catch (Exception ex)
+            {
+                throw new ProtocolParseException("Invalid JSON format: " + jstring, ex);
+            }
+            return Parse(jtok);
+        }
+
+        /// <summary>
+        /// Parses the given JSON object to create a Protocol object
+        /// </summary>
+        /// <param name="jtok">JSON object</param>
+        /// <returns>Protocol object</returns>
+        private static Protocol Parse(JToken jtok)
+        {
+            string name = JsonHelper.GetRequiredString(jtok, "protocol");
+            string space = JsonHelper.GetOptionalString(jtok, "namespace");
+            string doc = JsonHelper.GetOptionalString(jtok, "doc");
+
+            var names = new SchemaNames();
+
+            JToken jtypes = jtok["types"];
+            var types = new List<Schema>();
+            if (jtypes is JArray)
+            {
+                foreach (JToken jtype in jtypes)
+                {
+                    var schema = Schema.ParseJson(jtype, names, space);
+                    types.Add(schema);
+                }
+            }
+
+            var messages = new Dictionary<string,Message>();
+            JToken jmessages = jtok["messages"];
+            if (null != jmessages)
+            {
+                foreach (JProperty jmessage in jmessages)
+                {
+                    var message = Message.Parse(jmessage, names, space);
+                    messages.Add(message.Name, message);
+                }
+            }
+
+            return new Protocol(name, space, doc, types, messages);
+        }
+
+        /// <summary>
+        /// Writes Protocol in JSON format
+        /// </summary>
+        /// <returns>JSON string</returns>
+        public override string ToString()
+        {
+            using (System.IO.StringWriter sw = new System.IO.StringWriter())
+            {
+                using (Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(sw))
+                {
+                    #if(DEBUG)
+                    writer.Formatting = Newtonsoft.Json.Formatting.Indented;
+                    #endif
+
+                    WriteJson(writer, new SchemaNames());
+                    writer.Flush();
+                    return sw.ToString();
+                }
+            }
+        }
+
+        /// <summary>
+        /// Writes Protocol in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        internal void WriteJson(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names)
+        {
+            writer.WriteStartObject();
+
+            JsonHelper.writeIfNotNullOrEmpty(writer, "protocol", this.Name);
+            JsonHelper.writeIfNotNullOrEmpty(writer, "namespace", this.Namespace);
+            JsonHelper.writeIfNotNullOrEmpty(writer, "doc", this.Doc);
+
+            writer.WritePropertyName("types");
+            writer.WriteStartArray();
+
+            foreach (Schema type in this.Types)
+                type.WriteJson(writer, names, this.Namespace);
+
+            writer.WriteEndArray();
+
+            writer.WritePropertyName("messages");
+            writer.WriteStartObject();
+
+            foreach (KeyValuePair<string,Message> message in this.Messages)
+            {
+                writer.WritePropertyName(message.Key);
+                message.Value.writeJson(writer, names, this.Namespace);
+            }
+
+            writer.WriteEndObject();
+            writer.WriteEndObject();
+        }
+
+        /// <summary>
+        /// Tests equality of this protocol object with the passed object
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            if (!(obj is Protocol)) return false;
+
+            Protocol that = obj as Protocol;
+
+            return this.Name.Equals(that.Name) && this.Namespace.Equals(that.Namespace) && 
+                    TypesEquals(that.Types) && MessagesEquals(that.Messages);
+        }
+
+        /// <summary>
+        /// Test equality of this protocols Types list with the passed Types list.
+        /// Order of schemas does not matter, as long as all types in this protocol
+        /// are also defined in the passed protocol
+        /// </summary>
+        /// <param name="that"></param>
+        /// <returns></returns>
+        private bool TypesEquals(IList<Schema> that)
+        {
+            if (Types.Count != that.Count) return false;
+            foreach (Schema schema in Types)
+                if (!that.Contains(schema)) return false;
+            return true;
+        }
+
+        /// <summary>
+        /// Test equality of this protocols Message map with the passed Message map
+        /// Order of messages does not matter, as long as all messages in this protocol
+        /// are also defined in the passed protocol
+        /// </summary>
+        /// <param name="that"></param>
+        /// <returns></returns>
+        private bool MessagesEquals(IDictionary<string, Message> that)
+        {
+            if (Messages.Count != that.Count) return false;
+            foreach (KeyValuePair<string, Message> pair in Messages) 
+            { 
+                if (!that.ContainsKey(pair.Key))
+                    return false;
+                if (!pair.Value.Equals(that[pair.Key]))
+                    return false; 
+            } 
+            return true;
+        }
+
+        /// <summary>
+        /// Returns the hash code of this protocol object
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return Name.GetHashCode() + Namespace.GetHashCode() +
+                   GetTypesHashCode() + GetMessagesHashCode();
+        }
+
+        /// <summary>
+        /// Returns the hash code of the Types list
+        /// </summary>
+        /// <returns></returns>
+        private int GetTypesHashCode()
+        {
+            int hash = Types.Count;
+            foreach (Schema schema in Types)
+                hash += schema.GetHashCode();
+            return hash;
+        }
+
+        /// <summary>
+        /// Returns the hash code of the Messages map
+        /// </summary>
+        /// <returns></returns>
+        private int GetMessagesHashCode()
+        {
+            int hash = Messages.Count;
+            foreach (KeyValuePair<string, Message> pair in Messages)
+                hash += (pair.Key.GetHashCode() + pair.Value.GetHashCode());
+            return hash;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Protocol/ProtocolParseException.cs b/lang/csharp/src/apache/main/Protocol/ProtocolParseException.cs
new file mode 100644
index 0000000..4e67013
--- /dev/null
+++ b/lang/csharp/src/apache/main/Protocol/ProtocolParseException.cs
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    public class ProtocolParseException:AvroException
+    {
+        public ProtocolParseException(string s)
+            : base(s)
+        {
+        }
+
+        public ProtocolParseException(string s, Exception inner)
+            : base(s, inner)
+        {
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/ArraySchema.cs b/lang/csharp/src/apache/main/Schema/ArraySchema.cs
new file mode 100644
index 0000000..c5e4203
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/ArraySchema.cs
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using Newtonsoft.Json.Linq;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for array type schemas
+    /// </summary>
+    public class ArraySchema : UnnamedSchema
+    {
+        /// <summary>
+        /// Schema for the array 'type' attribute
+        /// </summary>
+        public Schema ItemSchema { get; set;  }
+
+        /// <summary>
+        /// Static class to return a new instance of ArraySchema
+        /// </summary>
+        /// <param name="jtok">JSON object for the array schema</param>
+        /// <param name="names">list of named schemas already parsed</param>
+        /// <param name="encspace">enclosing namespace for the array schema</param>
+        /// <returns></returns>
+        internal static ArraySchema NewInstance(JToken jtok, PropertyMap props, SchemaNames names, string encspace)
+        {
+            JToken jitem = jtok["items"];
+            if (null == jitem) throw new AvroTypeException("Array does not have 'items'");
+
+            return new ArraySchema(Schema.ParseJson(jitem, names, encspace), props);
+        }
+
+        /// <summary>
+        /// Constructor
+        /// </summary>
+        /// <param name="items">schema for the array items type</param>
+        private ArraySchema(Schema items, PropertyMap props) : base(Type.Array, props)
+        {
+            if (null == items) throw new ArgumentNullException("items");
+            this.ItemSchema = items;
+        }
+
+        /// <summary>
+        /// Writes the array schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writer.WritePropertyName("items");
+            ItemSchema.WriteJson(writer, names, encspace);
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if (writerSchema.Tag != Tag) return false;
+
+            ArraySchema that = writerSchema as ArraySchema;
+            return ItemSchema.CanRead(that.ItemSchema);
+        }
+
+        /// <summary>
+        /// Function to compare equality of two array schemas
+        /// </summary>
+        /// <param name="obj">other array schema</param>
+        /// <returns>true two schemas are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+
+            if (obj != null && obj is ArraySchema)
+            {
+                ArraySchema that = obj as ArraySchema;
+                if (ItemSchema.Equals(that.ItemSchema))
+                    return areEqual(that.Props, this.Props);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hashcode function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return 29 * ItemSchema.GetHashCode() + getHashCode(Props);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/AvroException.cs b/lang/csharp/src/apache/main/Schema/AvroException.cs
new file mode 100644
index 0000000..688fd59
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/AvroException.cs
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    public class AvroException : Exception
+    {
+        public AvroException(string s)
+            : base(s)
+        {
+        }
+
+        public AvroException(string s, Exception inner)
+            : base(s, inner)
+        {
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/AvroTypeException.cs b/lang/csharp/src/apache/main/Schema/AvroTypeException.cs
new file mode 100644
index 0000000..6c8b052
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/AvroTypeException.cs
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    public class AvroTypeException : AvroException
+    {
+        public AvroTypeException(string s)
+            : base(s)
+        {
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/EnumSchema.cs b/lang/csharp/src/apache/main/Schema/EnumSchema.cs
new file mode 100644
index 0000000..d346ecf
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/EnumSchema.cs
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for enum type schemas
+    /// </summary>
+    public class EnumSchema : NamedSchema
+    {
+        /// <summary>
+        /// List of strings representing the enum symbols
+        /// </summary>
+        public IList<string> Symbols { get; private set;  }
+
+        /// <summary>
+        /// Map of enum symbols and it's corresponding ordinal number
+        /// </summary>
+        private readonly IDictionary<string, int> symbolMap;
+
+        /// <summary>
+        /// Count of enum symbols
+        /// </summary>
+        public int Count { get { return Symbols.Count; } }
+
+        /// <summary>
+        /// Static function to return new instance of EnumSchema
+        /// </summary>
+        /// <param name="jtok">JSON object for enum schema</param>
+        /// <param name="names">list of named schema already parsed in</param>
+        /// <param name="encspace">enclosing namespace for the enum schema</param>
+        /// <returns>new instance of enum schema</returns>
+        internal static EnumSchema NewInstance(JToken jtok, PropertyMap props, SchemaNames names, string encspace)
+        {
+            SchemaName name = NamedSchema.GetName(jtok, encspace);
+            var aliases = NamedSchema.GetAliases(jtok, name.Space, name.EncSpace);
+
+            JArray jsymbols = jtok["symbols"] as JArray;
+            if (null == jsymbols)
+                throw new SchemaParseException("Enum has no symbols: " + name);
+
+            List<string> symbols = new List<string>();
+            IDictionary<string, int> symbolMap = new Dictionary<string, int>();
+            int i = 0;
+            foreach (JValue jsymbol in jsymbols)
+            {
+                string s = (string)jsymbol.Value;
+                if (symbolMap.ContainsKey(s))
+                    throw new SchemaParseException("Duplicate symbol: " + s);
+
+                symbolMap[s] = i++;
+                symbols.Add(s);
+            }
+            return new EnumSchema(name, aliases, symbols, symbolMap, props, names);
+        }
+
+        /// <summary>
+        /// Constructor for enum schema
+        /// </summary>
+        /// <param name="name">name of enum</param>
+        /// <param name="aliases">list of aliases for the name</param>
+        /// <param name="symbols">list of enum symbols</param>
+        /// <param name="symbolMap">map of enum symbols and value</param>
+        /// <param name="names">list of named schema already read</param>
+        private EnumSchema(SchemaName name, IList<SchemaName> aliases, List<string> symbols,
+                            IDictionary<String, int> symbolMap, PropertyMap props, SchemaNames names)
+                            : base(Type.Enumeration, name, aliases, props, names)
+        {
+            if (null == name.Name) throw new SchemaParseException("name cannot be null for enum schema.");
+            this.Symbols = symbols;
+            this.symbolMap = symbolMap;
+        }
+
+        /// <summary>
+        /// Writes enum schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schema already written</param>
+        /// <param name="encspace">enclosing namespace of the enum schema</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, 
+                                                            SchemaNames names, string encspace)
+        {
+            base.WriteJsonFields(writer, names, encspace);
+            writer.WritePropertyName("symbols");
+            writer.WriteStartArray();
+            foreach (string s in this.Symbols)
+                writer.WriteValue(s);
+            writer.WriteEndArray();
+        }
+
+        /// <summary>
+        /// Returns the position of the given symbol within this enum. 
+        /// Throws AvroException if the symbol is not found in this enum.
+        /// </summary>
+        /// <param name="symbol">name of the symbol to find</param>
+        /// <returns>position of the given symbol in this enum schema</returns>
+        public int Ordinal(string symbol)
+        {
+            int result;
+            if (symbolMap.TryGetValue(symbol, out result)) return result;
+            throw new AvroException("No such symbol: " + symbol);
+        }
+
+        /// <summary>
+        /// Returns the enum symbol of the given index to the list
+        /// </summary>
+        /// <param name="index">symbol index</param>
+        /// <returns>symbol name</returns>
+        public string this[int index]
+        {
+            get
+            {
+                if (index < Symbols.Count) return Symbols[index];
+                throw new AvroException("Enumeration out of range. Must be less than " + Symbols.Count + ", but is " + index);
+            }
+        }
+
+        /// <summary>
+        /// Checks if given symbol is in the list of enum symbols
+        /// </summary>
+        /// <param name="symbol">symbol to check</param>
+        /// <returns>true if symbol exist, false otherwise</returns>
+        public bool Contains(string symbol)
+        {
+            return symbolMap.ContainsKey(symbol);
+        }
+
+        /// <summary>
+        /// Returns an enumerator that enumerates the symbols in this enum schema in the order of their definition.
+        /// </summary>
+        /// <returns>Enumeration over the symbols of this enum schema</returns>
+        public IEnumerator<string> GetEnumerator()
+        {
+            return Symbols.GetEnumerator();
+        }
+
+        /// <summary>
+        /// Checks equality of two enum schema
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            if (obj != null && obj is EnumSchema)
+            {
+                EnumSchema that = obj as EnumSchema;
+                if (SchemaName.Equals(that.SchemaName) && Count == that.Count)
+                {
+                    for (int i = 0; i < Count; i++) if (!Symbols[i].Equals(that.Symbols[i])) return false;
+                    return areEqual(that.Props, this.Props);
+                }
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hashcode function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            int result = SchemaName.GetHashCode() + getHashCode(Props);
+            foreach (string s in Symbols) result += 23 * s.GetHashCode();
+            return result;
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if (writerSchema.Tag != Tag) return false;
+
+            EnumSchema that = writerSchema as EnumSchema;
+            if (!that.SchemaName.Equals(SchemaName))
+                if (!InAliases(that.SchemaName)) return false;
+
+            // we defer checking of symbols. Writer may have a symbol missing from the reader, 
+            // but if writer never used the missing symbol, then reader should still be able to read the data
+
+            return true;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/Field.cs b/lang/csharp/src/apache/main/Schema/Field.cs
new file mode 100644
index 0000000..740c44d
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/Field.cs
@@ -0,0 +1,245 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+
+    /// <summary>
+    /// Class for fields defined in a record
+    /// </summary>
+    public class Field
+    {
+        /// <summary>
+        /// Enum for the sorting order of record fields
+        /// </summary>
+        public enum SortOrder
+        {
+            ascending,
+            descending,
+            ignore
+        }
+
+        /// <summary>
+        /// Name of the field.
+        /// </summary>
+        public readonly string Name;
+
+        /// <summary>
+        /// List of aliases for the field name
+        /// </summary>
+        public readonly IList<string> aliases;
+
+        /// <summary>
+        /// Position of the field within its record.
+        /// </summary>
+        public int Pos { get; private set; }
+
+        /// <summary>
+        /// Documentation for the field, if any. Null if there is no documentation.
+        /// </summary>
+        public string Documentation { get; private set; }
+
+        /// <summary>
+        /// The default value for the field stored as JSON object, if defined. Otherwise, null.
+        /// </summary>
+        public JToken DefaultValue { get; private set; }
+
+        /// <summary>
+        /// Order of the field
+        /// </summary>
+        public SortOrder? Ordering { get; private set; }
+
+        /// <summary>
+        /// Field type's schema
+        /// </summary>
+        public Schema Schema { get; private set; }
+
+        /// <summary>
+        /// Custom properties for the field. We don't store the fields custom properties in
+        /// the field type's schema because if the field type is only a reference to the schema 
+        /// instead of an actual schema definition, then the schema could already have it's own set 
+        /// of custom properties when it was previously defined.
+        /// </summary>
+        private readonly PropertyMap Props;
+
+        /// <summary>
+        /// Static comparer object for JSON objects such as the fields default value
+        /// </summary>
+        internal static JTokenEqualityComparer JtokenEqual = new JTokenEqualityComparer();
+
+        /// <summary>
+        /// A flag to indicate if reader schema has a field that is missing from writer schema and has a default value
+        /// This is set in CanRead() which is always be called before deserializing data
+        /// </summary>
+
+        /// <summary>
+        /// Constructor for the field class
+        /// </summary>
+        /// <param name="schema">schema for the field type</param>
+        /// <param name="name">name of the field</param>
+        /// <param name="aliases">list of aliases for the name of the field</param>
+        /// <param name="pos">position of the field</param>
+        /// <param name="doc">documentation for the field</param>
+        /// <param name="defaultValue">field's default value if it exists</param>
+        /// <param name="sortorder">sort order of the field</param>
+        internal Field(Schema schema, string name, IList<string> aliases, int pos, string doc,
+                        JToken defaultValue, SortOrder sortorder, PropertyMap props)
+        {
+            if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name", "name cannot be null.");
+            if (null == schema) throw new ArgumentNullException("type", "type cannot be null.");
+            this.Schema = schema;
+            this.Name = name;
+            this.aliases = aliases;
+            this.Pos = pos;
+            this.Documentation = doc;
+            this.DefaultValue = defaultValue;
+            this.Ordering = sortorder;
+            this.Props = props;
+        }
+
+        /// <summary>
+        /// Writes the Field class in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace for the field</param>
+        protected internal void writeJson(JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writer.WriteStartObject();
+            JsonHelper.writeIfNotNullOrEmpty(writer, "name", this.Name);
+            JsonHelper.writeIfNotNullOrEmpty(writer, "doc", this.Documentation);
+
+            if (null != this.DefaultValue)
+            {
+                writer.WritePropertyName("default");
+                this.DefaultValue.WriteTo(writer, null);
+            }
+            if (null != this.Schema)
+            {
+                writer.WritePropertyName("type");
+                Schema.WriteJson(writer, names, encspace);
+            }
+
+            if (null != this.Props)
+                this.Props.WriteJson(writer);
+
+            if (null != aliases)
+            {
+                writer.WritePropertyName("aliases");
+                writer.WriteStartArray();
+                foreach (string name in aliases)
+                    writer.WriteValue(name);
+                writer.WriteEndArray();
+            }
+
+            writer.WriteEndObject();
+        }
+
+        /// <summary>
+        /// Parses the 'aliases' property from the given JSON token
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <returns>List of string that represents the list of alias. If no 'aliases' specified, then it returns null.</returns>
+        internal static IList<string> GetAliases(JToken jtok)
+        {
+            JToken jaliases = jtok["aliases"];
+            if (null == jaliases)
+                return null;
+
+            if (jaliases.Type != JTokenType.Array)
+                throw new SchemaParseException("Aliases must be of format JSON array of strings");
+
+            var aliases = new List<string>();
+            foreach (JToken jalias in jaliases)
+            {
+                if (jalias.Type != JTokenType.String)
+                    throw new SchemaParseException("Aliases must be of format JSON array of strings");
+
+                aliases.Add((string)jalias);
+            }
+            return aliases;
+        }
+
+        /// <summary>
+        /// Returns the field's custom property value given the property name
+        /// </summary>
+        /// <param name="key">custom property name</param>
+        /// <returns>custom property value</returns>
+        public string GetProperty(string key)
+        {
+            if (null == this.Props) return null;
+            string v;
+            return (this.Props.TryGetValue(key, out v)) ? v : null;
+        }
+
+        /// <summary>
+        /// Compares two field objects
+        /// </summary>
+        /// <param name="obj">field to compare with this field</param>
+        /// <returns>true if two fields are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            if (obj != null && obj is Field)
+            {
+                Field that = obj as Field;
+                return areEqual(that.Name, Name) && that.Pos == Pos && areEqual(that.Documentation, Documentation)
+                    && areEqual(that.Ordering, Ordering) && JtokenEqual.Equals(that.DefaultValue, DefaultValue)
+                    && that.Schema.Equals(Schema) && areEqual(that.Props, this.Props);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Compares two objects
+        /// </summary>
+        /// <param name="o1">first object</param>
+        /// <param name="o2">second object</param>
+        /// <returns>true if two objects are equal, false otherwise</returns>
+        private static bool areEqual(object o1, object o2)
+        {
+            return o1 == null ? o2 == null : o1.Equals(o2);
+        }
+
+        /// <summary>
+        /// Hash code function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return 17 * Name.GetHashCode() + Pos + 19 * getHashCode(Documentation) +
+                   23 * getHashCode(Ordering) + 29 * getHashCode(DefaultValue) + 31 * Schema.GetHashCode() +
+                   37 * getHashCode(Props);
+        }
+
+        /// <summary>
+        /// Hash code helper function
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        private static int getHashCode(object obj)
+        {
+            return obj == null ? 0 : obj.GetHashCode();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/FixedSchema.cs b/lang/csharp/src/apache/main/Schema/FixedSchema.cs
new file mode 100644
index 0000000..67c843d
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/FixedSchema.cs
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for fixed schemas
+    /// </summary>
+    public class FixedSchema : NamedSchema
+    {
+        /// <summary>
+        /// Fixed size for the bytes
+        /// </summary>
+        public int Size { get; set; }
+
+        /// <summary>
+        /// Static function to return new instance of the fixed schema class
+        /// </summary>
+        /// <param name="jtok">JSON object for the fixed schema</param>
+        /// <param name="names">list of named schema already parsed in</param>
+        /// <param name="encspace">enclosing namespace of the fixed schema</param>
+        /// <returns></returns>
+        internal static FixedSchema NewInstance(JToken jtok, PropertyMap props, SchemaNames names, string encspace)
+        {
+            SchemaName name = NamedSchema.GetName(jtok, encspace);
+            var aliases = NamedSchema.GetAliases(jtok, name.Space, name.EncSpace);
+
+            return new FixedSchema(name, aliases, JsonHelper.GetRequiredInteger(jtok, "size"), props, names);
+        }
+
+        /// <summary>
+        /// Constructor
+        /// </summary>
+        /// <param name="name">name of the fixed schema</param>
+        /// <param name="aliases">list of aliases for the name</param>
+        /// <param name="size">fixed size</param>
+        /// <param name="names">list of named schema already parsed in</param>
+        private FixedSchema(SchemaName name, IList<SchemaName> aliases, int size, PropertyMap props, SchemaNames names)
+                            : base(Type.Fixed, name, aliases, props, names)
+        {
+            if (null == name.Name) throw new SchemaParseException("name cannot be null for fixed schema.");
+            if (size <= 0) throw new ArgumentOutOfRangeException("size", "size must be greater than zero.");
+            this.Size = size;
+        }
+
+        /// <summary>
+        /// Writes the fixed schema class in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schema already written</param>
+        /// <param name="encspace">enclosing namespace for the fixed schema</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            base.WriteJsonFields(writer, names, encspace);
+            writer.WritePropertyName("size");
+            writer.WriteValue(this.Size);
+        }
+
+        /// <summary>
+        /// Compares two fixed schemas
+        /// </summary>
+        /// <param name="obj">fixed schema to compare against this schema</param>
+        /// <returns>true if two schemas are the same, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+
+            if (obj != null && obj is FixedSchema)
+            {
+                FixedSchema that = obj as FixedSchema;
+                return SchemaName.Equals(that.SchemaName) && Size == that.Size && areEqual(that.Props, this.Props);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hash code function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return 53 * SchemaName.GetHashCode() + 47 * Size + getHashCode(Props);
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if (writerSchema.Tag != Tag) return false;
+            FixedSchema that = writerSchema as FixedSchema;
+            if (that.Size != Size) return false;
+            if (that.SchemaName.Equals(SchemaName))
+                return true;
+            else
+                return InAliases(that.SchemaName);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/JsonHelper.cs b/lang/csharp/src/apache/main/Schema/JsonHelper.cs
new file mode 100644
index 0000000..13db058
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/JsonHelper.cs
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    class JsonHelper
+    {
+        /// <summary>
+        /// Retrieves the optional string property value for the given property name from the given JSON object.
+        /// This throws an exception if property exists but it is not a string.
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="field">property name</param>
+        /// <returns>property value if property exists, null if property doesn't exist in the JSON object</returns>
+        public static string GetOptionalString(JToken jtok, string field)
+        {
+            if (null == jtok) throw new ArgumentNullException("jtok", "jtok cannot be null.");
+            if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field", "field cannot be null.");
+
+            JToken child = jtok[field];
+            if (null == child) return null;
+
+            if (child.Type == JTokenType.String)
+            {
+                string value = child.ToString();
+                return value.Trim('\"');
+            }
+            throw new SchemaParseException("Field " + field + " is not a string");
+        }
+
+        /// <summary>
+        /// Retrieves the required string property value for the given property name from the given JSON object.
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="field">property name</param>
+        /// <returns>property value</returns>
+        public static string GetRequiredString(JToken jtok, string field)
+        {
+            string value = GetOptionalString(jtok, field);
+            if (string.IsNullOrEmpty(value)) throw new SchemaParseException(string.Format("No \"{0}\" JSON field: {1}", field, jtok));
+            return value;
+        }
+
+        /// <summary>
+        /// Retrieves the required int property value for the given property name from the given JSON object.
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="field">property name</param>
+        /// <returns>property value</returns>
+        public static int GetRequiredInteger(JToken jtok, string field)
+        {
+            ensureValidFieldName(field);
+            JToken child = jtok[field];
+            if (null == child) throw new SchemaParseException(string.Format("No \"{0}\" JSON field: {1}", field, jtok));
+
+            if (child.Type == JTokenType.Integer) return (int) child;
+            throw new SchemaParseException("Field " + field + " is not an integer");
+        }
+
+        /// <summary>
+        /// Retrieves the optional boolean property value for the given property name from the given JSON object.
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="field">property name</param>
+        /// <returns>null if property doesn't exist, otherise returns property boolean value</returns>
+        public static bool? GetOptionalBoolean(JToken jtok, string field)
+        {
+            if (null == jtok) throw new ArgumentNullException("jtok", "jtok cannot be null.");
+            if (string.IsNullOrEmpty(field)) throw new ArgumentNullException("field", "field cannot be null.");
+
+            JToken child = jtok[field];
+            if (null == child) return null;
+
+            if (child.Type == JTokenType.Boolean)
+                return (bool)child;
+
+            throw new SchemaParseException("Field " + field + " is not a boolean");
+        }
+
+        /// <summary>
+        /// Writes JSON property name and value if value is not null
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="key">property name</param>
+        /// <param name="value">property value</param>
+        internal static void writeIfNotNullOrEmpty(JsonTextWriter writer, string key, string value)
+        {
+            if (string.IsNullOrEmpty(value)) return;
+            writer.WritePropertyName(key);
+            writer.WriteValue(value);
+        }
+
+        /// <summary>
+        /// Checks if given name is not null or empty
+        /// </summary>
+        /// <param name="name"></param>
+        private static void ensureValidFieldName(string name)
+        {
+            if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name");
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/MapSchema.cs b/lang/csharp/src/apache/main/Schema/MapSchema.cs
new file mode 100644
index 0000000..23833ba
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/MapSchema.cs
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for map schemas
+    /// </summary>
+    public class MapSchema : UnnamedSchema
+    {
+        /// <summary>
+        /// Schema for map values type
+        /// </summary>
+        public Schema ValueSchema { get; set; }
+
+        public static MapSchema CreateMap(Schema type)
+        {
+            return new MapSchema(type,null);
+        }
+
+        /// <summary>
+        /// Static function to return new instance of map schema
+        /// </summary>
+        /// <param name="jtok">JSON object for the map schema</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the map schema</param>
+        /// <returns></returns>
+        internal static MapSchema NewInstance(JToken jtok, PropertyMap props, SchemaNames names, string encspace)
+        {
+            JToken jvalue = jtok["values"];
+            if (null == jvalue) throw new AvroTypeException("Map does not have 'values'");
+
+            return new MapSchema(Schema.ParseJson(jvalue, names, encspace), props);
+        }
+
+        /// <summary>
+        /// Constructor for map schema class
+        /// </summary>
+        /// <param name="valueSchema">schema for map values type</param>
+        private MapSchema(Schema valueSchema, PropertyMap props) : base(Type.Map, props)
+        {
+            if (null == valueSchema) throw new ArgumentNullException("valueSchema", "valueSchema cannot be null.");
+            this.ValueSchema = valueSchema;
+        }
+
+        /// <summary>
+        /// Writes map schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the map schema</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writer.WritePropertyName("values");
+            ValueSchema.WriteJson(writer, names, encspace);
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if (writerSchema.Tag != Tag) return false;
+
+            MapSchema that = writerSchema as MapSchema;
+            return ValueSchema.CanRead(that.ValueSchema);
+        }
+
+        /// <summary>
+        /// Compares equality of two map schemas
+        /// </summary>
+        /// <param name="obj">map schema to compare against this schema</param>
+        /// <returns>true if two schemas are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+
+            if (obj != null && obj is MapSchema)
+            {
+                MapSchema that = obj as MapSchema;
+                if (ValueSchema.Equals(that.ValueSchema))
+                    return areEqual(that.Props, this.Props);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hashcode function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return 29 * ValueSchema.GetHashCode() + getHashCode(Props);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/NamedSchema.cs b/lang/csharp/src/apache/main/Schema/NamedSchema.cs
new file mode 100644
index 0000000..02beb79
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/NamedSchema.cs
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+
+
+namespace Avro
+{
+    /// <summary>
+    /// Base class for all named schemas: fixed, enum, record
+    /// </summary>
+    public abstract class NamedSchema : Schema
+    {
+        /// <summary>
+        /// Name of the schema, contains name, namespace and enclosing namespace
+        /// </summary>
+        public SchemaName SchemaName { get; private set; }
+
+        /// <summary>
+        /// Name of the schema
+        /// </summary>
+        public override string Name
+        {
+            get { return SchemaName.Name; }
+        }
+
+        /// <summary>
+        /// Namespace of the schema
+        /// </summary>
+        public string Namespace
+        {
+            get { return SchemaName.Namespace; }
+        }
+
+        /// <summary>
+        /// Namespace.Name of the schema
+        /// </summary>
+        public string Fullname
+        {
+            get { return SchemaName.Fullname; }
+        }
+
+        /// <summary>
+        /// List of aliases for this named schema
+        /// </summary>
+        private readonly IList<SchemaName> aliases;
+
+        /// <summary>
+        /// Static function to return a new instance of the named schema
+        /// </summary>
+        /// <param name="jo">JSON object of the named schema</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the named schema</param>
+        /// <returns></returns>
+        internal static NamedSchema NewInstance(JObject jo, PropertyMap props, SchemaNames names, string encspace)
+        {
+            string type = JsonHelper.GetRequiredString(jo, "type");
+            switch (type)
+            {
+                case "fixed":
+                    return FixedSchema.NewInstance(jo, props, names, encspace);
+                case "enum":
+                    return EnumSchema.NewInstance(jo, props, names, encspace);
+                case "record":
+                    return RecordSchema.NewInstance(Type.Record, jo, props, names, encspace);
+                case "error":
+                    return RecordSchema.NewInstance(Type.Error, jo, props, names, encspace);
+                default:
+                    NamedSchema result;
+                    if (names.TryGetValue(type, null, encspace, out result))
+                        return result;
+                    return null;
+            }
+        }
+
+        /// <summary>
+        /// Constructor for named schema class
+        /// </summary>
+        /// <param name="type">schema type</param>
+        /// <param name="name">name</param>
+        /// <param name="names">list of named schemas already read</param>
+        protected NamedSchema(Type type, SchemaName name, IList<SchemaName> aliases, PropertyMap props, SchemaNames names)
+                                : base(type, props)
+        {
+            this.SchemaName = name;
+            this.aliases = aliases;
+            if (null != name.Name)  // Added this check for anonymous records inside Message 
+                if (!names.Add(name, this))
+                    throw new AvroException("Duplicate schema name " + name.Fullname);
+        }
+
+        /// <summary>
+        /// Parses the name and namespace from the given JSON schema object then creates
+        /// SchemaName object including the given enclosing namespace
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="encspace">enclosing namespace</param>
+        /// <returns>new SchemaName object</returns>
+        protected static SchemaName GetName(JToken jtok, string encspace)
+        {
+            String n = JsonHelper.GetOptionalString(jtok, "name");      // Changed this to optional string for anonymous records in messages 
+            String ns = JsonHelper.GetOptionalString(jtok, "namespace");
+            return new SchemaName(n, ns, encspace);
+        }
+
+        /// <summary>
+        /// Parses the 'aliases' property from the given JSON token
+        /// </summary>
+        /// <param name="jtok">JSON object to read</param>
+        /// <param name="space">namespace of the name this alias is for</param>
+        /// <param name="encspace">enclosing namespace of the name this alias is for</param>
+        /// <returns>List of SchemaName that represents the list of alias. If no 'aliases' specified, then it returns null.</returns>
+        protected static IList<SchemaName> GetAliases(JToken jtok, string space, string encspace)
+        {
+            JToken jaliases = jtok["aliases"];
+            if (null == jaliases)
+                return null;
+
+            if (jaliases.Type != JTokenType.Array)
+                throw new SchemaParseException("Aliases must be of format JSON array of strings");
+
+            var aliases = new List<SchemaName>();
+            foreach (JToken jalias in jaliases)
+            {
+                if (jalias.Type != JTokenType.String)
+                    throw new SchemaParseException("Aliases must be of format JSON array of strings");
+
+                aliases.Add(new SchemaName((string)jalias, space, encspace));
+            }
+            return aliases;
+        }
+
+        protected bool InAliases(SchemaName name)
+        {
+            if (null != aliases)
+            {
+                foreach (SchemaName alias in aliases)
+                    if (name.Equals(alias)) return true;
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Writes named schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the named schema</param>
+        protected internal override void WriteJson(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            if (!names.Add(this))
+            {
+                // schema is already in the list, write name only
+                SchemaName schemaName = this.SchemaName;
+                string name;
+                if (schemaName.Namespace != encspace)
+                    name = schemaName.Namespace + "." + schemaName.Name;  // we need to add the qualifying namespace of the target schema if it's not the same as current namespace
+                else
+                    name = schemaName.Name;
+                writer.WriteValue(name);
+            }
+            else
+                // schema is not in the list, write full schema definition
+                base.WriteJson(writer, names, encspace);
+        }
+
+        /// <summary>
+        /// Writes named schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the named schema</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            this.SchemaName.WriteJson(writer, names, encspace);
+
+            if (null != aliases)
+            {
+                writer.WritePropertyName("aliases");
+                writer.WriteStartArray();
+                foreach (SchemaName name in aliases)
+                {
+                    string fullname = (null != name.Space) ? name.Space + "." + name.Name : name.Name;
+                    writer.WriteValue(fullname);
+                }
+                writer.WriteEndArray();
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/PrimitiveSchema.cs b/lang/csharp/src/apache/main/Schema/PrimitiveSchema.cs
new file mode 100644
index 0000000..fa33bc6
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/PrimitiveSchema.cs
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for schemas of primitive types
+    /// </summary>
+    public sealed class PrimitiveSchema : UnnamedSchema
+    {
+        /// <summary>
+        /// Constructor for primitive schema
+        /// </summary>
+        /// <param name="type"></param>
+        private PrimitiveSchema(Type type, PropertyMap props) : base(type, props)
+        {
+        }
+
+        /// <summary>
+        /// Static function to return new instance of primitive schema
+        /// </summary>
+        /// <param name="type">primitive type</param>
+        /// <returns></returns>
+        public static PrimitiveSchema NewInstance(string type, PropertyMap props = null)
+        {
+            const string q = "\"";
+            if (type.StartsWith(q) && type.EndsWith(q)) type = type.Substring(1, type.Length - 2);
+            switch (type)
+            {
+                case "null":
+                    return new PrimitiveSchema(Schema.Type.Null, props);
+                case "boolean":
+                    return new PrimitiveSchema(Schema.Type.Boolean, props);
+                case "int":
+                    return new PrimitiveSchema(Schema.Type.Int, props);
+                case "long":
+                    return new PrimitiveSchema(Schema.Type.Long, props);
+                case "float":
+                    return new PrimitiveSchema(Schema.Type.Float, props);
+                case "double":
+                    return new PrimitiveSchema(Schema.Type.Double, props);
+                case "bytes":
+                    return new PrimitiveSchema(Schema.Type.Bytes, props);
+                case "string":
+                    return new PrimitiveSchema(Schema.Type.String, props);
+                default:
+                    return null;
+            }
+        }
+
+        /// <summary>
+        /// Writes primitive schema in JSON format
+        /// </summary>
+        /// <param name="w"></param>
+        /// <param name="names"></param>
+        /// <param name="encspace"></param>
+        protected internal override void WriteJson(JsonTextWriter w, SchemaNames names, string encspace)
+        {
+            w.WriteValue(Name);
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if (writerSchema is UnionSchema || Tag == writerSchema.Tag) return true;
+            Type t = writerSchema.Tag;
+            switch (Tag)
+            {
+                case Type.Double:
+                    return t == Type.Int || t == Type.Long || t == Type.Float;
+                case Type.Float:
+                    return t == Type.Int || t == Type.Long;
+                case Type.Long:
+                    return t == Type.Int;
+                default:
+                    return false;
+            }
+        }
+
+        /// <summary>
+        /// Function to compare equality of two primitive schemas
+        /// </summary>
+        /// <param name="obj">other primitive schema</param>
+        /// <returns>true two schemas are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+
+            if (obj != null && obj is PrimitiveSchema)
+            {
+                var that = obj as PrimitiveSchema;
+                if (this.Tag == that.Tag)
+                    return areEqual(that.Props, this.Props);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hashcode function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return 13 * Tag.GetHashCode() + getHashCode(Props);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/Property.cs b/lang/csharp/src/apache/main/Schema/Property.cs
new file mode 100644
index 0000000..3901623
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/Property.cs
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    public class PropertyMap : Dictionary<string, string>
+    {
+        /// <summary>
+        /// Set of reserved schema property names, any other properties not defined in this set are custom properties and can be added to this map
+        /// </summary>
+        private static readonly HashSet<string> ReservedProps = new HashSet<string>() { "type", "name", "namespace", "fields", "items", "size", "symbols", "values", "aliases", "order", "doc", "default" };
+
+        /// <summary>
+        /// Parses the custom properties from the given JSON object and stores them
+        /// into the schema's list of custom properties
+        /// </summary>
+        /// <param name="jtok">JSON object to prase</param>
+        public void Parse(JToken jtok)
+        {
+            JObject jo = jtok as JObject;
+            foreach (JProperty prop in jo.Properties())
+            {
+                if (ReservedProps.Contains(prop.Name))
+                    continue;
+                if (!ContainsKey(prop.Name))
+                    Add(prop.Name, prop.Value.ToString());
+            }
+        }
+
+        /// <summary>
+        /// Adds a custom property to the schema
+        /// </summary>
+        /// <param name="key">custom property name</param>
+        /// <param name="value">custom property value</param>
+        public void Set(string key, string value)
+        {
+            if (ReservedProps.Contains(key))
+                throw new AvroException("Can't set reserved property: " + key);
+
+            string oldValue;
+            if (TryGetValue(key, out oldValue))
+            {
+                if (!oldValue.Equals(value)) throw new AvroException("Property cannot be overwritten: " + key);
+            }
+            else
+                Add(key, value);
+        }
+
+        /// <summary>
+        /// Writes the schema's custom properties in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        public void WriteJson(JsonTextWriter writer)
+        {
+            foreach (KeyValuePair<string, string> kp in this)
+            {
+                if (ReservedProps.Contains(kp.Key)) continue;
+
+                writer.WritePropertyName(kp.Key);
+                writer.WriteRawValue(kp.Value);
+            }
+        }
+
+        /// <summary>
+        /// Function to compare equality of two PropertyMaps
+        /// </summary>
+        /// <param name="obj">other PropertyMap</param>
+        /// <returns>true if contents of the two maps are the same, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (this == obj) return true;
+
+            if (obj != null && obj is PropertyMap)
+            {
+                var that = obj as PropertyMap;
+                if (this.Count != that.Count) 
+                    return false; 
+                foreach (KeyValuePair<string, string> pair in this) 
+                { 
+                    if (!that.ContainsKey(pair.Key))
+                        return false;
+                    if (!pair.Value.Equals(that[pair.Key]))
+                        return false; 
+                } 
+                return true;
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hashcode function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            int hash = this.Count;
+            int index = 1;
+            foreach (KeyValuePair<string, string> pair in this)
+                hash += (pair.Key.GetHashCode() + pair.Value.GetHashCode()) * index++;
+            return hash;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/RecordSchema.cs b/lang/csharp/src/apache/main/Schema/RecordSchema.cs
new file mode 100644
index 0000000..ab2440f
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/RecordSchema.cs
@@ -0,0 +1,340 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    internal delegate T Function<T>();
+
+    /// <summary>
+    /// Class for record schemas
+    /// </summary>
+    public class RecordSchema : NamedSchema
+    {
+        /// <summary>
+        /// List of fields in the record
+        /// </summary>
+        public List<Field> Fields { get; private set; }
+
+        /// <summary>
+        /// Number of fields in the record
+        /// </summary>
+        public int Count { get { return Fields.Count; } }
+
+        /// <summary>
+        /// Map of field name and Field object for faster field lookups
+        /// </summary>
+        private readonly IDictionary<string, Field> fieldLookup;
+
+        private readonly IDictionary<string, Field> fieldAliasLookup;
+        private bool request;
+
+        /// <summary>
+        /// Static function to return new instance of the record schema
+        /// </summary>
+        /// <param name="type">type of record schema, either record or error</param>
+        /// <param name="jtok">JSON object for the record schema</param>
+        /// <param name="names">list of named schema already read</param>
+        /// <param name="encspace">enclosing namespace of the records schema</param>
+        /// <returns>new RecordSchema object</returns>
+        internal static RecordSchema NewInstance(Type type, JToken jtok, PropertyMap props, SchemaNames names, string encspace)
+        {
+            bool request = false;
+            JToken jfields = jtok["fields"];    // normal record
+            if (null == jfields)
+            {
+                jfields = jtok["request"];      // anonymous record from messages
+                if (null != jfields) request = true;
+            }
+            if (null == jfields)
+                throw new SchemaParseException("'fields' cannot be null for record");
+            if (jfields.Type != JTokenType.Array)
+                throw new SchemaParseException("'fields' not an array for record");
+
+            var name = GetName(jtok, encspace);
+            var aliases = NamedSchema.GetAliases(jtok, name.Space, name.EncSpace);
+            var fields = new List<Field>();
+            var fieldMap = new Dictionary<string, Field>();
+            var fieldAliasMap = new Dictionary<string, Field>();
+            var result = new RecordSchema(type, name, aliases, props, fields, request, fieldMap, fieldAliasMap, names);
+
+            int fieldPos = 0;
+            foreach (JObject jfield in jfields)
+            {
+                string fieldName = JsonHelper.GetRequiredString(jfield, "name");
+                Field field = createField(jfield, fieldPos++, names, name.Namespace);  // add record namespace for field look up
+                fields.Add(field);
+                addToFieldMap(fieldMap, fieldName, field);
+                addToFieldMap(fieldAliasMap, fieldName, field);
+
+                if (null != field.aliases)    // add aliases to field lookup map so reader function will find it when writer field name appears only as an alias on the reader field
+                    foreach (string alias in field.aliases)
+                        addToFieldMap(fieldAliasMap, alias, field);
+            }
+            return result;
+        }
+
+        /// <summary>
+        /// Constructor for the record schema
+        /// </summary>
+        /// <param name="type">type of record schema, either record or error</param>
+        /// <param name="name">name of the record schema</param>
+        /// <param name="aliases">list of aliases for the record name</param>
+        /// <param name="fields">list of fields for the record</param>
+        /// <param name="request">true if this is an anonymous record with 'request' instead of 'fields'</param>
+        /// <param name="fieldMap">map of field names and field objects</param>
+        /// <param name="names">list of named schema already read</param>
+        private RecordSchema(Type type, SchemaName name, IList<SchemaName> aliases,  PropertyMap props, 
+                                List<Field> fields, bool request, IDictionary<string, Field> fieldMap, 
+                                IDictionary<string, Field> fieldAliasMap, SchemaNames names)
+                                : base(type, name, aliases, props, names)
+        {
+            if (!request && null == name.Name) throw new SchemaParseException("name cannot be null for record schema.");
+            this.Fields = fields;
+            this.request = request;
+            this.fieldLookup = fieldMap;
+            this.fieldAliasLookup = fieldAliasMap;
+        }
+
+        /// <summary>
+        /// Creates a new field for the record
+        /// </summary>
+        /// <param name="jfield">JSON object for the field</param>
+        /// <param name="pos">position number of the field</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the records schema</param>
+        /// <returns>new Field object</returns>
+        private static Field createField(JToken jfield, int pos, SchemaNames names, string encspace)
+        {
+            var name = JsonHelper.GetRequiredString(jfield, "name");
+            var doc = JsonHelper.GetOptionalString(jfield, "doc");
+
+            var jorder = JsonHelper.GetOptionalString(jfield, "order");
+            Field.SortOrder sortorder = Field.SortOrder.ignore;
+            if (null != jorder)
+                sortorder = (Field.SortOrder) Enum.Parse(typeof(Field.SortOrder), jorder);
+
+            var aliases = Field.GetAliases(jfield);
+            var props = Schema.GetProperties(jfield);
+            var defaultValue = jfield["default"];
+
+            JToken jtype = jfield["type"];
+            if (null == jtype) 
+                throw new SchemaParseException("'type' was not found for field: " + name);
+            var schema = Schema.ParseJson(jtype, names, encspace);
+
+            return new Field(schema, name, aliases, pos, doc, defaultValue, sortorder, props);
+        }
+
+        private static void addToFieldMap(Dictionary<string, Field> map, string name, Field field)
+        {
+            if (map.ContainsKey(name))
+                throw new SchemaParseException("field or alias " + name + " is a duplicate name");
+            map.Add(name, field);
+        }
+
+        /// <summary>
+        /// Returns the field with the given name.
+        /// </summary>
+        /// <param name="name">field name</param>
+        /// <returns>Field object</returns>
+        public Field this[string name]
+        {
+            get
+            {
+                if (string.IsNullOrEmpty(name)) throw new ArgumentNullException("name");
+                Field field;
+                return (fieldLookup.TryGetValue(name, out field)) ? field : null;
+            }
+        }
+
+        /// <summary>
+        /// Returns true if and only if the record contains a field by the given name.
+        /// </summary>
+        /// <param name="fieldName">The name of the field</param>
+        /// <returns>true if the field exists, false otherwise</returns>
+        public bool Contains(string fieldName)
+        {
+            return fieldLookup.ContainsKey(fieldName);
+        }
+
+        public bool TryGetField(string fieldName, out Field field)
+        {
+            return fieldLookup.TryGetValue(fieldName, out field);
+        }
+        public bool TryGetFieldAlias(string fieldName, out Field field)
+        {
+            return fieldAliasLookup.TryGetValue(fieldName, out field);
+        }
+
+        /// <summary>
+        /// Returns an enumerator which enumerates over the fields of this record schema
+        /// </summary>
+        /// <returns>Enumerator over the field in the order of their definition</returns>
+        public IEnumerator<Field> GetEnumerator()
+        {
+            return Fields.GetEnumerator();
+        }
+
+        /// <summary>
+        /// Writes the records schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the record schema</param>
+        protected internal override void WriteJsonFields(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            base.WriteJsonFields(writer, names, encspace);
+
+            // we allow reading for empty fields, so writing of records with empty fields are allowed as well
+            if (request)
+                writer.WritePropertyName("request");
+            else
+                writer.WritePropertyName("fields");
+            writer.WriteStartArray();
+
+            if (null != this.Fields && this.Fields.Count > 0)
+            {
+                foreach (Field field in this)
+                    field.writeJson(writer, names, this.Namespace); // use the namespace of the record for the fields
+            }
+            writer.WriteEndArray();
+        }
+
+        /// <summary>
+        /// Compares equality of two record schemas
+        /// </summary>
+        /// <param name="obj">record schema to compare against this schema</param>
+        /// <returns>true if the two schemas are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            if (obj != null && obj is RecordSchema)
+            {
+                RecordSchema that = obj as RecordSchema;
+                return protect(() => true, () =>
+                {
+                    if (this.SchemaName.Equals(that.SchemaName) && this.Count == that.Count)
+                    {
+                        for (int i = 0; i < Fields.Count; i++) if (!Fields[i].Equals(that.Fields[i])) return false;
+                        return areEqual(that.Props, this.Props);
+                    }
+                    return false;
+                }, that);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hash code function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return protect(() => 0, () =>
+            {
+                int result = SchemaName.GetHashCode();
+                foreach (Field f in Fields) result += 29 * f.GetHashCode();
+                result += getHashCode(Props);
+                return result;
+            }, this);
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            if ((writerSchema.Tag != Type.Record) && (writerSchema.Tag != Type.Error)) return false;
+
+            RecordSchema that = writerSchema as RecordSchema;
+            return protect(() => true, () =>
+            {
+                if (!that.SchemaName.Equals(SchemaName))
+                    if (!InAliases(that.SchemaName)) 
+                        return false;
+
+                foreach (Field f in this)
+                {
+                    Field f2 = that[f.Name];
+                    if (null == f2) // reader field not in writer field, check aliases of reader field if any match with a writer field
+                        if (null != f.aliases)
+                            foreach (string alias in f.aliases)
+                            {
+                                f2 = that[alias];
+                                if (null != f2) break;
+                            }
+
+                    if (f2 == null && f.DefaultValue != null)
+                        continue;         // Writer field missing, reader has default.
+
+                    if (f2 != null && f.Schema.CanRead(f2.Schema)) continue;    // Both fields exist and are compatible.
+                    return false;
+                }
+                return true;
+            }, that);
+        }
+
+        private class RecordSchemaPair
+        {
+            public readonly RecordSchema first;
+            public readonly RecordSchema second;
+
+            public RecordSchemaPair(RecordSchema first, RecordSchema second)
+            {
+                this.first = first;
+                this.second = second;
+            }
+        }
+
+        [ThreadStatic]
+        private static List<RecordSchemaPair> seen;
+
+        /**
+         * We want to protect against infinite recursion when the schema is recursive. We look into a thread local
+         * to see if we have been into this if so, we execute the bypass function otherwise we execute the main function.
+         * Before executing the main function, we ensure that we create a marker so that if we come back here recursively
+         * we can detect it.
+         * 
+         * The infinite loop happens in ToString(), Equals() and GetHashCode() methods.
+         * Though it does not happen for CanRead() because of the current implemenation of UnionSchema's can read,
+         * it could potenitally happen.
+         * We do a linear seach for the marker as we don't expect the list to be very long.
+         */
+        private T protect<T>(Function<T> bypass, Function<T> main, RecordSchema that)
+        {
+            if (seen == null) 
+                seen = new List<RecordSchemaPair>();
+
+            else if (seen.Find((RecordSchemaPair rs) => rs.first == this && rs.second == that) != null) 
+                return bypass();
+
+            RecordSchemaPair p = new RecordSchemaPair(this, that);
+            seen.Add(p);
+            try { return main(); }
+            finally { seen.Remove(p); }
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/Schema.cs b/lang/csharp/src/apache/main/Schema/Schema.cs
new file mode 100644
index 0000000..fc0e237
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/Schema.cs
@@ -0,0 +1,306 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    /// <summary>
+    /// Base class for all schema types
+    /// </summary>
+    public abstract class Schema
+    {
+        /// <summary>
+        /// Enum for schema types
+        /// </summary>
+        public enum Type
+        {
+            Null,
+            Boolean,
+            Int,
+            Long,
+            Float,
+            Double,
+            Bytes,
+            String,
+            Record,
+            Enumeration,
+            Array,
+            Map,
+            Union,
+            Fixed,
+            Error
+        }
+
+        /// <summary>
+        /// Schema type property
+        /// </summary>
+        public Type Tag { get; private set; }
+
+        /// <summary>
+        /// Additional JSON attributes apart from those defined in the AVRO spec
+        /// </summary>
+        internal PropertyMap Props { get; private set; }
+
+        /// <summary>
+        /// Constructor for schema class
+        /// </summary>
+        /// <param name="type"></param>
+        protected Schema(Type type, PropertyMap props)
+        {
+            this.Tag = type;
+            this.Props = props;
+        }
+
+        /// <summary>
+        /// The name of this schema. If this is a named schema such as an enum, it returns the fully qualified
+        /// name for the schema. For other schemas, it returns the type of the schema.
+        /// </summary>
+        public abstract string Name { get; }
+
+        /// <summary>
+        /// Static class to return new instance of schema object
+        /// </summary>
+        /// <param name="jtok">JSON object</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        /// <returns>new Schema object</returns>
+        internal static Schema ParseJson(JToken jtok, SchemaNames names, string encspace)
+        {
+            if (null == jtok) throw new ArgumentNullException("j", "j cannot be null.");
+            
+            if (jtok.Type == JTokenType.String) // primitive schema with no 'type' property or primitive or named type of a record field
+            {
+                string value = (string)jtok;
+
+                PrimitiveSchema ps = PrimitiveSchema.NewInstance(value);
+                if (null != ps) return ps;
+
+                NamedSchema schema = null;
+                if (names.TryGetValue(value, null, encspace, out schema)) return schema;
+
+                throw new SchemaParseException("Undefined name: " + value);
+            }
+
+            if (jtok is JArray) // union schema with no 'type' property or union type for a record field
+                return UnionSchema.NewInstance(jtok as JArray, null, names, encspace);
+
+            if (jtok is JObject) // JSON object with open/close parenthesis, it must have a 'type' property
+            {
+                JObject jo = jtok as JObject;
+
+                JToken jtype = jo["type"];
+                if (null == jtype)
+                    throw new SchemaParseException("Property type is required");
+
+                var props = Schema.GetProperties(jtok);
+
+                if (jtype.Type == JTokenType.String)
+                {
+                    string type = (string)jtype;
+
+                    if (type.Equals("array")) 
+                        return ArraySchema.NewInstance(jtok, props, names, encspace);
+                    if (type.Equals("map"))
+                        return MapSchema.NewInstance(jtok, props, names, encspace);
+                    
+                    Schema schema = PrimitiveSchema.NewInstance((string)type, props);
+                    if (null != schema) return schema;
+
+                    return NamedSchema.NewInstance(jo, props, names, encspace);
+                }
+                else if (jtype.Type == JTokenType.Array)
+                    return UnionSchema.NewInstance(jtype as JArray, props, names, encspace);
+            }
+            throw new AvroTypeException("Invalid JSON for schema: " + jtok);
+        }
+
+        /// <summary>
+        /// Parses a given JSON string to create a new schema object
+        /// </summary>
+        /// <param name="json">JSON string</param>
+        /// <returns>new Schema object</returns>
+        public static Schema Parse(string json)
+        {
+            if (string.IsNullOrEmpty(json)) throw new ArgumentNullException("json", "json cannot be null.");
+            return Parse(json.Trim(), new SchemaNames(), null); // standalone schema, so no enclosing namespace
+        }
+
+        /// <summary>
+        /// Parses a JSON string to create a new schema object
+        /// </summary>
+        /// <param name="json">JSON string</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        /// <returns>new Schema object</returns>
+        internal static Schema Parse(string json, SchemaNames names, string encspace)
+        {
+            Schema sc = PrimitiveSchema.NewInstance(json);
+            if (null != sc) return sc;
+
+            try
+            {
+                bool IsArray = json.StartsWith("[") && json.EndsWith("]");
+                JContainer j = IsArray ? (JContainer)JArray.Parse(json) : (JContainer)JObject.Parse(json);
+
+                return ParseJson(j, names, encspace);
+            }
+            catch (Newtonsoft.Json.JsonSerializationException ex)
+            {
+                throw new SchemaParseException("Could not parse. " + ex.Message + Environment.NewLine + json);
+            }
+        }
+
+        /// <summary>
+        /// Static function to parse custom properties (not defined in the Avro spec) from the given JSON object
+        /// </summary>
+        /// <param name="jtok">JSON object to parse</param>
+        /// <returns>Property map if custom properties were found, null if no custom properties found</returns>
+        internal static PropertyMap GetProperties(JToken jtok)
+        {
+            var props = new PropertyMap();
+            props.Parse(jtok);
+            if (props.Count > 0)
+                return props;
+            else
+                return null;
+        }
+
+        /// <summary>
+        /// Returns the canonical JSON representation of this schema.
+        /// </summary>
+        /// <returns>The canonical JSON representation of this schema.</returns>
+        public override string ToString()
+        {
+            System.IO.StringWriter sw = new System.IO.StringWriter();
+            Newtonsoft.Json.JsonTextWriter writer = new Newtonsoft.Json.JsonTextWriter(sw);
+
+            if (this is PrimitiveSchema || this is UnionSchema)
+            {
+                writer.WriteStartObject();
+                writer.WritePropertyName("type");
+            }
+
+            WriteJson(writer, new SchemaNames(), null); // stand alone schema, so no enclosing name space
+
+            if (this is PrimitiveSchema || this is UnionSchema)
+                writer.WriteEndObject();
+
+            return sw.ToString();
+        }
+
+        /// <summary>
+        /// Writes opening { and 'type' property 
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        private void writeStartObject(JsonTextWriter writer)
+        {
+            writer.WriteStartObject();
+            writer.WritePropertyName("type");
+            writer.WriteValue(GetTypeString(this.Tag));
+        }
+
+        /// <summary>
+        /// Returns symbol name for the given schema type
+        /// </summary>
+        /// <param name="type">schema type</param>
+        /// <returns>symbol name</returns>
+        public static string GetTypeString(Type type)
+        {
+            if (type != Type.Enumeration) return type.ToString().ToLower();
+            return "enum";
+        }
+
+        /// <summary>
+        /// Default implementation for writing schema properties in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        protected internal virtual void WriteJsonFields(JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+        }
+
+        /// <summary>
+        /// Writes schema object in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        protected internal virtual void WriteJson(JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writeStartObject(writer);
+            WriteJsonFields(writer, names, encspace);
+            if (null != this.Props) Props.WriteJson(writer);
+            writer.WriteEndObject();
+        }
+
+        /// <summary>
+        /// Returns the schema's custom property value given the property name
+        /// </summary>
+        /// <param name="key">custom property name</param>
+        /// <returns>custom property value</returns>
+        public string GetProperty(string key)
+        {
+            if (null == this.Props) return null;
+            string v;
+            return (this.Props.TryGetValue(key, out v)) ? v : null;
+        }
+
+        /// <summary>
+        /// Hash code function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            return Tag.GetHashCode() + getHashCode(Props);
+        }
+
+        /// <summary>
+        /// Returns true if and only if data written using writerSchema can be read using the current schema
+        /// according to the Avro resolution rules.
+        /// </summary>
+        /// <param name="writerSchema">The writer's schema to match against.</param>
+        /// <returns>True if and only if the current schema matches the writer's.</returns>
+        public virtual bool CanRead(Schema writerSchema) { return Tag == writerSchema.Tag; }
+
+        /// <summary>
+        /// Compares two objects, null is equal to null
+        /// </summary>
+        /// <param name="o1">first object</param>
+        /// <param name="o2">second object</param>
+        /// <returns>true if two objects are equal, false otherwise</returns>
+        protected static bool areEqual(object o1, object o2)
+        {
+            return o1 == null ? o2 == null : o1.Equals(o2);
+        }
+
+        /// <summary>
+        /// Hash code helper function
+        /// </summary>
+        /// <param name="obj"></param>
+        /// <returns></returns>
+        protected static int getHashCode(object obj)
+        {
+            return obj == null ? 0 : obj.GetHashCode();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/SchemaName.cs b/lang/csharp/src/apache/main/Schema/SchemaName.cs
new file mode 100644
index 0000000..dda77e6
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/SchemaName.cs
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class to store schema name, namespace and enclosing namespace
+    /// </summary>
+    public class SchemaName
+    {
+        /// <summary>
+        /// Name of the schema
+        /// </summary>
+        public String Name { get; private set; }
+
+        /// <summary>
+        /// Namespace specified within the schema
+        /// </summary>
+        public String Space { get; private set; }
+
+        /// <summary>
+        /// Namespace from the most tightly enclosing schema
+        /// </summary>
+        public String EncSpace { get; private set; }
+
+        /// <summary>
+        /// Namespace.Name of the schema
+        /// </summary>
+        public String Fullname { get { return string.IsNullOrEmpty(Namespace) ? this.Name : Namespace + "." + this.Name; } }
+
+        /// <summary>
+        /// Namespace of the schema
+        /// </summary>
+        public String Namespace { get { return string.IsNullOrEmpty(this.Space) ? this.EncSpace : this.Space; } } 
+
+        /// <summary>
+        /// Constructor for SchemaName
+        /// </summary>
+        /// <param name="name">name of the schema</param>
+        /// <param name="space">namespace of the schema</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        public SchemaName(String name, String space, String encspace)
+        {
+            if (name == null)
+            {                         // anonymous
+                this.Name = this.Space = null;
+                this.EncSpace = encspace;   // need to save enclosing namespace for anonymous types, so named types within the anonymous type can be resolved
+            }
+            else if (!name.Contains("."))
+            {                          // unqualified name
+                this.Space = space;    // use default space
+                this.Name = name;
+                this.EncSpace = encspace;
+            }
+            else
+            {
+                string[] parts = name.Split('.');
+                this.Space = string.Join(".", parts, 0, parts.Length - 1);
+                this.Name = parts[parts.Length - 1];
+                this.EncSpace = encspace;
+            }
+        }
+
+        /// <summary>
+        /// Returns the full name of the schema
+        /// </summary>
+        /// <returns></returns>
+        public override string ToString()
+        {
+            return Fullname;
+        }
+
+        /// <summary>
+        /// Writes the schema name in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        internal void WriteJson(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            if (null != this.Name)  // write only if not anonymous
+            {
+                JsonHelper.writeIfNotNullOrEmpty(writer, "name", this.Name);
+                if (!String.IsNullOrEmpty(this.Space))
+                    JsonHelper.writeIfNotNullOrEmpty(writer, "namespace", this.Space);
+                else if (!String.IsNullOrEmpty(this.EncSpace)) // need to put enclosing name space for code generated classes
+                    JsonHelper.writeIfNotNullOrEmpty(writer, "namespace", this.EncSpace);
+            }
+        }
+
+        /// <summary>
+        /// Compares two schema names
+        /// </summary>
+        /// <param name="obj">SchameName object to compare against this object</param>
+        /// <returns>true or false</returns>
+        public override bool Equals(Object obj)
+        {
+            if (obj == this) return true;
+            if (obj != null && obj is SchemaName)
+            {
+                SchemaName that = (SchemaName)obj;
+                return areEqual(that.Name, Name) && areEqual(that.Namespace, Namespace);
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Compares two objects
+        /// </summary>
+        /// <param name="obj1">first object</param>
+        /// <param name="obj2">second object</param>
+        /// <returns>true or false</returns>
+        private static bool areEqual(object obj1, object obj2)
+        {
+            return obj1 == null ? obj2 == null : obj1.Equals(obj2);
+        }
+    
+        public override int GetHashCode()
+        {
+            return string.IsNullOrEmpty(Fullname) ? 0 : 29 * Fullname.GetHashCode();
+        }
+    }
+
+    /// <summary>
+    /// A class that contains a list of named schemas. This is used when reading or writing a schema/protocol.
+    /// This prevents reading and writing of duplicate schema definitions within a protocol or schema file
+    /// </summary>
+    public class SchemaNames
+    {
+        /// <summary>
+        /// Map of schema name and named schema objects
+        /// </summary>
+        public IDictionary<SchemaName, NamedSchema> Names { get; private set; }
+
+        /// <summary>
+        /// Constructor
+        /// </summary>
+        public SchemaNames()
+        {
+            Names = new Dictionary<SchemaName, NamedSchema>();
+        }
+
+        /// <summary>
+        /// Checks if given name is in the map
+        /// </summary>
+        /// <param name="name">schema name</param>
+        /// <returns>true or false</returns>
+        public bool Contains(SchemaName name)
+        {
+            if (Names.ContainsKey(name))
+                return true;
+            return false;
+        }
+
+        /// <summary>
+        /// Adds a schema name to the map if it doesn't exist yet
+        /// </summary>
+        /// <param name="name">schema name</param>
+        /// <param name="schema">schema object</param>
+        /// <returns>true if schema was added to the list, false if schema is already in the list</returns>
+        public bool Add(SchemaName name, NamedSchema schema)
+        {
+            if (Names.ContainsKey(name))
+                return false;
+
+            Names.Add(name, schema);
+            return true;
+        }
+
+        /// <summary>
+        /// Adds a named schema to the list
+        /// </summary>
+        /// <param name="schema">schema object</param>
+        /// <returns>true if schema was added to the list, false if schema is already in the list</returns>
+        public bool Add(NamedSchema schema)
+        {
+            SchemaName name = schema.SchemaName;
+            return Add(name, schema);
+        }
+
+        /// <summary>
+        /// Tries to get the value for the given name fields
+        /// </summary>
+        /// <param name="name">name of the schema</param>
+        /// <param name="space">namespace of the schema</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        /// <param name="schema">schema object found</param>
+        /// <returns>true if name is found in the map, false otherwise</returns>
+        public bool TryGetValue(string name, string space, string encspace, out NamedSchema schema)
+        {
+            SchemaName schemaname = new SchemaName(name, space, encspace);
+            return Names.TryGetValue(schemaname, out schema);
+        }
+
+        /// <summary>
+        /// Returns the enumerator for the map
+        /// </summary>
+        /// <returns></returns>
+        public IEnumerator<KeyValuePair<SchemaName, NamedSchema>> GetEnumerator()
+        {
+            return Names.GetEnumerator();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/SchemaNormalization.cs b/lang/csharp/src/apache/main/Schema/SchemaNormalization.cs
new file mode 100644
index 0000000..2c335a2
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/SchemaNormalization.cs
@@ -0,0 +1,256 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Collections.Generic;
+using System.Text;
+using System;
+
+namespace Avro
+{
+    /// <summary>
+    /// Collection of static methods for generating the cannonical form of schemas.
+    /// </summary>
+    public static class SchemaNormalization
+    {
+        public static long Empty64 = -4513414715797952619;
+
+        /// <summary>
+        /// Parses a schema into the canonical form as defined by Avro spec.
+        /// </summary>
+        /// <param name="s">Schema</param>
+        /// <returns>Parsing Canonical Form of a schema as defined by Avro spec.</returns>
+        public static string ToParsingForm(Schema s)
+        {
+            IDictionary<string, string> env = new Dictionary<string, string>();
+            return Build(env, s, new StringBuilder()).ToString();
+        }
+
+        /// <summary>
+        /// <para>Returns a fingerprint of a string of bytes. This string is
+        /// presumed to contain a canonical form of a schema. The
+        /// algorithm used to compute the fingerprint is selected by the
+        /// argument <i>fpName</i>.
+        /// </para>
+        /// <para>If <i>fpName</i> equals the string
+        /// <code>"CRC-64-AVRO"</code>, then the result of <see cref="Fingerprint64(byte[])"/> is
+        /// returned in little-endian format.
+        /// </para>
+        /// <para>If <i>fpName</i> equals the string
+        /// <code>"MD5"</code>, then the standard MD5 algorithm is used.
+        /// </para>
+        /// <para>If <i>fpName</i> equals the string
+        /// <code>"SHA-256"</code>, then the standard SHA-256 algorithm is used.
+        /// </para>
+        /// <para>Otherwise, <i>fpName</i> is
+        /// not recognized and an
+        /// <code>ArgumentException</code> is thrown
+        /// </para>
+        /// <para> Recommended Avro practice dictiates that
+        /// <code>"CRC-64-AVRO"</code> is used for 64-bit fingerprints,
+        /// <code>"MD5"</code> is used for 128-bit fingerprints, and
+        /// <code>"SHA-256"</code> is used for 256-bit fingerprints.
+        /// </para>
+        /// </summary>
+        /// <param name="fpName">Name of the hashing algorithm.</param>
+        /// <param name="data">Data to be hashed.</param>
+        /// <returns>Fingerprint</returns>
+        public static byte[] Fingerprint(string fpName, byte[] data)
+        {
+            switch (fpName)
+            {
+                case "CRC-64-AVRO":
+                    long fp = Fingerprint64(data);
+                    byte[] result = new byte[8];
+                    for (int i = 0; i < 8; i++)
+                    {
+                        result[i] = (byte) fp;
+                        fp >>= 8;
+                    }
+                    return result;
+                case "MD5":
+                    var md5 = System.Security.Cryptography.MD5.Create();
+                    return md5.ComputeHash(data);
+                case "SHA-256":
+                    var sha256 = System.Security.Cryptography.SHA256.Create();
+                    return sha256.ComputeHash(data);
+                default:
+                    throw new ArgumentException(string.Format("Unsupported fingerprint computation algorithm ({0})", fpName));
+            }
+        }
+
+        /// <summary>
+        /// Returns <see cref="Fingerprint(string, byte[])"/> applied to the parsing canonical form of the supplied schema.
+        /// </summary>
+        /// <param name="fpName">Name of the hashing algorithm.</param>
+        /// <param name="s">Schema to be hashed.</param>
+        /// <returns>Fingerprint</returns>
+        public static byte[] ParsingFingerprint(string fpName, Schema s)
+        {
+            return Fingerprint(fpName, Encoding.UTF8.GetBytes(ToParsingForm(s)));
+        }
+
+        /// <summary>
+        /// Returns <see cref="Fingerprint64(byte[])"/> applied to the parsing canonical form of the supplied schema.
+        /// </summary>
+        /// <param name="s">Schema to be hashed.</param>
+        /// <returns>Fingerprint</returns>
+        public static long ParsingFingerprint64(Schema s)
+        {
+            return Fingerprint64(Encoding.UTF8.GetBytes(ToParsingForm(s)));
+        }
+
+        /// <summary>
+        /// Computes the 64-bit Rabin Fingerprint (as recommended in the Avro spec) of a byte string.
+        /// </summary>
+        /// <param name="data">Data to be hashed.</param>
+        /// <returns>Fingerprint</returns>
+        private static long Fingerprint64(byte[] data)
+        {
+            long result = Empty64;
+            foreach (var b in data)
+            {
+                result = ((long)(((ulong)result) >> 8)) ^ Fp64.FpTable[(int) (result ^ b) & 0xff];
+            }
+            return result;
+        }
+
+        private static StringBuilder Build(IDictionary<string, string> env, Schema s, StringBuilder o)
+        {
+            bool firstTime = true;
+            Schema.Type st = s.Tag;
+            switch (st)
+            {
+                case Schema.Type.Union:
+                    UnionSchema us = s as UnionSchema;
+                    o.Append('[');
+                    foreach(Schema b in us.Schemas)
+                    {
+                        if (!firstTime)
+                        {
+                            o.Append(",");
+                        }
+                        else
+                        {
+                            firstTime = false;
+                        }
+                        Build(env, b, o);
+                    }
+                    return o.Append(']');
+
+                case Schema.Type.Array:
+                case Schema.Type.Map:
+                    o.Append("{\"type\":\"").Append(Schema.GetTypeString(s.Tag)).Append("\"");
+                    if (st == Schema.Type.Array)
+                    {
+                        ArraySchema arraySchema  = s as ArraySchema;
+                        Build(env, arraySchema.ItemSchema, o.Append(",\"items\":"));
+                    }
+                    else
+                    {
+                        MapSchema mapSchema = s as MapSchema;
+                        Build(env, mapSchema.ValueSchema, o.Append(",\"values\":"));
+                    }
+                    return o.Append("}");
+
+                case Schema.Type.Enumeration:
+                case Schema.Type.Fixed:
+                case Schema.Type.Record:
+                    NamedSchema namedSchema = s as NamedSchema;
+                    var name = namedSchema.Fullname;
+                    if (env.ContainsKey(name))
+                    {
+                        return o.Append(env[name]);
+                    }
+                    var qname = "\"" + name + "\"";
+                    env.Add(name, qname);
+                    o.Append("{\"name\":").Append(qname);
+                    o.Append(",\"type\":\"").Append(Schema.GetTypeString(s.Tag)).Append("\"");
+                    if (st == Schema.Type.Enumeration)
+                    {
+                        EnumSchema enumSchema = s as EnumSchema;
+                        o.Append(",\"symbols\":[");
+                        foreach (var enumSymbol in enumSchema.Symbols)
+                        {
+                            if (!firstTime)
+                            {
+                                o.Append(",");
+                            }
+                            else
+                            {
+                                firstTime = false;
+                            }
+                            o.Append("\"").Append(enumSymbol).Append("\"");
+                        }
+                        o.Append("]");
+                    }
+                    else if (st == Schema.Type.Fixed)
+                    {
+                        FixedSchema fixedSchema = s as FixedSchema;
+                        o.Append(",\"size\":").Append(fixedSchema.Size.ToString());
+                    }
+                    else  // st == Schema.Type.Record
+                    {
+                        RecordSchema recordSchema = s as RecordSchema;
+                        o.Append(",\"fields\":[");
+                        foreach (var field in recordSchema.Fields)
+                        {
+                            if (!firstTime)
+                            {
+                                o.Append(",");
+                            }
+                            else
+                            {
+                                firstTime = false;
+                            }
+                            o.Append("{\"name\":\"").Append(field.Name).Append("\"");
+                            Build(env, field.Schema, o.Append(",\"type\":")).Append("}");
+                        }
+                        o.Append("]");
+                    }
+                    return o.Append("}");
+
+                default:    //boolean, bytes, double, float, int, long, null, string
+                    return o.Append("\"").Append(s.Name).Append("\"");
+            }
+        }
+
+        private static class Fp64
+        {
+            private static readonly long[] fpTable = new long[256];
+
+            public static long[] FpTable
+            {
+                get { return fpTable; }
+            }
+
+            static Fp64()
+            {
+                for (int i = 0; i < 256; i++)
+                {
+                    long fp = i;
+                    for (int j = 0; j < 8; j++)
+                    {
+                        long mask = -(fp & 1L);
+                        fp = ((long) (((ulong) fp) >> 1)) ^ (Empty64 & mask);
+                    }
+                    FpTable[i] = fp;
+                }
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/SchemaParseException.cs b/lang/csharp/src/apache/main/Schema/SchemaParseException.cs
new file mode 100644
index 0000000..73859af
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/SchemaParseException.cs
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Avro
+{
+    public class SchemaParseException:AvroException
+    {
+        public SchemaParseException(string s)
+            : base(s)
+        {
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/UnionSchema.cs b/lang/csharp/src/apache/main/Schema/UnionSchema.cs
new file mode 100644
index 0000000..df2c37f
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/UnionSchema.cs
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Newtonsoft.Json.Linq;
+using Newtonsoft.Json;
+
+namespace Avro
+{
+    /// <summary>
+    /// Class for union schemas
+    /// </summary>
+    public class UnionSchema : UnnamedSchema
+    {
+        /// <summary>
+        /// List of schemas in the union
+        /// </summary>
+        public IList<Schema> Schemas { get; private set; }
+
+        /// <summary>
+        /// Count of schemas in the union
+        /// </summary>
+        public int Count { get { return Schemas.Count; } }
+
+        /// <summary>
+        /// Static function to return instance of the union schema
+        /// </summary>
+        /// <param name="jarr">JSON object for the union schema</param>
+        /// <param name="names">list of named schemas already read</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        /// <returns>new UnionSchema object</returns>
+        internal static UnionSchema NewInstance(JArray jarr, PropertyMap props, SchemaNames names, string encspace)
+        {
+            List<Schema> schemas = new List<Schema>();
+            IDictionary<string, string> uniqueSchemas = new Dictionary<string, string>();
+
+            foreach (JToken jvalue in jarr)
+            {
+                Schema unionType = Schema.ParseJson(jvalue, names, encspace);
+                if (null == unionType)
+                    throw new SchemaParseException("Invalid JSON in union" + jvalue.ToString());
+
+                string name = unionType.Name;
+                if (uniqueSchemas.ContainsKey(name))
+                    throw new SchemaParseException("Duplicate type in union: " + name);
+
+                uniqueSchemas.Add(name, name);
+                schemas.Add(unionType);
+            }
+
+            return new UnionSchema(schemas, props);
+        }
+
+        /// <summary>
+        /// Contructor for union schema
+        /// </summary>
+        /// <param name="schemas"></param>
+        private UnionSchema(List<Schema> schemas, PropertyMap props) : base(Type.Union, props)
+        {
+            if (schemas == null)
+                throw new ArgumentNullException("schemas");
+            this.Schemas = schemas;
+        }
+
+        /// <summary>
+        /// Returns the schema at the given branch.
+        /// </summary>
+        /// <param name="index">Index to the branch, starting with 0.</param>
+        /// <returns>The branch corresponding to the given index.</returns>
+        public Schema this[int index]
+        {
+            get
+            {
+                return Schemas[index];
+            }
+        }
+
+        /// <summary>
+        /// Writes union schema in JSON format
+        /// </summary>
+        /// <param name="writer">JSON writer</param>
+        /// <param name="names">list of named schemas already written</param>
+        /// <param name="encspace">enclosing namespace of the schema</param>
+        protected internal override void WriteJson(Newtonsoft.Json.JsonTextWriter writer, SchemaNames names, string encspace)
+        {
+            writer.WriteStartArray();
+            foreach (Schema schema in this.Schemas)
+                schema.WriteJson(writer, names, encspace);
+            writer.WriteEndArray();
+        }
+
+        /// <summary>
+        /// Returns the index of a branch that can read the data written by the given schema s.
+        /// </summary>
+        /// <param name="s">The schema to match the branches against.</param>
+        /// <returns>The index of the matching branch. If non matches a -1 is returned.</returns>
+        public int MatchingBranch(Schema s)
+        {
+            if (s is UnionSchema) throw new AvroException("Cannot find a match against union schema");
+            // Try exact match.
+            //for (int i = 0; i < Count; i++) if (Schemas[i].Equals(s)) return i; // removed this for performance's sake
+            for (int i = 0; i < Count; i++) if (Schemas[i].CanRead(s)) return i;
+            return -1;
+        }
+
+        /// <summary>
+        /// Checks if this schema can read data written by the given schema. Used for decoding data.
+        /// </summary>
+        /// <param name="writerSchema">writer schema</param>
+        /// <returns>true if this and writer schema are compatible based on the AVRO specification, false otherwise</returns>
+        public override bool CanRead(Schema writerSchema)
+        {
+            return writerSchema.Tag == Schema.Type.Union || MatchingBranch(writerSchema) >= 0;
+        }
+
+        /// <summary>
+        /// Compares two union schema objects
+        /// </summary>
+        /// <param name="obj">union schema object to compare against this schema</param>
+        /// <returns>true if objects are equal, false otherwise</returns>
+        public override bool Equals(object obj)
+        {
+            if (obj == this) return true;
+            if (obj != null && obj is UnionSchema)
+            {
+                UnionSchema that = obj as UnionSchema;
+                if (that.Count == Count)
+                {
+                    for (int i = 0; i < Count; i++) if (!that[i].Equals(this[i])) return false;
+                    return areEqual(that.Props, this.Props);
+                }
+            }
+            return false;
+        }
+
+        /// <summary>
+        /// Hash code function
+        /// </summary>
+        /// <returns></returns>
+        public override int GetHashCode()
+        {
+            int result = 53;
+            foreach (Schema schema in Schemas) result += 89 * schema.GetHashCode();
+            result += getHashCode(Props);
+            return result;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Schema/UnnamedSchema.cs b/lang/csharp/src/apache/main/Schema/UnnamedSchema.cs
new file mode 100644
index 0000000..aebbb10
--- /dev/null
+++ b/lang/csharp/src/apache/main/Schema/UnnamedSchema.cs
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro
+{
+    /// <summary>
+    /// Base class for all unnamed schemas
+    /// </summary>
+    public abstract class UnnamedSchema : Schema
+    {
+        protected UnnamedSchema(Type type, PropertyMap props) : base(type, props)
+        {
+        }
+
+        public override string Name
+        {
+            get { return Tag.ToString().ToLower(); }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Specific/ObjectCreator.cs b/lang/csharp/src/apache/main/Specific/ObjectCreator.cs
new file mode 100644
index 0000000..5435adb
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/ObjectCreator.cs
@@ -0,0 +1,321 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using System.Reflection;
+using System.Reflection.Emit;
+
+namespace Avro.Specific
+{
+
+    public sealed class ObjectCreator
+    {
+        private static readonly ObjectCreator instance = new ObjectCreator();
+        public static ObjectCreator Instance { get { return instance; } }
+
+        /// <summary>
+        /// Static generic dictionary type used for creating new dictionary instances 
+        /// </summary>
+        private Type GenericMapType = typeof(Dictionary<,>);
+
+        /// <summary>
+        /// Static generic list type used for creating new array instances
+        /// </summary>
+        private Type GenericListType = typeof(List<>);
+
+        /// <summary>
+        /// Static generic nullable type used for creating new nullable instances
+        /// </summary>
+        private Type GenericNullableType = typeof(Nullable<>);
+        
+        private readonly Assembly execAssembly;
+        private readonly Assembly entryAssembly;
+        private readonly bool diffAssembly;
+
+        public delegate object CtorDelegate();
+        private Type ctorType = typeof(CtorDelegate);
+        Dictionary<NameCtorKey, CtorDelegate> ctors;
+
+        private ObjectCreator()
+        {
+            execAssembly = System.Reflection.Assembly.GetExecutingAssembly();
+            entryAssembly = System.Reflection.Assembly.GetEntryAssembly();
+            if (entryAssembly != null && execAssembly != entryAssembly) // entryAssembly returns null when running from NUnit
+                diffAssembly = true;
+
+            GenericMapType = typeof(Dictionary<,>);
+            GenericListType = typeof(List<>);
+
+            ctors = new Dictionary<NameCtorKey, CtorDelegate>();
+        }
+
+        public struct NameCtorKey : IEquatable<NameCtorKey>
+        {
+            public string name { get; private set; }
+            public Schema.Type type { get; private set; }
+            public NameCtorKey(string value1, Schema.Type value2)
+                : this()
+            {
+                name = value1;
+                type = value2;
+            }
+            public bool Equals(NameCtorKey other)
+            {
+                return Equals(other.name, name) && other.type == type;
+            }
+            public override bool Equals(object obj)
+            {
+                if (ReferenceEquals(null, obj))
+                    return false;
+                if (obj.GetType() != typeof(NameCtorKey))
+                    return false;
+                return Equals((NameCtorKey)obj);
+            }
+            public override int GetHashCode()
+            {
+                unchecked
+                {
+                    return ((name != null ? name.GetHashCode() : 0) * 397) ^ type.GetHashCode();
+                }
+            }
+            public static bool operator ==(NameCtorKey left, NameCtorKey right)
+            {
+                return left.Equals(right);
+            }
+            public static bool operator !=(NameCtorKey left, NameCtorKey right)
+            {
+                return !left.Equals(right);
+            }
+        }
+
+        /// <summary>
+        /// Find the type with the given name
+        /// </summary>
+        /// <param name="name">the object type to locate</param>
+        /// <param name="throwError">whether or not to throw an error if the type wasn't found</param>
+        /// <returns>the object type, or <c>null</c> if not found</returns>
+        private Type FindType(string name,bool throwError) 
+        {
+            Type type;
+
+            // Modify provided type to ensure it can be discovered.
+            // This is mainly for Generics, and Nullables.
+            name = name.Replace("Nullable", "Nullable`1");
+            name = name.Replace("IList", "System.Collections.Generic.IList`1");
+            name = name.Replace("<", "[");
+            name = name.Replace(">", "]");
+
+            if (diffAssembly)
+            {
+                // entry assembly different from current assembly, try entry assembly first
+                type = entryAssembly.GetType(name);
+                if (type == null)   // now try current assembly and mscorlib
+                    type = Type.GetType(name);
+            }
+            else
+                type = Type.GetType(name);
+
+            Type[] types;
+
+            if (type == null) // type is still not found, need to loop through all loaded assemblies
+            {
+                Assembly[] assemblies = AppDomain.CurrentDomain.GetAssemblies();
+                foreach (Assembly assembly in assemblies)
+                {
+                    // Fix for Mono 3.0.10
+                    if (assembly.FullName.StartsWith("MonoDevelop.NUnit"))
+                        continue;
+
+                    types = assembly.GetTypes();
+
+                    // Change the search to look for Types by both NAME and FULLNAME
+                    foreach (Type t in types)
+                    {
+                        if (name == t.Name || name == t.FullName) type = t;
+                    }
+                    
+                    if (type != null)
+                        break;
+                }
+            }
+
+            if (null == type && throwError)
+            {
+                throw new AvroException("Unable to find type " + name + " in all loaded assemblies");
+            }
+
+            return type;
+        }
+
+
+        /// <summary>
+        /// Gets the type for the specified schema
+        /// </summary>
+        /// <param name="schema"></param>
+        /// <returns></returns>
+        public Type GetType(Schema schema)
+        {
+            switch(schema.Tag) {
+            case Schema.Type.Null:
+                break;
+            case Schema.Type.Boolean:
+                return typeof(bool);
+            case Schema.Type.Int:
+                return typeof(int);
+            case Schema.Type.Long:
+                return typeof(long);
+            case Schema.Type.Float:
+                return typeof(float);
+            case Schema.Type.Double:
+                return typeof(double);
+            case Schema.Type.Bytes:
+                return typeof(byte[]); 
+            case Schema.Type.String:
+                return typeof(string);
+            case Schema.Type.Union:
+                {
+                    UnionSchema unSchema = schema as UnionSchema;
+                    if (null != unSchema && unSchema.Count==2)
+                    {
+                        Schema s1 = unSchema.Schemas[0];
+                        Schema s2 = unSchema.Schemas[1];
+
+                        // Nullable ?
+                        Type itemType = null;
+                        if (s1.Tag == Schema.Type.Null)
+                        {
+                            itemType = GetType(s2);
+                        }
+                        else if (s2.Tag == Schema.Type.Null)
+                        {
+                            itemType = GetType(s1);
+                        }
+
+                        if (null != itemType ) 
+                        {
+                            if (itemType.IsValueType && !itemType.IsEnum)
+                            {
+                                try
+                                {
+                                    return GenericNullableType.MakeGenericType(new [] {itemType});
+                                }
+                                catch (Exception) { }
+                            }
+                            
+                            return itemType;
+                        }
+                    }
+
+                    return typeof(object);
+                }
+            case Schema.Type.Array: {
+                ArraySchema arrSchema = schema as ArraySchema;
+                Type itemSchema = GetType(arrSchema.ItemSchema);
+
+                return GenericListType.MakeGenericType(new [] {itemSchema}); }
+            case Schema.Type.Map: {
+                MapSchema mapSchema = schema as MapSchema;
+                Type itemSchema = GetType(mapSchema.ValueSchema);
+
+                return GenericMapType.MakeGenericType(new [] { typeof(string), itemSchema }); }
+            case Schema.Type.Enumeration:
+            case Schema.Type.Record:
+            case Schema.Type.Fixed:
+            case Schema.Type.Error: {
+                // Should all be named types
+                var named = schema as NamedSchema;
+                if(null!=named) {
+                    return FindType(named.Fullname,true);
+                }
+                break; }
+            }
+
+            // Fallback
+            return FindType(schema.Name,true);
+        }
+
+        /// <summary>
+        /// Gets the type of the specified type name
+        /// </summary>
+        /// <param name="name">name of the object to get type of</param>
+        /// <param name="schemaType">schema type for the object</param>
+        /// <returns>Type</returns>
+        public Type GetType(string name, Schema.Type schemaType)
+        {
+            Type type = FindType(name, true);
+
+            if (schemaType == Schema.Type.Map)
+            {
+                type = GenericMapType.MakeGenericType(new[] { typeof(string), type });
+            }
+            else if (schemaType == Schema.Type.Array)
+            {
+                type = GenericListType.MakeGenericType(new [] {type});
+            }
+
+            return type;
+        }
+
+        /// <summary>
+        /// Gets the default constructor for the specified type
+        /// </summary>
+        /// <param name="name">name of object for the type</param>
+        /// <param name="schemaType">schema type for the object</param>
+        /// <param name="type">type of the object</param>
+        /// <returns>Default constructor for the type</returns>
+        public CtorDelegate GetConstructor(string name, Schema.Type schemaType, Type type)
+        {
+            ConstructorInfo ctorInfo = type.GetConstructor(Type.EmptyTypes);
+            if (ctorInfo == null)
+                throw new AvroException("Class " + name + " has no default constructor");
+
+            DynamicMethod dynMethod = new DynamicMethod("DM$OBJ_FACTORY_" + name, typeof(object), null, type, true);
+            ILGenerator ilGen = dynMethod.GetILGenerator();
+            ilGen.Emit(OpCodes.Nop);
+            ilGen.Emit(OpCodes.Newobj, ctorInfo);
+            ilGen.Emit(OpCodes.Ret);
+
+            return (CtorDelegate)dynMethod.CreateDelegate(ctorType);
+        }
+
+        /// <summary>
+        /// Creates new instance of the given type
+        /// </summary>
+        /// <param name="name">fully qualified name of the type</param>
+        /// <param name="schemaType">type of schema</param>
+        /// <returns>new object of the given type</returns>
+        public object New(string name, Schema.Type schemaType)
+        {
+            NameCtorKey key = new NameCtorKey(name, schemaType);
+            
+            CtorDelegate ctor;
+            lock(ctors)
+            {
+                if (!ctors.TryGetValue(key, out ctor))
+                {
+                    Type type = GetType(name, schemaType);
+                    ctor = GetConstructor(name, schemaType, type);
+
+                    ctors.Add(key, ctor);
+                }
+            }
+            return ctor();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Specific/SpecificDatumReader.cs b/lang/csharp/src/apache/main/Specific/SpecificDatumReader.cs
new file mode 100644
index 0000000..5611a9c
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificDatumReader.cs
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System.Collections;
+using Avro.Generic;
+using Avro.IO;
+
+namespace Avro.Specific
+{
+    /// PreresolvingDatumReader for reading data to ISpecificRecord classes.
+    /// <see cref="PreresolvingDatumReader{T}">For more information about performance considerations for choosing this implementation</see>
+    public class SpecificDatumReader<T> : PreresolvingDatumReader<T>
+    {
+        public SpecificDatumReader(Schema writerSchema, Schema readerSchema) : base(writerSchema, readerSchema)
+        {
+        }
+
+        protected override bool IsReusable(Schema.Type tag)
+        {
+            switch (tag)
+            {
+                case Schema.Type.Double:
+                case Schema.Type.Boolean:
+                case Schema.Type.Int:
+                case Schema.Type.Long:
+                case Schema.Type.Float:
+                case Schema.Type.Bytes:
+                case Schema.Type.Enumeration:
+                case Schema.Type.String:
+                case Schema.Type.Null:
+                    return false;
+            }
+            return true;
+        }
+
+        protected override ArrayAccess GetArrayAccess(ArraySchema readerSchema)
+        {
+            return new SpecificArrayAccess(readerSchema);
+        }
+
+        protected override EnumAccess GetEnumAccess(EnumSchema readerSchema)
+        {
+            return new SpecificEnumAccess();
+        }
+
+        protected override MapAccess GetMapAccess(MapSchema readerSchema)
+        {
+            return new SpecificMapAccess(readerSchema);
+        }
+
+        protected override RecordAccess GetRecordAccess(RecordSchema readerSchema)
+        {
+            if (readerSchema.Name == null)
+            {
+                // ipc support
+                return new GenericDatumReader<T>.GenericRecordAccess(readerSchema);
+            }
+            return new SpecificRecordAccess(readerSchema);
+        }
+
+        protected override FixedAccess GetFixedAccess(FixedSchema readerSchema)
+        {
+            return new SpecificFixedAccess(readerSchema);
+        }
+
+        private static ObjectCreator.CtorDelegate GetConstructor(string name, Schema.Type schemaType)
+        {
+            var creator = ObjectCreator.Instance;
+            return creator.GetConstructor(name, schemaType, creator.GetType(name, schemaType));
+        }
+
+        private class SpecificEnumAccess : EnumAccess
+        {
+            public object CreateEnum(object reuse, int ordinal)
+            {
+                return ordinal;
+            }
+        }
+
+        private class SpecificRecordAccess : RecordAccess
+        {
+            private ObjectCreator.CtorDelegate objCreator;
+
+            public SpecificRecordAccess(RecordSchema readerSchema)
+            {
+                objCreator = GetConstructor(readerSchema.Fullname, Schema.Type.Record);
+            }
+
+            public object CreateRecord(object reuse)
+            {
+                return reuse ?? objCreator();
+            }
+
+            public object GetField(object record, string fieldName, int fieldPos)
+            {
+                return ((ISpecificRecord)record).Get(fieldPos);
+            }
+
+            public void AddField(object record, string fieldName, int fieldPos, object fieldValue)
+            {
+                ((ISpecificRecord)record).Put(fieldPos, fieldValue);
+            }
+        }
+
+        private class SpecificFixedAccess : FixedAccess
+        {
+            private ObjectCreator.CtorDelegate objCreator;
+
+            public SpecificFixedAccess(FixedSchema readerSchema)
+            {
+                objCreator = GetConstructor(readerSchema.Fullname, Schema.Type.Fixed);
+            }
+
+            public object CreateFixed(object reuse)
+            {
+                return reuse ?? objCreator();
+            }
+
+            public byte[] GetFixedBuffer(object rec)
+            {
+                return ((SpecificFixed) rec).Value;
+            }
+        }
+
+        private class SpecificArrayAccess : ArrayAccess
+        {
+            private ObjectCreator.CtorDelegate objCreator;
+
+            public SpecificArrayAccess(ArraySchema readerSchema)
+            {
+                bool nEnum = false;
+                string type = Avro.CodeGen.getType(readerSchema, false, ref nEnum);
+                type = type.Remove(0, 6);              // remove IList<
+                type = type.Remove(type.Length - 1);   // remove >
+        
+                objCreator = GetConstructor(type, Schema.Type.Array);
+            }
+
+            public object Create(object reuse)
+            {
+                IList array;
+
+                if( reuse != null )
+                {
+                    array = reuse as IList;
+                    if( array == null )
+                        throw new AvroException( "array object does not implement non-generic IList" );
+                    // retaining existing behavior where array contents aren't reused
+                    // TODO: try to reuse contents?
+                    array.Clear();
+                }
+                else
+                    array = objCreator() as IList;
+                return array;
+            }
+
+            public void EnsureSize(ref object array, int targetSize)
+            {
+                // no action needed
+            }
+
+            public void Resize(ref object array, int targetSize)
+            {
+                // no action needed
+            }
+
+            public void AddElements( object array, int elements, int index, ReadItem itemReader, Decoder decoder, bool reuse )
+            {
+                var list = (IList)array;
+                for (int i=0; i < elements; i++)
+                {
+                    list.Add( itemReader( null, decoder ) );
+                }
+            }
+        }
+
+        private class SpecificMapAccess : MapAccess
+        {
+            private ObjectCreator.CtorDelegate objCreator;
+
+            public SpecificMapAccess(MapSchema readerSchema)
+            {
+                bool nEnum = false;
+                string type = Avro.CodeGen.getType(readerSchema, false, ref nEnum);
+                type = type.Remove(0, 19);             // remove IDictionary<string,
+                type = type.Remove(type.Length - 1);   // remove >
+        
+                objCreator = GetConstructor(type, Schema.Type.Map);
+            }
+
+            public object Create(object reuse)
+            {
+                IDictionary map;
+                if (reuse != null)
+                {
+                    map = reuse as IDictionary;
+                    if (map == null)
+                        throw new AvroException("map object does not implement non-generic IList");
+
+                    map.Clear();
+                }
+                else
+                    map = objCreator() as System.Collections.IDictionary;
+                return map;
+            }
+
+            public void AddElements(object mapObj, int elements, ReadItem itemReader, Decoder decoder, bool reuse)
+            {
+                var map = ((IDictionary)mapObj);
+                for (int i = 0; i < elements; i++)
+                {
+                    var key = decoder.ReadString();
+                    map[key] = itemReader(null, decoder);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Specific/SpecificDatumWriter.cs b/lang/csharp/src/apache/main/Specific/SpecificDatumWriter.cs
new file mode 100644
index 0000000..a359938
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificDatumWriter.cs
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections;
+using Avro.Generic;
+using Encoder = Avro.IO.Encoder;
+
+namespace Avro.Specific
+{
+    /// <summary>
+    /// PreresolvingDatumWriter for writing data from ISpecificRecord classes.
+    /// <see cref="PreresolvingDatumWriter{T}">For more information about performance considerations for choosing this implementation</see>
+    /// </summary>
+    public class SpecificDatumWriter<T> : PreresolvingDatumWriter<T>
+    {
+        public SpecificDatumWriter(Schema schema) : base(schema, new SpecificArrayAccess(), new DictionaryMapAccess())
+        {
+        }
+
+        protected override void WriteRecordFields(object recordObj, RecordFieldWriter[] writers, Encoder encoder)
+        {
+            var record = (ISpecificRecord) recordObj;
+            for (int i = 0; i < writers.Length; i++)
+            {
+                var writer = writers[i];
+                writer.WriteField(record.Get(writer.Field.Pos), encoder);
+            }
+        }
+
+        protected override void EnsureRecordObject(RecordSchema recordSchema, object value)
+        {
+            if (!(value is ISpecificRecord))
+                throw new AvroTypeException("Record object is not derived from ISpecificRecord");
+        }
+
+        protected override void WriteField(object record, string fieldName, int fieldPos, WriteItem writer, Encoder encoder)
+        {
+            writer(((ISpecificRecord)record).Get(fieldPos), encoder);
+        }
+
+        protected override WriteItem ResolveEnum(EnumSchema es)
+        {
+            var type = ObjectCreator.Instance.GetType(es);
+            
+            var enumNames = Enum.GetNames(type);
+            var translator = new int[enumNames.Length];
+            for(int i = 0; i < enumNames.Length; i++)
+            {
+                if(es.Contains(enumNames[i]))
+                {
+                    translator[i] = es.Ordinal(enumNames[i]);
+                }
+                else
+                {
+                    translator[i] = -1;
+                }
+            }
+
+            return (v,e) =>
+                       {
+                           if(v == null)
+                                throw new AvroTypeException("value is null in SpecificDefaultWriter.WriteEnum");
+                           if(v.GetType() == type)
+                           {
+                               int translated = translator[(int)v];
+                               if (translated == -1)
+                               {
+                                   throw new AvroTypeException("Unknown enum value:" + v.ToString());   
+                               }
+                               else
+                               {
+                                   e.WriteEnum(translated);
+                               }
+                           }
+                           else
+                           {
+                               e.WriteEnum(es.Ordinal(v.ToString()));
+                           }
+                       };
+        }
+
+        protected override void WriteFixed(FixedSchema schema, object value, Encoder encoder)
+        {
+            var fixedrec = value as SpecificFixed;
+            if (fixedrec == null)
+                throw new AvroTypeException("Fixed object is not derived from SpecificFixed");
+
+            encoder.WriteFixed(fixedrec.Value);
+        }
+
+        protected override bool UnionBranchMatches( Schema sc, object obj )
+        {
+            if (obj == null && sc.Tag != Avro.Schema.Type.Null) return false;
+            switch (sc.Tag)
+            {
+                case Schema.Type.Null:
+                    return obj == null;
+                case Schema.Type.Boolean:
+                    return obj is bool;
+                case Schema.Type.Int:
+                    return obj is int;
+                case Schema.Type.Long:
+                    return obj is long;
+                case Schema.Type.Float:
+                    return obj is float;
+                case Schema.Type.Double:
+                    return obj is double;
+                case Schema.Type.Bytes:
+                    return obj is byte[];
+                case Schema.Type.String:
+                    return obj is string;
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    return obj is ISpecificRecord && 
+                           (((obj as ISpecificRecord).Schema) as RecordSchema).SchemaName.Equals((sc as RecordSchema).SchemaName);
+                case Schema.Type.Enumeration:
+                    return obj.GetType().IsEnum && (sc as EnumSchema).Symbols.Contains(obj.ToString());
+                case Schema.Type.Array:
+                    return obj is System.Collections.IList;
+                case Schema.Type.Map:
+                    return obj is System.Collections.IDictionary;
+                case Schema.Type.Union:
+                    return false;   // Union directly within another union not allowed!
+                case Schema.Type.Fixed:
+                    return obj is SpecificFixed && 
+                           (((obj as SpecificFixed).Schema) as FixedSchema).SchemaName.Equals((sc as FixedSchema).SchemaName);
+                default:
+                    throw new AvroException("Unknown schema type: " + sc.Tag);
+            }
+        }
+
+        class SpecificArrayAccess : ArrayAccess
+        {
+            public void EnsureArrayObject( object value )
+            {
+                if( !( value is System.Collections.IList ) )
+                {
+                    throw new AvroTypeException( "Array does not implement non-generic IList" );
+                }
+            }
+
+            public long GetArrayLength(object value)
+            {
+                return ((IList)value).Count;
+            }
+
+            public void WriteArrayValues(object array, WriteItem valueWriter, Encoder encoder)
+            {
+                var list = (IList) array;
+                for (int i = 0; i < list.Count; i++ )
+                {
+                    valueWriter(list[i], encoder);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Specific/SpecificException.cs b/lang/csharp/src/apache/main/Specific/SpecificException.cs
new file mode 100644
index 0000000..dbaba6d
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificException.cs
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+
+namespace Avro.Specific
+{
+    public abstract class SpecificException : Exception, ISpecificRecord
+    {
+        public abstract Schema Schema { get; }
+        public abstract object Get(int fieldPos);
+        public abstract void Put(int fieldPos, object fieldValue);
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Specific/SpecificFixed.cs b/lang/csharp/src/apache/main/Specific/SpecificFixed.cs
new file mode 100644
index 0000000..0c1be6d
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificFixed.cs
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using Avro.Generic;
+
+namespace Avro.Specific
+{
+    /// <summary>
+    /// Base class for all generated classes 
+    /// </summary>
+    public abstract class SpecificFixed : GenericFixed
+    {
+        public SpecificFixed(uint size) : base(size) { }
+        public abstract new Schema Schema { get; }
+
+        protected bool Equals(SpecificFixed obj)
+        {
+            if (this == obj) return true;
+            if (obj != null && obj is SpecificFixed)
+            {
+                SpecificFixed that = obj as SpecificFixed;
+                if (that.Schema.Equals(this.Schema))
+                {
+                    for (int i = 0; i < value.Length; i++) if (this.value[i] != that.Value[i]) return false;
+                    return true;
+                }
+            }
+            return false;
+
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (ReferenceEquals(null, obj)) return false;
+            if (ReferenceEquals(this, obj)) return true;
+            if (obj.GetType() != this.GetType()) return false;
+            return Equals((SpecificFixed) obj);
+        }
+
+        public override int GetHashCode()
+        {
+            int result = Schema.GetHashCode();
+            foreach (byte b in value)
+            {
+                result += 23 * b;
+            }
+            return result;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Specific/SpecificProtocol.cs b/lang/csharp/src/apache/main/Specific/SpecificProtocol.cs
new file mode 100644
index 0000000..1c7d47c
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificProtocol.cs
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+namespace Avro.Specific
+{
+    public interface ISpecificProtocol
+    {
+        Protocol Protocol { get; }
+        void Request(ICallbackRequestor requestor, string messageName, object[] args, object callback);
+    }
+
+    public interface ICallbackRequestor
+    {
+        void Request<T>(string messageName, object[] args, object callback);
+    }
+
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/main/Specific/SpecificReader.cs b/lang/csharp/src/apache/main/Specific/SpecificReader.cs
new file mode 100644
index 0000000..b3d96b9
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificReader.cs
@@ -0,0 +1,281 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Reflection;
+using System.IO;
+using Avro;
+using Avro.IO;
+using Avro.Generic;
+
+namespace Avro.Specific
+{
+    /// <summary>
+    /// Reader wrapper class for reading data and storing into specific classes
+    /// </summary>
+    /// <typeparam name="T">Specific class type</typeparam>
+    public class SpecificReader<T> : DatumReader<T>
+    {
+        /// <summary>
+        /// Reader class for reading data and storing into specific classes
+        /// </summary>
+        private readonly SpecificDefaultReader reader;
+
+        /// <summary>
+        /// Schema for the writer class
+        /// </summary>
+        public Schema WriterSchema { get { return reader.WriterSchema; } }
+
+        /// <summary>
+        /// Schema for the reader class
+        /// </summary>
+        public Schema ReaderSchema { get { return reader.ReaderSchema; } }
+
+        /// <summary>
+        /// Constructs a generic reader for the given schemas using the DefaultReader. If the
+        /// reader's and writer's schemas are different this class performs the resolution.
+        /// </summary>
+        /// <param name="writerSchema">The schema used while generating the data</param>
+        /// <param name="readerSchema">The schema desired by the reader</param>
+        public SpecificReader(Schema writerSchema, Schema readerSchema)
+        {
+            reader = new SpecificDefaultReader(writerSchema, readerSchema);
+        }
+
+        public SpecificReader(SpecificDefaultReader reader)
+        {
+            this.reader = reader;
+        }
+
+        /// <summary>
+        /// Generic read function
+        /// </summary>
+        /// <param name="reuse">object to store data read</param>
+        /// <param name="dec">decorder to use for reading data</param>
+        /// <returns></returns>
+        public T Read(T reuse, Decoder dec)
+        {
+            return reader.Read(reuse, dec);
+        }
+    }
+
+    /// <summary>
+    /// Reader class for reading data and storing into specific classes
+    /// </summary>
+    public class SpecificDefaultReader : DefaultReader
+    {
+        /// <summary>
+        /// Static dictionary of type names and its corresponding assembly type. 
+        /// This is used to prevent multiple reflection for the same type name.
+        /// </summary>
+        private static IDictionary<string, Type> TypeName = new Dictionary<string, Type>();
+
+        /// <summary>
+        /// Constructor
+        /// </summary>
+        /// <param name="writerSchema">schema of the object that wrote the data</param>
+        /// <param name="readerSchema">schema of the object that will store the data</param>
+        public SpecificDefaultReader(Schema writerSchema, Schema readerSchema) : base(writerSchema,readerSchema) 
+        {
+        }
+
+        /// <summary>
+        /// Deserializes a record from the stream.
+        /// </summary>
+        /// <param name="reuse">If not null, a record object that could be reused for returning the result</param>
+        /// <param name="writerSchema">The writer's RecordSchema</param>
+        /// <param name="readerSchema">The reader's schema, must be RecordSchema too.</param>
+        /// <param name="dec">The decoder for deserialization</param>
+        /// <returns>The record object just read</returns>
+        protected override object ReadRecord(object reuse, RecordSchema writerSchema, Schema readerSchema, Decoder dec)
+        {
+            RecordSchema rs = (RecordSchema)readerSchema;
+
+            if (rs.Name == null)
+                return base.ReadRecord(reuse, writerSchema, readerSchema, dec);
+
+            ISpecificRecord rec = (reuse != null ? reuse : ObjectCreator.Instance.New(rs.Fullname, Schema.Type.Record)) as ISpecificRecord;
+            object obj;
+            foreach (Field wf in writerSchema)
+            {
+                try
+                {
+                    Field rf;
+                    if (rs.TryGetField(wf.Name, out rf))
+                    {
+                        obj = rec.Get(rf.Pos);
+                        rec.Put(rf.Pos, Read(obj, wf.Schema, rf.Schema, dec));
+                    }
+                    else
+                        Skip(wf.Schema, dec);
+                }
+                catch (Exception ex)
+                {
+                    throw new AvroException(ex.Message + " in field " + wf.Name);
+                }
+            }
+            
+            var defaultStream = new MemoryStream();
+            var defaultEncoder = new BinaryEncoder(defaultStream);
+            var defaultDecoder = new BinaryDecoder(defaultStream);
+            foreach (Field rf in rs)
+            {
+                if (writerSchema.Contains(rf.Name)) continue;
+
+                defaultStream.Position = 0; // reset for writing
+                Resolver.EncodeDefaultValue(defaultEncoder, rf.Schema, rf.DefaultValue);
+                defaultStream.Flush();
+                defaultStream.Position = 0; // reset for reading
+
+                obj = rec.Get(rf.Pos);
+                rec.Put(rf.Pos, Read(obj, rf.Schema, rf.Schema, defaultDecoder));
+            }
+            
+            return rec;
+        }
+
+        /// <summary>
+        /// Deserializes a fixed object and returns the object. The default implementation uses CreateFixed()
+        /// and GetFixedBuffer() and returns what CreateFixed() returned.
+        /// </summary>
+        /// <param name="reuse">If appropriate, uses this object instead of creating a new one.</param>
+        /// <param name="writerSchema">The FixedSchema the writer used during serialization.</param>
+        /// <param name="readerSchema">The schema that the readr uses. Must be a FixedSchema with the same
+        /// size as the writerSchema.</param>
+        /// <param name="d">The decoder for deserialization.</param>
+        /// <returns>The deserilized object.</returns>
+        protected override object ReadFixed(object reuse, FixedSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            FixedSchema rs = readerSchema as FixedSchema;
+            if (rs.Size != writerSchema.Size)
+            {
+                throw new AvroException("Size mismatch between reader and writer fixed schemas. Writer: " + writerSchema +
+                    ", reader: " + readerSchema);
+            }
+
+            SpecificFixed fixedrec = (reuse != null ? reuse : ObjectCreator.Instance.New(rs.Fullname, Schema.Type.Fixed)) as SpecificFixed;
+            d.ReadFixed(fixedrec.Value);
+            return fixedrec;
+        }
+
+        /// <summary>
+        /// Reads an enum from the given decoder
+        /// </summary>
+        /// <param name="reuse">object to store data read</param>
+        /// <param name="writerSchema">schema of the object that wrote the data</param>
+        /// <param name="readerSchema">schema of the object that will store the data</param>
+        /// <param name="dec">decoder object that contains the data to be read</param>
+        /// <returns>enum value</returns>
+        protected override object ReadEnum(object reuse, EnumSchema writerSchema, Schema readerSchema, Decoder dec)
+        {
+            EnumSchema rs = readerSchema as EnumSchema;
+            return rs.Ordinal(writerSchema[dec.ReadEnum()]);
+        }
+
+        /// <summary>
+        /// Reads an array from the given decoder
+        /// </summary>
+        /// <param name="reuse">object to store data read</param>
+        /// <param name="writerSchema">schema of the object that wrote the data</param>
+        /// <param name="readerSchema">schema of the object that will store the data</param>
+        /// <param name="dec">decoder object that contains the data to be read</param>
+        /// <returns>array</returns>
+        protected override object ReadArray(object reuse, ArraySchema writerSchema, Schema readerSchema, Decoder dec)
+        {
+            ArraySchema rs = readerSchema as ArraySchema;
+            System.Collections.IList array;
+            if (reuse != null)
+            {
+                array = reuse as System.Collections.IList;
+                if (array == null)
+                    throw new AvroException("array object does not implement non-generic IList");
+
+                array.Clear();
+            }
+            else
+                array = ObjectCreator.Instance.New(getTargetType(readerSchema), Schema.Type.Array) as System.Collections.IList;
+            
+            int i = 0;
+            for (int n = (int)dec.ReadArrayStart(); n != 0; n = (int)dec.ReadArrayNext())
+            {
+                for (int j = 0; j < n; j++, i++)
+                    array.Add(Read(null, writerSchema.ItemSchema, rs.ItemSchema, dec));
+            }
+            return array;
+        }
+
+        /// <summary>
+        /// Deserialized an avro map. The default implemenation creats a new map using CreateMap() and then
+        /// adds elements to the map using AddMapEntry().
+        /// </summary>
+        /// <param name="reuse">If appropriate, use this instead of creating a new map object.</param>
+        /// <param name="writerSchema">The schema the writer used to write the map.</param>
+        /// <param name="readerSchema">The schema the reader is using.</param>
+        /// <param name="d">The decoder for serialization.</param>
+        /// <returns>The deserialized map object.</returns>
+        protected override object ReadMap(object reuse, MapSchema writerSchema, Schema readerSchema, Decoder d)
+        {
+            MapSchema rs = readerSchema as MapSchema;
+            System.Collections.IDictionary map;
+            if (reuse != null)
+            {
+                map = reuse as System.Collections.IDictionary;
+                if (map == null)
+                    throw new AvroException("map object does not implement non-generic IList");
+
+                map.Clear();
+            }
+            else
+                map = ObjectCreator.Instance.New(getTargetType(readerSchema), Schema.Type.Map) as System.Collections.IDictionary;
+
+            for (int n = (int)d.ReadMapStart(); n != 0; n = (int)d.ReadMapNext())
+            {
+                for (int j = 0; j < n; j++)
+                {
+                    string k = d.ReadString();
+                    map[k] = Read(null, writerSchema.ValueSchema, rs.ValueSchema, d);   // always create new map item
+                }
+            }
+            return map;
+        }
+
+        /// <summary>
+        /// Gets the target type name in the given schema
+        /// </summary>
+        /// <param name="schema">schema containing the type to be determined</param>
+        /// <param name="nullible">used for union schema</param>
+        /// <returns></returns>
+        protected virtual string getTargetType(Schema schema)
+        {
+            bool nEnum = false;
+            string type = Avro.CodeGen.getType(schema, false, ref nEnum);
+            if (schema.Tag == Schema.Type.Array)
+            {
+                type = type.Remove(0, 6);              // remove IList<
+                type = type.Remove(type.Length - 1);   // remove >
+            }
+            else if (schema.Tag == Schema.Type.Map)
+            {
+                type = type.Remove(0, 19);             // remove IDictionary<string,
+                type = type.Remove(type.Length - 1);   // remove >
+            }
+            return type;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/main/Specific/SpecificRecord.cs b/lang/csharp/src/apache/main/Specific/SpecificRecord.cs
new file mode 100644
index 0000000..eab5f96
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificRecord.cs
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+
+namespace Avro.Specific
+{
+    /// <summary>
+    /// Interface class for generated classes
+    /// </summary>
+    public interface ISpecificRecord
+    {
+        Schema Schema { get; }
+        object Get(int fieldPos);
+        void Put(int fieldPos, object fieldValue);
+    }
+}
diff --git a/lang/csharp/src/apache/main/Specific/SpecificWriter.cs b/lang/csharp/src/apache/main/Specific/SpecificWriter.cs
new file mode 100644
index 0000000..5800714
--- /dev/null
+++ b/lang/csharp/src/apache/main/Specific/SpecificWriter.cs
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Avro;
+using Avro.IO;
+using Avro.Generic;
+
+namespace Avro.Specific
+{
+    /// <summary>
+    /// Generic wrapper class for writing data from specific objects
+    /// </summary>
+    /// <typeparam name="T">type name of specific object</typeparam>
+    public class SpecificWriter<T> : GenericWriter<T>
+    {
+        public SpecificWriter(Schema schema) : base(new SpecificDefaultWriter(schema)) { }
+        public SpecificWriter(SpecificDefaultWriter writer) : base(writer) { }
+    }
+
+    /// <summary>
+    /// Class for writing data from any specific objects
+    /// </summary>
+    public class SpecificDefaultWriter : DefaultWriter
+    {
+        /// <summary>
+        /// Constructor
+        /// </summary>
+        /// <param name="schema">schema of the object to be written</param>
+        public SpecificDefaultWriter(Schema schema) : base(schema) { }
+
+        /// <summary>
+        /// Serialized a record using the given RecordSchema. It uses GetField method
+        /// to extract the field value from the given object.
+        /// </summary>
+        /// <param name="schema">The RecordSchema to use for serialization</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The Encoder for serialization</param>
+
+        protected override void WriteRecord(RecordSchema schema, object value, Encoder encoder)
+        {
+            var rec = value as ISpecificRecord;
+            if (rec == null)
+                throw new AvroTypeException("Record object is not derived from ISpecificRecord");
+
+            foreach (Field field in schema)
+            {
+                try
+                {
+                    Write(field.Schema, rec.Get(field.Pos), encoder);
+                }
+                catch (Exception ex)
+                {
+                    throw new AvroException(ex.Message + " in field " + field.Name);
+                }
+            }
+        }
+
+        /// <summary>
+        /// Validates that the record is a fixed record object and that the schema in the object is the
+        /// same as the given writer schema. Writes the given fixed record into the given encoder
+        /// </summary>
+        /// <param name="schema">writer schema</param>
+        /// <param name="value">fixed object to write</param>
+        /// <param name="encoder">encoder to write to</param>
+        protected override void WriteFixed(FixedSchema schema, object value, Encoder encoder)
+        {
+            var fixedrec = value as SpecificFixed;
+            if (fixedrec == null)
+                throw new AvroTypeException("Fixed object is not derived from SpecificFixed");
+
+            encoder.WriteFixed(fixedrec.Value);
+        }
+
+        /// <summary>
+        /// Writes the given enum value into the given encoder.
+        /// </summary>
+        /// <param name="schema">writer schema</param>
+        /// <param name="value">enum value</param>
+        /// <param name="encoder">encoder to write to</param>
+        protected override void WriteEnum(EnumSchema schema, object value, Encoder encoder)
+        {
+            if (value == null)
+                throw new AvroTypeException("value is null in SpecificDefaultWriter.WriteEnum");
+
+            encoder.WriteEnum(schema.Ordinal(value.ToString()));
+        }
+
+        /// <summary>
+        /// Serialized an array. The default implementation calls EnsureArrayObject() to ascertain that the
+        /// given value is an array. It then calls GetArrayLength() and GetArrayElement()
+        /// to access the members of the array and then serialize them.
+        /// </summary>
+        /// <param name="schema">The ArraySchema for serialization</param>
+        /// <param name="value">The value being serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected override void WriteArray(ArraySchema schema, object value, Encoder encoder)
+        {
+            var arr = value as System.Collections.IList;
+            if (arr == null)
+                throw new AvroTypeException("Array does not implement non-generic IList");
+
+            long l = arr.Count;
+            encoder.WriteArrayStart();
+            encoder.SetItemCount(l);
+            for (int i = 0; i < l; i++)
+            {
+                encoder.StartItem();
+                Write(schema.ItemSchema, arr[i], encoder);
+            }
+            encoder.WriteArrayEnd();
+        }
+
+        /// <summary>
+        /// Writes the given map into the given encoder.
+        /// </summary>
+        /// <param name="schema">writer schema</param>
+        /// <param name="value">map to write</param>
+        /// <param name="encoder">encoder to write to</param>
+        protected override void WriteMap(MapSchema schema, object value, Encoder encoder)
+        {
+            var map = value as System.Collections.IDictionary;
+            if (map == null)
+                throw new AvroTypeException("Map does not implement non-generic IDictionary");
+
+            encoder.WriteArrayStart();
+            encoder.SetItemCount(map.Count);
+            foreach (System.Collections.DictionaryEntry de in map)
+            {
+                encoder.StartItem();
+                encoder.WriteString(de.Key as string);
+                Write(schema.ValueSchema, de.Value, encoder);
+            }
+            encoder.WriteMapEnd();
+        }
+
+        /// <summary>
+        /// Resolves the given value against the given UnionSchema and serializes the object against
+        /// the resolved schema member. The default implementation of this method uses
+        /// ResolveUnion to find the member schema within the UnionSchema.
+        /// </summary>
+        /// <param name="us">The UnionSchema to resolve against</param>
+        /// <param name="value">The value to be serialized</param>
+        /// <param name="encoder">The encoder for serialization</param>
+        protected override void WriteUnion(UnionSchema us, object value, Encoder encoder)
+        {
+            for (int i = 0; i < us.Count; i++)
+            {
+                if (Matches(us[i], value))
+                {
+                    encoder.WriteUnionIndex(i);
+                    Write(us[i], value, encoder);
+                    return;
+                }
+            }
+            throw new AvroException("Cannot find a match for " + value.GetType() + " in " + us);
+        }
+
+        protected override bool Matches(Schema sc, object obj)
+        {
+            if (obj == null && sc.Tag != Avro.Schema.Type.Null) return false;
+            switch (sc.Tag)
+            {
+                case Schema.Type.Null:
+                    return obj == null;
+                case Schema.Type.Boolean:
+                    return obj is bool;
+                case Schema.Type.Int:
+                    return obj is int;
+                case Schema.Type.Long:
+                    return obj is long;
+                case Schema.Type.Float:
+                    return obj is float;
+                case Schema.Type.Double:
+                    return obj is double;
+                case Schema.Type.Bytes:
+                    return obj is byte[];
+                case Schema.Type.String:
+                    return obj is string;
+                case Schema.Type.Error:
+                case Schema.Type.Record:
+                    return obj is ISpecificRecord && 
+                           (((obj as ISpecificRecord).Schema) as RecordSchema).SchemaName.Equals((sc as RecordSchema).SchemaName);
+                case Schema.Type.Enumeration:
+                    return obj.GetType().IsEnum && (sc as EnumSchema).Symbols.Contains(obj.ToString());
+                case Schema.Type.Array:
+                    return obj is System.Collections.IList;
+                case Schema.Type.Map:
+                    return obj is System.Collections.IDictionary;
+                case Schema.Type.Union:
+                    return false;   // Union directly within another union not allowed!
+                case Schema.Type.Fixed:
+                    return obj is SpecificFixed && 
+                           (((obj as SpecificFixed).Schema) as FixedSchema).SchemaName.Equals((sc as FixedSchema).SchemaName);
+                default:
+                    throw new AvroException("Unknown schema type: " + sc.Tag);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/msbuild/Avro.msbuild.csproj b/lang/csharp/src/apache/msbuild/Avro.msbuild.csproj
new file mode 100644
index 0000000..f1dd6f8
--- /dev/null
+++ b/lang/csharp/src/apache/msbuild/Avro.msbuild.csproj
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProjectGuid>{AEB22F94-4ECF-4008-B159-389B3F05D54B}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro.msbuild</RootNamespace>
+    <AssemblyName>Avro.msbuild</AssemblyName>
+    <TargetFrameworkVersion>v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\msbuild\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>pdbonly</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\msbuild\Release\</OutputPath>
+    <DefineConstants>TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="Microsoft.Build.Framework" />
+    <Reference Include="Microsoft.Build.Tasks.v3.5" />
+    <Reference Include="Microsoft.Build.Utilities.v3.5" />
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="AvroBuilldTask.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\main\Avro.main.csproj">
+      <Project>{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}</Project>
+      <Name>Avro.main</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/msbuild/AvroBuilldTask.cs b/lang/csharp/src/apache/msbuild/AvroBuilldTask.cs
new file mode 100644
index 0000000..c8f9a47
--- /dev/null
+++ b/lang/csharp/src/apache/msbuild/AvroBuilldTask.cs
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using Microsoft.Build.Framework;
+using Microsoft.Build.Utilities;
+
+namespace Avro.msbuild
+{
+    public class AvroBuildTask : Task
+    {
+        public override bool Execute()
+        {
+            var codegen = new CodeGen();
+            if (SchemaFiles != null)
+            {
+                foreach (var schemaFile in SchemaFiles)
+                {
+                    var schema = Schema.Parse(System.IO.File.ReadAllText(schemaFile.ItemSpec));
+                    codegen.AddSchema(schema);
+                }
+            }
+            if (ProtocolFiles != null)
+            {
+                foreach (var protocolFile in ProtocolFiles)
+                {
+                    var protocol = Protocol.Parse(System.IO.File.ReadAllText(protocolFile.ItemSpec));
+                    codegen.AddProtocol(protocol);
+                }
+            }
+
+            var generateCode = codegen.GenerateCode();
+            var namespaces = generateCode.Namespaces;
+            for (var i = namespaces.Count - 1; i >= 0; i--)
+            {
+                var types = namespaces[i].Types;
+                for (var j = types.Count - 1; j >= 0; j--)
+                {
+                    Log.LogMessage("Generating {0}.{1}", namespaces[i].Name, types[j].Name);
+                }
+            }
+
+            codegen.WriteTypes(OutDir.ItemSpec);
+            return true;
+        }
+
+        public ITaskItem[] SchemaFiles { get; set; }
+        public ITaskItem[] ProtocolFiles { get; set; }
+
+        [Required]
+        public ITaskItem OutDir { get; set; }
+    }
+}
diff --git a/lang/csharp/src/apache/msbuild/Properties/AssemblyInfo.cs b/lang/csharp/src/apache/msbuild/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..e46675b
--- /dev/null
+++ b/lang/csharp/src/apache/msbuild/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Reflection;
+using System.Runtime.InteropServices;
+
+[assembly: AssemblyTitle("Avro.msbuild")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Apache")]
+[assembly: AssemblyProduct("Avro.msbuild")]
+[assembly: AssemblyCopyright("Copyright © Apache 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: Guid("A7982FA5-F369-457F-894E-4F90262E4638")]
+[assembly: AssemblyVersion("0.9.0.0")]
+[assembly: AssemblyFileVersion("0.9.0.0")]
\ No newline at end of file
diff --git a/lang/csharp/src/apache/perf/Avro.perf.csproj b/lang/csharp/src/apache/perf/Avro.perf.csproj
new file mode 100644
index 0000000..7235c99
--- /dev/null
+++ b/lang/csharp/src/apache/perf/Avro.perf.csproj
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProductVersion>8.0.30703</ProductVersion>
+    <SchemaVersion>2.0</SchemaVersion>
+    <ProjectGuid>{AC4E1909-2594-4D01-9B2B-B832C07BAFE5}</ProjectGuid>
+    <OutputType>Exe</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro.perf</RootNamespace>
+    <AssemblyName>Avro.perf</AssemblyName>
+    <TargetFrameworkVersion Condition=" '$(TargetFrameworkVersion)' == '' ">v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <FileUpgradeFlags>
+    </FileUpgradeFlags>
+    <OldToolsVersion>3.5</OldToolsVersion>
+    <UpgradeBackupLocation />
+    <PublishUrl>publish\</PublishUrl>
+    <Install>true</Install>
+    <InstallFrom>Disk</InstallFrom>
+    <UpdateEnabled>false</UpdateEnabled>
+    <UpdateMode>Foreground</UpdateMode>
+    <UpdateInterval>7</UpdateInterval>
+    <UpdateIntervalUnits>Days</UpdateIntervalUnits>
+    <UpdatePeriodically>false</UpdatePeriodically>
+    <UpdateRequired>false</UpdateRequired>
+    <MapFileExtensions>true</MapFileExtensions>
+    <ApplicationRevision>0</ApplicationRevision>
+    <ApplicationVersion>1.0.0.%2a</ApplicationVersion>
+    <IsWebBootstrapper>false</IsWebBootstrapper>
+    <UseApplicationTrust>false</UseApplicationTrust>
+    <BootstrapperEnabled>true</BootstrapperEnabled>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\perf\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>none</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\perf\Release\</OutputPath>
+    <DefineConstants>
+    </DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup>
+    <StartupObject />
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="Castle.Core, Version=3.2.0.0, Culture=neutral, PublicKeyToken=407dd0808d44fbdc, processorArchitecture=MSIL">
+      <SpecificVersion>False</SpecificVersion>
+      <HintPath>..\..\..\..\..\lang\csharp\lib\main\Castle.Core.dll</HintPath>
+    </Reference>
+    <Reference Include="nunit.framework, Version=2.5.7.10213, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
+      <SpecificVersion>False</SpecificVersion>
+      <HintPath>..\..\..\lib\test\nunit.framework.dll</HintPath>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\ipc\Avro.ipc.csproj">
+      <Project>{3b05043a-dc6c-49b6-85bf-9ab055d0b414}</Project>
+      <Name>Avro.ipc</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\main\Avro.main.csproj">
+      <Project>{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}</Project>
+      <Name>Avro.main</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <ItemGroup>
+    <BootstrapperPackage Include=".NETFramework,Version=v3.5">
+      <Visible>False</Visible>
+      <ProductName>Microsoft .NET Framework 4 %28x86 and x64%29</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
+      <Visible>False</Visible>
+      <ProductName>.NET Framework 3.5 SP1</ProductName>
+      <Install>false</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Windows.Installer.3.1">
+      <Visible>False</Visible>
+      <ProductName>Windows Installer 3.1</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="com\foo\A.cs" />
+    <Compile Include="com\foo\Complex.cs" />
+    <Compile Include="com\foo\MyEnum.cs" />
+    <Compile Include="com\foo\MyFixed.cs" />
+    <Compile Include="com\foo\Narrow.cs" />
+    <Compile Include="com\foo\newRec.cs" />
+    <Compile Include="com\foo\Simple.cs" />
+    <Compile Include="com\foo\Wide.cs" />
+    <Compile Include="PerfTest.cs">
+      <SubType>Code</SubType>
+    </Compile>
+  </ItemGroup>
+  <ItemGroup>
+    <None Include="app.config" />
+    <None Include="schema.avsc" />
+  </ItemGroup>
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/perf/PerfTest.cs b/lang/csharp/src/apache/perf/PerfTest.cs
new file mode 100644
index 0000000..e3aefd3
--- /dev/null
+++ b/lang/csharp/src/apache/perf/PerfTest.cs
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using Avro.Generic;
+using Avro.IO;
+using Avro.Specific;
+using com.foo;
+
+namespace Avro.perf
+{
+    class Program
+    {
+        static void Main(string[] args)
+        {
+            Console.Out.WriteLine("type\timpl\taction\ttotal_items\tbatches\tbatch_size\ttime(ms)");
+            PerfTest( "simple", BuildSimple(), Simple._SCHEMA);
+            PerfTest( "complex", BuildComplex(), Complex._SCHEMA);
+            PerfTest( "narrow", BuildNarrow(), Narrow._SCHEMA);
+            PerfTest( "wide", BuildWide(), Wide._SCHEMA);
+        }
+
+        private static Simple BuildSimple()
+        {
+            var bytes = "bytes sample text";
+            var encoding = new UTF8Encoding();
+            var simp = new Simple
+                           {
+                               myInt = 1,
+                               myLong = 2,
+                               myBool = true,
+                               myDouble = (double) 3,
+                               myFloat = (float) 4.5,
+                               myBytes = encoding.GetBytes( bytes ),
+                               myString = "Hello",
+                               myNull = null,
+                           };
+            return simp;
+        }
+
+        private static Complex BuildComplex()
+        {
+            var bytes = "bytes sample text";
+            var encoding = new UTF8Encoding();
+
+            var c = new Complex
+                        {
+                            myUInt = 1,
+                            myULong = 2,
+                            myUBool = true,
+                            myUDouble = (double) 3,
+                            myUFloat = (float) 4.5,
+                            myUBytes = encoding.GetBytes( bytes ),
+                            myUString = "Hello",
+                            myInt = 1,
+                            myLong = 2,
+                            myBool = true,
+                            myDouble = (double) 3,
+                            myFloat = (float) 4.5,
+                            myBytes = encoding.GetBytes( bytes ),
+                            myString = "Hello",
+                            myNull = null,
+                            myFixed = new MyFixed() { Value = encoding.GetBytes( "My fixed record0" ) },
+                            myA = new A() { f1 = 10 },
+                            myE = com.foo.MyEnum.C
+                        };
+
+            c.myArray = new List<byte[]>();
+            c.myArray.Add( encoding.GetBytes( "a" ) );
+
+            c.myArray2 = new List<com.foo.newRec>();
+            var rec = new com.foo.newRec();
+            rec.f1 = 50;
+            c.myArray2.Add( rec );
+
+            c.myMap = new Dictionary<string, string>();
+            c.myMap.Add( "key", "value" );
+            c.myMap2 = new Dictionary<string, com.foo.newRec>();
+            var newrec = new com.foo.newRec();
+            newrec.f1 = 1200;
+            c.myMap2.Add( "A", newrec );
+            c.myObject = c.myA;
+
+            var o1 = new List<System.Object>();
+
+            o1.Add( (double) 1123123121 );
+            o1.Add( (double) 2 );
+            o1.Add( null );
+            o1.Add( "fred" );
+
+            var o2 = new List<System.Object>();
+
+            o2.Add( (double) 1 );
+            o2.Add( (double) 32531 );
+            o2.Add( (double) 4 );
+            o2.Add( (double) 555 );
+            o2.Add( (double) 0 );
+
+            c.myArray3 = new List<IList<System.Object>>();
+            c.myArray3.Add( o1 );
+            c.myArray3.Add( o2 );
+            return c;
+        }
+
+        private static Narrow BuildNarrow()
+        {
+            return new Narrow
+                       {
+                           myInt = 5000000,
+                           myLong = 99999999999,
+                           myString = "narrow"
+                       };
+        }
+
+        private static Wide BuildWide()
+        {
+            return new Wide()
+                       {
+                           myA = new A { f1 = 9995885 },
+                           myA2 = new A { f1 = 29995885 },
+                           myA3 = new A { f1 = 39995885 },
+                           myA4 = new A { f1 = 49995885 },
+                           myFloat = 11111.11f,
+                           myFloat2 = 22222.22f,
+                           myFloat3 = 33333.33f,
+                           myFloat4 = 44444.44f,
+                           myE = MyEnum.A,
+                           myE2 = MyEnum.B,
+                           myE3 = MyEnum.C,
+                           myE4 = MyEnum.C,
+                           myBool = true,
+                           myBool2 = false,
+                           myBool3 = true,
+                           myBool4 = false,
+                           myDouble = 11111111111.11,
+                           myDouble2 = 22222222222.22,
+                           myDouble3 = 33333333333.33,
+                           myDouble4 = 44444444444.44,
+                           myInt = 1111111,
+                           myInt2 = 2222222,
+                           myInt3 = 3333333,
+                           myInt4 = 4444444,
+                           myLong = 1111111111111,
+                           myLong2 = 2222222222222,
+                           myLong3 = 3333333333333,
+                           myLong4 = 4444444444444,
+                           myString = "wide record 1",
+                           myString2 = "wide record 2",
+                           myString3 = "wide record 3",
+                           myString4 = "wide record 4",
+                           myBytes = new byte[] { 1, 1, 1, 1 },
+                           myBytes2 = new byte[] { 2, 2, 2, 2 },
+                           myBytes3 = new byte[] { 3, 3, 3, 3 },
+                           myBytes4 = new byte[] { 4, 4, 4, 4 }
+                       };
+        }
+
+        private static void PerfTest<T>(string testName, T testObj, Schema schema)
+        {
+            var generic = ConvertSpecificToGeneric(testObj, schema);
+            PerfTest(testName, "default_specific", testObj, schema, s => new SpecificWriter<T>(s), s => new SpecificReader<T>(s, s));
+            PerfTest(testName, "preresolved_specific", testObj, schema, s => new SpecificDatumWriter<T>(s), s => new SpecificDatumReader<T>(s, s));
+            PerfTest(testName, "default_generic", generic, schema, s => new GenericWriter<GenericRecord>(s), s => new GenericReader<GenericRecord>(s, s));
+            PerfTest(testName, "preresolved_generic", generic, schema, s => new GenericDatumWriter<GenericRecord>(s), s => new GenericDatumReader<GenericRecord>(s, s));
+        }
+
+        private static GenericRecord ConvertSpecificToGeneric<T>(T obj, Schema schema)
+        {
+            var stream = new MemoryStream();
+            var encoder = new BinaryEncoder( stream );
+            var decoder = new BinaryDecoder( stream );
+
+            var writer = new SpecificWriter<T>(schema);
+            writer.Write(obj, encoder);
+            encoder.Flush();
+            stream.Position = 0;
+
+            return new GenericReader<GenericRecord>(schema, schema).Read(null, decoder);
+        }
+
+        private static void PerfTest<T>(string name, string impl, T z, Schema schema, Func<Schema,DatumWriter<T>> writerFactory, Func<Schema,DatumReader<T>> readerFactory)
+        {
+            var stream = new MemoryStream();
+            var binEncoder = new BinaryEncoder( stream );
+            var decoder = new BinaryDecoder( stream );
+
+            var totalItems = 1000000;
+            
+            foreach (int itemsPerBatch in new List<int> { 1000 } )
+            {
+                int serialized = 0;
+                int batches = totalItems / itemsPerBatch;
+                var startTime = Environment.TickCount;
+                for (int batch = 0; batch < batches; batch++ )
+                {
+                    var writer = writerFactory( schema );
+                    for( int i = 0; i < itemsPerBatch; i++ )
+                    {
+                        stream.Position = 0;
+                        writer.Write( z, binEncoder );
+                        serialized++;
+                    }
+                }
+                Console.Out.WriteLine("{0}\t{1}\tserialize\t{2}\t{3}\t{4}\t{5}", name, impl, serialized, batches, itemsPerBatch, Environment.TickCount - startTime);
+
+                int deserialized = 0;
+                startTime = Environment.TickCount;
+                for (int batch = 0; batch < batches; batch++ )
+                {
+                    var reader = readerFactory(schema);
+                    for (int i = 0; i < itemsPerBatch; i++)
+                    {
+                        stream.Position = 0;
+                        reader.Read( z, decoder );
+                        deserialized++;
+                    }
+                }
+                Console.Out.WriteLine("{0}\t{1}\tdeserialize\t{2}\t{3}\t{4}\t{5}", name, impl, deserialized, batches, itemsPerBatch, Environment.TickCount - startTime);
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/perf/com/foo/A.cs b/lang/csharp/src/apache/perf/com/foo/A.cs
new file mode 100644
index 0000000..4f5155e
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/A.cs
@@ -0,0 +1,56 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class A : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"A\",\"namespace\":\"com.foo\",\"fields\":[{\"name\":\"f1\",\"type\":\"" +
+				"long\"}]}");
+		private long _f1;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return A._SCHEMA;
+			}
+		}
+		public long f1
+		{
+			get
+			{
+				return this._f1;
+			}
+			set
+			{
+				this._f1 = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.f1;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.f1 = (System.Int64)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/Complex.cs b/lang/csharp/src/apache/perf/com/foo/Complex.cs
new file mode 100644
index 0000000..ec9e559
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/Complex.cs
@@ -0,0 +1,377 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class Complex : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""Complex"",""namespace"":""com.foo"",""fields"":[{""name"":""myUInt"",""type"":[""int"",""null""]},{""name"":""myULong"",""type"":[""long"",""null""]},{""name"":""myUBool"",""type"":[""boolean"",""null""]},{""name"":""myUDouble"",""type"":[""double"",""null""]},{""name"":""myUFloat"",""type"":[""float"",""null""]},{""name"":""myUBytes"",""type"":[""bytes"",""null""]},{""name"":""myUString"",""type"":[""st [...]
+		private System.Nullable<int> _myUInt;
+		private System.Nullable<long> _myULong;
+		private System.Nullable<bool> _myUBool;
+		private System.Nullable<double> _myUDouble;
+		private System.Nullable<float> _myUFloat;
+		private byte[] _myUBytes;
+		private string _myUString;
+		private int _myInt;
+		private long _myLong;
+		private bool _myBool;
+		private double _myDouble;
+		private float _myFloat;
+		private byte[] _myBytes;
+		private string _myString;
+		private object _myNull;
+		private com.foo.MyFixed _myFixed;
+		private com.foo.A _myA;
+		private com.foo.MyEnum _myE;
+		private IList<System.Byte[]> _myArray;
+		private IList<com.foo.newRec> _myArray2;
+		private IDictionary<string,System.String> _myMap;
+		private IDictionary<string,com.foo.newRec> _myMap2;
+		private object _myObject;
+		private IList<IList<System.Object>> _myArray3;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return Complex._SCHEMA;
+			}
+		}
+		public System.Nullable<int> myUInt
+		{
+			get
+			{
+				return this._myUInt;
+			}
+			set
+			{
+				this._myUInt = value;
+			}
+		}
+		public System.Nullable<long> myULong
+		{
+			get
+			{
+				return this._myULong;
+			}
+			set
+			{
+				this._myULong = value;
+			}
+		}
+		public System.Nullable<bool> myUBool
+		{
+			get
+			{
+				return this._myUBool;
+			}
+			set
+			{
+				this._myUBool = value;
+			}
+		}
+		public System.Nullable<double> myUDouble
+		{
+			get
+			{
+				return this._myUDouble;
+			}
+			set
+			{
+				this._myUDouble = value;
+			}
+		}
+		public System.Nullable<float> myUFloat
+		{
+			get
+			{
+				return this._myUFloat;
+			}
+			set
+			{
+				this._myUFloat = value;
+			}
+		}
+		public byte[] myUBytes
+		{
+			get
+			{
+				return this._myUBytes;
+			}
+			set
+			{
+				this._myUBytes = value;
+			}
+		}
+		public string myUString
+		{
+			get
+			{
+				return this._myUString;
+			}
+			set
+			{
+				this._myUString = value;
+			}
+		}
+		public int myInt
+		{
+			get
+			{
+				return this._myInt;
+			}
+			set
+			{
+				this._myInt = value;
+			}
+		}
+		public long myLong
+		{
+			get
+			{
+				return this._myLong;
+			}
+			set
+			{
+				this._myLong = value;
+			}
+		}
+		public bool myBool
+		{
+			get
+			{
+				return this._myBool;
+			}
+			set
+			{
+				this._myBool = value;
+			}
+		}
+		public double myDouble
+		{
+			get
+			{
+				return this._myDouble;
+			}
+			set
+			{
+				this._myDouble = value;
+			}
+		}
+		public float myFloat
+		{
+			get
+			{
+				return this._myFloat;
+			}
+			set
+			{
+				this._myFloat = value;
+			}
+		}
+		public byte[] myBytes
+		{
+			get
+			{
+				return this._myBytes;
+			}
+			set
+			{
+				this._myBytes = value;
+			}
+		}
+		public string myString
+		{
+			get
+			{
+				return this._myString;
+			}
+			set
+			{
+				this._myString = value;
+			}
+		}
+		public object myNull
+		{
+			get
+			{
+				return this._myNull;
+			}
+			set
+			{
+				this._myNull = value;
+			}
+		}
+		public com.foo.MyFixed myFixed
+		{
+			get
+			{
+				return this._myFixed;
+			}
+			set
+			{
+				this._myFixed = value;
+			}
+		}
+		public com.foo.A myA
+		{
+			get
+			{
+				return this._myA;
+			}
+			set
+			{
+				this._myA = value;
+			}
+		}
+		public com.foo.MyEnum myE
+		{
+			get
+			{
+				return this._myE;
+			}
+			set
+			{
+				this._myE = value;
+			}
+		}
+		public IList<System.Byte[]> myArray
+		{
+			get
+			{
+				return this._myArray;
+			}
+			set
+			{
+				this._myArray = value;
+			}
+		}
+		public IList<com.foo.newRec> myArray2
+		{
+			get
+			{
+				return this._myArray2;
+			}
+			set
+			{
+				this._myArray2 = value;
+			}
+		}
+		public IDictionary<string,System.String> myMap
+		{
+			get
+			{
+				return this._myMap;
+			}
+			set
+			{
+				this._myMap = value;
+			}
+		}
+		public IDictionary<string,com.foo.newRec> myMap2
+		{
+			get
+			{
+				return this._myMap2;
+			}
+			set
+			{
+				this._myMap2 = value;
+			}
+		}
+		public object myObject
+		{
+			get
+			{
+				return this._myObject;
+			}
+			set
+			{
+				this._myObject = value;
+			}
+		}
+		public IList<IList<System.Object>> myArray3
+		{
+			get
+			{
+				return this._myArray3;
+			}
+			set
+			{
+				this._myArray3 = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.myUInt;
+			case 1: return this.myULong;
+			case 2: return this.myUBool;
+			case 3: return this.myUDouble;
+			case 4: return this.myUFloat;
+			case 5: return this.myUBytes;
+			case 6: return this.myUString;
+			case 7: return this.myInt;
+			case 8: return this.myLong;
+			case 9: return this.myBool;
+			case 10: return this.myDouble;
+			case 11: return this.myFloat;
+			case 12: return this.myBytes;
+			case 13: return this.myString;
+			case 14: return this.myNull;
+			case 15: return this.myFixed;
+			case 16: return this.myA;
+			case 17: return this.myE;
+			case 18: return this.myArray;
+			case 19: return this.myArray2;
+			case 20: return this.myMap;
+			case 21: return this.myMap2;
+			case 22: return this.myObject;
+			case 23: return this.myArray3;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.myUInt = (System.Nullable<int>)fieldValue; break;
+			case 1: this.myULong = (System.Nullable<long>)fieldValue; break;
+			case 2: this.myUBool = (System.Nullable<bool>)fieldValue; break;
+			case 3: this.myUDouble = (System.Nullable<double>)fieldValue; break;
+			case 4: this.myUFloat = (System.Nullable<float>)fieldValue; break;
+			case 5: this.myUBytes = (System.Byte[])fieldValue; break;
+			case 6: this.myUString = (System.String)fieldValue; break;
+			case 7: this.myInt = (System.Int32)fieldValue; break;
+			case 8: this.myLong = (System.Int64)fieldValue; break;
+			case 9: this.myBool = (System.Boolean)fieldValue; break;
+			case 10: this.myDouble = (System.Double)fieldValue; break;
+			case 11: this.myFloat = (System.Single)fieldValue; break;
+			case 12: this.myBytes = (System.Byte[])fieldValue; break;
+			case 13: this.myString = (System.String)fieldValue; break;
+			case 14: this.myNull = (System.Object)fieldValue; break;
+			case 15: this.myFixed = (com.foo.MyFixed)fieldValue; break;
+			case 16: this.myA = (com.foo.A)fieldValue; break;
+			case 17: this.myE = (com.foo.MyEnum)fieldValue; break;
+			case 18: this.myArray = (IList<System.Byte[]>)fieldValue; break;
+			case 19: this.myArray2 = (IList<com.foo.newRec>)fieldValue; break;
+			case 20: this.myMap = (IDictionary<string,System.String>)fieldValue; break;
+			case 21: this.myMap2 = (IDictionary<string,com.foo.newRec>)fieldValue; break;
+			case 22: this.myObject = (System.Object)fieldValue; break;
+			case 23: this.myArray3 = (IList<IList<System.Object>>)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/MyEnum.cs b/lang/csharp/src/apache/perf/com/foo/MyEnum.cs
new file mode 100644
index 0000000..5546c5c
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/MyEnum.cs
@@ -0,0 +1,22 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public enum MyEnum
+	{
+		A,
+		B,
+		C,
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/MyFixed.cs b/lang/csharp/src/apache/perf/com/foo/MyFixed.cs
new file mode 100644
index 0000000..3255e75
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/MyFixed.cs
@@ -0,0 +1,39 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class MyFixed : SpecificFixed
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"fixed\",\"name\":\"MyFixed\",\"namespace\":\"com.foo\",\"size\":16}");
+		private static uint fixedSize = 16;
+		public MyFixed() : 
+				base(fixedSize)
+		{
+		}
+		public override Schema Schema
+		{
+			get
+			{
+				return MyFixed._SCHEMA;
+			}
+		}
+		public static uint FixedSize
+		{
+			get
+			{
+				return MyFixed.fixedSize;
+			}
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/Narrow.cs b/lang/csharp/src/apache/perf/com/foo/Narrow.cs
new file mode 100644
index 0000000..13c1e03
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/Narrow.cs
@@ -0,0 +1,85 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class Narrow : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"Narrow\",\"namespace\":\"com.foo\",\"fields\":[{\"name\":\"myInt\"," +
+				"\"type\":\"int\"},{\"name\":\"myLong\",\"type\":\"long\"},{\"name\":\"myString\",\"type\":\"string\"" +
+				"}]}");
+		private int _myInt;
+		private long _myLong;
+		private string _myString;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return Narrow._SCHEMA;
+			}
+		}
+		public int myInt
+		{
+			get
+			{
+				return this._myInt;
+			}
+			set
+			{
+				this._myInt = value;
+			}
+		}
+		public long myLong
+		{
+			get
+			{
+				return this._myLong;
+			}
+			set
+			{
+				this._myLong = value;
+			}
+		}
+		public string myString
+		{
+			get
+			{
+				return this._myString;
+			}
+			set
+			{
+				this._myString = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.myInt;
+			case 1: return this.myLong;
+			case 2: return this.myString;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.myInt = (System.Int32)fieldValue; break;
+			case 1: this.myLong = (System.Int64)fieldValue; break;
+			case 2: this.myString = (System.String)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/Simple.cs b/lang/csharp/src/apache/perf/com/foo/Simple.cs
new file mode 100644
index 0000000..6b24814
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/Simple.cs
@@ -0,0 +1,153 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class Simple : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""Simple"",""namespace"":""com.foo"",""fields"":[{""name"":""myInt"",""type"":""int""},{""name"":""myLong"",""type"":""long""},{""name"":""myBool"",""type"":""boolean""},{""name"":""myDouble"",""type"":""double""},{""name"":""myFloat"",""type"":""float""},{""name"":""myBytes"",""type"":""bytes""},{""name"":""myString"",""type"":""string""},{""name"":""myNull"",""type"":""null""}]}");
+		private int _myInt;
+		private long _myLong;
+		private bool _myBool;
+		private double _myDouble;
+		private float _myFloat;
+		private byte[] _myBytes;
+		private string _myString;
+		private object _myNull;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return Simple._SCHEMA;
+			}
+		}
+		public int myInt
+		{
+			get
+			{
+				return this._myInt;
+			}
+			set
+			{
+				this._myInt = value;
+			}
+		}
+		public long myLong
+		{
+			get
+			{
+				return this._myLong;
+			}
+			set
+			{
+				this._myLong = value;
+			}
+		}
+		public bool myBool
+		{
+			get
+			{
+				return this._myBool;
+			}
+			set
+			{
+				this._myBool = value;
+			}
+		}
+		public double myDouble
+		{
+			get
+			{
+				return this._myDouble;
+			}
+			set
+			{
+				this._myDouble = value;
+			}
+		}
+		public float myFloat
+		{
+			get
+			{
+				return this._myFloat;
+			}
+			set
+			{
+				this._myFloat = value;
+			}
+		}
+		public byte[] myBytes
+		{
+			get
+			{
+				return this._myBytes;
+			}
+			set
+			{
+				this._myBytes = value;
+			}
+		}
+		public string myString
+		{
+			get
+			{
+				return this._myString;
+			}
+			set
+			{
+				this._myString = value;
+			}
+		}
+		public object myNull
+		{
+			get
+			{
+				return this._myNull;
+			}
+			set
+			{
+				this._myNull = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.myInt;
+			case 1: return this.myLong;
+			case 2: return this.myBool;
+			case 3: return this.myDouble;
+			case 4: return this.myFloat;
+			case 5: return this.myBytes;
+			case 6: return this.myString;
+			case 7: return this.myNull;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.myInt = (System.Int32)fieldValue; break;
+			case 1: this.myLong = (System.Int64)fieldValue; break;
+			case 2: this.myBool = (System.Boolean)fieldValue; break;
+			case 3: this.myDouble = (System.Double)fieldValue; break;
+			case 4: this.myFloat = (System.Single)fieldValue; break;
+			case 5: this.myBytes = (System.Byte[])fieldValue; break;
+			case 6: this.myString = (System.String)fieldValue; break;
+			case 7: this.myNull = (System.Object)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/Wide.cs b/lang/csharp/src/apache/perf/com/foo/Wide.cs
new file mode 100644
index 0000000..e92f8e6
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/Wide.cs
@@ -0,0 +1,545 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class Wide : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""Wide"",""namespace"":""com.foo"",""fields"":[{""name"":""myInt"",""type"":""int""},{""name"":""myLong"",""type"":""long""},{""name"":""myBool"",""type"":""boolean""},{""name"":""myDouble"",""type"":""double""},{""name"":""myFloat"",""type"":""float""},{""name"":""myBytes"",""type"":""bytes""},{""name"":""myString"",""type"":""string""},{""name"":""myA"",""type"":{""type"":""record"",""name"":""A"",""name [...]
+		private int _myInt;
+		private long _myLong;
+		private bool _myBool;
+		private double _myDouble;
+		private float _myFloat;
+		private byte[] _myBytes;
+		private string _myString;
+		private com.foo.A _myA;
+		private com.foo.MyEnum _myE;
+		private int _myInt2;
+		private long _myLong2;
+		private bool _myBool2;
+		private double _myDouble2;
+		private float _myFloat2;
+		private byte[] _myBytes2;
+		private string _myString2;
+		private com.foo.A _myA2;
+		private com.foo.MyEnum _myE2;
+		private int _myInt3;
+		private long _myLong3;
+		private bool _myBool3;
+		private double _myDouble3;
+		private float _myFloat3;
+		private byte[] _myBytes3;
+		private string _myString3;
+		private com.foo.A _myA3;
+		private com.foo.MyEnum _myE3;
+		private int _myInt4;
+		private long _myLong4;
+		private bool _myBool4;
+		private double _myDouble4;
+		private float _myFloat4;
+		private byte[] _myBytes4;
+		private string _myString4;
+		private com.foo.A _myA4;
+		private com.foo.MyEnum _myE4;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return Wide._SCHEMA;
+			}
+		}
+		public int myInt
+		{
+			get
+			{
+				return this._myInt;
+			}
+			set
+			{
+				this._myInt = value;
+			}
+		}
+		public long myLong
+		{
+			get
+			{
+				return this._myLong;
+			}
+			set
+			{
+				this._myLong = value;
+			}
+		}
+		public bool myBool
+		{
+			get
+			{
+				return this._myBool;
+			}
+			set
+			{
+				this._myBool = value;
+			}
+		}
+		public double myDouble
+		{
+			get
+			{
+				return this._myDouble;
+			}
+			set
+			{
+				this._myDouble = value;
+			}
+		}
+		public float myFloat
+		{
+			get
+			{
+				return this._myFloat;
+			}
+			set
+			{
+				this._myFloat = value;
+			}
+		}
+		public byte[] myBytes
+		{
+			get
+			{
+				return this._myBytes;
+			}
+			set
+			{
+				this._myBytes = value;
+			}
+		}
+		public string myString
+		{
+			get
+			{
+				return this._myString;
+			}
+			set
+			{
+				this._myString = value;
+			}
+		}
+		public com.foo.A myA
+		{
+			get
+			{
+				return this._myA;
+			}
+			set
+			{
+				this._myA = value;
+			}
+		}
+		public com.foo.MyEnum myE
+		{
+			get
+			{
+				return this._myE;
+			}
+			set
+			{
+				this._myE = value;
+			}
+		}
+		public int myInt2
+		{
+			get
+			{
+				return this._myInt2;
+			}
+			set
+			{
+				this._myInt2 = value;
+			}
+		}
+		public long myLong2
+		{
+			get
+			{
+				return this._myLong2;
+			}
+			set
+			{
+				this._myLong2 = value;
+			}
+		}
+		public bool myBool2
+		{
+			get
+			{
+				return this._myBool2;
+			}
+			set
+			{
+				this._myBool2 = value;
+			}
+		}
+		public double myDouble2
+		{
+			get
+			{
+				return this._myDouble2;
+			}
+			set
+			{
+				this._myDouble2 = value;
+			}
+		}
+		public float myFloat2
+		{
+			get
+			{
+				return this._myFloat2;
+			}
+			set
+			{
+				this._myFloat2 = value;
+			}
+		}
+		public byte[] myBytes2
+		{
+			get
+			{
+				return this._myBytes2;
+			}
+			set
+			{
+				this._myBytes2 = value;
+			}
+		}
+		public string myString2
+		{
+			get
+			{
+				return this._myString2;
+			}
+			set
+			{
+				this._myString2 = value;
+			}
+		}
+		public com.foo.A myA2
+		{
+			get
+			{
+				return this._myA2;
+			}
+			set
+			{
+				this._myA2 = value;
+			}
+		}
+		public com.foo.MyEnum myE2
+		{
+			get
+			{
+				return this._myE2;
+			}
+			set
+			{
+				this._myE2 = value;
+			}
+		}
+		public int myInt3
+		{
+			get
+			{
+				return this._myInt3;
+			}
+			set
+			{
+				this._myInt3 = value;
+			}
+		}
+		public long myLong3
+		{
+			get
+			{
+				return this._myLong3;
+			}
+			set
+			{
+				this._myLong3 = value;
+			}
+		}
+		public bool myBool3
+		{
+			get
+			{
+				return this._myBool3;
+			}
+			set
+			{
+				this._myBool3 = value;
+			}
+		}
+		public double myDouble3
+		{
+			get
+			{
+				return this._myDouble3;
+			}
+			set
+			{
+				this._myDouble3 = value;
+			}
+		}
+		public float myFloat3
+		{
+			get
+			{
+				return this._myFloat3;
+			}
+			set
+			{
+				this._myFloat3 = value;
+			}
+		}
+		public byte[] myBytes3
+		{
+			get
+			{
+				return this._myBytes3;
+			}
+			set
+			{
+				this._myBytes3 = value;
+			}
+		}
+		public string myString3
+		{
+			get
+			{
+				return this._myString3;
+			}
+			set
+			{
+				this._myString3 = value;
+			}
+		}
+		public com.foo.A myA3
+		{
+			get
+			{
+				return this._myA3;
+			}
+			set
+			{
+				this._myA3 = value;
+			}
+		}
+		public com.foo.MyEnum myE3
+		{
+			get
+			{
+				return this._myE3;
+			}
+			set
+			{
+				this._myE3 = value;
+			}
+		}
+		public int myInt4
+		{
+			get
+			{
+				return this._myInt4;
+			}
+			set
+			{
+				this._myInt4 = value;
+			}
+		}
+		public long myLong4
+		{
+			get
+			{
+				return this._myLong4;
+			}
+			set
+			{
+				this._myLong4 = value;
+			}
+		}
+		public bool myBool4
+		{
+			get
+			{
+				return this._myBool4;
+			}
+			set
+			{
+				this._myBool4 = value;
+			}
+		}
+		public double myDouble4
+		{
+			get
+			{
+				return this._myDouble4;
+			}
+			set
+			{
+				this._myDouble4 = value;
+			}
+		}
+		public float myFloat4
+		{
+			get
+			{
+				return this._myFloat4;
+			}
+			set
+			{
+				this._myFloat4 = value;
+			}
+		}
+		public byte[] myBytes4
+		{
+			get
+			{
+				return this._myBytes4;
+			}
+			set
+			{
+				this._myBytes4 = value;
+			}
+		}
+		public string myString4
+		{
+			get
+			{
+				return this._myString4;
+			}
+			set
+			{
+				this._myString4 = value;
+			}
+		}
+		public com.foo.A myA4
+		{
+			get
+			{
+				return this._myA4;
+			}
+			set
+			{
+				this._myA4 = value;
+			}
+		}
+		public com.foo.MyEnum myE4
+		{
+			get
+			{
+				return this._myE4;
+			}
+			set
+			{
+				this._myE4 = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.myInt;
+			case 1: return this.myLong;
+			case 2: return this.myBool;
+			case 3: return this.myDouble;
+			case 4: return this.myFloat;
+			case 5: return this.myBytes;
+			case 6: return this.myString;
+			case 7: return this.myA;
+			case 8: return this.myE;
+			case 9: return this.myInt2;
+			case 10: return this.myLong2;
+			case 11: return this.myBool2;
+			case 12: return this.myDouble2;
+			case 13: return this.myFloat2;
+			case 14: return this.myBytes2;
+			case 15: return this.myString2;
+			case 16: return this.myA2;
+			case 17: return this.myE2;
+			case 18: return this.myInt3;
+			case 19: return this.myLong3;
+			case 20: return this.myBool3;
+			case 21: return this.myDouble3;
+			case 22: return this.myFloat3;
+			case 23: return this.myBytes3;
+			case 24: return this.myString3;
+			case 25: return this.myA3;
+			case 26: return this.myE3;
+			case 27: return this.myInt4;
+			case 28: return this.myLong4;
+			case 29: return this.myBool4;
+			case 30: return this.myDouble4;
+			case 31: return this.myFloat4;
+			case 32: return this.myBytes4;
+			case 33: return this.myString4;
+			case 34: return this.myA4;
+			case 35: return this.myE4;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.myInt = (System.Int32)fieldValue; break;
+			case 1: this.myLong = (System.Int64)fieldValue; break;
+			case 2: this.myBool = (System.Boolean)fieldValue; break;
+			case 3: this.myDouble = (System.Double)fieldValue; break;
+			case 4: this.myFloat = (System.Single)fieldValue; break;
+			case 5: this.myBytes = (System.Byte[])fieldValue; break;
+			case 6: this.myString = (System.String)fieldValue; break;
+			case 7: this.myA = (com.foo.A)fieldValue; break;
+			case 8: this.myE = (com.foo.MyEnum)fieldValue; break;
+			case 9: this.myInt2 = (System.Int32)fieldValue; break;
+			case 10: this.myLong2 = (System.Int64)fieldValue; break;
+			case 11: this.myBool2 = (System.Boolean)fieldValue; break;
+			case 12: this.myDouble2 = (System.Double)fieldValue; break;
+			case 13: this.myFloat2 = (System.Single)fieldValue; break;
+			case 14: this.myBytes2 = (System.Byte[])fieldValue; break;
+			case 15: this.myString2 = (System.String)fieldValue; break;
+			case 16: this.myA2 = (com.foo.A)fieldValue; break;
+			case 17: this.myE2 = (com.foo.MyEnum)fieldValue; break;
+			case 18: this.myInt3 = (System.Int32)fieldValue; break;
+			case 19: this.myLong3 = (System.Int64)fieldValue; break;
+			case 20: this.myBool3 = (System.Boolean)fieldValue; break;
+			case 21: this.myDouble3 = (System.Double)fieldValue; break;
+			case 22: this.myFloat3 = (System.Single)fieldValue; break;
+			case 23: this.myBytes3 = (System.Byte[])fieldValue; break;
+			case 24: this.myString3 = (System.String)fieldValue; break;
+			case 25: this.myA3 = (com.foo.A)fieldValue; break;
+			case 26: this.myE3 = (com.foo.MyEnum)fieldValue; break;
+			case 27: this.myInt4 = (System.Int32)fieldValue; break;
+			case 28: this.myLong4 = (System.Int64)fieldValue; break;
+			case 29: this.myBool4 = (System.Boolean)fieldValue; break;
+			case 30: this.myDouble4 = (System.Double)fieldValue; break;
+			case 31: this.myFloat4 = (System.Single)fieldValue; break;
+			case 32: this.myBytes4 = (System.Byte[])fieldValue; break;
+			case 33: this.myString4 = (System.String)fieldValue; break;
+			case 34: this.myA4 = (com.foo.A)fieldValue; break;
+			case 35: this.myE4 = (com.foo.MyEnum)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/com/foo/newRec.cs b/lang/csharp/src/apache/perf/com/foo/newRec.cs
new file mode 100644
index 0000000..18d4e05
--- /dev/null
+++ b/lang/csharp/src/apache/perf/com/foo/newRec.cs
@@ -0,0 +1,56 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace com.foo
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class newRec : ISpecificRecord
+	{
+		public static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"newRec\",\"namespace\":\"com.foo\",\"fields\":[{\"name\":\"f1\",\"ty" +
+				"pe\":\"long\"}]}");
+		private long _f1;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return newRec._SCHEMA;
+			}
+		}
+		public long f1
+		{
+			get
+			{
+				return this._f1;
+			}
+			set
+			{
+				this._f1 = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.f1;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.f1 = (System.Int64)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/perf/schema.avsc b/lang/csharp/src/apache/perf/schema.avsc
new file mode 100644
index 0000000..5606294
--- /dev/null
+++ b/lang/csharp/src/apache/perf/schema.avsc
@@ -0,0 +1,122 @@
+{
+  "protocol" : "MyProtocol",
+  "namespace" : "com.foo",
+  "types" : [ 
+   {
+	"type" : "record",
+	"name" : "A",
+	"fields" : [ { "name" : "f1", "type" : "long" } ]
+   },
+   {
+	"type" : "enum",
+	"name" : "MyEnum",
+	"symbols" : [ "A", "B", "C" ]
+   },
+   {
+   "type": "fixed", 
+   "size": 16, 
+   "name": "MyFixed"
+   },
+   {
+	"type" : "record",
+	"name" : "Simple",
+	"fields" : 
+			[ 	
+				{ "name" : "myInt", "type" : "int" },
+				{ "name" : "myLong", "type" : "long" },
+				{ "name" : "myBool", "type" : "boolean" },
+				{ "name" : "myDouble", "type" : "double" },
+				{ "name" : "myFloat", "type" : "float" },
+				{ "name" : "myBytes", "type" : "bytes" },
+				{ "name" : "myString", "type" : "string" },
+				{ "name" : "myNull", "type" : "null" },
+ 			]
+   },   
+   {
+	"type" : "record",
+	"name" : "Complex",
+	"fields" : 
+			[ 	
+				{ "name" : "myUInt", "type" : [ "int", "null" ] },
+				{ "name" : "myULong", "type" : [ "long", "null" ] },
+				{ "name" : "myUBool", "type" : [ "boolean", "null" ] },
+				{ "name" : "myUDouble", "type" : [ "double", "null" ] },
+				{ "name" : "myUFloat", "type" : [ "float", "null" ] },
+				{ "name" : "myUBytes", "type" : [ "bytes", "null" ] },
+				{ "name" : "myUString", "type" : [ "string", "null" ] },
+				
+				{ "name" : "myInt", "type" : "int" },
+				{ "name" : "myLong", "type" : "long" },
+				{ "name" : "myBool", "type" : "boolean" },
+				{ "name" : "myDouble", "type" : "double" },
+				{ "name" : "myFloat", "type" : "float" },
+				{ "name" : "myBytes", "type" : "bytes" },
+				{ "name" : "myString", "type" : "string" },
+				{ "name" : "myNull", "type" : "null" },
+
+				{ "name" : "myFixed", "type" : "MyFixed" },								
+				{ "name" : "myA", "type" : "A" },
+				{ "name" : "myE", "type" : "MyEnum" },
+				{ "name" : "myArray", "type" : { "type" : "array", "items" : "bytes" } },
+				{ "name" : "myArray2", "type" : { "type" : "array", "items" : { "type" : "record", "name" : "newRec", "fields" : [ { "name" : "f1", "type" : "long"} ] } } },
+				{ "name" : "myMap", "type" : { "type" : "map", "values" : "string" } },
+				{ "name" : "myMap2", "type" : { "type" : "map", "values" : "newRec" } },
+				{ "name" : "myObject", "type" : [ "MyEnum", "A", "null" ] },
+                { "name" : "myArray3", "type" : { "type" : "array", "items" : { "type" : "array", "items" : [ "double", "string", "null" ] } } }
+			]
+   },
+   {
+	"type" : "record",
+	"name" : "Wide",
+	"fields" : 
+			[ 	
+				{ "name" : "myInt", "type" : "int" },
+				{ "name" : "myLong", "type" : "long" },
+				{ "name" : "myBool", "type" : "boolean" },
+				{ "name" : "myDouble", "type" : "double" },
+				{ "name" : "myFloat", "type" : "float" },
+				{ "name" : "myBytes", "type" : "bytes" },
+				{ "name" : "myString", "type" : "string" },
+				{ "name" : "myA", "type" : "A" },
+				{ "name" : "myE", "type" : "MyEnum" },
+				{ "name" : "myInt2", "type" : "int" },
+				{ "name" : "myLong2", "type" : "long" },
+				{ "name" : "myBool2", "type" : "boolean" },
+				{ "name" : "myDouble2", "type" : "double" },
+				{ "name" : "myFloat2", "type" : "float" },
+				{ "name" : "myBytes2", "type" : "bytes" },
+				{ "name" : "myString2", "type" : "string" },
+				{ "name" : "myA2", "type" : "A" },
+				{ "name" : "myE2", "type" : "MyEnum" },
+				{ "name" : "myInt3", "type" : "int" },
+				{ "name" : "myLong3", "type" : "long" },
+				{ "name" : "myBool3", "type" : "boolean" },
+				{ "name" : "myDouble3", "type" : "double" },
+				{ "name" : "myFloat3", "type" : "float" },
+				{ "name" : "myBytes3", "type" : "bytes" },
+				{ "name" : "myString3", "type" : "string" },
+				{ "name" : "myA3", "type" : "A" },
+				{ "name" : "myE3", "type" : "MyEnum" },
+				{ "name" : "myInt4", "type" : "int" },
+				{ "name" : "myLong4", "type" : "long" },
+				{ "name" : "myBool4", "type" : "boolean" },
+				{ "name" : "myDouble4", "type" : "double" },
+				{ "name" : "myFloat4", "type" : "float" },
+				{ "name" : "myBytes4", "type" : "bytes" },
+				{ "name" : "myString4", "type" : "string" },
+				{ "name" : "myA4", "type" : "A" },
+				{ "name" : "myE4", "type" : "MyEnum" },
+ 			]
+   },
+   {
+	"type" : "record",
+	"name" : "Narrow",
+	"fields" : 
+			[ 	
+				{ "name" : "myInt", "type" : "int" },
+				{ "name" : "myLong", "type" : "long" },
+				{ "name" : "myString", "type" : "string" }
+ 			]
+   }
+   ]
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Avro.test.csproj b/lang/csharp/src/apache/test/Avro.test.csproj
new file mode 100644
index 0000000..841f4f2
--- /dev/null
+++ b/lang/csharp/src/apache/test/Avro.test.csproj
@@ -0,0 +1,168 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <PropertyGroup>
+    <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+    <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+    <ProductVersion>8.0.30703</ProductVersion>
+    <SchemaVersion>2.0</SchemaVersion>
+    <ProjectGuid>{911D56AB-587B-4E5F-B5EA-D47D8A46F1FA}</ProjectGuid>
+    <OutputType>Library</OutputType>
+    <AppDesignerFolder>Properties</AppDesignerFolder>
+    <RootNamespace>Avro.test</RootNamespace>
+    <AssemblyName>Avro.test</AssemblyName>
+    <TargetFrameworkVersion Condition=" '$(TargetFrameworkVersion)' == '' ">v3.5</TargetFrameworkVersion>
+    <FileAlignment>512</FileAlignment>
+    <FileUpgradeFlags>
+    </FileUpgradeFlags>
+    <OldToolsVersion>3.5</OldToolsVersion>
+    <UpgradeBackupLocation />
+    <PublishUrl>publish\</PublishUrl>
+    <Install>true</Install>
+    <InstallFrom>Disk</InstallFrom>
+    <UpdateEnabled>false</UpdateEnabled>
+    <UpdateMode>Foreground</UpdateMode>
+    <UpdateInterval>7</UpdateInterval>
+    <UpdateIntervalUnits>Days</UpdateIntervalUnits>
+    <UpdatePeriodically>false</UpdatePeriodically>
+    <UpdateRequired>false</UpdateRequired>
+    <MapFileExtensions>true</MapFileExtensions>
+    <ApplicationRevision>0</ApplicationRevision>
+    <ApplicationVersion>1.0.0.%2a</ApplicationVersion>
+    <IsWebBootstrapper>false</IsWebBootstrapper>
+    <UseApplicationTrust>false</UseApplicationTrust>
+    <BootstrapperEnabled>true</BootstrapperEnabled>
+    <TargetFrameworkProfile />
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+    <DebugSymbols>true</DebugSymbols>
+    <DebugType>full</DebugType>
+    <Optimize>false</Optimize>
+    <OutputPath>..\..\..\build\test\Debug\</OutputPath>
+    <DefineConstants>DEBUG;TRACE</DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+    <DebugType>none</DebugType>
+    <Optimize>true</Optimize>
+    <OutputPath>..\..\..\build\test\Release\</OutputPath>
+    <DefineConstants>
+    </DefineConstants>
+    <ErrorReport>prompt</ErrorReport>
+    <WarningLevel>4</WarningLevel>
+    <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
+  </PropertyGroup>
+  <ItemGroup>
+    <Reference Include="Castle.Core, Version=3.2.0.0, Culture=neutral, PublicKeyToken=407dd0808d44fbdc, processorArchitecture=MSIL">
+      <SpecificVersion>False</SpecificVersion>
+      <HintPath>..\..\..\..\..\lang\csharp\lib\main\Castle.Core.dll</HintPath>
+    </Reference>
+    <Reference Include="nunit.framework, Version=2.5.7.10213, Culture=neutral, PublicKeyToken=96d09a1eb7f44a77, processorArchitecture=MSIL">
+      <SpecificVersion>False</SpecificVersion>
+      <HintPath>..\..\..\lib\test\nunit.framework.dll</HintPath>
+    </Reference>
+    <Reference Include="System" />
+    <Reference Include="System.Core" />
+    <Reference Include="System.Xml.Linq" />
+    <Reference Include="System.Data.DataSetExtensions" />
+    <Reference Include="System.Data" />
+    <Reference Include="System.Xml" />
+  </ItemGroup>
+  <ItemGroup>
+    <Compile Include="CodGen\CodeGenTest.cs" />
+    <Compile Include="File\FileTests.cs" />
+    <Compile Include="Generic\GenericTests.cs" />
+    <Compile Include="IO\BinaryCodecTests.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\All.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\AllCallback.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\AllEnum.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\AllTestRecord.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\AllTestRecordPartial.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\FixedTest.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\Kind.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\Mail.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\MailCallback.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\MD5.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\Message.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\Simple.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\SimpleCallback.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\TestError.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\TestRecord.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\TestRecordExtensions.cs" />
+    <Compile Include="Ipc\GeneratedFiles\org\apache\avro\test\TestRecordWithUnion.cs" />
+    <Compile Include="Ipc\HttpClientServerTest.cs" />
+    <Compile Include="Ipc\LocalTransceiverTest.cs" />
+    <Compile Include="Ipc\MailResponder.cs" />
+    <Compile Include="Ipc\SerializationTest.cs" />
+    <Compile Include="Ipc\SocketServerConcurrentExecutionTest.cs" />
+    <Compile Include="Ipc\SocketServerTest.cs" />
+    <Compile Include="Ipc\SocketServerWithCallbacksTest.cs" />
+    <Compile Include="Ipc\SocketTransceiverWhenServerStopsTest.cs" />
+    <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Protocol\ProtocolTest.cs" />
+    <Compile Include="Schema\AliasTest.cs" />
+    <Compile Include="Schema\SchemaNormalizationTests.cs" />
+    <Compile Include="Schema\SchemaTests.cs" />
+    <Compile Include="Specific\SpecificTests.cs" />
+    <Compile Include="Utils\CaseFinder.cs" />
+    <Compile Include="Utils\CaseFinderTests.cs" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\ipc\Avro.ipc.csproj">
+      <Project>{3b05043a-dc6c-49b6-85bf-9ab055d0b414}</Project>
+      <Name>Avro.ipc</Name>
+    </ProjectReference>
+    <ProjectReference Include="..\main\Avro.main.csproj">
+      <Project>{A0A5CA3C-F58C-4D07-98B0-2C7B62AB20F0}</Project>
+      <Name>Avro.main</Name>
+    </ProjectReference>
+  </ItemGroup>
+  <ItemGroup>
+    <BootstrapperPackage Include=".NETFramework,Version=v3.5">
+      <Visible>False</Visible>
+      <ProductName>Microsoft .NET Framework 4 %28x86 and x64%29</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
+      <Visible>False</Visible>
+      <ProductName>.NET Framework 3.5 SP1</ProductName>
+      <Install>false</Install>
+    </BootstrapperPackage>
+    <BootstrapperPackage Include="Microsoft.Windows.Installer.3.1">
+      <Visible>False</Visible>
+      <ProductName>Windows Installer 3.1</ProductName>
+      <Install>true</Install>
+    </BootstrapperPackage>
+  </ItemGroup>
+  <ItemGroup>
+    <EmbeddedResource Include="..\..\..\..\..\share\test\schemas\mail.avpr">
+      <Link>Ipc\mail.avpr</Link>
+    </EmbeddedResource>
+  </ItemGroup>
+  <ItemGroup />
+  <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+  <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
+       Other similar extension points exist, see Microsoft.Common.targets.
+  <Target Name="BeforeBuild">
+  </Target>
+  <Target Name="AfterBuild">
+  </Target>
+  -->
+</Project>
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/CodGen/CodeGenTest.cs b/lang/csharp/src/apache/test/CodGen/CodeGenTest.cs
new file mode 100644
index 0000000..7e1d9e5
--- /dev/null
+++ b/lang/csharp/src/apache/test/CodGen/CodeGenTest.cs
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.CodeDom.Compiler;
+using Microsoft.CSharp;
+using NUnit.Framework;
+using Avro.Specific;
+
+namespace Avro.Test
+{
+    [TestFixture]
+ 
+    class CodeGenTest
+    {
+        [TestCase(@"{
+""type"" : ""record"",
+""name"" : ""ClassKeywords"",
+""namespace"" : ""com.base"",
+""fields"" : 
+		[ 	
+			{ ""name"" : ""int"", ""type"" : ""int"" },
+			{ ""name"" : ""base"", ""type"" : ""long"" },
+			{ ""name"" : ""event"", ""type"" : ""boolean"" },
+			{ ""name"" : ""foreach"", ""type"" : ""double"" },
+			{ ""name"" : ""bool"", ""type"" : ""float"" },
+			{ ""name"" : ""internal"", ""type"" : ""bytes"" },
+			{ ""name"" : ""while"", ""type"" : ""string"" },
+			{ ""name"" : ""return"", ""type"" : ""null"" },
+			{ ""name"" : ""enum"", ""type"" : { ""type"" : ""enum"", ""name"" : ""class"", ""symbols"" : [ ""A"", ""B"" ] } },
+			{ ""name"" : ""string"", ""type"" : { ""type"": ""fixed"", ""size"": 16, ""name"": ""static"" } }
+		]
+}
+", new object[] {"com.base.ClassKeywords", typeof(int), typeof(long), typeof(bool), typeof(double), typeof(float), typeof(byte[]), typeof(string),typeof(object),"com.base.class", "com.base.static"})]
+        [TestCase(@"{
+""type"" : ""record"",
+""name"" : ""SchemaObject"",
+""namespace"" : ""schematest"",
+""fields"" : 
+	[ 	
+		{ ""name"" : ""myobject"", ""type"" : 
+			[ 
+				""null"", 
+				{""type"" : ""array"", ""items"" : [ ""null"", 
+											{ ""type"" : ""enum"", ""name"" : ""MyEnum"", ""symbols"" : [ ""A"", ""B"" ] },
+											{ ""type"": ""fixed"", ""size"": 16, ""name"": ""MyFixed"" } 
+											]
+				}
+			]
+		}
+	]
+}
+", new object[] { "schematest.SchemaObject", typeof(IList<object>) })]
+        public static void TestCodeGen(string str, object[] result)
+        {
+            Schema schema = Schema.Parse(str);
+
+            CompilerResults compres = GenerateSchema(schema);
+
+            // instantiate object
+            ISpecificRecord rec = compres.CompiledAssembly.CreateInstance((string)result[0]) as ISpecificRecord;
+            Assert.IsNotNull(rec);
+
+            // test type of each fields
+            for (int i = 1; i < result.Length; ++i)
+            {
+                object field = rec.Get(i - 1);
+                Type stype;
+                if (result[i].GetType() == typeof(string))
+                {
+                    object obj = compres.CompiledAssembly.CreateInstance((string)result[i]);
+                    Assert.IsNotNull(obj);
+                    stype = obj.GetType();
+                }
+                else
+                    stype = (Type)result[i];
+                if (!stype.IsValueType)
+                    Assert.IsNull(field);   // can't test reference type, it will be null
+                else
+                    Assert.AreEqual(stype, field.GetType());
+            }
+        }
+
+
+        private static CompilerResults GenerateSchema(Schema schema)
+        {
+            var codegen = new CodeGen();
+            codegen.AddSchema(schema);
+            return GenerateAssembly(codegen);
+        }
+
+        private static CompilerResults GenerateAssembly(CodeGen schema)
+        {
+            var compileUnit = schema.GenerateCode();
+
+            var comparam = new CompilerParameters(new string[] { "mscorlib.dll" });
+            comparam.ReferencedAssemblies.Add("System.dll");
+            comparam.ReferencedAssemblies.Add("Avro.dll");
+            comparam.GenerateInMemory = true;
+            var ccp = new CSharpCodeProvider();
+            var units = new[] { compileUnit };
+            var compres = ccp.CompileAssemblyFromDom(comparam, units);
+            if (compres.Errors.Count > 0)
+            {
+                for (int i = 0; i < compres.Errors.Count; i++)
+                    Console.WriteLine(compres.Errors[i]);
+            }
+            Assert.AreEqual(0, compres.Errors.Count);
+            return compres;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/File/FileTests.cs b/lang/csharp/src/apache/test/File/FileTests.cs
new file mode 100644
index 0000000..c565c56
--- /dev/null
+++ b/lang/csharp/src/apache/test/File/FileTests.cs
@@ -0,0 +1,781 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections;
+using System.IO;
+using System.Collections.Generic;
+using Avro.Generic;
+using NUnit.Framework;
+using Avro.Specific;
+using System.Reflection;
+using Avro.File;
+using System.Linq;
+
+namespace Avro.Test.File
+{
+    [TestFixture]
+    public class FileTests
+    {
+        const string specificSchema  = "{\"type\":\"record\",\"name\":\"Foo\",\"namespace\":\"Avro.Test.File\",\"fields\":"
+                                     + "[{\"name\":\"name\",\"type\":[\"null\",\"string\"]},{\"name\":\"age\",\"type\":\"int\"}]}";
+
+        /// <summary>
+        /// Reading & writing of specific (custom) record objects
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="recs"></param>
+        /// <param name="codecType"></param>
+        [TestCase(specificSchema, new object[] { new object[] { "John", 23 } }, Codec.Type.Deflate)]
+        [TestCase(specificSchema, new object[] { new object[] { "Jane", 23 } }, Codec.Type.Deflate)]
+        [TestCase(specificSchema, new object[] { new object[] { "John", 23 }, new object[] { "Jane", 99 }, new object[] { "Jeff", 88 } }, Codec.Type.Deflate)]
+        [TestCase(specificSchema, new object[] { new object[] {"John", 23}, new object[] { "Jane", 99 }, new object[] { "Jeff", 88 },
+                                                 new object[] {"James", 13}, new object[] { "June", 109 }, new object[] { "Lloyd", 18 },
+                                                 new object[] {"Jenny", 3}, new object[] { "Bob", 9 }, new object[] { null, 48 }}, Codec.Type.Deflate)]
+        [TestCase(specificSchema, new object[] { new object[] { "John", 23 } }, Codec.Type.Null)]
+        [TestCase(specificSchema, new object[] { new object[] { "Jane", 23 } }, Codec.Type.Null)]
+        [TestCase(specificSchema, new object[] { new object[] { "John", 23 }, new object[] { "Jane", 99 }, new object[] { "Jeff", 88 } }, Codec.Type.Null)]
+        [TestCase(specificSchema, new object[] { new object[] {"John", 23}, new object[] { "Jane", 99 }, new object[] { "Jeff", 88 },
+                                                 new object[] {"James", 13}, new object[] { "June", 109 }, new object[] { "Lloyd", 18 },
+                                                 new object[] {"Jamie", 53}, new object[] { "Fanessa", 101 }, new object[] { "Kan", 18 },
+                                                 new object[] {"Janey", 33}, new object[] { "Deva", 102 }, new object[] { "Gavin", 28 },
+                                                 new object[] {"Lochy", 113}, new object[] { "Nickie", 10 }, new object[] { "Liddia", 38 },
+                                                 new object[] {"Fred", 3}, new object[] { "April", 17 }, new object[] { "Novac", 48 },
+                                                 new object[] {"Idan", 33}, new object[] { "Jolyon", 76 }, new object[] { "Ant", 68 },
+                                                 new object[] {"Ernie", 43}, new object[] { "Joel", 99 }, new object[] { "Dan", 78 },
+                                                 new object[] {"Dave", 103}, new object[] { "Hillary", 79 }, new object[] { "Grant", 88 },
+                                                 new object[] {"JJ", 14}, new object[] { "Bill", 90 }, new object[] { "Larry", 4 },
+                                                 new object[] {"Jenny", 3}, new object[] { "Bob", 9 }, new object[] { null, 48 }}, Codec.Type.Null)]
+        public void TestSpecificData(string schemaStr, object[] recs, Codec.Type codecType)
+        {
+            // create and write out
+            IList<Foo> records = MakeRecords(recs);
+
+            foreach(var rwFactory in SpecificOptions<Foo>())
+            {
+                MemoryStream dataFileOutputStream = new MemoryStream();
+                Schema schema = Schema.Parse(schemaStr);
+                using (IFileWriter<Foo> dataFileWriter = rwFactory.CreateWriter(dataFileOutputStream, schema, Codec.CreateCodec(codecType)))
+                {
+                    foreach (Foo rec in records)
+                        dataFileWriter.Append(rec);
+                }
+
+                MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+                // read back
+                IList<Foo> readRecords = new List<Foo>();
+
+                using (IFileReader<Foo> reader = rwFactory.CreateReader(dataFileInputStream, null))
+                {
+                    foreach (Foo rec in reader.NextEntries)
+                        readRecords.Add(rec);
+                }
+
+                // compare objects via Json
+                Assert.AreEqual(records.Count, readRecords.Count);
+                for (int i = 0; i < records.Count; i++)
+                {
+                    Assert.AreEqual(records[i].ToString(), readRecords[i].ToString());
+                }
+            }
+        }
+
+        /// <summary>
+        /// Reading & writing of generic record objects
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="value"></param>
+        /// <param name="codecType"></param>
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"null\"}]}",
+            new object[] { "f1", null }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", true }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", false }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"int\"}]}",
+            new object[] { "f1", 101 }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"long\"}]}",
+            new object[] { "f1", 101L }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"float\"}]}",
+            new object[] { "f1", 101.78f }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"double\"}]}",
+            new object[] { "f1", 101.78 }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"}]}",
+            new object[] { "f1", "A" }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"bytes\"}]}",
+            new object[] { "f1", new byte[] { 0, 1 } }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"enum\", \"name\": \"e\", \"symbols\":[\"s1\", \"s2\"]}}]}",
+            new object[] { "f1", "s2" }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new object[] { 0, 1, 101 } }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new int[] { 0, 1, 101 } }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100 }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100L }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"fixed\", \"name\": \"f\", \"size\": 2}}]}",
+            new object[] { "f1", new byte[] { 1, 2 } }, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"null\"}]}",
+            new object[] { "f1", null }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", true }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", false }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"int\"}]}",
+            new object[] { "f1", 101 }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"long\"}]}",
+            new object[] { "f1", 101L }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"float\"}]}",
+            new object[] { "f1", 101.78f }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"double\"}]}",
+            new object[] { "f1", 101.78 }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"}]}",
+            new object[] { "f1", "A" }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"bytes\"}]}",
+            new object[] { "f1", new byte[] { 0, 1 } }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"enum\", \"name\": \"e\", \"symbols\":[\"s1\", \"s2\"]}}]}",
+            new object[] { "f1", "s2" }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new object[] { 0, 1, 101 } }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new int[] { 0, 1, 101 } }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100 }, Codec.Type.Null)]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100L }, Codec.Type.Null)]
+        public void TestGenericData(string schemaStr, object[] value, Codec.Type codecType)
+        {
+            foreach(var rwFactory in GenericOptions<GenericRecord>())
+            {
+                // Create and write out
+                MemoryStream dataFileOutputStream = new MemoryStream();
+                using (var writer = rwFactory.CreateWriter(dataFileOutputStream, Schema.Parse(schemaStr), Codec.CreateCodec(codecType)))
+                {
+                    writer.Append(mkRecord(value, Schema.Parse(schemaStr) as RecordSchema));
+                }
+
+                MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+                // Read back
+                IList<GenericRecord> readFoos = new List<GenericRecord>();
+                using (IFileReader<GenericRecord> reader = rwFactory.CreateReader(dataFileInputStream,null))
+                {
+                    foreach (GenericRecord foo in reader.NextEntries)
+                    {
+                        readFoos.Add(foo);
+                    }
+                }
+
+                Assert.IsTrue((readFoos != null && readFoos.Count > 0),
+                               string.Format(@"Generic object: {0} did not serialise/deserialise correctly", readFoos));
+            }
+        }
+
+        /// <summary>
+        /// Reading & writing of primitive objects
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="value"></param>
+        /// <param name="codecType"></param>
+        [TestCase("{\"type\": \"boolean\"}", true, Codec.Type.Deflate)]
+        [TestCase("{\"type\": \"boolean\"}", false, Codec.Type.Deflate)]
+        [TestCase("{\"type\": \"boolean\"}", true, Codec.Type.Null)]
+        [TestCase("{\"type\": \"boolean\"}", false, Codec.Type.Null)]
+        [TestCase("[\"boolean\", \"null\"]", null, Codec.Type.Deflate)]
+        [TestCase("[\"boolean\", \"null\"]", true, Codec.Type.Deflate)]
+        [TestCase("[\"int\", \"long\"]", 100, Codec.Type.Deflate)]
+        [TestCase("[\"int\", \"long\"]", 100L, Codec.Type.Deflate)]
+        [TestCase("[\"float\", \"double\"]", 100.75, Codec.Type.Deflate)]
+        [TestCase("[\"float\", \"double\"]", 23.67f, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", new float[] { 23.67f, 22.78f }, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", 100.89, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", "a", Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", new string[] { "a", "b" }, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]", new byte[] { 1, 2, 3 }, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]", new object[] { new byte[] { 1, 2 }, new byte[] { 3, 4 } }, Codec.Type.Deflate)]
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]", "h1", Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"string\"}", "John", Codec.Type.Deflate)]
+        [TestCase("{\"type\":[\"null\",\"string\"]}", null, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"int\"}", 1, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"boolean\"}", false, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"long\"}", 12312313123L, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"float\"}", 0.0f, Codec.Type.Deflate)]
+        [TestCase("{\"type\":\"double\"}", 0.0, Codec.Type.Deflate)]
+        [TestCase("[\"boolean\", \"null\"]", null, Codec.Type.Null)]
+        [TestCase("[\"boolean\", \"null\"]", true, Codec.Type.Null)]
+        [TestCase("[\"int\", \"long\"]", 100, Codec.Type.Null)]
+        [TestCase("[\"int\", \"long\"]", 100L, Codec.Type.Null)]
+        [TestCase("[\"float\", \"double\"]", 100.75, Codec.Type.Null)]
+        [TestCase("[\"float\", \"double\"]", 23.67f, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", new float[] { 23.67f, 22.78f }, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", 100.89, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", "a", Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", new string[] { "a", "b" }, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]", new byte[] { 1, 2, 3 }, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]", new object[] { new byte[] { 1, 2 }, new byte[] { 3, 4 } }, Codec.Type.Null)]
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]", "h1", Codec.Type.Null)]
+        [TestCase("{\"type\":\"string\"}", "John", Codec.Type.Null)]
+        [TestCase("{\"type\":[\"null\",\"string\"]}", null, Codec.Type.Null)]
+        [TestCase("{\"type\":\"int\"}", 1, Codec.Type.Null)]
+        [TestCase("{\"type\":\"boolean\"}", false, Codec.Type.Null)]
+        [TestCase("{\"type\":\"long\"}", 12312313123L, Codec.Type.Null)]
+        [TestCase("{\"type\":\"float\"}", 0.0f, Codec.Type.Null)]
+        [TestCase("{\"type\":\"double\"}", 0.0, Codec.Type.Null)]
+        [TestCase("{\"type\":\"string\"}", "test", Codec.Type.Null)]
+        public void TestPrimitiveData(string schemaStr, object value, Codec.Type codecType)
+        {
+            foreach(var rwFactory in GenericOptions<object>())
+            {
+                MemoryStream dataFileOutputStream = new MemoryStream();
+                using (var writer = rwFactory.CreateWriter(dataFileOutputStream, Schema.Parse(schemaStr), Codec.CreateCodec(codecType)))
+                {
+                    writer.Append(value);
+                }
+
+                MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+                Assert.IsTrue(CheckPrimitive(dataFileInputStream, value, rwFactory.CreateReader),
+                              string.Format("Error reading generic data for object: {0}", value));
+            }
+        }
+
+        /// <summary>
+        /// Reading & writing of header meta data
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="value"></param>
+        /// <param name="codecType"></param>
+        [TestCase("bytesTest", new byte[] { 1, 2, 3 }, Codec.Type.Null, true)]
+        [TestCase("stringTest", "testVal", Codec.Type.Null, true)]
+        [TestCase("longTest", 12312313123L, Codec.Type.Null, true)]
+        [TestCase("bytesTest", new byte[] { 1 }, Codec.Type.Null, true)]
+        [TestCase("longTest", -1211212L, Codec.Type.Null, true)]
+        [TestCase("bytesTest", new byte[] { 1, 2, 3 }, Codec.Type.Deflate, true)]
+        [TestCase("stringTest", "testVal", Codec.Type.Deflate, true)]
+        [TestCase("longTest", 12312313123L, Codec.Type.Deflate, true)]
+        [TestCase("bytesTest", new byte[] { 1 }, Codec.Type.Deflate, true)]
+        [TestCase("longTest", -21211212L, Codec.Type.Deflate, true)]
+        [TestCase("bytesTest", new byte[] { 1, 2, 3 }, Codec.Type.Null, false)]
+        [TestCase("stringTest", "testVal", Codec.Type.Null, false)]
+        [TestCase("longTest", 12312313123L, Codec.Type.Null, false)]
+        [TestCase("bytesTest", new byte[] { 1 }, Codec.Type.Null, false)]
+        [TestCase("longTest", -1211212L, Codec.Type.Null, false)]
+        [TestCase("bytesTest", new byte[] { 1, 2, 3 }, Codec.Type.Deflate, false)]
+        [TestCase("stringTest", "testVal", Codec.Type.Deflate, false)]
+        [TestCase("longTest", 12312313123L, Codec.Type.Deflate, false)]
+        [TestCase("bytesTest", new byte[] { 1 }, Codec.Type.Deflate, false)]
+        [TestCase("longTest", -21211212L, Codec.Type.Deflate, false)]
+        public void TestMetaData(string key, object value, Codec.Type codecType, bool useTypeGetter)
+        {
+            // create and write out
+            object[] obj = new object[] { new object[] { "John", 23 } };
+            IList<Foo> records = MakeRecords(obj);
+            MemoryStream dataFileOutputStream = new MemoryStream();
+
+            Schema schema = Schema.Parse(specificSchema);
+            DatumWriter<Foo> writer = new SpecificWriter<Foo>(schema);
+            using (IFileWriter<Foo> dataFileWriter = DataFileWriter<Foo>.OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType)))
+            {
+                SetMetaData(dataFileWriter, key, value);
+                foreach (Foo rec in records)
+                    dataFileWriter.Append(rec);
+            }
+
+            MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+            // read back
+            using (IFileReader<Foo> reader = DataFileReader<Foo>.OpenReader(dataFileInputStream))
+            {
+                Assert.IsTrue(ValidateMetaData(reader, key, value, useTypeGetter),
+                              string.Format("Error validating header meta data for key: {0}, expected value: {1}", key, value));
+            }
+        }
+
+        /// <summary>
+        /// Partial reading of file / stream from 
+        /// position in stream
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="value"></param>
+        /// <param name="codecType"></param>
+        [TestCase(specificSchema, Codec.Type.Null, 1, 330)] // 330
+        [TestCase(specificSchema, Codec.Type.Null, 135, 330)] // 330
+        [TestCase(specificSchema, Codec.Type.Null, 194, 264)] // 264
+        [TestCase(specificSchema, Codec.Type.Null, 235, 264)] // 264
+        [TestCase(specificSchema, Codec.Type.Null, 888, 165)] // 165
+        [TestCase(specificSchema, Codec.Type.Null, 0, 330)] // 330
+        public void TestPartialRead(string schemaStr, Codec.Type codecType, int position, int expectedRecords)
+        {
+            // create and write out
+            IList<Foo> records = MakeRecords(GetTestFooObject());
+
+            MemoryStream dataFileOutputStream = new MemoryStream();
+
+            Schema schema = Schema.Parse(schemaStr);
+            DatumWriter<Foo> writer = new SpecificWriter<Foo>(schema);
+            using (IFileWriter<Foo> dataFileWriter = DataFileWriter<Foo>.OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType)))
+            {
+                for (int i = 0; i < 10; ++i)
+                {
+                    foreach (Foo foo in records)
+                    {
+                        dataFileWriter.Append(foo);
+                    }
+
+                    // write out block
+                    if (i == 1 || i == 4)
+                    {
+                        dataFileWriter.Sync();
+                    }
+                }
+            }
+
+            MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+            // read back
+            IList<Foo> readRecords = new List<Foo>();
+            using (IFileReader<Foo> reader = DataFileReader<Foo>.OpenReader(dataFileInputStream))
+            {
+                // move to next block from position
+                reader.Sync(position);
+
+                // read records from synced position 
+                foreach (Foo rec in reader.NextEntries)
+                    readRecords.Add(rec);
+            }
+
+            Assert.IsTrue((readRecords != null && readRecords.Count == expectedRecords),
+                          string.Format("Error performing partial read after position: {0}", position));
+        }
+
+        /// <summary>
+        /// Partial reading of file / stream from position in stream
+        /// Tests reading from sync boundaries.
+        /// </summary>
+        /// <param name="schemaStr"></param>
+        /// <param name="value"></param>
+        /// <param name="codecType"></param>
+        [TestCase(specificSchema, Codec.Type.Null)]
+        [TestCase(specificSchema, Codec.Type.Deflate)]
+        public void TestPartialReadAll(string schemaStr, Codec.Type codecType)
+        {
+            // create and write out
+            IList<Foo> records = MakeRecords(GetTestFooObject());
+
+            MemoryStream dataFileOutputStream = new MemoryStream();
+
+            Schema schema = Schema.Parse(schemaStr);
+            DatumWriter<Foo> writer = new SpecificWriter<Foo>(schema);
+            int numRecords = 0;
+            List<SyncLog> syncLogs = new List<SyncLog>();
+            using (IFileWriter<Foo> dataFileWriter = DataFileWriter<Foo>.OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType)))
+            {
+                dataFileWriter.Flush();
+                syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords });
+                long lastPosition = dataFileOutputStream.Position;
+                for (int i = 0; i < 10; ++i)
+                {
+                    foreach (Foo foo in records)
+                    {
+                        dataFileWriter.Append(foo);
+                        if (dataFileOutputStream.Position != lastPosition)
+                        {
+                            syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords });
+                            lastPosition = dataFileOutputStream.Position;
+                        }
+                        numRecords++;
+                    }
+
+                    // write out block
+                    if (i == 1 || i == 4)
+                    {
+                        dataFileWriter.Sync();
+                        syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position - DataFileConstants.SyncSize + 1, RemainingRecords = numRecords });
+                        lastPosition = dataFileOutputStream.Position;
+                    }
+                }
+                dataFileWriter.Flush();
+                syncLogs.Add(new SyncLog { Position = dataFileOutputStream.Position, RemainingRecords = numRecords });
+            }
+
+            MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+            // read back
+            using (IFileReader<Foo> reader = DataFileReader<Foo>.OpenReader(dataFileInputStream))
+            {
+                long curPosition = 0;
+
+                foreach (SyncLog syncLog in syncLogs)
+                {
+                    int expectedRecords = numRecords - syncLog.RemainingRecords;
+                    long nextSyncPoint = syncLog.Position;
+                    AssertNumRecordsFromPosition( reader, curPosition, expectedRecords );
+                    AssertNumRecordsFromPosition( reader, nextSyncPoint - 1, expectedRecords );
+                    curPosition = nextSyncPoint;
+                }
+            }
+        }
+
+        class SyncLog
+        {
+            public long Position { get; set; }
+            public int RemainingRecords { get; set; }
+        }
+
+        private static void AssertNumRecordsFromPosition( IFileReader<Foo> reader, long position, int expectedRecords )
+        {
+            // move to next block from position
+            reader.Sync( position );
+
+            int readRecords = 0;
+            // read records from synced position 
+            foreach( Foo rec in reader.NextEntries )
+            {
+                readRecords++;
+            }
+            Assert.AreEqual( expectedRecords, readRecords, "didn't read expected records from position " + position );
+        }
+
+        /// <summary>
+        /// Reading all sync positions and 
+        /// verifying them with subsequent seek
+        /// positions
+        /// </summary>
+        [TestCase(specificSchema, Codec.Type.Null, 2, 0, 1)]
+        [TestCase(specificSchema, Codec.Type.Null, 10, 1, 4)]
+        [TestCase(specificSchema, Codec.Type.Null, 200, 111, 15)]
+        [TestCase(specificSchema, Codec.Type.Null, 1000, 588, 998)]
+        [TestCase(specificSchema, Codec.Type.Deflate, 2, 0, 1)]
+        [TestCase(specificSchema, Codec.Type.Deflate, 10, 1, 4)]
+        [TestCase(specificSchema, Codec.Type.Deflate, 200, 111, 15)]
+        [TestCase(specificSchema, Codec.Type.Deflate, 1000, 588, 998)]
+        public void TestSyncAndSeekPositions(string schemaStr, Codec.Type codecType, int iterations, int firstSyncPosition, int secondSyncPosition)
+        {
+            // create and write out
+            IList<Foo> records = MakeRecords(GetTestFooObject());
+            MemoryStream dataFileOutputStream = new MemoryStream();
+
+            Schema schema = Schema.Parse(schemaStr);
+            DatumWriter<Foo> writer = new SpecificWriter<Foo>(schema);
+            using (IFileWriter<Foo> dataFileWriter = DataFileWriter<Foo>.OpenWriter(writer, dataFileOutputStream, Codec.CreateCodec(codecType)))
+            {
+                for (int i = 0; i < iterations; ++i)
+                {
+                    foreach (Foo foo in records)
+                        dataFileWriter.Append(foo);
+
+                    // write out block
+                    if (i == firstSyncPosition || i == secondSyncPosition)
+                        dataFileWriter.Sync();
+                }
+            }
+
+            MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+            // read syncs 
+            IList<long> syncs = new List<long>();
+            using (IFileReader<Foo> reader = DataFileReader<Foo>.OpenReader(dataFileInputStream))
+            {
+                long previousSync = -1;
+
+                foreach (Foo foo in reader.NextEntries)
+                {
+                    if (reader.PreviousSync() != previousSync 
+                     && reader.Tell() != reader.PreviousSync()) // EOF
+                    {
+                        previousSync = reader.PreviousSync();
+                        syncs.Add(previousSync);
+                    }
+                }
+
+                // verify syncs wth seeks
+                reader.Sync(0); // first sync
+                Assert.AreEqual(reader.PreviousSync(), syncs[0],
+                              string.Format("Error syncing reader to position: {0}", syncs[0]));
+
+                foreach (long sync in syncs) // the rest 
+                {
+                    reader.Seek(sync);
+                    Foo foo = reader.Next();
+                    Assert.IsNotNull(foo, string.Format("Error seeking to sync position: {0}", sync));
+                }
+            }
+        }
+
+        [TestCase]
+        public void TestDifferentReaderSchema()
+        {
+            RecordSchema writerSchema = Schema.Parse( "{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"},"
+                + "{\"name\":\"f2\", \"type\":\"string\"}]}" ) as RecordSchema;
+            Schema readerSchema = Schema.Parse( "{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"},"
+                +"{\"name\":\"f3\", \"type\":\"string\", \"default\":\"test\"}]}" );
+
+            foreach(var rwFactory in GenericOptions<GenericRecord>())
+            {
+                MemoryStream dataFileOutputStream = new MemoryStream();
+
+                using (var writer = rwFactory.CreateWriter(dataFileOutputStream, writerSchema, Codec.CreateCodec(Codec.Type.Null)))
+                {
+                    writer.Append(mkRecord(new [] { "f1", "f1val", "f2", "f2val" }, writerSchema));
+                }
+
+                MemoryStream dataFileInputStream = new MemoryStream(dataFileOutputStream.ToArray());
+
+                using (IFileReader<GenericRecord> reader = rwFactory.CreateReader(dataFileInputStream, readerSchema))
+                {
+                    GenericRecord result = reader.Next();
+                    object ignore;
+                    Assert.IsFalse(result.TryGetValue("f2", out ignore));
+                    Assert.AreEqual("f1val", result["f1"]);
+                    Assert.AreEqual("test", result["f3"]);
+                }
+            }
+        }
+
+        private bool CheckPrimitive<T>(Stream input, T value, ReaderWriterPair<T>.ReaderFactory createReader)
+        {
+            IFileReader<T> reader = createReader(input, null);
+            IList<T> readFoos = new List<T>();
+            foreach (T foo in reader.NextEntries)
+            {
+                readFoos.Add(foo);
+            }
+            return (readFoos.Count > 0 && 
+                CheckPrimitiveEquals(value, readFoos[0]));
+        }
+
+        private bool CheckPrimitiveEquals(object first, object second)
+        {
+            if (first is IList)
+            {
+                var firstList = (IList) first;
+                var secondList = (IList) second;
+                if (firstList.Count != secondList.Count)
+                {
+                    return false;
+                }
+                for (int i = 0; i < firstList.Count; i++)
+                {
+                    if (!CheckPrimitiveEquals(firstList[i], secondList[i]))
+                    {
+                        return false;
+                    }
+                }
+                return true;
+            }
+            return (first == null && second == null) || (first.Equals(second));
+        }
+
+        private static GenericRecord mkRecord(object[] kv, RecordSchema s)
+        {
+            GenericRecord input = new GenericRecord(s);
+            for (int i = 0; i < kv.Length; i += 2)
+            {
+                string fieldName = (string)kv[i];
+                object fieldValue = kv[i + 1];
+                Schema inner = s[fieldName].Schema;
+                if (inner is EnumSchema)
+                {
+                    GenericEnum ge = new GenericEnum(inner as EnumSchema, (string)fieldValue);
+                    fieldValue = ge;
+                }
+                else if (inner is FixedSchema)
+                {
+                    GenericFixed gf = new GenericFixed(inner as FixedSchema);
+                    gf.Value = (byte[])fieldValue;
+                    fieldValue = gf;
+                }
+                input.Add(fieldName, fieldValue);
+            }
+            return input;
+        }
+
+        private IList<Foo> MakeRecords(object[] recs)
+        {
+            IList<Foo> records = new List<Foo>();
+
+            foreach (object obj in recs)
+            {
+                object[] inner = (object[])obj;
+                Foo newFoo = new Foo { name = (String)inner[0], age = (int)inner[1] };
+                records.Add(newFoo);
+            }
+            return records;
+        }
+
+        private bool ValidateMetaData<T>(IFileReader<T> reader, 
+                                         string key, 
+                                         object expected, 
+                                         bool useTypeGetter)
+        {
+            byte[] valueBytes = reader.GetMeta(key);
+
+            if (expected is byte[])
+            {
+                Byte[] expectedBytes = new Byte[valueBytes.Length];
+                expectedBytes = (byte[])expected;
+                return Enumerable.SequenceEqual(expectedBytes, valueBytes);
+            }
+            else if (expected is long)  
+            {
+                if (useTypeGetter)
+                    return ((long)expected == reader.GetMetaLong(key));
+                else
+                    return ((long)expected == long.Parse(System.Text.Encoding.UTF8.GetString(valueBytes)));
+            }
+            else
+            {
+                if (useTypeGetter)
+                    return ((string)expected == reader.GetMetaString(key));
+                else
+                    return ((string)expected == System.Text.Encoding.UTF8.GetString(valueBytes));
+            }
+        }
+
+        private void SetMetaData(IFileWriter<Foo> dataFileWriter, string key, object value)
+        {
+            if (value is byte[])
+                dataFileWriter.SetMeta(key, (byte[])value);
+            else if (value is long)
+                dataFileWriter.SetMeta(key, (long)value);
+            else
+                dataFileWriter.SetMeta(key, (string)value);
+        }
+
+        private object[] GetTestFooObject()
+        {
+            return new object[] { new object[] {"John", 23}, new object[] { "Jane", 99 }, new object[] { "Jeff", 88 },
+                                  new object[] {"James", 13}, new object[] { "June", 109 }, new object[] { "Lloyd", 18 },
+                                  new object[] {"Jamie", 53}, new object[] { "Fanessa", 101 }, new object[] { "Kan", 18 },
+                                  new object[] {"Janey", 33}, new object[] { "Deva", 102 }, new object[] { "Gavin", 28 },
+                                  new object[] {"Lochy", 113}, new object[] { "Nickie", 10 }, new object[] { "Liddia", 38 },
+                                  new object[] {"Fred", 3}, new object[] { "April", 17 }, new object[] { "Novac", 48 },
+                                  new object[] {"Idan", 33}, new object[] { "Jolyon", 76 }, new object[] { "Ant", 68 },
+                                  new object[] {"Ernie", 43}, new object[] { "Joel", 99 }, new object[] { "Dan", 78 },
+                                  new object[] {"Dave", 103}, new object[] { "Hillary", 79 }, new object[] { "Grant", 88 },
+                                  new object[] {"JJ", 14}, new object[] { "Bill", 90 }, new object[] { "Larry", 4 },
+                                  new object[] {"Jenny", 3}, new object[] { "Bob", 9 }, new object[] { null, 48 }};
+        }
+
+        private static IEnumerable<ReaderWriterPair<T>> SpecificOptions<T>()
+        {
+            yield return new ReaderWriterPair<T>
+                             {
+                                 CreateReader = (stream, schema) => DataFileReader<T>.OpenReader(stream, schema),
+                                 CreateWriter = (stream, schema, codec) => 
+                                     DataFileWriter<T>.OpenWriter(new SpecificWriter<T>(schema), stream, codec )
+                             };
+
+            yield return new ReaderWriterPair<T>
+                             {
+                                 CreateReader = (stream, schema) => DataFileReader<T>.OpenReader(stream, schema,
+                                     (ws, rs) => new SpecificDatumReader<T>(ws, rs)),
+                                 CreateWriter = (stream, schema, codec) => 
+                                     DataFileWriter<T>.OpenWriter(new SpecificDatumWriter<T>(schema), stream, codec )
+                             };
+        }
+
+        private static IEnumerable<ReaderWriterPair<T>> GenericOptions<T>()
+        {
+            yield return new ReaderWriterPair<T>
+                             {
+                                 CreateReader = (stream, schema) => DataFileReader<T>.OpenReader(stream, schema),
+                                 CreateWriter = (stream, schema, codec) => 
+                                     DataFileWriter<T>.OpenWriter(new GenericWriter<T>(schema), stream, codec )
+                             };
+
+            yield return new ReaderWriterPair<T>
+                             {
+                                 CreateReader = (stream, schema) => DataFileReader<T>.OpenReader(stream, schema,
+                                     (ws, rs) => new GenericDatumReader<T>(ws, rs)),
+                                 CreateWriter = (stream, schema, codec) => 
+                                     DataFileWriter<T>.OpenWriter(new GenericDatumWriter<T>(schema), stream, codec )
+                             };
+        }
+
+        class ReaderWriterPair<T>
+        {
+            public delegate IFileWriter<T> WriterFactory(Stream stream, Schema writerSchema, Codec codec);
+            public delegate IFileReader<T> ReaderFactory(Stream stream, Schema readerSchema);
+
+            public WriterFactory CreateWriter { get; set; }
+            public ReaderFactory CreateReader { get; set; }
+        }
+    }
+
+
+    // Foo (Specific)
+    public class Foo : ISpecificRecord
+    {
+        public string name { get; set; }
+        public int age { get; set; }
+
+        public Schema Schema
+        {
+            get
+            {
+                return Schema.Parse("{\"type\":\"record\",\"name\":\"Foo\",\"namespace\":\"Avro.Test.File\"," + 
+                                    "\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"age\",\"type\":\"int\"}]}");
+            }
+        }
+
+        public object Get(int fieldPos)
+        {
+            switch (fieldPos)
+            {
+                case 0:
+                    return name;
+                case 1:
+                    return age;
+            }
+            throw new Exception("Invalid index " + fieldPos);
+        }
+
+        public void Put(int fieldPos, object fieldValue)
+        {
+            switch (fieldPos)
+            {
+                case 0:
+                    name = (string)fieldValue;
+                    break;
+                case 1:
+                    age = (int) fieldValue;
+                    break;
+                default:
+                    throw new Exception("Invalid index " + fieldPos);
+            }
+        }
+
+        public override string ToString()
+        {
+            return string.Format("Name: {0}, Age: {1}", name, age);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Generic/GenericTests.cs b/lang/csharp/src/apache/test/Generic/GenericTests.cs
new file mode 100644
index 0000000..d08e7ba
--- /dev/null
+++ b/lang/csharp/src/apache/test/Generic/GenericTests.cs
@@ -0,0 +1,493 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.IO;
+using System.Linq;
+using Avro.IO;
+using System.Collections.Generic;
+using Avro.Generic;
+using NUnit.Framework;
+
+namespace Avro.Test.Generic
+{
+    class GenericTests
+    {
+        private static void test<T>(string s, T value)
+        {
+            Stream ms;
+            Schema ws;
+            serialize(s, value, out ms, out ws);
+            Schema rs = Schema.Parse(s);
+            T output = deserialize<T>(ms, ws, rs);
+            Assert.AreEqual(value, output);
+        }
+
+        [TestCase("{\"type\": \"boolean\"}", true)]
+        [TestCase("{\"type\": \"boolean\"}", false)]
+
+        // Union
+        [TestCase("[\"boolean\", \"null\"]", null)]
+        [TestCase("[\"boolean\", \"null\"]", true)]
+        [TestCase("[\"int\", \"long\"]", 100)]
+        [TestCase("[\"int\", \"long\"]", 100L)]
+        [TestCase("[\"float\", \"double\"]", 100.75)]
+        [TestCase("[\"float\", \"double\"]", 23.67f)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", new float[] { 23.67f, 22.78f })]
+        [TestCase("[{\"type\": \"array\", \"items\": \"float\"}, \"double\"]", 100.89)]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", "a")]
+        [TestCase("[{\"type\": \"array\", \"items\": \"string\"}, \"string\"]", new string[] { "a", "b" })]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]", new byte[] { 1, 2, 3 })]
+        [TestCase("[{\"type\": \"array\", \"items\": \"bytes\"}, \"bytes\"]",
+            new object[] { new byte[] { 1, 2 }, new byte[] { 3, 4 } })]
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]", "h1")]
+        public void TestPrimitive(string schema, object value)
+        {
+            test(schema, value);
+        }
+
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"null\"}]}",
+            new object[] { "f1", null })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", true })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"boolean\"}]}",
+            new object[] { "f1", false })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"int\"}]}",
+            new object[] { "f1", 101 })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"long\"}]}",
+            new object[] { "f1", 101L })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"float\"}]}",
+            new object[] { "f1", 101.78f })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"double\"}]}",
+            new object[] { "f1", 101.78 })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"}]}",
+            new object[] { "f1", "A" })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"bytes\"}]}",
+            new object[] { "f1", new byte[] { 0, 1 } })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"enum\", \"name\": \"e\", \"symbols\":[\"s1\", \"s2\"]}}]}",
+            new object[] { "f1", "s2" })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new object[] { 0, 1, 101 } })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"array\", \"items\": \"int\"}}]}",
+            new object[] { "f1", new int[] { 0, 1, 101 } })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100 })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":[\"int\", \"long\"]}]}",
+            new object[] { "f1", 100L })]
+        [TestCase("{\"type\":\"record\", \"name\":\"n\", \"fields\":" +
+            "[{\"name\":\"f1\", \"type\":{\"type\": \"fixed\", \"name\": \"f\", \"size\": 2}}]}",
+            new object[] { "f1", new byte[] { 1, 2 } })]
+        public void TestRecord(string schema, object[] kv)
+        {
+            test(schema, mkRecord(kv, Schema.Parse(schema) as RecordSchema));
+        }
+
+        [TestCase("{\"type\": \"map\", \"values\": \"string\"}",
+            new object[] { "a", "0", "b", "1", "c", "101" })]
+        public void TestMap(string schema, object[] values)
+        {
+            test(schema, mkMap(values));
+        }
+
+        [TestCase("[{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"}]}, \"string\"]",
+            "{\"type\":\"record\", \"name\":\"n\", \"fields\":[{\"name\":\"f1\", \"type\":\"string\"}]}",
+            new object[] { "f1", "v1" })]
+        public void TestUnion_record(string unionSchema, string recordSchema, object[] value)
+        {
+            test(unionSchema, mkRecord(value, Schema.Parse(recordSchema) as RecordSchema));
+        }
+
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]",
+            "{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}", "s1")]
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]",
+            "{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}", "s2")]
+        [TestCase("[{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}, \"string\"]",
+            "{\"type\": \"enum\", \"symbols\": [\"s1\", \"s2\"], \"name\": \"e\"}", "s3",
+            ExpectedException = typeof(AvroException))]
+        public void TestUnion_enum(string unionSchema, string enumSchema, string value)
+        {
+            test(unionSchema, mkEnum(enumSchema, value));
+        }
+
+
+        [TestCase("[{\"type\": \"map\", \"values\": \"int\"}, \"string\"]",
+            "{\"type\": \"map\", \"values\": \"int\"}", new object[] { "a", 1, "b", 2 })]
+        public void TestUnion_map(string unionSchema, string mapSchema, object[] value)
+        {
+            test(unionSchema, mkMap(value));
+        }
+
+        [TestCase("[{\"type\": \"fixed\", \"size\": 2, \"name\": \"f\"}, \"string\"]",
+            "{\"type\": \"fixed\", \"size\": 2, \"name\": \"f\"}", new byte[] { 1, 2 })]
+        [TestCase("[{\"type\": \"fixed\", \"size\": 2, \"name\": \"f\"}, \"string\"]",
+            "{\"type\": \"fixed\", \"size\": 2, \"name\": \"f\"}", new byte[] { 1, 2, 3 },
+            ExpectedException = typeof(AvroException))]
+        [TestCase("[{\"type\": \"fixed\", \"size\": 2, \"name\": \"f\"}, \"string\"]",
+            "{\"type\": \"fixed\", \"size\": 3, \"name\": \"f\"}", new byte[] { 1, 2, 3 },
+            ExpectedException = typeof(AvroException))]
+        public void TestUnion_fixed(string unionSchema, string fixedSchema, byte[] value)
+        {
+            test(unionSchema, mkFixed(fixedSchema, value));
+        }
+
+        public void TestResolution<T, S>(string writerSchema, T actual, string readerSchema, S expected)
+        {
+            Stream ms;
+            Schema ws;
+            serialize<T>(writerSchema, actual, out ms, out ws);
+            Schema rs = Schema.Parse(readerSchema);
+            S output = deserialize<S>(ms, ws, rs);
+            Assert.AreEqual(expected, output);
+        }
+
+        [TestCase("int", 10, "long", 10L)]
+        [TestCase("int", 10, "float", 10.0f)]
+        [TestCase("int", 10, "double", 10.0)]
+        [TestCase("long", 10L, "float", 10.0f)]
+        [TestCase("long", 10L, "double", 10.0)]
+        [TestCase("float", 10.0f, "double", 10.0)]
+        [TestCase("{\"type\":\"array\", \"items\":\"int\"}", new int[] { 10, 20 },
+            "{\"type\":\"array\", \"items\":\"long\"}", new object[] { 10L, 20L })]
+        [TestCase("[\"int\", \"boolean\"]", true, "[\"boolean\", \"double\"]", true)]
+        [TestCase("[\"int\", \"boolean\"]", 10, "[\"boolean\", \"double\"]", 10.0)]
+        [TestCase("[\"int\", \"boolean\"]", 10, "\"int\"", 10)]
+        [TestCase("[\"int\", \"boolean\"]", 10, "\"double\"", 10.0)]
+        [TestCase("\"int\"", 10, "[\"int\", \"boolean\"]", 10)]
+        [TestCase("\"int\"", 10, "[\"long\", \"boolean\"]", 10L)]
+        public void TestResolution_simple(string writerSchema, object actual, string readerSchema, object expected)
+        {
+            TestResolution(writerSchema, actual, readerSchema, expected);
+        }
+
+        [Test]
+        public void TestResolution_intMapToLongMap()
+        {
+            TestResolution("{\"type\":\"map\", \"values\":\"int\"}", mkMap(new object[] { "a", 10, "b", 20 }),
+                "{\"type\":\"map\", \"values\":\"long\"}", mkMap(new object[] { "a", 10L, "b", 20L }));
+        }
+
+        [Test]
+        public void TestResolution_enum()
+        {
+            string ws = "{\"type\":\"enum\", \"symbols\":[\"a\", \"b\"], \"name\":\"e\"}";
+            string rs = "{\"type\":\"enum\", \"symbols\":[\"a\", \"b\"], \"name\":\"e\"}";
+            TestResolution(ws, mkEnum(ws, "a"), rs, mkEnum(rs, "a"));
+        }
+
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", true, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"},{\"name\":\"f1\",\"type\":\"boolean\"}]}",
+            new object[] { "f1", true, "f2", 100 }, Description = "Out of order fields")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", true, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"long\"}]}",
+            new object[] { "f1", true, "f2", 100L }, Description = "Field promotion")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", true, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"}]}",
+            new object[] { "f1", true }, Description = "Missing fields - 1")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"null\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", null, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - null")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", true, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - boolean")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"int\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 1, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - int")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"long\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 1L, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - long")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"float\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 1.0f, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - float")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"double\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 1.0, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - double")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"bytes\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", new byte[] { 1 , 0 }, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - bytes")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"string\"},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", "h", "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - string")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", new int[] { 100, 101 }, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - array")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":[\"int\", \"null\"]},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 101, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f2", 100 }, Description = "Missing fields - union")]
+        // TODO: Missing fields - record, enum, map, fixed
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"}]}",
+            new object[] { "f1", true },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":\"boolean\"},{\"name\":\"f2\",\"type\":\"string\",\"default\":\"d\"}]}",
+            new object[] { "f1", true, "f2", "d" }, Description = "Default field")]
+        public void TestResolution_record(string ws, object[] actual, string rs, object[] expected)
+        {
+            TestResolution(ws, mkRecord(actual, Schema.Parse(ws) as RecordSchema), rs,
+                mkRecord(expected, Schema.Parse(rs) as RecordSchema));
+        }
+
+        [TestCase("{\"type\":\"map\",\"values\":\"int\"}", new object[] { "a", 100, "b", -202 },
+            "{\"type\":\"map\",\"values\":\"long\"}", new object[] { "a", 100L, "b", -202L })]
+        public void TestResolution_intMapToLongMap(string ws, object[] value, string rs, object[] expected)
+        {
+            TestResolution(ws, mkMap(value), rs, mkMap(expected));
+        }
+
+
+        private static void testResolutionMismatch<T>(string writerSchema, T value, string readerSchema)
+        {
+            Stream ms;
+            Schema ws;
+            serialize(writerSchema, value, out ms, out ws);
+            deserialize<object>(ms, ws, Schema.Parse(readerSchema));
+        }
+
+        [TestCase("boolean", true, "null", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "boolean", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "string", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "bytes", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "{\"type\":\"record\",\"name\":\"r\",\"fields\":[{\"name\":\"f\", \"type\":\"int\"}]}",
+            ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"s\", \"t\"]}", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "{\"type\":\"array\",\"items\":\"int\"}", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "{\"type\":\"map\",\"values\":\"int\"}", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "[\"string\", \"bytes\"]", ExpectedException = typeof(AvroException))]
+        [TestCase("int", 10, "{\"type\":\"fixed\",\"name\":\"f\",\"size\":2}", ExpectedException = typeof(AvroException))]
+        [TestCase("{\"type\":\"array\",\"items\":\"int\"}", new int[] { 10 },
+            "\"boolean\"", ExpectedException = typeof(AvroException))]
+        [TestCase("{\"type\":\"array\",\"items\":\"int\"}", new int[] { 10 },
+            "{\"type\":\"array\",\"items\":\"string\"}", ExpectedException = typeof(AvroException))]
+        [TestCase("[\"int\", \"boolean\"]", 10, "[\"string\", \"bytes\"]", ExpectedException = typeof(AvroException))]
+        [TestCase("[\"int\", \"boolean\"]", 10, "\"string\"", ExpectedException = typeof(AvroException))]
+        public void TestResolutionMismatch_simple(string writerSchema, object value, string readerSchema)
+        {
+            testResolutionMismatch(writerSchema, value, readerSchema);
+        }
+
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":[\"int\", \"null\"]},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 101, "f2", 100 }, "int",
+            ExpectedException = typeof(AvroException), Description = "Non-record schema")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":[\"int\", \"null\"]},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 101, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"s\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"int\"}]}",
+            ExpectedException = typeof(AvroException), Description = "Name mismatch")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":[\"int\", \"null\"]},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 101, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f2\",\"type\":\"string\"}]}",
+            ExpectedException = typeof(AvroException), Description = "incompatible field")]
+        [TestCase("{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f1\",\"type\":[\"int\", \"null\"]},{\"name\":\"f2\",\"type\":\"int\"}]}",
+            new object[] { "f1", 101, "f2", 100 },
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":" +
+            "[{\"name\":\"f3\",\"type\":\"string\"}]}",
+            ExpectedException = typeof(AvroException), Description = "new field without default")]
+        public void TestResolutionMismatch_record(string ws, object[] actual, string rs)
+        {
+            testResolutionMismatch(ws, mkRecord(actual, Schema.Parse(ws) as RecordSchema), rs);
+        }
+
+        [TestCase("{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"s\", \"t\"]}", "s", "int",
+            ExpectedException = typeof(AvroException), Description = "Non-enum schema")]
+        [TestCase("{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"s\", \"t\"]}",
+            "s", "{\"type\":\"enum\",\"name\":\"f\",\"symbols\":[\"s\", \"t\"]}",
+            ExpectedException = typeof(AvroException), Description = "Name mismatch")]
+        [TestCase("{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"s\", \"t\"]}",
+            "s", "{\"type\":\"enum\",\"name\":\"f\",\"symbols\":[\"t\", \"u\"]}",
+            ExpectedException = typeof(AvroException), Description = "Incompatible symbols")]
+        public void TestResolutionMismatch_enum(string ws, string value, string rs)
+        {
+            testResolutionMismatch(ws, mkEnum(ws, value), rs);
+        }
+
+        [TestCase("{\"type\":\"map\",\"values\":\"int\"}", new object[] { "a", 0 }, "int",
+            ExpectedException = typeof(AvroException), Description = "Non-map schema")]
+        [TestCase("{\"type\":\"map\",\"values\":\"int\"}",
+            new object[] { "a", 0 }, "{\"type\":\"map\",\"values\":\"string\"}",
+            ExpectedException = typeof(AvroException), Description = "Name mismatch")]
+        public void TestResolutionMismatch_map(string ws, object[] value, string rs)
+        {
+            testResolutionMismatch(ws, mkMap(value), rs);
+        }
+
+        [TestCase("{\"type\":\"fixed\",\"name\":\"f\",\"size\":2}", new byte[] { 1, 1 }, "int",
+            ExpectedException = typeof(AvroException), Description = "Non-fixed schema")]
+        [TestCase("{\"type\":\"fixed\",\"name\":\"f\",\"size\":2}",
+            new byte[] { 1, 1 }, "{\"type\":\"fixed\",\"name\":\"g\",\"size\":2}",
+            ExpectedException = typeof(AvroException), Description = "Name mismatch")]
+        [TestCase("{\"type\":\"fixed\",\"name\":\"f\",\"size\":2}",
+            new byte[] { 1, 1 }, "{\"type\":\"fixed\",\"name\":\"f\",\"size\":1}",
+            ExpectedException = typeof(AvroException), Description = "Size mismatch")]
+        public void TestResolutionMismatch_fixed(string ws, byte[] value, string rs)
+        {
+            testResolutionMismatch(ws, mkFixed(ws, value), rs);
+        }
+
+        private static GenericRecord mkRecord(object[] kv, RecordSchema s)
+        {
+            GenericRecord input = new GenericRecord(s);
+            for (int i = 0; i < kv.Length; i += 2)
+            {
+                string fieldName = (string)kv[i];
+                object fieldValue = kv[i + 1];
+                Schema inner = s[fieldName].Schema;
+                if (inner is EnumSchema)
+                {
+                    GenericEnum ge = new GenericEnum(inner as EnumSchema, (string)fieldValue);
+                    fieldValue = ge;
+                }
+                else if (inner is FixedSchema)
+                {
+                    GenericFixed gf = new GenericFixed(inner as FixedSchema);
+                    gf.Value = (byte[])fieldValue;
+                    fieldValue = gf;
+                }
+                input.Add(fieldName, fieldValue);
+            }
+            return input;
+        }
+
+        private static IDictionary<string, object> mkMap(object[] vv)
+        {
+            IDictionary<string, object> d = new Dictionary<string, object>();
+            for (int j = 0; j < vv.Length; j += 2)
+            {
+                d[(string)vv[j]] = vv[j + 1];
+            }
+            return d;
+        }
+
+        private static object mkEnum(string enumSchema, string value)
+        {
+            return new GenericEnum(Schema.Parse(enumSchema) as EnumSchema, value);
+        }
+
+        private static object mkFixed(string fixedSchema, byte[] value)
+        {
+            return new GenericFixed(Schema.Parse(fixedSchema) as FixedSchema, value);
+        }
+
+        private static S deserialize<S>(Stream ms, Schema ws, Schema rs)
+        {
+            long initialPos = ms.Position;
+            GenericReader<S> r = new GenericReader<S>(ws, rs);
+            Decoder d = new BinaryDecoder(ms);
+            var items = new List<S>();
+            // validate reading twice to make sure there isn't some state that isn't reset between reads.
+            items.Add( Read( r, d ) );
+            items.Add( Read( r, d ) );
+            Assert.AreEqual(ms.Length, ms.Position); // Ensure we have read everything.
+            checkAlternateDeserializers(items, ms, initialPos, ws, rs);
+            return items[0];
+        }
+
+        private static S Read<S>( DatumReader<S> reader, Decoder d )
+        {
+            S reuse = default( S );
+            return reader.Read( reuse, d );
+        }
+
+        private static void checkAlternateDeserializers<S>(IEnumerable<S> expectations, Stream input, long startPos, Schema ws, Schema rs)
+        {
+            input.Position = startPos;
+            var reader = new GenericDatumReader<S>(ws, rs);
+            Decoder d = new BinaryDecoder(input);
+            foreach( var expected in expectations )
+            {
+                var read = Read( reader, d );
+                Assert.AreEqual(expected, read);
+            }
+            Assert.AreEqual(input.Length, input.Position); // Ensure we have read everything.
+        }
+
+        private static void serialize<T>(string writerSchema, T actual, out Stream stream, out Schema ws)
+        {
+            var ms = new MemoryStream();
+            Encoder e = new BinaryEncoder(ms);
+            ws = Schema.Parse(writerSchema);
+            GenericWriter<T> w = new GenericWriter<T>(ws);
+            // write twice so we can validate reading twice
+            w.Write(actual, e);
+            w.Write(actual, e);
+            ms.Flush();
+            ms.Position = 0;
+            checkAlternateSerializers(ms.ToArray(), actual, ws);
+            stream = ms;
+        }
+
+        private static void checkAlternateSerializers<T>(byte[] expected, T value, Schema ws)
+        {
+            var ms = new MemoryStream();
+            var writer = new GenericDatumWriter<T>(ws);
+            var e = new BinaryEncoder(ms);
+            writer.Write(value, e);
+            writer.Write(value, e);
+            var output = ms.ToArray();
+            
+            Assert.AreEqual(expected.Length, output.Length);
+            Assert.True(expected.SequenceEqual(output));
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/IO/BinaryCodecTests.cs b/lang/csharp/src/apache/test/IO/BinaryCodecTests.cs
new file mode 100644
index 0000000..b6d2e89
--- /dev/null
+++ b/lang/csharp/src/apache/test/IO/BinaryCodecTests.cs
@@ -0,0 +1,334 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using NUnit.Framework;
+using System.IO;
+
+using Avro.IO;
+
+namespace Avro.Test
+{
+    using Decoder = Avro.IO.Decoder;
+    using Encoder = Avro.IO.Encoder;
+    delegate T Decode<T>(Decoder d);
+    delegate void Skip<T>(Decoder d);
+    delegate void Encode<T>(Encoder e, T t);
+
+    /// <summary>
+    /// Tests the BinaryEncoder and BinaryDecoder. This is pertty general set of test cases and hence
+    /// can be used for any encoder and its corresponding decoder.
+    /// </summary>
+    [TestFixture]
+    public class BinaryCodecTests
+    {
+
+        /// <summary>
+        /// Writes an avro type T with value t into a stream using the encode method e
+        /// and reads it back using the decode method d and verifies that
+        /// the value read back is the same as the one written in.
+        /// </summary>
+        /// <typeparam name="T">Avro type to test</typeparam>
+        /// <param name="t">Value for the Avro type to test.</param>
+        /// <param name="r">The decode method</param>
+        /// <param name="w">The encode method</param>
+        private void TestRead<T>(T t, Decode<T> r, Encode<T> w, int size)
+        {
+            MemoryStream iostr = new MemoryStream();
+            Encoder e = new BinaryEncoder(iostr);
+            w(e, t);
+            iostr.Flush();
+            Assert.AreEqual(size, iostr.Length);
+            iostr.Position = 0;
+            Decoder d = new BinaryDecoder(iostr);
+            T actual = r(d);
+            Assert.AreEqual(t, actual);
+            Assert.AreEqual(-1, iostr.ReadByte());
+            iostr.Close();
+        }
+
+        /// <summary>
+        /// Writes an avro type T with value t into a stream using the encode method e
+        /// and reads it back using the decode method d and verifies that
+        /// the value read back is the same as the one written in.
+        /// </summary>
+        /// <typeparam name="T">Avro type to test</typeparam>
+        /// <param name="t">Value for the Avro type to test.</param>
+        /// <param name="r">The skip method</param>
+        /// <param name="w">The encode method</param>
+        private void TestSkip<T>(T t, Skip<T> s, Encode<T> w, int size)
+        {
+            MemoryStream iostr = new MemoryStream();
+            Encoder e = new BinaryEncoder(iostr);
+            w(e, t);
+            iostr.Flush();
+            Assert.AreEqual(size, iostr.Length);
+            iostr.Position = 0;
+            Decoder d = new BinaryDecoder(iostr);
+            s(d);
+            Assert.AreEqual(-1, iostr.ReadByte());
+            iostr.Close();
+        }
+
+
+        [TestCase(true)]
+        [TestCase(false)]
+        public void TestBoolean(bool b)
+        {
+            TestRead(b, (Decoder d) => d.ReadBoolean(), (Encoder e, bool t) => e.WriteBoolean(t), 1);
+            TestSkip(b, (Decoder d) => d.SkipBoolean(), (Encoder e, bool t) => e.WriteBoolean(t), 1);
+        }
+
+        [TestCase(0, 1)]
+        [TestCase(1, 1)]
+        [TestCase(63, 1)]
+        [TestCase(64, 2)]
+        [TestCase(8191, 2)]
+        [TestCase(8192, 3)]
+        [TestCase(1048575, 3)]
+        [TestCase(1048576, 4)]
+        [TestCase(134217727, 4)]
+        [TestCase(134217728, 5)]
+        [TestCase(2147483647, 5)]
+        [TestCase(-1, 1)]
+        [TestCase(-64, 1)]
+        [TestCase(-65, 2)]
+        [TestCase(-8192, 2)]
+        [TestCase(-8193, 3)]
+        [TestCase(-1048576, 3)]
+        [TestCase(-1048577, 4)]
+        [TestCase(-134217728, 4)]
+        [TestCase(-134217729, 5)]
+        [TestCase(-2147483648, 5)]
+        public void TestInt(int n, int size)
+        {
+            TestRead(n, (Decoder d) => d.ReadInt(), (Encoder e, int t) => e.WriteInt(t), size);
+            TestSkip(n, (Decoder d) => d.SkipInt(), (Encoder e, int t) => e.WriteInt(t), size);
+        }
+
+        [TestCase(0, 1)]
+        [TestCase(1, 1)]
+        [TestCase(63, 1)]
+        [TestCase(64, 2)]
+        [TestCase(8191, 2)]
+        [TestCase(8192, 3)]
+        [TestCase(1048575, 3)]
+        [TestCase(1048576, 4)]
+        [TestCase(134217727, 4)]
+        [TestCase(134217728, 5)]
+        [TestCase(17179869183L, 5)]
+        [TestCase(17179869184L, 6)]
+        [TestCase(2199023255551L, 6)]
+        [TestCase(2199023255552L, 7)]
+        [TestCase(281474976710655L, 7)]
+        [TestCase(281474976710656L, 8)]
+        [TestCase(36028797018963967L, 8)]
+        [TestCase(36028797018963968L, 9)]
+        [TestCase(4611686018427387903L, 9)]
+        [TestCase(4611686018427387904L, 10)]
+        [TestCase(9223372036854775807L, 10)]
+        [TestCase(-1, 1)]
+        [TestCase(-64, 1)]
+        [TestCase(-65, 2)]
+        [TestCase(-8192, 2)]
+        [TestCase(-8193, 3)]
+        [TestCase(-1048576, 3)]
+        [TestCase(-1048577, 4)]
+        [TestCase(-134217728, 4)]
+        [TestCase(-134217729, 5)]
+        [TestCase(-17179869184L, 5)]
+        [TestCase(-17179869185L, 6)]
+        [TestCase(-2199023255552L, 6)]
+        [TestCase(-2199023255553L, 7)]
+        [TestCase(-281474976710656L, 7)]
+        [TestCase(-281474976710657L, 8)]
+        [TestCase(-36028797018963968L, 8)]
+        [TestCase(-36028797018963969L, 9)]
+        [TestCase(-4611686018427387904L, 9)]
+        [TestCase(-4611686018427387905L, 10)]
+        [TestCase(-9223372036854775808L, 10)]
+        public void TestLong(long n, int size)
+        {
+            TestRead(n, (Decoder d) => d.ReadLong(), (Encoder e, long t) => e.WriteLong(t), size);
+            TestSkip(n, (Decoder d) => d.SkipLong(), (Encoder e, long t) => e.WriteLong(t), size);
+        }
+
+        [TestCase(0.0f)]
+        [TestCase(Single.MaxValue, Description = "Max value")]
+        [TestCase(1.17549435E-38f, Description = "Min 'normal' value")]
+        [TestCase(1.4e-45f, Description = "Min value")]
+        public void TestFloat(float n)
+        {
+            TestRead(n, (Decoder d) => d.ReadFloat(), (Encoder e, float t) => e.WriteFloat(t), 4);
+            TestSkip(n, (Decoder d) => d.SkipFloat(), (Encoder e, float t) => e.WriteFloat(t), 4);
+        }
+
+        [TestCase(0.0)]
+        [TestCase(1.7976931348623157e+308, Description = "Max value")]
+        [TestCase(2.2250738585072014E-308, Description = "Min 'normal' value")]
+        [TestCase(4.9e-324, Description = "Min value")]
+        public void TestDouble(double n)
+        {
+            TestRead(n, (Decoder d) => d.ReadDouble(), (Encoder e, double t) => e.WriteDouble(t), 8);
+            TestSkip(n, (Decoder d) => d.SkipDouble(), (Encoder e, double t) => e.WriteDouble(t), 8);
+        }
+
+
+        [TestCase(0, 1)]
+        [TestCase(5, 1)]
+        [TestCase(63, 1)]
+        [TestCase(64, 2)]
+        [TestCase(8191, 2)]
+        [TestCase(8192, 3)]
+        public void TestBytes(int length, int overhead)
+        {
+            Random r = new Random();
+            byte[] b = new byte[length];
+            r.NextBytes(b);
+            TestRead(b, (Decoder d) => d.ReadBytes(), (Encoder e, byte[] t) => e.WriteBytes(t), overhead + b.Length);
+            TestSkip(b, (Decoder d) => d.SkipBytes(), (Encoder e, byte[] t) => e.WriteBytes(t), overhead + b.Length);
+        }
+
+        [TestCase("", 1)]
+        [TestCase("hello", 1)]
+        [TestCase("1234567890123456789012345678901234567890123456789012345678901234", 2)]
+        public void TestString(string n, int overhead)
+        {
+            TestRead(n, (Decoder d) => d.ReadString(), (Encoder e, string t) => e.WriteString(t), overhead + n.Length);
+            TestSkip(n, (Decoder d) => d.SkipString(), (Encoder e, string t) => e.WriteString(t), overhead + n.Length);
+        }
+
+        [TestCase(0, 1)]
+        [TestCase(1, 1)]
+        [TestCase(64, 2)]
+        public void TestEnum(int n, int size)
+        {
+            TestRead(n, (Decoder d) => d.ReadEnum(), (Encoder e, int t) => e.WriteEnum(t), size);
+            TestSkip(n, (Decoder d) => d.SkipEnum(), (Encoder e, int t) => e.WriteEnum(t), size);
+        }
+
+        [TestCase(1, new int[] { })]
+        [TestCase(3, new int[] { 0 })]
+        [TestCase(4, new int[] { 64 })]
+        public void TestArray(int size, int[] entries)
+        {
+            TestRead(entries, (Decoder d) =>
+            {
+                int[] t = new int[entries.Length];
+                int j = 0;
+                for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                {
+                    for (int i = 0; i < n; i++) { t[j++] = d.ReadInt(); }
+
+                }
+                return t;
+            },
+                (Encoder e, int[] t) =>
+                {
+                    e.WriteArrayStart();
+                    e.SetItemCount(t.Length);
+                    foreach (int i in t) { e.StartItem(); e.WriteInt(i); } e.WriteArrayEnd();
+                }, size);
+
+            TestSkip(entries, (Decoder d) =>
+            {
+                for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                {
+                    for (int i = 0; i < n; i++) { d.SkipInt(); }
+
+                }
+            },
+                (Encoder e, int[] t) =>
+                {
+                    e.WriteArrayStart();
+                    e.SetItemCount(t.Length);
+                    foreach (int i in t) { e.StartItem(); e.WriteInt(i); } e.WriteArrayEnd();
+                }, size);
+        }
+
+        [TestCase(1, new string[] { })]
+        [TestCase(6, new string[] { "a", "b" })]
+        [TestCase(9, new string[] { "a", "b", "c", "" })]
+        public void TestMap(int size, string[] entries)
+        {
+            TestRead(entries, (Decoder d) =>
+            {
+                string[] t = new string[entries.Length];
+                int j = 0;
+                for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                {
+                    for (int i = 0; i < n; i++) { t[j++] = d.ReadString(); t[j++] = d.ReadString(); }
+
+                }
+                return t;
+            },
+                (Encoder e, string[] t) =>
+                {
+                    e.WriteArrayStart();
+                    e.SetItemCount(t.Length / 2);
+                    for (int i = 0; i < t.Length; i += 2)
+                    {
+                        e.StartItem(); e.WriteString(t[i]); e.WriteString(t[i + 1]);
+                    }
+                    e.WriteArrayEnd();
+                }, size);
+
+            TestSkip(entries, (Decoder d) =>
+            {
+                for (long n = d.ReadArrayStart(); n != 0; n = d.ReadArrayNext())
+                {
+                    for (int i = 0; i < n; i++) { d.SkipString(); d.SkipString(); }
+
+                }
+            },
+                (Encoder e, string[] t) =>
+                {
+                    e.WriteArrayStart();
+                    e.SetItemCount(t.Length / 2);
+                    for (int i = 0; i < t.Length; i += 2)
+                    {
+                        e.StartItem(); e.WriteString(t[i]); e.WriteString(t[i + 1]);
+                    }
+                    e.WriteArrayEnd();
+                }, size);
+        }
+
+        [TestCase(0, 1)]
+        [TestCase(1, 1)]
+        [TestCase(64, 2)]
+        public void TestUnionIndex(int n, int size)
+        {
+            TestRead(n, (Decoder d) => d.ReadUnionIndex(), (Encoder e, int t) => e.WriteUnionIndex(t), size);
+            TestSkip(n, (Decoder d) => d.SkipUnionIndex(), (Encoder e, int t) => e.WriteUnionIndex(t), size);
+        }
+
+        [TestCase(0)]
+        [TestCase(1)]
+        [TestCase(64)]
+        public void TestFixed(int size)
+        {
+            byte[] b = new byte[size];
+            new Random().NextBytes(b);
+            TestRead(b, (Decoder d) => { byte[] t = new byte[size]; d.ReadFixed(t); return t; },
+                (Encoder e, byte[] t) => e.WriteFixed(t), size);
+            TestSkip(b, (Decoder d) => d.SkipFixed(size),
+                (Encoder e, byte[] t) => e.WriteFixed(t), size);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/All.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/All.cs
new file mode 100644
index 0000000..f1d3648
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/All.cs
@@ -0,0 +1,95 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	/// <summary>
+	/// * Licensed to the Apache Software Foundation (ASF) under one\r\n * or more contributor license agreements.  See the NOTICE file\r\n * distributed with this work for additional information\r\n * regarding copyright ownership.  The ASF licenses this file\r\n * to you under the Apache License, Version 2.0 (the\r\n * \"License\"); you may not use this file except in compliance\r\n * with the License.  You may obtain a copy of the License at\r\n *\r\n *     http://www.apache.org/licenses [...]
+	/// </summary>
+	public abstract class All : Avro.Specific.ISpecificProtocol
+	{
+		private static readonly Avro.Protocol protocol = Avro.Protocol.Parse("{\r\n  \"protocol\": \"All\",\r\n  \"namespace\": \"org.apache.avro.test\",\r\n  \"doc\": \"* Lice" +
+				"nsed to the Apache Software Foundation (ASF) under one\\\\r\\\\n * or more contribut" +
+				"or license agreements.  See the NOTICE file\\\\r\\\\n * distributed with this work f" +
+				"or additional information\\\\r\\\\n * regarding copyright ownership.  The ASF licens" +
+				"es this file\\\\r\\\\n * to you under the Apache License, Version 2.0 (the\\\\r\\\\n * \\" +
+				"\\\\\"License\\\\\\\"); you may not use this file except in compliance\\\\r\\\\n * with the" +
+				" License.  You may obtain a copy of the License at\\\\r\\\\n *\\\\r\\\\n *     http://ww" +
+				"w.apache.org/licenses/LICENSE-2.0\\\\r\\\\n *\\\\r\\\\n * Unless required by applicable " +
+				"law or agreed to in writing, software\\\\r\\\\n * distributed under the License is d" +
+				"istributed on an \\\\\\\"AS IS\\\\\\\" BASIS,\\\\r\\\\n * WITHOUT WARRANTIES OR CONDITIONS O" +
+				"F ANY KIND, either express or implied.\\\\r\\\\n * See the License for the specific " +
+				"language governing permissions and\\\\r\\\\n * limitations under the License.\",\r\n  \"" +
+				"types\": [\r\n    {\r\n      \"type\": \"enum\",\r\n      \"name\": \"AllEnum\",\r\n      \"namesp" +
+				"ace\": \"org.apache.avro.test\",\r\n      \"symbols\": [\r\n        \"FOO\",\r\n        \"BAR\"" +
+				"\r\n      ]\r\n    },\r\n    {\r\n      \"type\": \"fixed\",\r\n      \"name\": \"FixedTest\",\r\n  " +
+				"    \"namespace\": \"org.apache.avro.test\",\r\n      \"size\": 10\r\n    },\r\n    {\r\n     " +
+				" \"type\": \"record\",\r\n      \"name\": \"AllTestRecord\",\r\n      \"namespace\": \"org.apac" +
+				"he.avro.test\",\r\n      \"fields\": [\r\n        {\r\n          \"name\": \"booleanTest\",\r\n" +
+				"          \"type\": \"boolean\"\r\n        },\r\n        {\r\n          \"name\": \"intTest\"," +
+				"\r\n          \"type\": \"int\"\r\n        },\r\n        {\r\n          \"name\": \"longTest\",\r" +
+				"\n          \"type\": \"long\"\r\n        },\r\n        {\r\n          \"name\": \"floatTest\"," +
+				"\r\n          \"type\": \"float\"\r\n        },\r\n        {\r\n          \"name\": \"doubleTes" +
+				"t\",\r\n          \"type\": \"double\"\r\n        },\r\n        {\r\n          \"name\": \"bytes" +
+				"Test\",\r\n          \"type\": \"bytes\"\r\n        },\r\n        {\r\n          \"name\": \"str" +
+				"ingTest\",\r\n          \"type\": \"string\"\r\n        },\r\n        {\r\n          \"name\": " +
+				"\"enumTest\",\r\n          \"type\": \"AllEnum\"\r\n        },\r\n        {\r\n          \"name" +
+				"\": \"fixedTest\",\r\n          \"type\": \"FixedTest\"\r\n        },\r\n        {\r\n         " +
+				" \"name\": \"arrayTest\",\r\n          \"type\": {\r\n            \"type\": \"array\",\r\n      " +
+				"      \"items\": \"long\"\r\n          }\r\n        },\r\n        {\r\n          \"name\": \"ma" +
+				"pTest\",\r\n          \"type\": {\r\n            \"type\": \"map\",\r\n            \"values\": " +
+				"\"long\"\r\n          }\r\n        },\r\n        {\r\n          \"name\": \"nestedTest\",\r\n   " +
+				"       \"type\": [\r\n            \"AllTestRecord\",\r\n            \"null\"\r\n          ]\r" +
+				"\n        }\r\n      ]\r\n    }\r\n  ],\r\n  \"messages\": {\r\n    \"echo\": {\r\n      \"request" +
+				"\": [\r\n        {\r\n          \"name\": \"allTest\",\r\n          \"type\": \"AllTestRecord\"" +
+				"\r\n        }\r\n      ],\r\n      \"response\": \"AllTestRecord\"\r\n    },\r\n    \"echoParam" +
+				"eters\": {\r\n      \"request\": [\r\n        {\r\n          \"name\": \"booleanTest\",\r\n    " +
+				"      \"type\": \"boolean\"\r\n        },\r\n        {\r\n          \"name\": \"intTest\",\r\n  " +
+				"        \"type\": \"int\"\r\n        },\r\n        {\r\n          \"name\": \"longTest\",\r\n   " +
+				"       \"type\": \"long\"\r\n        },\r\n        {\r\n          \"name\": \"floatTest\",\r\n  " +
+				"        \"type\": \"float\"\r\n        },\r\n        {\r\n          \"name\": \"doubleTest\",\r" +
+				"\n          \"type\": \"double\"\r\n        },\r\n        {\r\n          \"name\": \"bytesTest" +
+				"\",\r\n          \"type\": \"bytes\"\r\n        },\r\n        {\r\n          \"name\": \"stringT" +
+				"est\",\r\n          \"type\": \"string\"\r\n        },\r\n        {\r\n          \"name\": \"enu" +
+				"mTest\",\r\n          \"type\": \"AllEnum\"\r\n        },\r\n        {\r\n          \"name\": \"" +
+				"fixedTest\",\r\n          \"type\": \"FixedTest\"\r\n        },\r\n        {\r\n          \"na" +
+				"me\": \"arrayTest\",\r\n          \"type\": {\r\n            \"type\": \"array\",\r\n          " +
+				"  \"items\": \"long\"\r\n          }\r\n        },\r\n        {\r\n          \"name\": \"mapTes" +
+				"t\",\r\n          \"type\": {\r\n            \"type\": \"map\",\r\n            \"values\": \"lon" +
+				"g\"\r\n          }\r\n        },\r\n        {\r\n          \"name\": \"nestedTest\",\r\n       " +
+				"   \"type\": \"AllTestRecord\"\r\n        }\r\n      ],\r\n      \"response\": \"AllTestRecor" +
+				"d\"\r\n    }\r\n  }\r\n}");
+		public Avro.Protocol Protocol
+		{
+			get
+			{
+				return protocol;
+			}
+		}
+		public void Request(Avro.Specific.ICallbackRequestor requestor, string messageName, object[] args, object callback)
+		{
+			switch(messageName)
+			{
+				case "echo":
+				requestor.Request<org.apache.avro.test.AllTestRecord>(messageName, args, callback);
+				break;
+
+				case "echoParameters":
+				requestor.Request<org.apache.avro.test.AllTestRecord>(messageName, args, callback);
+				break;
+			};
+		}
+		public abstract org.apache.avro.test.AllTestRecord echo(org.apache.avro.test.AllTestRecord allTest);
+		public abstract org.apache.avro.test.AllTestRecord echoParameters(bool booleanTest, int intTest, long longTest, float floatTest, double doubleTest, byte[] bytesTest, string stringTest, org.apache.avro.test.AllEnum enumTest, org.apache.avro.test.FixedTest fixedTest, IList<System.Int64> arrayTest, IDictionary<string,System.Int64> mapTest, org.apache.avro.test.AllTestRecord nestedTest);
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllCallback.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllCallback.cs
new file mode 100644
index 0000000..cd9c688
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllCallback.cs
@@ -0,0 +1,24 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	/// <summary>
+	/// * Licensed to the Apache Software Foundation (ASF) under one\r\n * or more contributor license agreements.  See the NOTICE file\r\n * distributed with this work for additional information\r\n * regarding copyright ownership.  The ASF licenses this file\r\n * to you under the Apache License, Version 2.0 (the\r\n * \"License\"); you may not use this file except in compliance\r\n * with the License.  You may obtain a copy of the License at\r\n *\r\n *     http://www.apache.org/licenses [...]
+	/// </summary>
+	public abstract class AllCallback : All
+	{
+		public abstract void echo(org.apache.avro.test.AllTestRecord allTest, Avro.IO.ICallback<org.apache.avro.test.AllTestRecord> callback);
+		public abstract void echoParameters(bool booleanTest, int intTest, long longTest, float floatTest, double doubleTest, byte[] bytesTest, string stringTest, org.apache.avro.test.AllEnum enumTest, org.apache.avro.test.FixedTest fixedTest, IList<System.Int64> arrayTest, IDictionary<string,System.Int64> mapTest, org.apache.avro.test.AllTestRecord nestedTest, Avro.IO.ICallback<org.apache.avro.test.AllTestRecord> callback);
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllEnum.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllEnum.cs
new file mode 100644
index 0000000..3a4ae29
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllEnum.cs
@@ -0,0 +1,21 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public enum AllEnum
+	{
+		FOO,
+		BAR,
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecord.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecord.cs
new file mode 100644
index 0000000..1b881b3
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecord.cs
@@ -0,0 +1,209 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class AllTestRecord : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""AllTestRecord"",""namespace"":""org.apache.avro.test"",""fields"":[{""name"":""booleanTest"",""type"":""boolean""},{""name"":""intTest"",""type"":""int""},{""name"":""longTest"",""type"":""long""},{""name"":""floatTest"",""type"":""float""},{""name"":""doubleTest"",""type"":""double""},{""name"":""bytesTest"",""type"":""bytes""},{""name"":""stringTest"",""type"":""string""},{""name"":""enumTest"",""type [...]
+		private bool _booleanTest;
+		private int _intTest;
+		private long _longTest;
+		private float _floatTest;
+		private double _doubleTest;
+		private byte[] _bytesTest;
+		private string _stringTest;
+		private org.apache.avro.test.AllEnum _enumTest;
+		private org.apache.avro.test.FixedTest _fixedTest;
+		private IList<System.Int64> _arrayTest;
+		private IDictionary<string,System.Int64> _mapTest;
+		private org.apache.avro.test.AllTestRecord _nestedTest;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return AllTestRecord._SCHEMA;
+			}
+		}
+		public bool booleanTest
+		{
+			get
+			{
+				return this._booleanTest;
+			}
+			set
+			{
+				this._booleanTest = value;
+			}
+		}
+		public int intTest
+		{
+			get
+			{
+				return this._intTest;
+			}
+			set
+			{
+				this._intTest = value;
+			}
+		}
+		public long longTest
+		{
+			get
+			{
+				return this._longTest;
+			}
+			set
+			{
+				this._longTest = value;
+			}
+		}
+		public float floatTest
+		{
+			get
+			{
+				return this._floatTest;
+			}
+			set
+			{
+				this._floatTest = value;
+			}
+		}
+		public double doubleTest
+		{
+			get
+			{
+				return this._doubleTest;
+			}
+			set
+			{
+				this._doubleTest = value;
+			}
+		}
+		public byte[] bytesTest
+		{
+			get
+			{
+				return this._bytesTest;
+			}
+			set
+			{
+				this._bytesTest = value;
+			}
+		}
+		public string stringTest
+		{
+			get
+			{
+				return this._stringTest;
+			}
+			set
+			{
+				this._stringTest = value;
+			}
+		}
+		public org.apache.avro.test.AllEnum enumTest
+		{
+			get
+			{
+				return this._enumTest;
+			}
+			set
+			{
+				this._enumTest = value;
+			}
+		}
+		public org.apache.avro.test.FixedTest fixedTest
+		{
+			get
+			{
+				return this._fixedTest;
+			}
+			set
+			{
+				this._fixedTest = value;
+			}
+		}
+		public IList<System.Int64> arrayTest
+		{
+			get
+			{
+				return this._arrayTest;
+			}
+			set
+			{
+				this._arrayTest = value;
+			}
+		}
+		public IDictionary<string,System.Int64> mapTest
+		{
+			get
+			{
+				return this._mapTest;
+			}
+			set
+			{
+				this._mapTest = value;
+			}
+		}
+		public org.apache.avro.test.AllTestRecord nestedTest
+		{
+			get
+			{
+				return this._nestedTest;
+			}
+			set
+			{
+				this._nestedTest = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.booleanTest;
+			case 1: return this.intTest;
+			case 2: return this.longTest;
+			case 3: return this.floatTest;
+			case 4: return this.doubleTest;
+			case 5: return this.bytesTest;
+			case 6: return this.stringTest;
+			case 7: return this.enumTest;
+			case 8: return this.fixedTest;
+			case 9: return this.arrayTest;
+			case 10: return this.mapTest;
+			case 11: return this.nestedTest;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.booleanTest = (System.Boolean)fieldValue; break;
+			case 1: this.intTest = (System.Int32)fieldValue; break;
+			case 2: this.longTest = (System.Int64)fieldValue; break;
+			case 3: this.floatTest = (System.Single)fieldValue; break;
+			case 4: this.doubleTest = (System.Double)fieldValue; break;
+			case 5: this.bytesTest = (System.Byte[])fieldValue; break;
+			case 6: this.stringTest = (System.String)fieldValue; break;
+			case 7: this.enumTest = (org.apache.avro.test.AllEnum)fieldValue; break;
+			case 8: this.fixedTest = (org.apache.avro.test.FixedTest)fieldValue; break;
+			case 9: this.arrayTest = (IList<System.Int64>)fieldValue; break;
+			case 10: this.mapTest = (IDictionary<string,System.Int64>)fieldValue; break;
+			case 11: this.nestedTest = (org.apache.avro.test.AllTestRecord)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecordPartial.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecordPartial.cs
new file mode 100644
index 0000000..de387d8
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/AllTestRecordPartial.cs
@@ -0,0 +1,50 @@
+using System.Linq;
+
+namespace org.apache.avro.test
+{
+    public partial class AllTestRecord
+    {
+        protected bool Equals(AllTestRecord other)
+        {
+            bool arrayEqual = _arrayTest.SequenceEqual(other._arrayTest);
+            bool mapEqual = _mapTest.SequenceEqual(other._mapTest);
+            bool bytesEqual = _bytesTest.SequenceEqual(other._bytesTest);
+
+            return Equals(_nestedTest, other._nestedTest) && mapEqual &&
+                   arrayEqual
+                 && Equals(_fixedTest, other._fixedTest) &&
+                   _enumTest == other._enumTest && string.Equals(_stringTest, other._stringTest) &&
+                   bytesEqual && _doubleTest.Equals(other._doubleTest) &&
+                   _floatTest.Equals(other._floatTest) && _longTest == other._longTest && _intTest == other._intTest &&
+                   _booleanTest.Equals(other._booleanTest);
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (ReferenceEquals(null, obj)) return false;
+            if (ReferenceEquals(this, obj)) return true;
+            if (obj.GetType() != GetType()) return false;
+            return Equals((AllTestRecord) obj);
+        }
+
+        public override int GetHashCode()
+        {
+            unchecked
+            {
+                var hashCode = (_nestedTest != null ? _nestedTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (_mapTest != null ? _mapTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (_arrayTest != null ? _arrayTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (_fixedTest != null ? _fixedTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (int) _enumTest;
+                hashCode = (hashCode*397) ^ (_stringTest != null ? _stringTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (_bytesTest != null ? _bytesTest.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ _doubleTest.GetHashCode();
+                hashCode = (hashCode*397) ^ _floatTest.GetHashCode();
+                hashCode = (hashCode*397) ^ _longTest.GetHashCode();
+                hashCode = (hashCode*397) ^ _intTest;
+                hashCode = (hashCode*397) ^ _booleanTest.GetHashCode();
+                return hashCode;
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/FixedTest.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/FixedTest.cs
new file mode 100644
index 0000000..edcc444
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/FixedTest.cs
@@ -0,0 +1,39 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class FixedTest : SpecificFixed
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"fixed\",\"name\":\"FixedTest\",\"namespace\":\"org.apache.avro.test\",\"size\":10}");
+		private static uint fixedSize = 10;
+		public FixedTest() : 
+				base(fixedSize)
+		{
+		}
+		public override Schema Schema
+		{
+			get
+			{
+				return FixedTest._SCHEMA;
+			}
+		}
+		public static uint FixedSize
+		{
+			get
+			{
+				return FixedTest.fixedSize;
+			}
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Kind.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Kind.cs
new file mode 100644
index 0000000..9f73ad9
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Kind.cs
@@ -0,0 +1,22 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public enum Kind
+	{
+		FOO,
+		BAR,
+		BAZ,
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MD5.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MD5.cs
new file mode 100644
index 0000000..bc80a1b
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MD5.cs
@@ -0,0 +1,40 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class MD5 : SpecificFixed
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"fixed\",\"name\":\"MD5\",\"namespace\":\"org.apache.avro.test\",\"size\":16,\"javaAn" +
+				"notation\":\"org.apache.avro.TestAnnotation\"}");
+		private static uint fixedSize = 16;
+		public MD5() : 
+				base(fixedSize)
+		{
+		}
+		public override Schema Schema
+		{
+			get
+			{
+				return MD5._SCHEMA;
+			}
+		}
+		public static uint FixedSize
+		{
+			get
+			{
+				return MD5.fixedSize;
+			}
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Mail.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Mail.cs
new file mode 100644
index 0000000..9a6fdfc
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Mail.cs
@@ -0,0 +1,87 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public abstract class Mail : Avro.Specific.ISpecificProtocol
+	{
+		private static readonly Avro.Protocol protocol = Avro.Protocol.Parse(@"{
+  ""protocol"": ""Mail"",
+  ""namespace"": ""org.apache.avro.test"",
+  ""types"": [
+    {
+      ""type"": ""record"",
+      ""name"": ""Message"",
+      ""namespace"": ""org.apache.avro.test"",
+      ""fields"": [
+        {
+          ""name"": ""to"",
+          ""type"": ""string""
+        },
+        {
+          ""name"": ""from"",
+          ""type"": ""string""
+        },
+        {
+          ""name"": ""body"",
+          ""type"": ""string""
+        }
+      ]
+    }
+  ],
+  ""messages"": {
+    ""send"": {
+      ""request"": [
+        {
+          ""name"": ""message"",
+          ""type"": ""Message""
+        }
+      ],
+      ""response"": ""string""
+    },
+    ""fireandforget"": {
+      ""request"": [
+        {
+          ""name"": ""message"",
+          ""type"": ""Message""
+        }
+      ],
+      ""response"": ""null"",
+      ""one-way"": true
+    }
+  }
+}");
+		public Avro.Protocol Protocol
+		{
+			get
+			{
+				return protocol;
+			}
+		}
+		public void Request(Avro.Specific.ICallbackRequestor requestor, string messageName, object[] args, object callback)
+		{
+			switch(messageName)
+			{
+				case "send":
+				requestor.Request<System.String>(messageName, args, callback);
+				break;
+
+				case "fireandforget":
+				requestor.Request<System.Object>(messageName, args, callback);
+				break;
+			};
+		}
+		public abstract string send(org.apache.avro.test.Message message);
+		public abstract void fireandforget(org.apache.avro.test.Message message);
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MailCallback.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MailCallback.cs
new file mode 100644
index 0000000..eb25503
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/MailCallback.cs
@@ -0,0 +1,20 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public abstract class MailCallback : Mail
+	{
+		public abstract void send(org.apache.avro.test.Message message, Avro.IO.ICallback<System.String> callback);
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Message.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Message.cs
new file mode 100644
index 0000000..ed6c74d
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Message.cs
@@ -0,0 +1,85 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class Message : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"Message\",\"namespace\":\"org.apache.avro.test\",\"fields\":[{\"" +
+				"name\":\"to\",\"type\":\"string\"},{\"name\":\"from\",\"type\":\"string\"},{\"name\":\"body\",\"type" +
+				"\":\"string\"}]}");
+		private string _to;
+		private string _from;
+		private string _body;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return Message._SCHEMA;
+			}
+		}
+		public string to
+		{
+			get
+			{
+				return this._to;
+			}
+			set
+			{
+				this._to = value;
+			}
+		}
+		public string from
+		{
+			get
+			{
+				return this._from;
+			}
+			set
+			{
+				this._from = value;
+			}
+		}
+		public string body
+		{
+			get
+			{
+				return this._body;
+			}
+			set
+			{
+				this._body = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.to;
+			case 1: return this.from;
+			case 2: return this.body;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.to = (System.String)fieldValue; break;
+			case 1: this.from = (System.String)fieldValue; break;
+			case 2: this.body = (System.String)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Simple.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Simple.cs
new file mode 100644
index 0000000..1902130
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/Simple.cs
@@ -0,0 +1,103 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	/// <summary>
+	/// Protocol used for testing.
+	/// </summary>
+	public abstract class Simple : Avro.Specific.ISpecificProtocol
+	{
+		private static readonly Avro.Protocol protocol = Avro.Protocol.Parse("{\r\n  \"protocol\": \"Simple\",\r\n  \"namespace\": \"org.apache.avro.test\",\r\n  \"doc\": \"Pro" +
+				"tocol used for testing.\",\r\n  \"types\": [\r\n    {\r\n      \"type\": \"enum\",\r\n      \"na" +
+				"me\": \"Kind\",\r\n      \"namespace\": \"org.apache.avro.test\",\r\n      \"symbols\": [\r\n  " +
+				"      \"FOO\",\r\n        \"BAR\",\r\n        \"BAZ\"\r\n      ],\r\n      \"javaAnnotation\": \"" +
+				"org.apache.avro.TestAnnotation\"\r\n    },\r\n    {\r\n      \"type\": \"fixed\",\r\n      \"n" +
+				"ame\": \"MD5\",\r\n      \"namespace\": \"org.apache.avro.test\",\r\n      \"size\": 16,\r\n   " +
+				"   \"javaAnnotation\": \"org.apache.avro.TestAnnotation\"\r\n    },\r\n    {\r\n      \"typ" +
+				"e\": \"record\",\r\n      \"name\": \"TestRecord\",\r\n      \"namespace\": \"org.apache.avro." +
+				"test\",\r\n      \"fields\": [\r\n        {\r\n          \"name\": \"name\",\r\n          \"type" +
+				"\": \"string\",\r\n          \"javaAnnotation\": \"org.apache.avro.TestAnnotation\"\r\n    " +
+				"    },\r\n        {\r\n          \"name\": \"kind\",\r\n          \"type\": \"Kind\"\r\n        " +
+				"},\r\n        {\r\n          \"name\": \"hash\",\r\n          \"type\": \"MD5\"\r\n        }\r\n  " +
+				"    ],\r\n      \"javaAnnotation\": \"org.apache.avro.TestAnnotation\"\r\n    },\r\n    {\r" +
+				"\n      \"type\": \"error\",\r\n      \"name\": \"TestError\",\r\n      \"namespace\": \"org.apa" +
+				"che.avro.test\",\r\n      \"fields\": [\r\n        {\r\n          \"name\": \"message\",\r\n   " +
+				"       \"type\": \"string\"\r\n        }\r\n      ]\r\n    },\r\n    {\r\n      \"type\": \"recor" +
+				"d\",\r\n      \"name\": \"TestRecordWithUnion\",\r\n      \"namespace\": \"org.apache.avro.t" +
+				"est\",\r\n      \"fields\": [\r\n        {\r\n          \"name\": \"kind\",\r\n          \"type\"" +
+				": [\r\n            \"null\",\r\n            \"Kind\"\r\n          ]\r\n        },\r\n        {" +
+				"\r\n          \"name\": \"value\",\r\n          \"type\": [\r\n            \"null\",\r\n        " +
+				"    \"string\"\r\n          ]\r\n        }\r\n      ]\r\n    }\r\n  ],\r\n  \"messages\": {\r\n   " +
+				" \"hello\": {\r\n      \"doc\": \"Send a greeting\",\r\n      \"request\": [\r\n        {\r\n   " +
+				"       \"name\": \"greeting\",\r\n          \"type\": \"string\"\r\n        }\r\n      ],\r\n   " +
+				"   \"response\": \"string\"\r\n    },\r\n    \"echo\": {\r\n      \"doc\": \"Pretend you\'re in " +
+				"a cave!\",\r\n      \"request\": [\r\n        {\r\n          \"name\": \"record\",\r\n         " +
+				" \"type\": \"TestRecord\"\r\n        }\r\n      ],\r\n      \"response\": \"TestRecord\"\r\n    " +
+				"},\r\n    \"add\": {\r\n      \"request\": [\r\n        {\r\n          \"name\": \"arg1\",\r\n    " +
+				"      \"type\": \"int\"\r\n        },\r\n        {\r\n          \"name\": \"arg2\",\r\n         " +
+				" \"type\": \"int\"\r\n        }\r\n      ],\r\n      \"response\": \"int\"\r\n    },\r\n    \"echoB" +
+				"ytes\": {\r\n      \"request\": [\r\n        {\r\n          \"name\": \"data\",\r\n          \"t" +
+				"ype\": \"bytes\"\r\n        }\r\n      ],\r\n      \"response\": \"bytes\"\r\n    },\r\n    \"erro" +
+				"r\": {\r\n      \"doc\": \"Always throws an error.\",\r\n      \"request\": [],\r\n      \"res" +
+				"ponse\": \"null\",\r\n      \"errors\": [\r\n        \"TestError\"\r\n      ]\r\n    },\r\n    \"a" +
+				"ck\": {\r\n      \"doc\": \"Send a one way message\",\r\n      \"request\": [],\r\n      \"res" +
+				"ponse\": \"null\",\r\n      \"one-way\": true\r\n    }\r\n  }\r\n}");
+		public Avro.Protocol Protocol
+		{
+			get
+			{
+				return protocol;
+			}
+		}
+		public void Request(Avro.Specific.ICallbackRequestor requestor, string messageName, object[] args, object callback)
+		{
+			switch(messageName)
+			{
+				case "hello":
+				requestor.Request<System.String>(messageName, args, callback);
+				break;
+
+				case "echo":
+				requestor.Request<org.apache.avro.test.TestRecord>(messageName, args, callback);
+				break;
+
+				case "add":
+				requestor.Request<System.Int32>(messageName, args, callback);
+				break;
+
+				case "echoBytes":
+				requestor.Request<System.Byte[]>(messageName, args, callback);
+				break;
+
+				case "error":
+				requestor.Request<System.Object>(messageName, args, callback);
+				break;
+
+				case "ack":
+				requestor.Request<System.Object>(messageName, args, callback);
+				break;
+			};
+		}
+		// Send a greeting
+		public abstract string hello(string greeting);
+		// Pretend you're in a cave!
+		public abstract org.apache.avro.test.TestRecord echo(org.apache.avro.test.TestRecord record);
+		public abstract int add(int arg1, int arg2);
+		public abstract byte[] echoBytes(byte[] data);
+		// Always throws an error.
+		public abstract object error();
+		// Send a one way message
+		public abstract void ack();
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/SimpleCallback.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/SimpleCallback.cs
new file mode 100644
index 0000000..4e5054d
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/SimpleCallback.cs
@@ -0,0 +1,30 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	/// <summary>
+	/// Protocol used for testing.
+	/// </summary>
+	public abstract class SimpleCallback : Simple
+	{
+		// Send a greeting
+		public abstract void hello(string greeting, Avro.IO.ICallback<System.String> callback);
+		// Pretend you're in a cave!
+		public abstract void echo(org.apache.avro.test.TestRecord record, Avro.IO.ICallback<org.apache.avro.test.TestRecord> callback);
+		public abstract void add(int arg1, int arg2, Avro.IO.ICallback<System.Int32> callback);
+		public abstract void echoBytes(byte[] data, Avro.IO.ICallback<System.Byte[]> callback);
+		// Always throws an error.
+		public abstract void error(Avro.IO.ICallback<System.Object> callback);
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestError.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestError.cs
new file mode 100644
index 0000000..1ea2e68
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestError.cs
@@ -0,0 +1,56 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class TestError : SpecificException
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse("{\"type\":\"error\",\"name\":\"TestError\",\"namespace\":\"org.apache.avro.test\",\"fields\":[{" +
+				"\"name\":\"message\",\"type\":\"string\"}]}");
+		private string _message;
+		public override Schema Schema
+		{
+			get
+			{
+				return TestError._SCHEMA;
+			}
+		}
+		public string message
+		{
+			get
+			{
+				return this._message;
+			}
+			set
+			{
+				this._message = value;
+			}
+		}
+		public override object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.message;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public override void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.message = (System.String)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecord.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecord.cs
new file mode 100644
index 0000000..899b452
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecord.cs
@@ -0,0 +1,83 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class TestRecord : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""TestRecord"",""namespace"":""org.apache.avro.test"",""fields"":[{""name"":""name"",""type"":""string"",""javaAnnotation"":""org.apache.avro.TestAnnotation""},{""name"":""kind"",""type"":{""type"":""enum"",""name"":""Kind"",""namespace"":""org.apache.avro.test"",""symbols"":[""FOO"",""BAR"",""BAZ""],""javaAnnotation"":""org.apache.avro.TestAnnotation""}},{""name"":""hash"",""type"":{""type"":""fixed"","" [...]
+		private string _name;
+		private org.apache.avro.test.Kind _kind;
+		private org.apache.avro.test.MD5 _hash;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return TestRecord._SCHEMA;
+			}
+		}
+		public string name
+		{
+			get
+			{
+				return this._name;
+			}
+			set
+			{
+				this._name = value;
+			}
+		}
+		public org.apache.avro.test.Kind kind
+		{
+			get
+			{
+				return this._kind;
+			}
+			set
+			{
+				this._kind = value;
+			}
+		}
+		public org.apache.avro.test.MD5 hash
+		{
+			get
+			{
+				return this._hash;
+			}
+			set
+			{
+				this._hash = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.name;
+			case 1: return this.kind;
+			case 2: return this.hash;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.name = (System.String)fieldValue; break;
+			case 1: this.kind = (org.apache.avro.test.Kind)fieldValue; break;
+			case 2: this.hash = (org.apache.avro.test.MD5)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordExtensions.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordExtensions.cs
new file mode 100644
index 0000000..8334623
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordExtensions.cs
@@ -0,0 +1,29 @@
+namespace org.apache.avro.test
+{
+    public partial class TestRecord
+    {
+        protected bool Equals(TestRecord other)
+        {
+            return string.Equals(_name, other._name) && _kind == other._kind && Equals(_hash, other._hash);
+        }
+
+        public override bool Equals(object obj)
+        {
+            if (ReferenceEquals(null, obj)) return false;
+            if (ReferenceEquals(this, obj)) return true;
+            if (obj.GetType() != this.GetType()) return false;
+            return Equals((TestRecord) obj);
+        }
+
+        public override int GetHashCode()
+        {
+            unchecked
+            {
+                int hashCode = (_name != null ? _name.GetHashCode() : 0);
+                hashCode = (hashCode*397) ^ (int) _kind;
+                hashCode = (hashCode*397) ^ (_hash != null ? _hash.GetHashCode() : 0);
+                return hashCode;
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordWithUnion.cs b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordWithUnion.cs
new file mode 100644
index 0000000..f2b4251
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/GeneratedFiles/org/apache/avro/test/TestRecordWithUnion.cs
@@ -0,0 +1,69 @@
+// ------------------------------------------------------------------------------
+// <auto-generated>
+//    Generated by avrogen.vshost.exe, version 0.9.0.0
+//    Changes to this file may cause incorrect behavior and will be lost if code
+//    is regenerated
+// </auto-generated>
+// ------------------------------------------------------------------------------
+namespace org.apache.avro.test
+{
+	using System;
+	using System.Collections.Generic;
+	using System.Text;
+	using Avro;
+	using Avro.Specific;
+	
+	public partial class TestRecordWithUnion : ISpecificRecord
+	{
+		private static Schema _SCHEMA = Avro.Schema.Parse(@"{""type"":""record"",""name"":""TestRecordWithUnion"",""namespace"":""org.apache.avro.test"",""fields"":[{""name"":""kind"",""type"":[""null"",{""type"":""enum"",""name"":""Kind"",""namespace"":""org.apache.avro.test"",""symbols"":[""FOO"",""BAR"",""BAZ""],""javaAnnotation"":""org.apache.avro.TestAnnotation""}]},{""name"":""value"",""type"":[""null"",""string""]}]}");
+		private System.Nullable<org.apache.avro.test.Kind> _kind;
+		private string _value;
+		public virtual Schema Schema
+		{
+			get
+			{
+				return TestRecordWithUnion._SCHEMA;
+			}
+		}
+		public System.Nullable<org.apache.avro.test.Kind> kind
+		{
+			get
+			{
+				return this._kind;
+			}
+			set
+			{
+				this._kind = value;
+			}
+		}
+		public string @value
+		{
+			get
+			{
+				return this._value;
+			}
+			set
+			{
+				this._value = value;
+			}
+		}
+		public virtual object Get(int fieldPos)
+		{
+			switch (fieldPos)
+			{
+			case 0: return this.kind;
+			case 1: return this. at value;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Get()");
+			};
+		}
+		public virtual void Put(int fieldPos, object fieldValue)
+		{
+			switch (fieldPos)
+			{
+			case 0: this.kind = fieldValue == null ? (System.Nullable<org.apache.avro.test.Kind>)null : (org.apache.avro.test.Kind)fieldValue; break;
+			case 1: this. at value = (System.String)fieldValue; break;
+			default: throw new AvroRuntimeException("Bad index " + fieldPos + " in Put()");
+			};
+		}
+	}
+}
diff --git a/lang/csharp/src/apache/test/Ipc/HttpClientServerTest.cs b/lang/csharp/src/apache/test/Ipc/HttpClientServerTest.cs
new file mode 100644
index 0000000..5c2ddf6
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/HttpClientServerTest.cs
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Net;
+
+using NUnit.Framework;
+//using Microsoft.VisualStudio.TestTools.UnitTesting;
+
+using Avro.ipc;
+using Avro.ipc.Generic;
+using Avro.Generic;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    //[TestClass]
+    public class HttpClientServerTest
+    {
+        private HttpListenerServer server;
+        private MailResponder mailResponder;
+        private HttpTransceiver transceiver;
+        private GenericRequestor proxy;
+
+        const string URL = @"http://localhost:18080/avro/test/ipc/mailResponder/";
+
+        [TestFixtureSetUp]
+        //[TestInitialize]
+        public void Init()
+        {
+            mailResponder = new MailResponder();
+
+            server = new HttpListenerServer(new string[] { URL }, mailResponder);
+            server.Start();
+
+            HttpWebRequest requestTemplate = (HttpWebRequest)HttpWebRequest.Create(URL);
+            requestTemplate.Timeout = 6000;
+            requestTemplate.Proxy = null;
+            transceiver = new HttpTransceiver(requestTemplate);
+            proxy = new GenericRequestor(transceiver, MailResponder.Protocol);
+        }
+
+        [TestFixtureTearDown]
+        //[TestCleanup]
+        public void Cleanup()
+        {
+            server.Stop();
+        }
+
+        private string Send(GenericRecord message)
+        {
+            var request = new GenericRecord(MailResponder.Protocol.Messages["send"].Request);
+            request.Add("message", message);
+
+            var result = (string)proxy.Request("send", request);
+            return result;
+        }
+
+        [Test]
+        //[TestMethod]
+        public void TestRequestResponse()
+        {
+            for (int x = 0; x < 5; x++)
+            {
+                var message = SocketServerTest.CreateMessage();
+
+                var result = Send(message);
+                SocketServerTest.VerifyResponse(result);
+            }
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Ipc/LocalTransceiverTest.cs b/lang/csharp/src/apache/test/Ipc/LocalTransceiverTest.cs
new file mode 100644
index 0000000..fc6a7fc
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/LocalTransceiverTest.cs
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using Avro.Generic;
+using Avro.IO;
+using Avro.ipc;
+using Avro.ipc.Generic;
+using NUnit.Framework;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class LocalTransceiverTest
+    {
+        [TestCase]
+        public void TestSingleRpc()
+        {
+            Transceiver t = new LocalTransceiver(new TestResponder(protocol));
+            var p = new GenericRecord(protocol.Messages["m"].Request);
+            p.Add("x", "hello");
+            var r = new GenericRequestor(t, protocol);
+
+            for (int x = 0; x < 5; x++)
+            {
+                object request = r.Request("m", p);
+                Assert.AreEqual("there", request);
+            }
+        }
+
+
+        private readonly Protocol protocol = Protocol.Parse("" + "{\"protocol\": \"Minimal\", "
+                                                            + "\"messages\": { \"m\": {"
+                                                            +
+                                                            "   \"request\": [{\"name\": \"x\", \"type\": \"string\"}], "
+                                                            + "   \"response\": \"string\"} } }");
+
+        public class TestResponder : GenericResponder
+        {
+            public TestResponder(Protocol local)
+                : base(local)
+            {
+            }
+
+            public override object Respond(Message message, object request)
+            {
+                Assert.AreEqual("hello", ((GenericRecord) request)["x"]);
+                return "there";
+            }
+
+            public override void WriteError(Schema schema, object error, Encoder output)
+            {
+                throw new NotSupportedException();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/MailResponder.cs b/lang/csharp/src/apache/test/Ipc/MailResponder.cs
new file mode 100644
index 0000000..b6502c5
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/MailResponder.cs
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.IO;
+using System.Reflection;
+using Avro.Generic;
+using Avro.IO;
+using Avro.ipc;
+using Avro.ipc.Generic;
+using NUnit.Framework;
+
+namespace Avro.Test.Ipc
+{
+    public class MailResponder : GenericResponder
+    {
+        private static Protocol protocol;
+        private CountdownLatch allMessages = new CountdownLatch(5);
+
+        public MailResponder()
+            : base(Protocol)
+        {
+        }
+
+        public static Protocol Protocol
+        {
+            get
+            {
+                if (protocol == null)
+                {
+                    string readAllLines;
+                    using (
+                        Stream stream =
+                            Assembly.GetExecutingAssembly().GetManifestResourceStream("Avro.test.Ipc.mail.avpr"))
+                    using (var reader = new StreamReader(stream))
+                    {
+                        readAllLines = reader.ReadToEnd();
+                    }
+
+                    protocol = Protocol.Parse(readAllLines);
+                }
+
+                return protocol;
+            }
+        }
+
+        public override object Respond(Message message, object request)
+        {
+            if (message.Name == "send")
+            {
+                var genericRecord = (GenericRecord) ((GenericRecord) request)["message"];
+
+                return "Sent message to [" + genericRecord["to"] +
+                       "] from [" + genericRecord["from"] + "] with body [" +
+                       genericRecord["body"] + "]";
+            }
+            if (message.Name == "fireandforget")
+            {
+                allMessages.Signal();
+                return null;
+            }
+
+            throw new NotSupportedException();
+        }
+
+        public void Reset()
+        {
+            allMessages = new CountdownLatch(5);
+        }
+
+        public void AwaitMessages()
+        {
+            allMessages.Wait(2000);
+        }
+
+        public void AssertAllMessagesReceived()
+        {
+            Assert.AreEqual(0, allMessages.CurrentCount);
+        }
+
+
+        public override void WriteError(Schema schema, object error, Encoder output)
+        {
+            Assert.Fail(error.ToString());
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/SerializationTest.cs b/lang/csharp/src/apache/test/Ipc/SerializationTest.cs
new file mode 100644
index 0000000..84104a0
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/SerializationTest.cs
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Collections.Generic;
+using Avro.ipc;
+using Avro.ipc.Generic;
+using Avro.ipc.Specific;
+using NUnit.Framework;
+using org.apache.avro.test;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class SerializationTest
+    {
+        class AllImpl :All
+        {
+            public override AllTestRecord echo(AllTestRecord allTest)
+            {
+                return allTest;
+            }
+
+            public override AllTestRecord echoParameters(bool booleanTest, int intTest, long longTest, float floatTest, double doubleTest,
+                                                         byte[] bytesTest, string stringTest, AllEnum enumTest, FixedTest fixedTest, IList<long> arrayTest,
+                                                         IDictionary<string, long> mapTest, AllTestRecord nestedTest)
+            {
+                return new AllTestRecord
+                           {
+                               stringTest = stringTest,
+                               booleanTest = booleanTest,
+                               intTest = intTest,
+                               arrayTest = arrayTest,
+                               bytesTest = bytesTest,
+                               doubleTest = doubleTest,
+                               enumTest = enumTest,
+                               fixedTest = fixedTest,
+                               floatTest = floatTest,
+                               longTest = longTest,
+                               mapTest = mapTest,
+                               nestedTest = nestedTest
+                           };
+            }
+        }
+        private SocketServer server;
+        private SocketTransceiver transceiver;
+        private All simpleClient;
+
+        [TestFixtureSetUp]
+        public void Init()
+        {
+            var mailResponder = new SpecificResponder<All>(new AllImpl());
+
+            server = new SocketServer("localhost", 0, mailResponder);
+            server.Start();
+
+            transceiver = new SocketTransceiver("localhost", server.Port);
+
+            simpleClient = SpecificRequestor.CreateClient<All>(transceiver);
+        }
+
+        [TestFixtureTearDown]
+        public void Cleanup()
+        {
+            server.Stop();
+
+            transceiver.Disconnect();
+        }
+
+        [Test]
+        public void EchoClass()
+        {
+            AllTestRecord expected = CreateExpectedTestData();
+            AllTestRecord actual = simpleClient.echo(expected);
+
+            Assert.AreEqual(expected, actual);
+        }
+
+        [Test]
+        public void EchoParameters()
+        {
+            AllTestRecord expected = CreateExpectedTestData();
+
+            AllTestRecord actual = simpleClient.echoParameters(
+                expected.booleanTest,
+                expected.intTest,
+                expected.longTest,
+                expected.floatTest,
+                expected.doubleTest,
+                expected.bytesTest,
+                expected.stringTest,
+                expected.enumTest,
+                expected.fixedTest,
+                expected.arrayTest,
+                expected.mapTest,
+                expected.nestedTest);
+
+            Assert.AreEqual(expected, actual);
+        }
+
+        private static AllTestRecord CreateExpectedTestData()
+        {
+            var fixedTestData = new FixedTest();
+            fixedTestData.Value[0] = 5;
+
+            return new AllTestRecord
+            {
+                arrayTest = new List<long> { 1, 2, 3, 4 },
+                booleanTest = true,
+                bytesTest = new byte[] { 1, 2, 3, 4 },
+                doubleTest = 5.0,
+                enumTest = AllEnum.BAR,
+                fixedTest = fixedTestData,
+                floatTest = 99.0f,
+                intTest = 3,
+                longTest = 4,
+                stringTest = "required",
+                mapTest = new Dictionary<string, long>
+                                         {
+                                             { "foo", 1},
+                                             { "bar", 2}
+                                         },
+                nestedTest = new AllTestRecord
+                {
+                    booleanTest = true,
+                    bytesTest = new byte[] { 1 },
+                    stringTest = "required",
+                    fixedTest = fixedTestData,
+                    arrayTest = new List<long> { 1, 2, 3, 4 },
+                    mapTest = new Dictionary<string, long>
+                                                              {
+                                                                  { "foo", 1},
+                                                                  { "bar", 2}
+                                                              },
+                }
+            };
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/SocketServerConcurrentExecutionTest.cs b/lang/csharp/src/apache/test/Ipc/SocketServerConcurrentExecutionTest.cs
new file mode 100644
index 0000000..b0847ca
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/SocketServerConcurrentExecutionTest.cs
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Threading;
+using Avro.Generic;
+using Avro.IO;
+using Avro.ipc;
+using Avro.ipc.Generic;
+using NUnit.Framework;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class SocketServerConcurrentExecutionTest
+    {
+        private SocketServer server;
+
+        private SocketTransceiver transceiver;
+        private GenericRequestor proxy;
+
+        //[TearDown]
+        public void Cleanup()
+        {
+            try
+            {
+                if (transceiver != null)
+                {
+                    transceiver.Disconnect();
+                }
+            }
+            catch
+            {
+            }
+
+            try
+            {
+                server.Stop();
+            }
+            catch
+            {
+            }
+        }
+
+        // AVRO-625 [Test] 
+        // Currently, SocketTransceiver does not permit out-of-order requests on a stateful connection.
+        public void Test()
+        {
+            var waitLatch = new CountdownLatch(1);
+            var simpleResponder = new SimpleResponder(waitLatch);
+            server = new SocketServer("localhost", 0, simpleResponder);
+
+            server.Start();
+
+            int port = server.Port;
+
+            transceiver = new SocketTransceiver("localhost", port);
+            proxy = new GenericRequestor(transceiver, SimpleResponder.Protocol);
+
+            // Step 1:
+            proxy.GetRemote(); // force handshake
+
+            new Thread(x =>
+                           {
+                               // Step 2a:
+                               waitLatch.Wait();
+
+                               var ack = new GenericRecord(SimpleResponder.Protocol.Messages["ack"].Request);
+                               // Step 2b:
+                               proxy.Request("ack", ack);
+
+                           }).Start();
+
+
+            /*
+             * 3. Execute the Client.hello("wait") RPC, which will block until the
+             *    Client.ack() call has completed in the background thread.
+             */
+
+            var request = new GenericRecord(SimpleResponder.Protocol.Messages["hello"].Request);
+            request.Add("greeting", "wait");
+
+            var response = (string)proxy.Request("hello", request);
+
+            // 4. If control reaches here, both RPCs have executed concurrently
+            Assert.AreEqual("wait", response); 
+        }
+
+        private class SimpleResponder : GenericResponder
+        {
+            private readonly CountdownLatch waitLatch;
+            private readonly CountdownLatch ackLatch = new CountdownLatch(1);
+
+            static readonly public Protocol Protocol = Protocol.Parse("{\"protocol\":\"Simple\",\"namespace\":\"org.apache.avro.test\",\"doc\":\"Protocol used for testing.\",\"version\":\"1.6.2\",\"javaAnnotation\":[\"javax.annotation.Generated(\\\"avro\\\")\",\"org.apache.avro.TestAnnotation\"],\"types\":[{\"type\":\"enum\",\"name\":\"Kind\",\"symbols\":[\"FOO\",\"BAR\",\"BAZ\"],\"javaAnnotation\":\"org.apache.avro.TestAnnotation\"},{\"type\":\"fixed\",\"name\":\"MD5\",\"size\":16,\"jav [...]
+
+            public SimpleResponder(CountdownLatch waitLatch)
+                : base(Protocol)
+            {
+                this.waitLatch = waitLatch;
+            }
+
+            public override object Respond(Message message, object request)
+            {
+                if (message.Name == "hello")
+                {
+                    string greeting = ((GenericRecord)request)["greeting"].ToString();
+                    if (greeting == "wait")
+                    {
+                        // Step 3a:
+                        waitLatch.Signal();
+
+                        // Step 3b:
+                        ackLatch.Wait();
+                    }
+                    return greeting;
+                }
+                if (message.Name == "ack")
+                {
+                    ackLatch.Signal();
+                }
+
+                throw new NotSupportedException();
+            }
+
+            public override void WriteError(Schema schema, object error, Encoder output)
+            {
+                throw new System.NotImplementedException();
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/SocketServerTest.cs b/lang/csharp/src/apache/test/Ipc/SocketServerTest.cs
new file mode 100644
index 0000000..1a1747b
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/SocketServerTest.cs
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Net.Sockets;
+using Avro.Generic;
+using Avro.ipc;
+using Avro.ipc.Generic;
+using NUnit.Framework;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class SocketServerTest
+    {
+        private SocketServer server;
+        private MailResponder mailResponder;
+        private SocketTransceiver transceiver;
+        private GenericRequestor proxy;
+
+        [TestFixtureSetUp]
+        public void Init()
+        {
+            mailResponder = new MailResponder();
+
+            server = new SocketServer("localhost", 0, mailResponder);
+            server.Start();
+
+            transceiver = new SocketTransceiver("localhost", server.Port);
+            proxy = new GenericRequestor(transceiver, MailResponder.Protocol);
+        }
+
+        [TestFixtureTearDown]
+        public void Cleanup()
+        {
+            server.Stop();
+
+            transceiver.Disconnect();
+        }
+
+        public void Reset()
+        {
+            Cleanup();
+            Init();
+        }
+
+        private string Send(GenericRecord message)
+        {
+            var request = new GenericRecord(MailResponder.Protocol.Messages["send"].Request);
+            request.Add("message", message);
+
+            var result = (string) proxy.Request("send", request);
+            return result;
+        }
+
+        private static void FireAndForget(GenericRequestor proxy, GenericRecord genericRecord)
+        {
+            var request = new GenericRecord(MailResponder.Protocol.Messages["fireandforget"].Request);
+            request.Add("message", genericRecord);
+
+            proxy.Request("fireandforget", request);
+        }
+
+        private void FireAndForget(GenericRecord genericRecord)
+        {
+            FireAndForget(proxy, genericRecord);
+        }
+
+        private static byte[] GetBytes(string str)
+        {
+            var bytes = new byte[str.Length*sizeof (char)];
+            Buffer.BlockCopy(str.ToCharArray(), 0, bytes, 0, bytes.Length);
+            return bytes;
+        }
+
+        public static GenericRecord CreateMessage()
+        {
+            // The first and only type in the list is the Message type.
+            var recordSchema = (RecordSchema) MailResponder.Protocol.Types[0];
+            var record = new GenericRecord(recordSchema);
+
+            record.Add("to", "wife");
+            record.Add("from", "husband");
+            record.Add("body", "I love you!");
+
+            return record;
+        }
+
+        public static void VerifyResponse(string result)
+        {
+            Assert.AreEqual(
+                "Sent message to [wife] from [husband] with body [I love you!]",
+                result);
+        }
+
+        [Test]
+        public void TestBadRequest()
+        {
+            int port = server.Port;
+            const string msg = "GET /status HTTP/1.1\n\n";
+            var clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
+            clientSocket.Connect("localhost", port);
+
+            clientSocket.Send(GetBytes(msg));
+            var buf = new byte[2048];
+
+            try
+            {
+                clientSocket.Receive(buf);
+            }
+            catch (SocketException ex)
+            {
+                Assert.AreEqual(ex.ErrorCode, (int)SocketError.ConnectionReset);
+            }
+        }
+
+        [Test]
+        public void TestMixtureOfRequests()
+        {
+            mailResponder.Reset();
+            for (int x = 0; x < 5; x++)
+            {
+                var createMessage = CreateMessage();
+                FireAndForget(createMessage);
+
+                var result = Send(createMessage);
+                VerifyResponse(result);
+            }
+            mailResponder.AwaitMessages();
+            mailResponder.AssertAllMessagesReceived();
+        }
+
+        [Test]
+        public void TestMultipleConnectionsCount()
+        {
+            Reset();
+
+            var transceiver2 = new SocketTransceiver("localhost", server.Port);
+
+            var proxy2 = new GenericRequestor(transceiver2, MailResponder.Protocol);
+
+            FireAndForget(proxy, CreateMessage());
+            FireAndForget(proxy2, CreateMessage());
+            transceiver2.Disconnect();
+        }
+
+        [Test]
+        public void TestOneway()
+        {
+            Reset();
+            for (int x = 0; x < 5; x++)
+            {
+                GenericRecord genericRecord = CreateMessage();
+                FireAndForget(genericRecord);
+            }
+            mailResponder.AwaitMessages();
+            mailResponder.AssertAllMessagesReceived();
+        }
+
+        [Test]
+        public void TestRequestResponse()
+        {
+            for (int x = 0; x < 5; x++)
+            {
+                var message = CreateMessage();
+
+                var result = Send(message);
+                VerifyResponse(result);
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/SocketServerWithCallbacksTest.cs b/lang/csharp/src/apache/test/Ipc/SocketServerWithCallbacksTest.cs
new file mode 100644
index 0000000..29d2b63
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/SocketServerWithCallbacksTest.cs
@@ -0,0 +1,806 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Diagnostics;
+using System.Net.Sockets;
+using System.Threading;
+using Avro.ipc;
+using Avro.ipc.Specific;
+using NUnit.Framework;
+using org.apache.avro.test;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class SocketServerWithCallbacksTest
+    {
+        private static volatile bool ackFlag;
+        private static volatile CountdownLatch ackLatch = new CountdownLatch(1);
+
+        private SocketServer server;
+        private SocketTransceiver transceiver;
+        private SimpleCallback simpleClient;
+        
+        [TestFixtureSetUp]
+        public void Init()
+        {
+            var responder = new SpecificResponder<Simple>(new SimpleImpl());
+            server = new SocketServer("localhost", 0, responder);
+            server.Start();
+
+            transceiver = new SocketTransceiver("localhost", server.Port);
+            simpleClient = SpecificRequestor.CreateClient<SimpleCallback>(transceiver);
+        }
+
+        [TestFixtureTearDown]
+        public void TearDown()
+        {
+            try
+            {
+                if (transceiver != null)
+                {
+                    transceiver.Disconnect();
+                }
+            }
+            catch
+            {
+            }
+
+            try
+            {
+                server.Stop();
+            }
+            catch
+            {
+            }
+        }
+
+
+        // AVRO-625 [Test]
+        public void CancelPendingRequestsOnTransceiverClose()
+        {
+            // Start up a second server so that closing the server doesn't 
+            // interfere with the other unit tests:
+            var blockingSimpleImpl = new BlockingSimpleImpl();
+
+            var responder = new SpecificResponder<Simple>(blockingSimpleImpl);
+            var server2 = new SocketServer("localhost", 0, responder);
+
+            server2.Start();
+
+            try
+            {
+                int serverPort = server2.Port;
+
+                var transceiver2 = new SocketTransceiver("localhost", serverPort);
+
+                var addFuture = new CallFuture<int>();
+                try
+                {
+                    var simpleClient2 = SpecificRequestor.CreateClient<SimpleCallback>(transceiver2);
+
+                    // The first call has to block for the handshake:
+                    Assert.AreEqual(3, simpleClient2.add(1, 2));
+
+                    // Now acquire the semaphore so that the server will block:
+                    blockingSimpleImpl.acquireRunPermit();
+                    simpleClient2.add(1, 2, addFuture);
+                }
+                finally
+                {
+                    // When the transceiver is closed, the CallFuture should get 
+                    // an IOException
+                    transceiver2.Close();
+                }
+                bool ioeThrown = false;
+                try
+                {
+                    addFuture.WaitForResult(2000);
+                }
+                catch (Exception)
+                {
+                }
+                //catch (ExecutionException e) {
+                //  ioeThrown = e.getCause() instanceof IOException;
+                //  Assert.assertTrue(e.getCause() instanceof IOException);
+                //} catch (Exception e) {
+                //  e.printStackTrace();
+                //  Assert.fail("Unexpected Exception: " + e.toString());
+                //}
+                Assert.IsTrue(ioeThrown, "Expected IOException to be thrown");
+            }
+            finally
+            {
+                blockingSimpleImpl.releaseRunPermit();
+                server2.Stop();
+            }
+        }
+
+        // AVRO-625 [Test]
+        public void CancelPendingRequestsAfterChannelCloseByServerShutdown()
+        {
+            // The purpose of this test is to verify that a client doesn't stay
+            // blocked when a server is unexpectedly killed (or when for some
+            // other reason the channel is suddenly closed) while the server
+            // was in the process of handling a request (thus after it received
+            // the request, and before it returned the response).
+
+            // Start up a second server so that closing the server doesn't
+            // interfere with the other unit tests:
+            var blockingSimpleImpl = new BlockingSimpleImpl();
+
+            var responder = new SpecificResponder<Simple>(blockingSimpleImpl);
+            var server2 = new SocketServer("localhost", 0, responder);
+
+            server2.Start();
+            SocketTransceiver transceiver2 = null;
+
+            try
+            {
+                transceiver2 = new SocketTransceiver("localhost", server2.Port);
+
+                var simpleClient2 = SpecificRequestor.CreateClient<SimpleCallback>(transceiver2);
+
+                // Acquire the method-enter permit, which will be released by the
+                // server method once we call it
+                blockingSimpleImpl.acquireEnterPermit();
+
+                // Acquire the run permit, to avoid that the server method returns immediately
+                blockingSimpleImpl.acquireRunPermit();
+
+                var t = new Thread(() =>
+                                       {
+                                           try
+                                           {
+                                               simpleClient2.add(3, 4);
+                                               Assert.Fail("Expected an exception");
+                                           }
+                                           catch (Exception)
+                                           {
+                                               // expected
+                                           }
+                                       });
+                // Start client call
+                t.Start();
+
+                // Wait until method is entered on the server side
+                blockingSimpleImpl.acquireEnterPermit();
+
+                // The server side method is now blocked waiting on the run permit
+                // (= is busy handling the request)
+
+                // Stop the server
+                server2.Stop();
+
+                // With the server gone, we expect the client to get some exception and exit
+                // Wait for client thread to exit
+                t.Join(10000);
+
+                Assert.IsFalse(t.IsAlive, "Client request should not be blocked on server shutdown");
+            }
+            finally
+            {
+                blockingSimpleImpl.releaseRunPermit();
+                server2.Stop();
+                if (transceiver2 != null)
+                    transceiver2.Close();
+            }
+        }
+
+
+        private class CallbackCallFuture<T> : CallFuture<T>
+        {
+            private readonly Action<Exception> _handleException;
+            private readonly Action<T> _handleResult;
+
+            public CallbackCallFuture(Action<T> handleResult = null, Action<Exception> handleException = null)
+            {
+                _handleResult = handleResult;
+                _handleException = handleException;
+            }
+
+            public override void HandleResult(T result)
+            {
+                _handleResult(result);
+            }
+
+            public override void HandleException(Exception exception)
+            {
+                _handleException(exception);
+            }
+        }
+
+        private class NestedCallFuture<T> : CallFuture<T>
+        {
+            private readonly CallFuture<T> cf;
+
+            public NestedCallFuture(CallFuture<T> cf)
+            {
+                this.cf = cf;
+            }
+
+            public override void HandleResult(T result)
+            {
+                cf.HandleResult(result);
+            }
+
+            public override void HandleException(Exception exception)
+            {
+                cf.HandleException(exception);
+            }
+        }
+
+
+        private string Hello(string howAreYou)
+        {
+            var response = new CallFuture<string>();
+
+            simpleClient.hello(howAreYou, response);
+
+            return response.WaitForResult(2000);
+        }
+
+        private void Hello(string howAreYou, CallFuture<string> future1)
+        {
+            simpleClient.hello(howAreYou, future1);
+        }
+
+        private class BlockingSimpleImpl : SimpleImpl
+        {
+            /** Semaphore that is released when the method is entered. */
+            private readonly Semaphore enterSemaphore = new Semaphore(1, 1);
+            /** Semaphore that must be acquired for the method to run and exit. */
+            private readonly Semaphore runSemaphore = new Semaphore(1, 1);
+
+
+            public override string hello(string greeting)
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    return base.hello(greeting);
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+            public override TestRecord echo(TestRecord record)
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    return base.echo(record);
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+            public override int add(int arg1, int arg2)
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    return base.add(arg1, arg2);
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+            public override byte[] echoBytes(byte[] data)
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    return base.echoBytes(data);
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+            public override object error()
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    return base.error();
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+            public override void ack()
+            {
+                releaseEnterPermit();
+                acquireRunPermit();
+                try
+                {
+                    base.ack();
+                }
+                finally
+                {
+                    releaseRunPermit();
+                }
+            }
+
+
+            /**
+     * Acquires a single permit from the semaphore.
+     */
+
+            public void acquireRunPermit()
+            {
+                try
+                {
+                    runSemaphore.WaitOne();
+                    //  } catch (InterruptedException e) {
+                    //    Thread.currentThread().interrupt();
+                    //    throw new RuntimeException(e);
+                    //}
+                }
+                finally
+                {
+                }
+            }
+
+            /**
+     * Releases a single permit to the semaphore.
+     */
+
+            public void releaseRunPermit()
+            {
+                try
+                {
+                    runSemaphore.Release();
+                }
+                catch //(SemaphoreFullException)
+                {
+                }
+            }
+
+            private void releaseEnterPermit()
+            {
+                try
+                {
+                    enterSemaphore.Release();
+                }
+                catch //(SemaphoreFullException)
+                {
+                }
+            }
+
+            /**
+     * Acquires a single permit from the semaphore.
+     */
+
+            public void acquireEnterPermit()
+            {
+                try
+                {
+                    enterSemaphore.WaitOne();
+                    //    } catch (InterruptedException e) {
+                    //      Thread.currentThread().interrupt();
+                    //      throw new RuntimeException(e);
+                    //}
+                }
+                finally
+                {
+                }
+            }
+        }
+
+        [Test]
+        public void Ack()
+        {
+            simpleClient.ack();
+
+            ackLatch.Wait(2000);
+            Assert.IsTrue(ackFlag, "Expected ack flag to be set");
+
+            ackLatch = new CountdownLatch(1);
+            simpleClient.ack();
+            ackLatch.Wait(2000);
+            Assert.IsFalse(ackFlag, "Expected ack flag to be cleared");
+        }
+
+        [Test]
+        public void Add()
+        {
+            // Test synchronous RPC:
+            Assert.AreEqual(8, simpleClient.add(2, 6));
+
+            // Test asynchronous RPC (future):
+            var future1 = new CallFuture<int>();
+            simpleClient.add(8, 8, future1);
+            Assert.AreEqual(16, future1.WaitForResult(2000));
+            Assert.IsNull(future1.Error);
+
+            // Test asynchronous RPC (callback):
+            var future2 = new CallFuture<int>();
+            simpleClient.add(512, 256, new NestedCallFuture<int>(future2));
+
+            Assert.AreEqual(768, future2.WaitForResult(2000));
+            Assert.IsNull(future2.Error);
+        }
+
+        [Test]
+        public void ClientReconnectAfterServerRestart()
+        {
+            // Start up a second server so that closing the server doesn't 
+            // interfere with the other unit tests:
+            SimpleImpl simpleImpl = new BlockingSimpleImpl();
+
+            var responder = new SpecificResponder<Simple>(simpleImpl);
+            var server2 = new SocketServer("localhost", 0, responder);
+
+            server2.Start();
+
+            try
+            {
+                int serverPort = server2.Port;
+
+                // Initialize a client, and establish a connection to the server:
+                Transceiver transceiver2 = new SocketTransceiver("localhost", serverPort);
+
+                var simpleClient2 =
+                    SpecificRequestor.CreateClient<SimpleCallback>(transceiver2);
+
+                Assert.AreEqual(3, simpleClient2.add(1, 2));
+
+                // Restart the server:
+                server2.Stop();
+                try
+                {
+                    simpleClient2.add(2, -1);
+                    Assert.Fail("Client should not be able to invoke RPCs because server is no longer running");
+                }
+                catch (Exception)
+                {
+                    // Expected since server is no longer running
+                }
+
+                Thread.Sleep(2000);
+                server2 = new SocketServer("localhost", serverPort, new SpecificResponder<Simple>(new SimpleImpl()));
+
+                server2.Start();
+
+                // Invoke an RPC using the same client, which should reestablish the 
+                // connection to the server:
+                Assert.AreEqual(3, simpleClient2.add(1, 2));
+            }
+            finally
+            {
+                server2.Stop();
+            }
+        }
+
+        [Test]
+        public void Echo()
+        {
+            var record = new TestRecord
+                             {
+                                 hash =
+                                     new MD5
+                                         {
+                                             Value =
+                                                 new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8}
+                                         },
+                                 kind = Kind.FOO,
+                                 name = "My Record"
+                             };
+
+            // Test synchronous RPC:
+            TestRecord testRecord = simpleClient.echo(record);
+            Assert.AreEqual(record, testRecord);
+
+            // Test asynchronous RPC (future):
+            var future1 = new CallFuture<TestRecord>();
+            simpleClient.echo(record, future1);
+            Assert.AreEqual(record, future1.WaitForResult(2000));
+            Assert.IsNull(future1.Error);
+
+            // Test asynchronous RPC (callback):
+            var future2 = new CallFuture<TestRecord>();
+            simpleClient.echo(record, new NestedCallFuture<TestRecord>(future2));
+
+            Assert.AreEqual(record, future2.WaitForResult(2000));
+            Assert.IsNull(future2.Error);
+        }
+
+        [Test]
+        public void EchoBytes()
+        {
+            var byteBuffer = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
+
+            // Test synchronous RPC:
+            Assert.AreEqual(byteBuffer, simpleClient.echoBytes(byteBuffer));
+
+            // Test asynchronous RPC (future):
+            var future1 = new CallFuture<byte[]>();
+            simpleClient.echoBytes(byteBuffer, future1);
+            Assert.AreEqual(byteBuffer, future1.WaitForResult(2000));
+            Assert.IsNull(future1.Error);
+
+            // Test asynchronous RPC (callback):
+            var future2 = new CallFuture<byte[]>();
+            simpleClient.echoBytes(byteBuffer, new NestedCallFuture<byte[]>(future2));
+
+            Assert.AreEqual(byteBuffer, future2.WaitForResult(2000));
+            Assert.IsNull(future2.Error);
+        }
+
+        [Test, TestCase(false, TestName = "Specific error"), TestCase(true, TestName = "System error")]
+        public void Error(bool systemError)
+        {
+            Type expected;
+            
+            if(systemError)
+            {
+                expected = typeof(Exception);
+                SimpleImpl.throwSystemError = true;
+            }
+            else
+            {
+                expected = typeof(TestError);
+                SimpleImpl.throwSystemError = false;
+            }
+
+            // Test synchronous RPC:
+            try
+            {
+                simpleClient.error();
+                Assert.Fail("Expected " + expected.Name + " to be thrown");
+            }
+            catch (Exception e)
+            {
+                Assert.AreEqual(expected, e.GetType());
+            }
+
+            // Test asynchronous RPC (future):
+            var future = new CallFuture<object>();
+            simpleClient.error(future);
+            try
+            {
+                future.WaitForResult(2000);
+                Assert.Fail("Expected " + expected.Name + " to be thrown");
+            }
+            catch (Exception e)
+            {
+                Assert.AreEqual(expected, e.GetType());
+            }
+
+            Assert.IsNotNull(future.Error);
+            Assert.AreEqual(expected, future.Error.GetType());
+            Assert.IsNull(future.Result);
+
+            // Test asynchronous RPC (callback):
+            Exception errorRef = null;
+            var latch = new CountdownLatch(1);
+            simpleClient.error(new CallbackCallFuture<object>(
+                                   result => Assert.Fail("Expected " + expected.Name),
+                                   exception =>
+                                       {
+                                           errorRef = exception;
+                                           latch.Signal();
+                                       }));
+
+            Assert.IsTrue(latch.Wait(2000), "Timed out waiting for error");
+            Assert.IsNotNull(errorRef);
+            Assert.AreEqual(expected, errorRef.GetType());
+        }
+
+        [Test]
+        public void Greeting()
+        {
+            // Test synchronous RPC:
+            string response = Hello("how are you?");
+            Assert.AreEqual("Hello, how are you?", response);
+
+            // Test asynchronous RPC (future):
+            var future1 = new CallFuture<String>();
+            Hello("World!", future1);
+
+            string result = future1.WaitForResult();
+            Assert.AreEqual("Hello, World!", result);
+            Assert.IsNull(future1.Error);
+
+            // Test asynchronous RPC (callback):
+            var future2 = new CallFuture<String>();
+
+            Hello("what's up?", new NestedCallFuture<string>(future2));
+            Assert.AreEqual("Hello, what's up?", future2.WaitForResult());
+            Assert.IsNull(future2.Error);
+        }
+
+        //[Test]
+        public void PerformanceTest()
+        {
+            const int threadCount = 8;
+            const long runTimeMillis = 10*1000L;
+
+
+            long rpcCount = 0;
+            int[] runFlag = {1};
+
+            var startLatch = new CountdownLatch(threadCount);
+            for (int ii = 0; ii < threadCount; ii++)
+            {
+                new Thread(() =>
+                               {
+                                   {
+                                       try
+                                       {
+                                           startLatch.Signal();
+                                           startLatch.Wait(2000);
+
+                                           while (Interlocked.Add(ref runFlag[0], 0) == 1)
+                                           {
+                                               Interlocked.Increment(ref rpcCount);
+                                               Assert.AreEqual("Hello, World!", simpleClient.hello("World!"));
+                                           }
+                                       }
+                                       catch (Exception e)
+                                       {
+                                           Console.WriteLine(e);
+                                       }
+                                   }
+                               }).Start();
+            }
+
+            startLatch.Wait(2000);
+            Thread.Sleep(2000);
+            Interlocked.Exchange(ref runFlag[0], 1);
+
+            string results = "Completed " + rpcCount + " RPCs in " + runTimeMillis +
+                             "ms => " + ((rpcCount/(double) runTimeMillis)*1000) + " RPCs/sec, " +
+                             (runTimeMillis/(double) rpcCount) + " ms/RPC.";
+
+            Debug.WriteLine(results);
+        }
+
+        [Test]
+        public void TestSendAfterChannelClose()
+        {
+            // Start up a second server so that closing the server doesn't 
+            // interfere with the other unit tests:
+
+            var responder = new SpecificResponder<Simple>(new SimpleImpl());
+            var server2 = new SocketServer("localhost", 0, responder);
+
+            server2.Start();
+
+            try
+            {
+                var transceiver2 = new SocketTransceiver("localhost", server2.Port);
+
+                try
+                {
+                    var simpleClient2 = SpecificRequestor.CreateClient<SimpleCallback>(transceiver2);
+
+                    // Verify that connection works:
+                    Assert.AreEqual(3, simpleClient2.add(1, 2));
+
+                    // Try again with callbacks:
+                    var addFuture = new CallFuture<int>();
+                    simpleClient2.add(1, 2, addFuture);
+                    Assert.AreEqual(3, addFuture.WaitForResult(2000));
+
+                    // Shut down server:
+                    server2.Stop();
+
+                    // Send a new RPC, and verify that it throws an Exception that 
+                    // can be detected by the client:
+                    bool ioeCaught = false;
+                    try
+                    {
+                        simpleClient2.add(1, 2);
+                        Assert.Fail("Send after server close should have thrown Exception");
+                    }
+                    catch (SocketException)
+                    {
+                        ioeCaught = true;
+                    }
+
+                    Assert.IsTrue(ioeCaught, "Expected IOException");
+
+                    // Send a new RPC with callback, and verify that the correct Exception 
+                    // is thrown:
+                    ioeCaught = false;
+                    try
+                    {
+                        addFuture = new CallFuture<int>();
+                        simpleClient2.add(1, 2, addFuture);
+                        addFuture.WaitForResult(2000);
+
+                        Assert.Fail("Send after server close should have thrown Exception");
+                    }
+                    catch (SocketException)
+                    {
+                        ioeCaught = true;
+                    }
+
+                    Assert.IsTrue(ioeCaught, "Expected IOException");
+                }
+                finally
+                {
+                    transceiver2.Disconnect();
+                }
+            }
+            finally
+            {
+                server2.Stop();
+                Thread.Sleep(1000);
+            }
+        }
+
+        private class SimpleImpl : Simple
+        {
+            public static bool throwSystemError = false;
+
+            public override string hello(string greeting)
+            {
+                return "Hello, " + greeting;
+            }
+
+            public override TestRecord echo(TestRecord record)
+            {
+                return record;
+            }
+
+            public override int add(int arg1, int arg2)
+            {
+                return arg1 + arg2;
+            }
+
+            public override byte[] echoBytes(byte[] data)
+            {
+                return data;
+            }
+
+            public override object error()
+            {
+                if(throwSystemError)
+                    throw new SystemException("System error");
+                else
+                    throw new TestError { message = "Test Message" };
+            }
+
+            public override void ack()
+            {
+                ackFlag = !ackFlag;
+                ackLatch.Signal();
+            }
+        }
+
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Ipc/SocketTransceiverWhenServerStopsTest.cs b/lang/csharp/src/apache/test/Ipc/SocketTransceiverWhenServerStopsTest.cs
new file mode 100644
index 0000000..ee1d1e5
--- /dev/null
+++ b/lang/csharp/src/apache/test/Ipc/SocketTransceiverWhenServerStopsTest.cs
@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using Avro.ipc;
+using Avro.ipc.Specific;
+using NUnit.Framework;
+using org.apache.avro.test;
+
+namespace Avro.Test.Ipc
+{
+    [TestFixture]
+    public class SocketTransceiverWhenServerStopsTest
+    {
+        private static org.apache.avro.test.Message CreateMessage()
+        {
+            var msg = new org.apache.avro.test.Message
+                          {
+                              to = "wife",
+                              from = "husband",
+                              body = "I love you!"
+                          };
+            return msg;
+        }
+
+        private static readonly DateTime Jan1st1970 = new DateTime
+            (1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
+
+        public static long CurrentTimeMillis()
+        {
+            return (long) (DateTime.UtcNow - Jan1st1970).TotalMilliseconds;
+        }
+
+        public class MailImpl : Mail
+        {
+            private CountdownLatch allMessages = new CountdownLatch(5);
+
+            // in this simple example just return details of the message
+            public override String send(org.apache.avro.test.Message message)
+            {
+                return "Sent message to [" + message.to +
+                       "] from [" + message.from + "] with body [" +
+                       message.body + "]";
+            }
+
+            public override void fireandforget(org.apache.avro.test.Message message)
+            {
+                allMessages.Signal();
+            }
+
+            public void reset()
+            {
+                allMessages = new CountdownLatch(5);
+            }
+        }
+
+        [Test]
+        public void TestSocketTransceiverWhenServerStops()
+        {
+            Responder responder = new SpecificResponder<Mail>(new MailImpl());
+            var server = new SocketServer("localhost", 0, responder);
+
+            server.Start();
+
+            var transceiver = new SocketTransceiver("localhost", server.Port);
+            var mail = SpecificRequestor.CreateClient<Mail>(transceiver);
+
+            int[] successes = {0};
+            int failures = 0;
+            int[] quitOnFailure = {0};
+            var threads = new List<Thread>();
+
+            // Start a bunch of client threads that use the transceiver to send messages
+            for (int i = 0; i < 100; i++)
+            {
+                var thread = new Thread(
+                    () =>
+                        {
+                            while (true)
+                            {
+                                try
+                                {
+                                    mail.send(CreateMessage());
+                                    Interlocked.Increment(ref successes[0]);
+                                }
+                                catch (Exception)
+                                {
+                                    Interlocked.Increment(ref failures);
+
+                                    if (Interlocked.Add(ref quitOnFailure[0], 0) == 1)
+                                    {
+                                        return;
+                                    }
+                                }
+                            }
+                        });
+
+                thread.Name = "Thread" + i;
+                threads.Add(thread);
+                thread.Start();
+            }
+
+            // Be sure the threads are running: wait until we get a good deal of successes
+            while (Interlocked.Add(ref successes[0], 0) < 10000)
+            {
+                Thread.Sleep(50);
+            }
+
+            // Now stop the server
+            server.Stop();
+
+            // Server is stopped: successes should not increase anymore: wait until we're in that situation
+            while (true)
+            {
+                int previousSuccesses = Interlocked.Add(ref successes[0], 0);
+                Thread.Sleep(500);
+                if (previousSuccesses == Interlocked.Add(ref successes[0], 0))
+                {
+                    break;
+                }
+            }
+
+            server.Start();
+
+            long now = CurrentTimeMillis();
+
+            int previousSuccesses2 = successes[0];
+            while (true)
+            {
+                Thread.Sleep(500);
+                if (successes[0] > previousSuccesses2)
+                {
+                    break;
+                }
+                if (CurrentTimeMillis() - now > 5000)
+                {
+                    Console.WriteLine("FYI: requests don't continue immediately...");
+                    break;
+                }
+            }
+
+            // Stop our client, we would expect this to go on immediately
+            Console.WriteLine("Stopping transceiver");
+
+            Interlocked.Add(ref quitOnFailure[0], 1);
+            now = CurrentTimeMillis();
+            transceiver.Close();
+
+            // Wait for all threads to quit
+            while (true)
+            {
+                threads.RemoveAll(x => !x.IsAlive);
+
+                if (threads.Count > 0)
+                    Thread.Sleep(1000);
+                else 
+                    break;
+            }
+
+            if (CurrentTimeMillis() - now > 10000)
+            {
+                Assert.Fail("Stopping NettyTransceiver and waiting for client threads to quit took too long.");
+            }
+            else
+            {
+                Console.WriteLine("Stopping NettyTransceiver and waiting for client threads to quit took "
+                                  + (CurrentTimeMillis() - now) + " ms");
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Properties/AssemblyInfo.cs b/lang/csharp/src/apache/test/Properties/AssemblyInfo.cs
new file mode 100644
index 0000000..8821f64
--- /dev/null
+++ b/lang/csharp/src/apache/test/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System.Reflection;
+using System.Runtime.InteropServices;
+
+[assembly: AssemblyTitle("Avro.test")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Apache")]
+[assembly: AssemblyProduct("Avro.test")]
+[assembly: AssemblyCopyright("Copyright © Apache 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: Guid("442785CE-3633-4A04-A103-434104F63D55")]
+[assembly: AssemblyVersion("0.9.0.0")]
+[assembly: AssemblyFileVersion("0.9.0.0")]
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Protocol/ProtocolTest.cs b/lang/csharp/src/apache/test/Protocol/ProtocolTest.cs
new file mode 100644
index 0000000..3c6c00b
--- /dev/null
+++ b/lang/csharp/src/apache/test/Protocol/ProtocolTest.cs
@@ -0,0 +1,445 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.IO;
+using NUnit.Framework;
+using Avro;
+
+namespace Avro.Test
+{
+    [TestFixture]
+    public class ProtocolTest
+    {
+        [TestCase(@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+  ""doc"": ""HelloWorld"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""CurseMore"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse"", ""CurseMore""]
+    }
+  }
+}", true)]
+        [TestCase(@"{
+  ""protocol"" : ""MyProtocol"",
+  ""namespace"" : ""com.foo"",
+  ""types"" : [ 
+   {
+	""type"" : ""record"",
+	""name"" : ""A"",
+	""fields"" : [ { ""name"" : ""f1"", ""type"" : ""long"" } ]
+   },
+   {
+	""type"" : ""enum"",
+	""name"" : ""MyEnum"",
+	""symbols"" : [ ""A"", ""B"", ""C"" ]
+   },
+   {
+   ""type"": ""fixed"", 
+   ""size"": 16, 
+   ""name"": ""MyFixed""
+   },
+   {
+	""type"" : ""record"",
+	""name"" : ""Z"",
+	""fields"" : 
+			[ 	
+				{ ""name"" : ""myUInt"", ""type"" : [ ""int"", ""null"" ] },
+				{ ""name"" : ""myULong"", ""type"" : [ ""long"", ""null"" ] },
+				{ ""name"" : ""myUBool"", ""type"" : [ ""boolean"", ""null"" ] },
+				{ ""name"" : ""myUDouble"", ""type"" : [ ""double"", ""null"" ] },
+				{ ""name"" : ""myUFloat"", ""type"" : [ ""float"", ""null"" ] },
+				{ ""name"" : ""myUBytes"", ""type"" : [ ""bytes"", ""null"" ] },
+				{ ""name"" : ""myUString"", ""type"" : [ ""string"", ""null"" ] },
+				
+				{ ""name"" : ""myInt"", ""type"" : ""int"" },
+				{ ""name"" : ""myLong"", ""type"" : ""long"" },
+				{ ""name"" : ""myBool"", ""type"" : ""boolean"" },
+				{ ""name"" : ""myDouble"", ""type"" : ""double"" },
+				{ ""name"" : ""myFloat"", ""type"" : ""float"" },
+				{ ""name"" : ""myBytes"", ""type"" : ""bytes"" },
+				{ ""name"" : ""myString"", ""type"" : ""string"" },
+				{ ""name"" : ""myNull"", ""type"" : ""null"" },
+
+				{ ""name"" : ""myFixed"", ""type"" : ""MyFixed"" },								
+				{ ""name"" : ""myFixed2"", ""type"" : ""MyFixed"" },								
+				{ ""name"" : ""myA"", ""type"" : ""A"" },
+				{ ""name"" : ""myE"", ""type"" : ""MyEnum"" },
+				{ ""name"" : ""myArray"", ""type"" : { ""type"" : ""array"", ""items"" : ""bytes"" } },
+				{ ""name"" : ""myArray2"", ""type"" : { ""type"" : ""array"", ""items"" : { ""type"" : ""record"", ""name"" : ""newRec"", ""fields"" : [ { ""name"" : ""f1"", ""type"" : ""long""} ] } } },
+				{ ""name"" : ""myMap"", ""type"" : { ""type"" : ""map"", ""values"" : ""string"" } },
+				{ ""name"" : ""myMap2"", ""type"" : { ""type"" : ""map"", ""values"" : ""newRec"" } },
+				{ ""name"" : ""myObject"", ""type"" : [ ""MyEnum"", ""A"", ""null"" ] },
+				{ ""name"" : ""next"", ""type"" : [ ""A"", ""null"" ] }
+			]
+   } ,
+   {
+	""type"" : ""int""
+   }
+   ]
+}", true)]
+        [TestCase(@"{
+  ""protocol"" : ""MyProtocol"",
+  ""namespace"" : ""com.bar"",
+  ""types"" : [ 
+   {
+	""type"" : ""record"",
+	""name"" : ""A"",
+	""fields"" : 
+		[ 
+			{ ""name"" : ""f1"", ""type"" : ""long"" }
+		]
+   },
+   {
+	""type"" : ""enum"",
+	""name"" : ""MyEnum"",
+	""symbols"" : [ ""A"", ""B"", ""C"" ]
+   },
+   {
+   ""type"": ""fixed"", 
+   ""size"": 16, 
+   ""name"": ""MyFixed""
+   },
+   {
+	""type"" : ""record"",
+	""name"" : ""Z"",
+	""fields"" : 
+			[ 	
+				{ ""name"" : ""myUInt"", ""type"" : [ ""int"", ""null"" ], ""default"" : 1 },
+				{ ""name"" : ""myULong"", ""type"" : [ ""long"", ""null"" ], ""default"" : 2 },
+				{ ""name"" : ""myUBool"", ""type"" : [ ""boolean"", ""null"" ], ""default"" : true },
+				{ ""name"" : ""myUDouble"", ""type"" : [ ""double"", ""null"" ], ""default"" : 3 },
+				{ ""name"" : ""myUFloat"", ""type"" : [ ""float"", ""null"" ], ""default"" : 4.5 },
+				{ ""name"" : ""myUBytes"", ""type"" : [ ""bytes"", ""null"" ], ""default"" : ""\u00ff"" },
+				{ ""name"" : ""myUString"", ""type"" : [ ""string"", ""null"" ], ""default"" : ""foo"" },
+				
+				{ ""name"" : ""myInt"", ""type"" : ""int"", ""default"" : 10 },
+				{ ""name"" : ""myLong"", ""type"" : ""long"", ""default"" : 11 },
+				{ ""name"" : ""myBool"", ""type"" : ""boolean"", ""default"" : false },
+				{ ""name"" : ""myDouble"", ""type"" : ""double"", ""default"" : 12 },
+				{ ""name"" : ""myFloat"", ""type"" : ""float"", ""default"" : 13.14 },
+				{ ""name"" : ""myBytes"", ""type"" : ""bytes"", ""default"" : ""\u00ff"" },
+				{ ""name"" : ""myString"", ""type"" : ""string"", ""default"" : ""bar"" },
+				{ ""name"" : ""myNull"", ""type"" : ""null"", ""default"" : null },
+
+				{ ""name"" : ""myFixed"", ""type"" : ""MyFixed"", ""default"" : ""\u00FFFFFFFFFFFFFFFFA"" },
+				{ ""name"" : ""myFixed2"", ""type"" : ""MyFixed"", ""default"" : ""\u00FFFFFFFFFFFFFFFFA"" },
+				{ ""name"" : ""myA"", ""type"" : ""A"", ""default"" : {""f1"":5} },
+				{ ""name"" : ""myE"", ""type"" : ""MyEnum"", ""default"" : ""C"" },
+				{ ""name"" : ""myArray"", ""type"" : { ""type"" : ""array"", ""items"" : ""bytes"" }, ""default"" : [ ""a12b"", ""cc50"" ] },
+				{ ""name"" : ""myArray2"", ""type"" : { ""type"" : ""array"", ""items"" : { ""type"" : ""record"", ""name"" : ""newRec"", ""fields"" : [ { ""name"" : ""f2"", ""type"" : ""long""} ], ""default"" : {""f2"":5} } }, ""default"" : [ {""f2"":6}, {""f2"":7} ] },
+				{ ""name"" : ""myMap"", ""type"" : { ""type"" : ""map"", ""values"" : ""string"" }, ""default"" : {""a"":""A"", ""b"":""B""} },
+				{ ""name"" : ""myMap2"", ""type"" : { ""type"" : ""map"", ""values"" : ""newRec"" }, ""default"" : { ""key1"":{""f2"":6}, ""key2"":{""f2"":7} } },
+				{ ""name"" : ""myObject"", ""type"" : [ ""MyEnum"", ""A"", ""null"" ], ""default"" : ""A"" },
+				{ ""name"" : ""next"", ""type"" : [ ""null"" , ""A"" ], ""default"" : null }
+			]
+   } ,
+   {
+	""type"" : ""int""
+   }
+   ]
+}", true)]
+        public static void TestProtocol(string str, bool valid)
+        {
+            Protocol protocol = Protocol.Parse(str);
+            Assert.IsTrue(valid);
+            string json = protocol.ToString();
+
+            Protocol protocol2 = Protocol.Parse(json);
+            string json2 = protocol2.ToString();
+
+            Assert.AreEqual(json,json2);
+        }
+
+        // Protocols match
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}", 
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  true,true)]
+        // Protocols match, order of schemas in 'types' are different
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  false,true)]
+        // Name of protocol is different
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol1"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  false,false)]
+        // Name of a message request is different: 'hi'
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hi"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  false,false)]
+        // Name of a type is different : Curse1
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse1"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse1""]
+    }
+  }
+}",
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hi"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  false,false)]
+        // Name of a record field is different: 'mymessage'
+        [TestCase(
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hello"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+@"{
+  ""protocol"": ""TestProtocol"",
+  ""namespace"": ""com.acme"",
+
+  ""types"": [
+    {""name"": ""Greeting"", ""type"": ""record"", ""fields"": [
+      {""name"": ""message"", ""type"": ""string""}]},
+    {""name"": ""Curse"", ""type"": ""error"", ""fields"": [
+      {""name"": ""mymessage"", ""type"": ""string""}]}
+  ],
+
+  ""messages"": {
+    ""hi"": {
+      ""request"": [{""name"": ""greeting"", ""type"": ""Greeting"" }],
+      ""response"": ""Greeting"",
+      ""errors"": [""Curse""]
+    }
+  }
+}",
+  false,false)]
+        public static void TestProtocolHash(string str1, string str2, bool md5_equal, bool hash_equal)
+        {
+            Protocol protocol1 = Protocol.Parse(str1);
+            Protocol protocol2 = Protocol.Parse(str2);
+
+            byte[] md51 = protocol1.MD5;
+            byte[] md52 = protocol2.MD5;
+
+            int hash1 = protocol1.GetHashCode();
+            int hash2 = protocol2.GetHashCode();
+
+            Assert.AreEqual(md5_equal, md51.SequenceEqual(md52));
+            Assert.AreEqual(hash_equal, hash1 == hash2);
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Schema/AliasTest.cs b/lang/csharp/src/apache/test/Schema/AliasTest.cs
new file mode 100644
index 0000000..e8aeed7
--- /dev/null
+++ b/lang/csharp/src/apache/test/Schema/AliasTest.cs
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using NUnit.Framework;
+using Avro;
+
+namespace Avro.Test
+{
+    [TestFixture]
+    public class AliasTest
+    {
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""namespace"":""com"", ""aliases"":[""c"",""foo.y""],
+                   ""fields"":
+                    [{""name"":""f1"",""type"":""long"", ""extraprop"":""important"", ""id"":""1029"", ""aliases"":[""a"",""b"",""c""] },
+                     {""name"":""f2"",""type"": ""int""}]}", 
+                   true)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[""Alias1""],
+                   ""fields"":[{""name"":""f1"",""type"":""long"", ""order"":""junk"" },
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    false)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[""Alias1""], ""customprop"":""123456"",
+                   ""fields"":[{""name"":""f1"",""type"":""long"", ""order"":""ascending"", ""fprop"":""faaa"" },
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    true)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[""Alias1""],
+                   ""fields"":[{""name"":""f1"",""type"":""long""},
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    true)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[""Alias1"",""Alias2""],
+                   ""fields"":[{""name"":""f1"",""type"":""long""},
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    true)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[""Alias1"",9],
+                   ""fields"":[{""name"":""f1"",""type"":""long""},
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    false)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"":[1, 2],
+                    ""fields"":[{""name"":""f1"",""type"":""long"", ""default"": ""100""},
+                    {""name"":""f2"",""type"": ""int""}]}", 
+                    false)]
+        [TestCase(@"{""type"":""record"",""name"":""LongList"", ""aliases"": ""wrong alias format"",
+                    ""fields"":[{""name"":""value"",""type"":""long"", ""default"": ""100""},
+                    {""name"":""next"",""type"":[""LongList"",""null""]}]}", 
+                    false)]
+        public void TestAliases(string s, bool valid)   // also tests properties, default, order
+        {
+            try
+            {
+                Schema sc = Schema.Parse(s);
+                Assert.IsTrue(valid);
+
+                string json = sc.ToString();
+                Schema sc2 = Schema.Parse(json);
+                string json2 = sc2.ToString();
+
+                Assert.IsTrue(json == json2);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine(ex.Message);
+                Assert.IsFalse(valid);
+            }
+        }
+
+        // Enum
+        [TestCase(@"{""type"":""enum"",""name"":""Symbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  @"{""type"":""enum"",""name"":""Symbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  true)]
+        [TestCase(@"{""type"":""enum"",""name"":""Symbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  @"{""type"":""enum"",""name"":""NewSymbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  false)]
+        [TestCase(@"{""type"":""enum"",""name"":""Symbols"", ""aliases"" : [""NewSymbols""], ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  @"{""type"":""enum"",""name"":""NewSymbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  true)]
+        [TestCase(@"{""type"":""enum"",""name"":""Symbols"", ""aliases"" : [""DiffSymbols"", ""OtherSymbols""], ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  @"{""type"":""enum"",""name"":""NewSymbols"", ""symbols"" : [ ""A"", ""B"", ""C"" ] }",
+                  false)]
+        public void TestEnumAliasesResolution(string reader, string writer, bool canread)
+        {
+            try
+            {
+                Schema rs = Schema.Parse(reader);
+                Schema ws = Schema.Parse(writer);
+                Assert.IsTrue(rs.CanRead(ws) == canread);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine(ex.Message);
+                Assert.IsTrue(false);
+            }
+        }
+
+        // Fixed
+        [TestCase(@"{""type"": ""fixed"", ""name"": ""Fixed"", ""size"": 1}",
+                  @"{""type"": ""fixed"", ""name"": ""Fixed"", ""size"": 1}",
+                  true)]
+        [TestCase(@"{""type"": ""fixed"", ""name"": ""Fixed"", ""size"": 1}",
+                  @"{""type"": ""fixed"", ""name"": ""NewFixed"", ""size"": 1}",
+                  false)]
+        [TestCase(@"{""type"": ""fixed"", ""name"": ""Fixed"",  ""aliases"" : [""NewFixed""], ""size"": 1}",
+                  @"{""type"": ""fixed"", ""name"": ""NewFixed"", ""size"": 1}",
+                  true)]
+        [TestCase(@"{""type"": ""fixed"", ""name"": ""Fixed"",  ""aliases"" : [""DiffFixed"", ""OtherFixed""], ""size"": 1}",
+                  @"{""type"": ""fixed"", ""name"": ""NewFixed"", ""size"": 1}",
+                  false)]
+        public void TestFixedAliasesResolution(string reader, string writer, bool canread)
+        {
+            try
+            {
+                Schema rs = Schema.Parse(reader);
+                Schema ws = Schema.Parse(writer);
+                Assert.IsTrue(rs.CanRead(ws) == canread);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine(ex.Message);
+                Assert.IsTrue(false);
+            }
+        }
+
+        // Records
+        [TestCase(1,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}", 
+                  true)]
+        [TestCase(2,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  @"{""type"":""record"",""name"":""NewRec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  false)]
+        [TestCase(3,@"{""type"":""record"",""name"":""Rec"", ""aliases"":[""NewRec""],
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  @"{""type"":""record"",""name"":""NewRec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  true)]
+        [TestCase(4,@"{""type"":""record"",""name"":""Rec"", ""aliases"":[""OtherRec"",""DiffRec""],
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  @"{""type"":""record"",""name"":""NewRec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  false)]
+        [TestCase(5,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f3"",""type"": ""int""}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  false)]
+        [TestCase(6,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f3"",""type"": ""int"", ""aliases"":[""f2""]}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  true)]
+        [TestCase(7,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f3"",""type"": ""int"", ""aliases"":[""f4"",""f5""]}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"":""long"" },
+                                 {""name"":""f2"",""type"": ""int""}]}",
+                  false)]
+        [TestCase(8,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""Symbol"", ""symbols"":[""A""] }}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""NewSymbol"", ""symbols"":[""A""] }}]}",
+                  false)]
+        [TestCase(9,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""Symbol"", ""aliases"":[""NewSymbol""], ""symbols"":[""A""] }}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""NewSymbol"", ""symbols"":[""A""] }}]}",
+                  true)]
+        [TestCase(10,@"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""Symbol"", ""aliases"":[""DiffSymbol""], ""symbols"":[""A""] }}]}",
+                  @"{""type"":""record"",""name"":""Rec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""NewSymbol"", ""symbols"":[""A""] }}]}",
+                  false)]
+        [TestCase(11,@"{""type"":""record"",""name"":""Rec"",""aliases"":[""NewRec""], 
+                    ""fields"":[{""name"":""f2"",""aliases"":[""f1""],""type"": {""type"":""enum"", ""name"":""Symbol"", ""aliases"":[""NewSymbol""], ""symbols"":[""A""] }},
+                                {""name"":""f3"",""aliases"":[""f4""],""type"": {""type"":""fixed"", ""name"":""Fixed"", ""aliases"":[""NewFixed""], ""size"": 1 }}
+                               ]}",
+                  @"{""type"":""record"",""name"":""NewRec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""NewSymbol"", ""symbols"":[""A""] }},
+                                 {""name"":""f4"",""type"": {""type"":""fixed"", ""name"":""NewFixed"", ""size"": 1 }}
+                                ]}",
+                  true)]
+        [TestCase(12,@"{""type"":""record"",""name"":""Rec"",""aliases"":[""NewRec""], 
+                     ""fields"":[{""name"":""f2"",""aliases"":[""f1""],""type"": {""type"":""enum"", ""name"":""Symbol"", ""aliases"":[""NewSymbol""], ""symbols"":[""A""] }},
+                                 {""name"":""f3"",""aliases"":[""f4""],""type"": {""type"":""fixed"", ""name"":""Fixed"", ""aliases"":[""NewFixed""], ""size"":1 }}
+                                ]}",
+                  @"{""type"":""record"",""name"":""NewRec"", 
+                     ""fields"":[{""name"":""f1"",""type"": {""type"":""enum"", ""name"":""NewSymbol"", ""symbols"":[""A"",""B""] }},
+                                 {""name"":""f4"",""type"": {""type"":""fixed"", ""name"":""NewFixed"", ""size"":1 }}
+                                ]}",
+                  true)]
+
+        public void TestRecordAliasesResolution(int testid, string reader, string writer, bool canread)
+        {
+            try
+            {
+                Schema rs = Schema.Parse(reader);
+                Schema ws = Schema.Parse(writer);
+                Assert.IsTrue(rs.CanRead(ws) == canread);
+            }
+            catch (Exception ex)
+            {
+                Console.WriteLine(ex.Message);
+                Assert.IsTrue(false);
+            }
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/test/Schema/SchemaNormalizationTests.cs b/lang/csharp/src/apache/test/Schema/SchemaNormalizationTests.cs
new file mode 100644
index 0000000..c3b0cd1
--- /dev/null
+++ b/lang/csharp/src/apache/test/Schema/SchemaNormalizationTests.cs
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using NUnit.Framework;
+using System.IO;
+using Avro.Test.Utils;
+using Avro;
+
+namespace Avro.Test
+{
+    [TestFixture]
+    public class SchemaNormalizationTests
+    {
+        private static readonly long One = -9223372036854775808;
+        private static readonly byte[] Postfix = { 0, 0, 0, 0, 0, 0, 0, 0 };
+
+        [Test, TestCaseSource("ProvideCanonicalTestCases")]
+        public void CanonicalTest(string input, string expectedOutput)
+        {
+            Assert.AreEqual(expectedOutput, SchemaNormalization.ToParsingForm(Schema.Parse(input)));
+        }
+
+        [Test, TestCaseSource("ProvideFingerprintTestCases")]
+        public void FingerprintTest(string input, string expectedOutput)
+        {
+            Schema s = Schema.Parse(input);
+            long carefulFP = AltFingerprint(SchemaNormalization.ToParsingForm(s));
+            Assert.AreEqual(long.Parse(expectedOutput), carefulFP);
+            Assert.AreEqual(carefulFP, SchemaNormalization.ParsingFingerprint64(s));
+        }
+
+        private static List<object[]> ProvideFingerprintTestCases()
+        {
+            using (StreamReader reader = new StreamReader("../../../../../share/test/data/schema-tests.txt"))
+            {
+                return CaseFinder.Find(reader, "fingerprint", new List<object[]>());
+            }
+        }
+
+        private static List<object[]> ProvideCanonicalTestCases()
+        {
+            using (StreamReader reader = new StreamReader("../../../../../share/test/data/schema-tests.txt"))
+            {
+                return CaseFinder.Find(reader, "canonical", new List<object[]>());
+            }
+        }
+
+        private static long AltFingerprint(string s)
+        {
+            long tmp = AltExtended(SchemaNormalization.Empty64, 64, One, Encoding.UTF8.GetBytes(s));
+            return AltExtended(SchemaNormalization.Empty64, 64, tmp, Postfix);
+        }
+
+        private static long AltExtended(long poly, int degree, long fp, byte[] b)
+        {
+            long overflowBit = 1L << (64 - degree);
+            for (int i = 0; i < b.Length; i++)
+            {
+                for (int j = 1; j < 129; j = j << 1)
+                {
+                    bool overflow = (0 != (fp & overflowBit));
+                    fp = (long) (((ulong) fp) >> 1);
+                    if (0 != (j & b[i]))
+                    {
+                        fp |= One;
+                    }
+                    if (overflow)
+                    {
+                        fp ^= poly;
+                    }
+                }
+            }
+            return fp;
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Schema/SchemaTests.cs b/lang/csharp/src/apache/test/Schema/SchemaTests.cs
new file mode 100644
index 0000000..b124831
--- /dev/null
+++ b/lang/csharp/src/apache/test/Schema/SchemaTests.cs
@@ -0,0 +1,276 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections.Generic;
+using System.Text;
+using NUnit.Framework;
+using Avro;
+
+namespace Avro.Test
+{
+    [TestFixture]
+    public class SchemaTests
+    {
+        // Primitive types - shorthand
+        [TestCase("null")]
+        [TestCase("boolean")]
+        [TestCase("int")]
+        [TestCase("long")]
+        [TestCase("float")]
+        [TestCase("double")]
+        [TestCase("bytes")]
+        [TestCase("string")]
+
+        [TestCase("\"null\"")]
+        [TestCase("\"boolean\"")]
+        [TestCase("\"int\"")]
+        [TestCase("\"long\"")]
+        [TestCase("\"float\"")]
+        [TestCase("\"double\"")]
+        [TestCase("\"bytes\"")]
+        [TestCase("\"string\"")]
+
+        // Primitive types - longer
+        [TestCase("{ \"type\": \"null\" }")]
+        [TestCase("{ \"type\": \"boolean\" }")]
+        [TestCase("{ \"type\": \"int\" }")]
+        [TestCase("{ \"type\": \"long\" }")]
+        [TestCase("{ \"type\": \"float\" }")]
+        [TestCase("{ \"type\": \"double\" }")]
+        [TestCase("{ \"type\": \"bytes\" }")]
+        [TestCase("{ \"type\": \"string\" }")]
+        // Record
+        [TestCase("{\"type\": \"record\",\"name\": \"Test\",\"fields\": [{\"name\": \"f\",\"type\": \"long\"}]}")]
+        [TestCase("{\"type\": \"record\",\"name\": \"Test\",\"fields\": " +
+            "[{\"name\": \"f1\",\"type\": \"long\"},{\"name\": \"f2\", \"type\": \"int\"}]}")]
+        [TestCase("{\"type\": \"error\",\"name\": \"Test\",\"fields\": " +
+            "[{\"name\": \"f1\",\"type\": \"long\"},{\"name\": \"f2\", \"type\": \"int\"}]}")]
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"," +
+            "\"fields\":[{\"name\":\"value\",\"type\":\"long\"},{\"name\":\"next\",\"type\":[\"LongList\",\"null\"]}]}")] // Recursive.
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"," +
+            "\"fields\":[{\"name\":\"value\",\"type\":\"long\"},{\"name\":\"next\",\"type\":[\"LongListA\",\"null\"]}]}",
+            Description = "Unknown name", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"}",
+            Description = "No fields", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\", \"fields\": \"hi\"}",
+            Description = "Fields not an array", ExpectedException = typeof(SchemaParseException))]
+
+        // Enum
+        [TestCase("{\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}")]
+        [TestCase("{\"type\": \"enum\", \"name\": \"Status\", \"symbols\": \"Normal Caution Critical\"}",
+            Description = "Symbols not an array", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\": \"enum\", \"name\": [ 0, 1, 1, 2, 3, 5, 8 ], \"symbols\": [\"Golden\", \"Mean\"]}",
+            Description = "Name not a string", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\": \"enum\", \"symbols\" : [\"I\", \"will\", \"fail\", \"no\", \"name\"]}",
+            Description = "No name", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\": \"enum\", \"name\": \"Test\", \"symbols\" : [\"AA\", \"AA\"]}",
+            Description = "Duplicate symbol", ExpectedException = typeof(SchemaParseException))]
+
+        // Array
+        [TestCase("{\"type\": \"array\", \"items\": \"long\"}")]
+        [TestCase("{\"type\": \"array\",\"items\": {\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}}")]
+
+        // Map
+        [TestCase("{\"type\": \"map\", \"values\": \"long\"}")]
+        [TestCase("{\"type\": \"map\",\"values\": {\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}}")]
+
+        // Union
+        [TestCase("[\"string\", \"null\", \"long\"]")]
+        [TestCase("[\"string\", \"long\", \"long\"]",
+            Description = "Duplicate type", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("[{\"type\": \"array\", \"items\": \"long\"}, {\"type\": \"array\", \"items\": \"string\"}]",
+            Description = "Duplicate type", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\":[\"string\", \"null\", \"long\"]}")]
+        
+        // Fixed
+        [TestCase("{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}")]
+        [TestCase("{\"type\": \"fixed\", \"name\": \"MyFixed\", \"namespace\": \"org.apache.hadoop.avro\", \"size\": 1}")]
+        [TestCase("{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}")]
+        [TestCase("{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}")]
+        [TestCase("{\"type\": \"fixed\", \"name\": \"Missing size\"}", ExpectedException = typeof(SchemaParseException))]
+        [TestCase("{\"type\": \"fixed\", \"size\": 314}",
+            Description = "No name", ExpectedException = typeof(SchemaParseException))]
+        public void TestBasic(string s)
+        {
+            Schema.Parse(s);
+        }
+
+        [TestCase("null", Schema.Type.Null)]
+        [TestCase("boolean", Schema.Type.Boolean)]
+        [TestCase("int", Schema.Type.Int)]
+        [TestCase("long", Schema.Type.Long)]
+        [TestCase("float", Schema.Type.Float)]
+        [TestCase("double", Schema.Type.Double)]
+        [TestCase("bytes", Schema.Type.Bytes)]
+        [TestCase("string", Schema.Type.String)]
+        
+        [TestCase("{ \"type\": \"null\" }", Schema.Type.Null)]
+        [TestCase("{ \"type\": \"boolean\" }", Schema.Type.Boolean)]
+        [TestCase("{ \"type\": \"int\" }", Schema.Type.Int)]
+        [TestCase("{ \"type\": \"long\" }", Schema.Type.Long)]
+        [TestCase("{ \"type\": \"float\" }", Schema.Type.Float)]
+        [TestCase("{ \"type\": \"double\" }", Schema.Type.Double)]
+        [TestCase("{ \"type\": \"bytes\" }", Schema.Type.Bytes)]
+        [TestCase("{ \"type\": \"string\" }", Schema.Type.String)]
+        public void TestPrimitive(string s, Schema.Type type)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.IsTrue(sc is PrimitiveSchema);
+            Assert.AreEqual(type, sc.Tag);
+
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        private static void testEquality(string s, Schema sc)
+        {
+            Assert.IsTrue(sc.Equals(sc));
+            Schema sc2 = Schema.Parse(s);
+            Assert.IsTrue(sc.Equals(sc2));
+            Assert.AreEqual(sc.GetHashCode(), sc2.GetHashCode());
+        }
+
+        private static void testToString(Schema sc)
+        {
+            try
+            {
+                Assert.AreEqual(sc, Schema.Parse(sc.ToString()));
+            }
+            catch (Exception e)
+            {
+                throw new AvroException(e.ToString() + ": " + sc.ToString());
+            }
+        }
+
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"," +
+            "\"fields\":[{\"name\":\"f1\",\"type\":\"long\"}," +
+            "{\"name\":\"f2\",\"type\": \"int\"}]}",
+            new string[] { "f1", "long", "100", "f2", "int", "10" })]
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"," +
+            "\"fields\":[{\"name\":\"f1\",\"type\":\"long\", \"default\": \"100\"}," +
+            "{\"name\":\"f2\",\"type\": \"int\"}]}",
+            new string[] { "f1", "long", "100", "f2", "int", "10" })]
+        [TestCase("{\"type\":\"record\",\"name\":\"LongList\"," +
+            "\"fields\":[{\"name\":\"value\",\"type\":\"long\", \"default\": \"100\"}," +
+            "{\"name\":\"next\",\"type\":[\"LongList\",\"null\"]}]}",
+            new string[] { "value", "long", "100", "next", "union", null })]
+        public void TestRecord(string s, string[] kv)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Record, sc.Tag);
+            RecordSchema rs = sc as RecordSchema;
+            Assert.AreEqual(kv.Length / 3, rs.Count);
+            for (int i = 0; i < kv.Length; i += 3)
+            {
+                Field f = rs[kv[i]];
+                Assert.AreEqual(kv[i + 1], f.Schema.Name);
+                /*
+                if (kv[i + 2] != null)
+                {
+                    Assert.IsNotNull(f.DefaultValue);
+                    Assert.AreEqual(kv[i + 2], f.DefaultValue);
+                }
+                else
+                {
+                    Assert.IsNull(f.DefaultValue);
+                }
+                 */
+            }
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("{\"type\": \"enum\", \"name\": \"Test\", \"symbols\": [\"A\", \"B\"]}",
+            new string[] { "A", "B" })]
+        public void TestEnum(string s, string[] symbols)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Enumeration, sc.Tag);
+            EnumSchema es = sc as EnumSchema;
+            Assert.AreEqual(symbols.Length, es.Count);
+
+            int i = 0;
+            foreach (String str in es)
+            {
+                Assert.AreEqual(symbols[i++], str);
+            }
+
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("{\"type\": \"array\", \"items\": \"long\"}", "long")]
+        public void TestArray(string s, string item)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Array, sc.Tag);
+            ArraySchema ars = sc as ArraySchema;
+            Assert.AreEqual(item, ars.ItemSchema.Name);
+            
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("{\"type\": \"map\", \"values\": \"long\"}", "long")]
+        public void TestMap(string s, string value)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Map, sc.Tag);
+            MapSchema ms = sc as MapSchema;
+            Assert.AreEqual(value, ms.ValueSchema.Name);
+
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("[\"string\", \"null\", \"long\"]", new string[] { "string", "null", "long" })]
+        public void TestUnion(string s, string[] types)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Union, sc.Tag);
+            UnionSchema us = sc as UnionSchema;
+            Assert.AreEqual(types.Length, us.Count);
+
+            for (int i = 0; i < us.Count; i++)
+            {
+                Assert.AreEqual(types[i], us[i].Name);
+            }
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("{ \"type\": \"fixed\", \"name\": \"Test\", \"size\": 1}", 1)]
+        public void TestFixed(string s, int size)
+        {
+            Schema sc = Schema.Parse(s);
+            Assert.AreEqual(Schema.Type.Fixed, sc.Tag);
+            FixedSchema fs = sc as FixedSchema;
+            Assert.AreEqual(size, fs.Size);
+            testEquality(s, sc);
+            testToString(sc);
+        }
+
+        [TestCase("a", "o.a.h", Result = "o.a.h.a")]
+        public string testFullname(string s1, string s2)
+        {
+            var name = new SchemaName(s1, s2, null);
+            return name.Fullname;
+        }
+
+    }
+}
diff --git a/lang/csharp/src/apache/test/Specific/SpecificTests.cs b/lang/csharp/src/apache/test/Specific/SpecificTests.cs
new file mode 100644
index 0000000..e324945
--- /dev/null
+++ b/lang/csharp/src/apache/test/Specific/SpecificTests.cs
@@ -0,0 +1,378 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+using System;
+using System.Collections;
+using System.IO;
+using System.Linq;
+using NUnit.Framework;
+using Avro.IO;
+using System.CodeDom;
+using System.CodeDom.Compiler;
+using Avro.Specific;
+using System.Reflection;
+
+namespace Avro.Test
+{
+    [TestFixture]
+    class SpecificTests
+    {
+        // The dynamically created assembly used in the test below can only be created
+        // once otherwise repeated tests will fail as the same type name will exist in
+        // multiple assemblies and so the type in the test and the type found by ObjectCreator
+        // will differ.  This single CompilerResults only works so long as there is only one test.
+        // If additional tests are added then each test will need its own CompilerResults.
+        private static CompilerResults compres;
+
+        [TestCase(@"{
+  ""protocol"" : ""MyProtocol"",
+  ""namespace"" : ""com.foo"",
+  ""types"" : [ 
+   {
+	""type"" : ""record"",
+	""name"" : ""A"",
+	""fields"" : [ { ""name"" : ""f1"", ""type"" : ""long"" } ]
+   },
+   {
+	""type"" : ""enum"",
+	""name"" : ""MyEnum"",
+	""symbols"" : [ ""A"", ""B"", ""C"" ]
+   },
+   {
+   ""type"": ""fixed"", 
+   ""size"": 16, 
+   ""name"": ""MyFixed""
+   },
+   {
+	""type"" : ""record"",
+	""name"" : ""Z"",
+	""fields"" : 
+			[ 	
+				{ ""name"" : ""myUInt"", ""type"" : [ ""int"", ""null"" ] },
+				{ ""name"" : ""myULong"", ""type"" : [ ""long"", ""null"" ] },
+				{ ""name"" : ""myUBool"", ""type"" : [ ""boolean"", ""null"" ] },
+				{ ""name"" : ""myUDouble"", ""type"" : [ ""double"", ""null"" ] },
+				{ ""name"" : ""myUFloat"", ""type"" : [ ""float"", ""null"" ] },
+				{ ""name"" : ""myUBytes"", ""type"" : [ ""bytes"", ""null"" ] },
+				{ ""name"" : ""myUString"", ""type"" : [ ""string"", ""null"" ] },
+				
+				{ ""name"" : ""myInt"", ""type"" : ""int"" },
+				{ ""name"" : ""myLong"", ""type"" : ""long"" },
+				{ ""name"" : ""myBool"", ""type"" : ""boolean"" },
+				{ ""name"" : ""myDouble"", ""type"" : ""double"" },
+				{ ""name"" : ""myFloat"", ""type"" : ""float"" },
+				{ ""name"" : ""myBytes"", ""type"" : ""bytes"" },
+				{ ""name"" : ""myString"", ""type"" : ""string"" },
+				{ ""name"" : ""myNull"", ""type"" : ""null"" },
+
+				{ ""name"" : ""myFixed"", ""type"" : ""MyFixed"" },								
+				{ ""name"" : ""myA"", ""type"" : ""A"" },
+				{ ""name"" : ""myE"", ""type"" : ""MyEnum"" },
+				{ ""name"" : ""myArray"", ""type"" : { ""type"" : ""array"", ""items"" : ""bytes"" } },
+				{ ""name"" : ""myArray2"", ""type"" : { ""type"" : ""array"", ""items"" : { ""type"" : ""record"", ""name"" : ""newRec"", ""fields"" : [ { ""name"" : ""f1"", ""type"" : ""long""} ] } } },
+				{ ""name"" : ""myMap"", ""type"" : { ""type"" : ""map"", ""values"" : ""string"" } },
+				{ ""name"" : ""myMap2"", ""type"" : { ""type"" : ""map"", ""values"" : ""newRec"" } },
+				{ ""name"" : ""myObject"", ""type"" : [ ""MyEnum"", ""A"", ""null"" ] },
+                { ""name"" : ""myArray3"", ""type"" : { ""type"" : ""array"", ""items"" : { ""type"" : ""array"", ""items"" : [ ""double"", ""string"", ""null"" ] } } }
+			]
+   } 
+   ]
+}"
+, new object[] {3, // index of the schema to serialize
+  "com.foo.Z",  // name of the schema to serialize
+@"Console.WriteLine(""Constructing com.foo.Z..."");", // Empty Constructor.
+@"
+  Console.WriteLine(""Populating com.foo.Z..."");
+  string bytes = ""bytes sample text"";
+  System.Text.UTF8Encoding encoding = new System.Text.UTF8Encoding();
+
+  myUInt=1; 
+  myULong=2; 
+  myUBool=true; 
+  myUDouble=(double)3; 
+  myUFloat=(float)4.5; 
+  myUBytes = encoding.GetBytes(bytes);
+  myUString=""Hello""; 
+
+  myInt=1; 
+  myLong=2; 
+  myBool=true; 
+  myDouble=(double)3; 
+  myFloat=(float)4.5; 
+  myBytes=encoding.GetBytes(bytes);
+  myString=""Hello"";
+  myNull=null;
+
+  string fixedstr = ""My fixed record0"";
+  myFixed=new MyFixed(); myFixed.Value = encoding.GetBytes(fixedstr);
+  myA=new A(); myA.f1 = 10;
+  myE=com.foo.MyEnum.C;
+
+  myArray=new List<byte[]>();
+  myArray.Add(encoding.GetBytes(""a""));
+
+  myArray2 = new List<com.foo.newRec>();
+  com.foo.newRec rec = new com.foo.newRec();
+  rec.f1 = 50;
+  myArray2.Add(rec);
+
+  myMap = new Dictionary<string, string>();
+  myMap.Add(""key"", ""value"");
+  myMap2 = new Dictionary<string, com.foo.newRec>();
+  com.foo.newRec newrec = new com.foo.newRec();
+  newrec.f1 = 1200;
+  myMap2.Add(""A"", newrec);
+  myObject = myA;
+
+  IList<System.Object> o1 = new List<System.Object>();
+
+    o1.Add((double)1123123121);
+    o1.Add((double)2);
+    o1.Add(null);
+    o1.Add(""fred"");
+
+    IList<System.Object> o2 = new List<System.Object>();
+
+    o2.Add((double)1);
+    o2.Add((double)32531);
+    o2.Add((double)4);
+    o2.Add((double)555);
+    o2.Add((double)0);
+
+    myArray3 = new List<IList<System.Object>>();
+    myArray3.Add(o1);
+    myArray3.Add(o2);
+
+"}
+)]
+        public void TestSpecific(string str, object[] result)
+        {
+            if(compres == null)
+            {
+            Protocol protocol = Protocol.Parse(str);
+            var codegen = new CodeGen();
+            codegen.AddProtocol(protocol);
+            var compileUnit = codegen.GenerateCode();
+
+            // add a constructor to the main class using the passed assignment statements
+            CodeTypeDeclaration ctd = compileUnit.Namespaces[0].Types[(int)result[0]];
+            CodeConstructor constructor = new CodeConstructor();
+            constructor.Attributes = MemberAttributes.Public;
+            CodeSnippetExpression snippet = new CodeSnippetExpression((string)result[2]);
+            constructor.Statements.Add(snippet);
+            ctd.Members.Add(constructor);
+
+            // add a function to the main class to populate the data
+            // This has been moved from constructor, as it was causing some tests to pass that shouldn't when referencing a blank object on READ.
+
+            CodeMemberMethod method = new CodeMemberMethod();
+            method.Attributes = MemberAttributes.Public;
+            method.Name = "Populate";
+            CodeSnippetExpression snippet2 = new CodeSnippetExpression((string)result[3]);
+            method.Statements.Add(snippet2);
+            ctd.Members.Add(method);
+
+
+
+            // compile
+            var comparam = new CompilerParameters(new string[] { "mscorlib.dll" });
+            comparam.ReferencedAssemblies.Add("System.dll");
+            comparam.ReferencedAssemblies.Add("Avro.dll");
+            comparam.GenerateInMemory = true;
+            var ccp = new Microsoft.CSharp.CSharpCodeProvider();
+            var units = new CodeCompileUnit[] { compileUnit };
+            compres = ccp.CompileAssemblyFromDom(comparam, units);
+            Assert.IsNotNull(compres);
+            if (compres.Errors.Count > 0)
+            {
+                for (int i = 0; i < compres.Errors.Count; i++)
+                    Console.WriteLine(compres.Errors[i]);
+            }
+            Assert.IsTrue(compres.Errors.Count == 0);
+            }
+
+            // create record
+            ISpecificRecord rec = compres.CompiledAssembly.CreateInstance((string)result[1]) as ISpecificRecord;
+
+            // Call populate to put some data in it.
+            Type recType = rec.GetType(); ;
+            MethodInfo methodInfo = recType.GetMethod("Populate");
+            methodInfo.Invoke(rec, null);
+
+            var x1 = compres.CompiledAssembly.FullName;
+
+            Assert.IsFalse(rec == null);
+
+            // serialize
+            var stream = serialize(rec.Schema, rec);
+
+            // deserialize
+            var rec2 = deserialize<ISpecificRecord>(stream, rec.Schema, rec.Schema);
+            Assert.IsFalse(rec2 == null);
+            AssertSpecificRecordEqual(rec, rec2);
+        }
+
+        [TestCase]
+        public void TestEnumResolution()
+        {
+            Schema writerSchema = Schema.Parse("{\"type\":\"record\",\"name\":\"EnumRecord\",\"namespace\":\"Avro.Test\"," + 
+                                        "\"fields\":[{\"name\":\"enumType\",\"type\": { \"type\": \"enum\", \"name\": \"EnumType\", \"symbols\": [\"FIRST\", \"SECOND\"]} }]}");
+
+            var testRecord = new EnumRecord();
+
+            Schema readerSchema = testRecord.Schema;
+            testRecord.enumType = EnumType.SECOND;
+
+            // serialize
+            var stream = serialize(writerSchema, testRecord);
+
+            // deserialize
+            var rec2 = deserialize<EnumRecord>(stream, writerSchema, readerSchema);
+            Assert.AreEqual( EnumType.SECOND, rec2.enumType );
+        }
+
+        private static S deserialize<S>(Stream ms, Schema ws, Schema rs) where S : class, ISpecificRecord
+        {
+            long initialPos = ms.Position;
+            var r = new SpecificReader<S>(ws, rs);
+            Decoder d = new BinaryDecoder(ms);
+            S output = r.Read(null, d);
+            Assert.AreEqual(ms.Length, ms.Position); // Ensure we have read everything.
+            checkAlternateDeserializers(output, ms, initialPos, ws, rs);
+            return output;
+        }
+
+        private static void checkAlternateDeserializers<S>(S expected, Stream input, long startPos, Schema ws, Schema rs) where S : class, ISpecificRecord
+        {
+            input.Position = startPos;
+            var reader = new SpecificDatumReader<S>(ws, rs);
+            Decoder d = new BinaryDecoder(input);
+            S output = reader.Read(null, d);
+            Assert.AreEqual(input.Length, input.Position); // Ensure we have read everything.
+            AssertSpecificRecordEqual(expected, output);
+        }
+
+        private static Stream serialize<T>(Schema ws, T actual)
+        {
+            var ms = new MemoryStream();
+            Encoder e = new BinaryEncoder(ms);
+            var w = new SpecificWriter<T>(ws);
+            w.Write(actual, e);
+            ms.Flush();
+            ms.Position = 0;
+            checkAlternateSerializers(ms.ToArray(), actual, ws);
+            return ms;
+        }
+
+        private static void checkAlternateSerializers<T>(byte[] expected, T value, Schema ws)
+        {
+            var ms = new MemoryStream();
+            var writer = new SpecificDatumWriter<T>(ws);
+            var e = new BinaryEncoder(ms);
+            writer.Write(value, e);
+            var output = ms.ToArray();
+            
+            Assert.AreEqual(expected.Length, output.Length);
+            Assert.True(expected.SequenceEqual(output));
+        }
+
+        private static void AssertSpecificRecordEqual(ISpecificRecord rec1, ISpecificRecord rec2)
+        {
+            var recordSchema = (RecordSchema) rec1.Schema;
+            for (int i = 0; i < recordSchema.Count; i++)
+            {
+                var rec1Val = rec1.Get(i);
+                var rec2Val = rec2.Get(i);
+                if (rec1Val is ISpecificRecord)
+                {
+                    AssertSpecificRecordEqual((ISpecificRecord)rec1Val, (ISpecificRecord)rec2Val);
+                }
+                else if (rec1Val is IList)
+                {
+                    var rec1List = (IList) rec1Val;
+                    if( rec1List.Count > 0 && rec1List[0] is ISpecificRecord)
+                    {
+                        var rec2List = (IList) rec2Val;
+                        Assert.AreEqual(rec1List.Count, rec2List.Count);
+                        for (int j = 0; j < rec1List.Count; j++)
+                        {
+                            AssertSpecificRecordEqual((ISpecificRecord)rec1List[j], (ISpecificRecord)rec2List[j]);
+                        }
+                    }
+                    else
+                    {
+                        Assert.AreEqual(rec1Val, rec2Val);
+                    }
+                }
+                else if (rec1Val is IDictionary)
+                {
+                    var rec1Dict = (IDictionary) rec1Val;
+                    var rec2Dict = (IDictionary) rec2Val;
+                    Assert.AreEqual(rec2Dict.Count, rec2Dict.Count);
+                    foreach (var key in rec1Dict.Keys)
+                    {
+                        var val1 = rec1Dict[key];
+                        var val2 = rec2Dict[key];
+                        if (val1 is ISpecificRecord)
+                        {
+                            AssertSpecificRecordEqual((ISpecificRecord)val1, (ISpecificRecord)val2);
+                        }
+                        else
+                        {
+                            Assert.AreEqual(val1, val2);
+                        }
+                    }
+                }
+                else
+                {
+                    Assert.AreEqual(rec1Val, rec2Val);
+                }
+            }
+        }
+    }
+
+    enum EnumType
+    {
+        THIRD,
+        FIRST,
+        SECOND
+    }
+
+    class EnumRecord : ISpecificRecord
+    {
+        public EnumType enumType { get; set; }
+        public Schema Schema
+        {
+            get
+            {
+                return Schema.Parse("{\"type\":\"record\",\"name\":\"EnumRecord\",\"namespace\":\"Avro.Test\"," + 
+                                        "\"fields\":[{\"name\":\"enumType\",\"type\": { \"type\": \"enum\", \"name\":" +
+                                        " \"EnumType\", \"symbols\": [\"THIRD\", \"FIRST\", \"SECOND\"]} }]}");
+            }
+        }
+
+        public object Get(int fieldPos)
+        {
+            return enumType;
+        }
+
+        public void Put(int fieldPos, object fieldValue)
+        {
+            enumType = (EnumType)fieldValue;
+        }
+    }
+}
\ No newline at end of file
diff --git a/lang/csharp/src/apache/test/Utils/CaseFinder.cs b/lang/csharp/src/apache/test/Utils/CaseFinder.cs
new file mode 100644
index 0000000..ed0d7cf
--- /dev/null
+++ b/lang/csharp/src/apache/test/Utils/CaseFinder.cs
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using System.Text.RegularExpressions;
+
+namespace Avro.Test.Utils
+{
+    public class CaseFinder
+    {
+        private static string labelRegex = "[a-zA-Z][_a-zA-Z0-9]*";
+        private static string newCaseName = "INPUT";
+        private static string newCaseMarker = "<<" + newCaseName;
+        private static string startLinePattern = "^<<("+labelRegex+")(.*)$";
+
+        /// <summary>
+        /// Scan test-case file <code>streamReader</code> looking for test subcases
+        /// marked with <code>label</code>.  Any such cases are appended
+        /// (in order) to the "cases" parameter.
+        /// </summary>
+        /// <param name="streamReader"></param>
+        /// <param name="label"></param>
+        /// <param name="cases"></param>
+        /// <returns></returns>
+        public static List<object[]> Find(StreamReader streamReader, string label, List<object[]> cases)
+        {
+            if (!Regex.IsMatch(label, "^" + labelRegex + "$"))
+            {
+                throw new ArgumentException("Bad case subcase label: " + label);
+            }
+
+            string subcaseMarker = "<<" + label;
+
+            var line = streamReader.ReadLine();
+            while (true)
+            {
+                while (line != null && !line.StartsWith(newCaseMarker))
+                {
+                    line = streamReader.ReadLine();
+                }
+                if (line == null)
+                {
+                    break;
+                }
+                string input = ProcessHereDoc(streamReader, line);
+
+                if (label == newCaseName)
+                {
+                    cases.Add(new object[] { input, null });
+                    line = streamReader.ReadLine();
+                    continue;
+                }
+
+                do
+                {
+                    line = streamReader.ReadLine();
+                } while (line != null && (!line.StartsWith(newCaseMarker) && !line.StartsWith(subcaseMarker)));
+
+                if (line == null || line.StartsWith(newCaseMarker))
+                {
+                    continue;
+                }
+
+                string expectedOutput = ProcessHereDoc(streamReader, line);
+                cases.Add(new object[] { input, expectedOutput });
+            }
+            return cases;
+        }
+
+        private static string ProcessHereDoc(StreamReader streamReader, string docStart)
+        {
+            var match = Regex.Match(docStart, startLinePattern);
+            if (!match.Success)
+            {
+                throw new ArgumentException(string.Format("Wasn't given the start of a heredoc (\"{0}\")", docStart));
+            }
+
+            string docName = match.Groups[1].Value;
+
+            // Determine if this is a single-line heredoc, and process if it is
+            string singleLineText = match.Groups[2].Value;
+            if (singleLineText.Length != 0)
+            {
+                if (!singleLineText.StartsWith(" "))
+                {
+                    throw new IOException(string.Format("Single-line heredoc missing initial space (\"{0}\")", docStart));
+                }
+                return singleLineText.Substring(1);
+            }
+            
+            // Process multi-line heredocs
+            var sb = new StringBuilder();
+            string line = streamReader.ReadLine();
+            string prevLine = string.Empty;
+            bool firstTime = true;
+            while (line != null && line != docName)
+            {
+                if (!firstTime)
+                {
+                    sb.Append(prevLine).Append("\n");
+                }
+                else
+                {
+                    firstTime = false;
+                }
+                prevLine = line;
+                line = streamReader.ReadLine();
+            }
+            if (line == null)
+            {
+                throw new IOException(string.Format("Here document ({0}) terminated by end-of-file.", docName));
+            }
+            return sb.Append(prevLine).ToString();
+        }
+    }
+}
diff --git a/lang/csharp/src/apache/test/Utils/CaseFinderTests.cs b/lang/csharp/src/apache/test/Utils/CaseFinderTests.cs
new file mode 100644
index 0000000..1257863
--- /dev/null
+++ b/lang/csharp/src/apache/test/Utils/CaseFinderTests.cs
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using NUnit.Framework;
+using System.IO;
+
+namespace Avro.Test.Utils
+{
+    [TestFixture]
+    public class CaseFinderTests
+    {
+        [Test]
+        public void TestBadDocLabel1()
+        {
+            List<Object[]> result = new List<Object[]>();
+            Assert.Throws<ArgumentException>(
+                () => CaseFinder.Find(Mk("<<INPUT blah"), "", result)
+                );
+        }
+
+        [Test]
+        public void TestBadDocLabel2()
+        {
+            List<Object[]> result = new List<Object[]>();
+            Assert.Throws<ArgumentException>(
+                () => CaseFinder.Find(Mk("<<INPUT blah"), "kill-er", result)
+                );
+        }
+
+        [Test]
+        public void TestBadSingleLineHeredoc()
+        {
+            List<Object[]> result = new List<Object[]>();
+            Assert.Throws<IOException>(
+                () => CaseFinder.Find(Mk("<<INPUTblah"), "foo", result)
+                );
+        }
+
+        [Test]
+        public void TestUnterminatedHeredoc()
+        {
+            List<Object[]> result = new List<Object[]>();
+            Assert.Throws<IOException>(
+                () => CaseFinder.Find(Mk("<<INPUT"), "foo", result)
+                );
+        }
+
+        [Test, TestCaseSource("OutputTestCases")]
+        public void TestOutput(string input, string label, List<object[]> expectedOutput)
+        {
+            List<Object[]> result = new List<Object[]>();
+            CaseFinder.Find(Mk(input), label, result);
+            Assert.True(Eq(result, expectedOutput), Pr(result));
+        }
+
+        private List<Object[]> OutputTestCases()
+        {
+            List<Object[]> result = new List<Object[]>();
+            result.Add(new Object[] { "", "foo", new List<object[]> { } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT b", "OUTPUT", new List<object[]> { new object[] {"a","b"} } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT b\n", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT b\n\n", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "<<INPUT a\r<<OUTPUT b", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "// This is a test\n<<INPUT a\n\n\n<<OUTPUT b", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT", new List<object[]> { new object[] { "a", "b" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\n\nOUTPUT", "OUTPUT", new List<object[]> { new object[] { "a", "b\n" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<OUTPUT\n\n  b  \n\nOUTPUT", "OUTPUT", new List<object[]> { new object[] { "a", "\n  b  \n" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<O b\n<<INPUT c\n<<O d", "O", new List<object[]> { new object[] { "a", "b" }, new object[] { "c", "d" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d", "O", new List<object[]> { new object[] { "a", "b" }, new object[] { "c", "d" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d", "F", new List<object[]> { new object[] { "a", "z" } } });
+            result.Add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT\nc\nINPUT\n<<O d\n<<INPUT e", "INPUT", new List<object[]> { new object[] { "a", null }, new object[] { "c", null }, new object[] { "e", null } } });
+            return result;
+        }
+
+        private StreamReader Mk(string s)
+        {
+            byte[] byteArray = Encoding.ASCII.GetBytes(s);
+            MemoryStream stream = new MemoryStream(byteArray);
+            return new StreamReader(stream);
+        }
+
+        private string Pr(List<object[]> t)
+        {
+            StringBuilder sb = new StringBuilder();
+            sb.Append("{ ");
+            bool firstTime = true;
+            foreach (var obj in t)
+            {
+                if (!firstTime)
+                {
+                    sb.Append(", ");
+                }
+                else
+                {
+                    firstTime = false;
+                }
+                sb.Append("{ \"").Append(obj[0]).Append("\", \"").Append(obj[1]).Append("\" }");
+            }
+            sb.Append("}");
+            return sb.ToString();
+        }
+
+        private bool Eq(List<object []> l1, List<object []> l2)
+        {
+            if (l1 == null || l2 == null)
+            {
+                return l1 == l2;
+            }
+            if (l1.Count != l2.Count)
+            {
+                return false;
+            }
+            for (int i = 0; i < l1.Count; i++)
+            {
+                if (!ArraysEqual(l1[i], l2[i]))
+                {
+                    return false;
+                }
+            }
+            return true;
+        }
+
+        static bool ArraysEqual<T>(T[] a1, T[] a2)
+        {
+            if (ReferenceEquals(a1, a2))
+                return true;
+
+            if (a1 == null || a2 == null)
+                return false;
+
+            if (a1.Length != a2.Length)
+                return false;
+
+            EqualityComparer<T> comparer = EqualityComparer<T>.Default;
+            for (int i = 0; i < a1.Length; i++)
+            {
+                if (!comparer.Equals(a1[i], a2[i])) return false;
+            }
+            return true;
+        }
+
+    }
+}
diff --git a/lang/java/.eclipse_launchers/AllTests.launch b/lang/java/.eclipse_launchers/AllTests.launch
new file mode 100644
index 0000000..f66750c
--- /dev/null
+++ b/lang/java/.eclipse_launchers/AllTests.launch
@@ -0,0 +1,36 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.jdt.junit.launchconfig">
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
+<listEntry value="/avro"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
+<listEntry value="4"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.ui.favoriteGroups">
+<listEntry value="org.eclipse.debug.ui.launchGroup.debug"/>
+<listEntry value="org.eclipse.debug.ui.launchGroup.run"/>
+</listAttribute>
+<stringAttribute key="org.eclipse.jdt.junit.CONTAINER" value="=avro"/>
+<booleanAttribute key="org.eclipse.jdt.junit.KEEPRUNNING_ATTR" value="false"/>
+<stringAttribute key="org.eclipse.jdt.junit.TESTNAME" value=""/>
+<stringAttribute key="org.eclipse.jdt.junit.TEST_KIND" value="org.eclipse.jdt.junit.loader.junit4"/>
+<stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value=""/>
+<stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="avro"/>
+<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Dtest.dir=${workspace_loc:avro}/build/test -Dtest.genavro.dir=${workspace_loc:avro}/src/test/genavro"/>
+</launchConfiguration>
diff --git a/lang/java/.gitignore b/lang/java/.gitignore
new file mode 100644
index 0000000..92c171c
--- /dev/null
+++ b/lang/java/.gitignore
@@ -0,0 +1,19 @@
+#   Licensed to the Apache Software Foundation (ASF) under one or more
+#   contributor license agreements.  See the NOTICE file distributed with
+#   this work for additional information regarding copyright ownership.
+#   The ASF licenses this file to You under the Apache License, Version 2.0
+#   (the "License"); you may not use this file except in compliance with
+#   the License.  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+/build
+/.classpath
+/.eclipse
+/.project
diff --git a/lang/java/archetypes/avro-service-archetype/pom.xml b/lang/java/archetypes/avro-service-archetype/pom.xml
new file mode 100644
index 0000000..96fb809
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/pom.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-archetypes-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>avro-service-archetype</artifactId>
+  <packaging>maven-archetype</packaging>
+
+  <name>Apache Avro Maven Service Archetype</name>
+  <description>Archetype that generates a simple example Avro service</description>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+      </resource>
+      <resource>
+        <targetPath>archetype-resources</targetPath>
+        <directory>target/generated-resources</directory>
+        <filtering>false</filtering>
+        <includes>
+          <include>pom.xml</include>
+        </includes>
+      </resource>
+    </resources>
+  </build>
+
+</project>
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/pom/pom.xml b/lang/java/archetypes/avro-service-archetype/src/main/pom/pom.xml
new file mode 100644
index 0000000..4e6ca35
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/pom/pom.xml
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>\${groupId}</groupId>
+  <artifactId>\${artifactId}</artifactId>
+  <version>\${version}</version>
+  <packaging>jar</packaging>
+
+  <name>Simple Avro Ordering Service</name>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+
+    <avro.version>${project.version}</avro.version>
+    <jackson.version>${jackson.version}</jackson.version>
+    <junit.version>${junit.version}</junit.version>
+    <logback.version>1.0.0</logback.version>
+    <slf4j.version>${slf4j.version}</slf4j.version>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <version>\${avro.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <version>\${avro.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>\${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>\${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>\${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>ch.qos.logback</groupId>
+      <artifactId>logback-classic</artifactId>
+      <version>\${logback.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>\${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <defaultGoal>install</defaultGoal>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
+        <version>\${avro.version}</version>
+        <executions>
+          <execution>
+            <goals>
+              <!-- By default generates classes from all Protocol (.avpr) files found in 'src/main/avro' -->
+              <goal>protocol</goal>
+            </goals>
+          </execution>
+        </executions>
+        <dependencies>
+          <dependency>
+            <groupId>org.apache.avro</groupId>
+            <artifactId>avro-ipc</artifactId>
+            <version>\${avro.version}</version>
+          </dependency>
+        </dependencies>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>${surefire-plugin.version}</version>
+        <configuration>
+          <excludes>
+            <exclude>%regex[.*.integration.*]</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-failsafe-plugin</artifactId>
+        <version>${surefire-plugin.version}</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>integration-test</goal>
+              <goal>verify</goal>
+            </goals>
+            <configuration>
+              <includes>
+                <include>%regex[.*.integration.*]</include>
+              </includes>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>${compiler-plugin.version}</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml b/lang/java/archetypes/avro-service-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml
new file mode 100644
index 0000000..3ab22c4
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/META-INF/maven/archetype-metadata.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<archetype-descriptor
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-archetype-plugin/archetype-descriptor/1.0.0 http://maven.apache.org/xsd/archetype-descriptor-1.0.0.xsd"
+  name="simple-avro-ordering-service" xmlns="http://maven.apache.org/plugins/maven-archetype-plugin/archetype-descriptor/1.0.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <fileSets>
+    <fileSet filtered="true" packaged="true" encoding="UTF-8">
+      <directory>src/main/java</directory>
+      <includes>
+        <include>**/*.java</include>
+      </includes>
+    </fileSet>
+    <fileSet filtered="true" encoding="UTF-8">
+      <directory>src/main/resources</directory>
+      <includes>
+        <include>**/*.xml</include>
+      </includes>
+    </fileSet>
+    <fileSet filtered="true" encoding="UTF-8">
+      <directory>src/main/avro</directory>
+      <includes>
+        <include>**/*.avpr</include>
+      </includes>
+    </fileSet>
+    <fileSet filtered="true" packaged="true" encoding="UTF-8">
+      <directory>src/test/java</directory>
+      <includes>
+        <include>**/*.java</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+</archetype-descriptor>
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr
new file mode 100644
index 0000000..b9ea5c5
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/avro/order-service.avpr
@@ -0,0 +1,46 @@
+{
+  "namespace":"${package}.service",
+  "protocol": "OrderProcessingService",
+  "doc": "Protocol to submit customer Orders",
+  "types": [    
+ 	{
+      "name": "Item", "type": "record",
+      "fields": [
+        {"name": "name", "type": "string"},
+        {"name": "sku", "type": "long"},
+ 		{"name": "quantity", "type": "int"}
+ 	  ]
+ 	},
+ 	{
+      "name": "Order", "type": "record",
+      "fields": [
+        {"name": "customerId", "type": "long"},
+        {"name": "orderId", "type": "long"},
+ 		{"name": "orderItems", "type": {"type": "array", "items": "Item"}}
+ 	  ]
+ 	},
+ 	{
+      "name": "Confirmation", "type": "record",
+      "fields": [
+        {"name": "customerId", "type": {"type": "long"}},
+        {"name": "orderId", "type": "long"},
+        {"name": "estimatedCompletion", "type": "long"}
+ 	  ]
+ 	},
+ 	{
+      "name": "OrderFailure", "type": "error",
+      "fields": [
+      	{"name": "message", "type": "string"}
+      ]
+ 	} 	
+   ],
+    
+   "messages": {
+     "submitOrder": {
+	   "doc": "Submit an Order",
+	   "request": [{"name": "order", "type": "Order"}],
+	   "response": "Confirmation",
+	   "errors": ["OrderFailure"]
+	 }
+   }
+}
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
new file mode 100644
index 0000000..161ddc3
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
@@ -0,0 +1,45 @@
+#set( $symbol_pound = '#' )
+#set( $symbol_dollar = '$' )
+#set( $symbol_escape = '\' )
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ${package}.service;
+
+import org.apache.avro.AvroRemoteException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * {@code SimpleOrderService} is a simple example implementation of an Avro service generated from the
+ * order-service.avpr protocol definition.
+ */
+public class SimpleOrderService implements OrderProcessingService {
+
+	private Logger log = LoggerFactory.getLogger(SimpleOrderService.class);
+
+  @Override
+  public Confirmation submitOrder(Order order) throws AvroRemoteException, OrderFailure {
+    log.info("Received order for '{}' items from customer with id '{}'",
+      new Object[] {order.getOrderItems().size(), order.getCustomerId()});
+
+    long estimatedCompletion = System.currentTimeMillis() + (5 * 60 * 60);
+    return Confirmation.newBuilder().setCustomerId(order.getCustomerId()).setEstimatedCompletion(estimatedCompletion)
+      .setOrderId(order.getOrderId()).build();
+  }
+}
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
new file mode 100644
index 0000000..5c3dc4f
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
@@ -0,0 +1,79 @@
+#set( $symbol_pound = '#' )
+#set( $symbol_dollar = '$' )
+#set( $symbol_escape = '\' )
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ${package}.transport;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.ipc.NettyTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import ${package}.service.Confirmation;
+import ${package}.service.Order;
+import ${package}.service.OrderFailure;
+import ${package}.service.OrderProcessingService;
+
+/**
+ * {@code SimpleOrderServiceClient} is a basic client for the Netty backed {@link OrderProcessingService}
+ * implementation.
+ */
+public class SimpleOrderServiceClient implements OrderProcessingService {
+
+  private static final Logger log = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class);
+
+  private InetSocketAddress endpointAddress;
+
+  private Transceiver transceiver;
+
+  private OrderProcessingService service;
+
+  public SimpleOrderServiceClient(InetSocketAddress endpointAddress) {
+    this.endpointAddress = endpointAddress;
+  }
+
+  public synchronized void start() throws IOException {
+    if (log.isInfoEnabled()) {
+      log.info("Starting Simple Ordering Netty client on '{}'", endpointAddress);
+    }
+    transceiver = new NettyTransceiver(endpointAddress);
+    service = SpecificRequestor.getClient(OrderProcessingService.class, transceiver);
+  }
+
+  public void stop() throws IOException {
+    if (log.isInfoEnabled()) {
+      log.info("Stopping Simple Ordering Netty client on '{}'", endpointAddress);
+    }
+    if (transceiver != null && transceiver.isConnected()) {
+      transceiver.close();
+    }
+  }
+
+  @Override
+  public Confirmation submitOrder(Order order) throws AvroRemoteException, OrderFailure {
+    return service.submitOrder(order);
+  }
+
+}
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceEndpoint.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceEndpoint.java
new file mode 100644
index 0000000..6b19d00
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceEndpoint.java
@@ -0,0 +1,67 @@
+#set( $symbol_pound = '#' )
+#set( $symbol_dollar = '$' )
+#set( $symbol_escape = '\' )
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ${package}.transport;
+
+import java.net.InetSocketAddress;
+
+import ${package}.service.SimpleOrderService;
+import org.apache.avro.ipc.NettyServer;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import ${package}.service.OrderProcessingService;
+
+/**
+ * {@code SimpleOrderProcessingServer} provides a very basic example Netty endpoint for the
+ * {@link SimpleOrderService} implementation
+ */
+public class SimpleOrderServiceEndpoint {
+
+  private static final Logger log = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class);
+
+  private InetSocketAddress endpointAddress;
+
+  private Server service;
+
+  public SimpleOrderServiceEndpoint(InetSocketAddress endpointAddress) {
+    this.endpointAddress = endpointAddress;
+  }
+
+  public synchronized void start() throws Exception {
+    if (log.isInfoEnabled()) {
+      log.info("Starting Simple Ordering Netty Server on '{}'", endpointAddress);
+    }
+
+    SpecificResponder responder = new SpecificResponder(OrderProcessingService.class, new SimpleOrderService());
+    service = new NettyServer(responder, endpointAddress);
+    service.start();
+  }
+
+  public synchronized void stop() throws Exception {
+    if (log.isInfoEnabled()) {
+      log.info("Stopping Simple Ordering Server on '{}'", endpointAddress);
+    }
+    service.start();
+  }
+}
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/resources/logback.xml b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/resources/logback.xml
new file mode 100644
index 0000000..2822546
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/resources/logback.xml
@@ -0,0 +1,30 @@
+#set( $symbol_pound = '#' )
+#set( $symbol_dollar = '$' )
+#set( $symbol_escape = '\' )
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
+    <encoder>
+      <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
+    </encoder>
+  </appender>
+  <root level="info">
+    <appender-ref ref="console" />
+  </root>
+</configuration>
diff --git a/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
new file mode 100644
index 0000000..e751192
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
@@ -0,0 +1,87 @@
+#set( $symbol_pound = '#' )
+#set( $symbol_dollar = '$' )
+#set( $symbol_escape = '\' )
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ${package}.integration;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.List;
+
+import ${package}.transport.SimpleOrderServiceEndpoint;
+import ${package}.transport.SimpleOrderServiceClient;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import ${package}.service.Confirmation;
+import ${package}.service.Item;
+import ${package}.service.Order;
+
+/**
+ * <code>SimpleOrderServiceIntegrationTest</code> runs as part of the Integration phase of the build and is
+ * meant for end to end service testing.
+ */
+public class SimpleOrderServiceIntegrationTest {
+
+	private static SimpleOrderServiceEndpoint service;
+	private static SimpleOrderServiceClient client;
+
+	@Test
+	public void simpleRoundTripTest() throws Exception {
+		Order simpleOrder = createOrder();
+		Confirmation c = client.submitOrder(simpleOrder);
+
+		assertEquals(c.getOrderId(), simpleOrder.getOrderId());
+		assertEquals(c.getCustomerId(), simpleOrder.getCustomerId());
+		assertTrue(c.getEstimatedCompletion() > 0);
+	}
+
+	@BeforeClass
+	public static void setupTransport() throws Exception {
+		InetSocketAddress endpointAddress = new InetSocketAddress("0.0.0.0", 12345);
+		service = new SimpleOrderServiceEndpoint(endpointAddress);
+		client = new SimpleOrderServiceClient(endpointAddress);
+
+		service.start();
+		client.start();
+	}
+
+	@AfterClass
+	public static void shutdownTransport() throws Exception {
+		client.stop();
+		service.stop();
+	}
+
+	public Order createOrder() {
+		return Order.newBuilder().setOrderId(1).setCustomerId(1).setOrderItems(createItems()).build();
+	}
+
+	public List<Item> createItems() {
+		List<Item> items = new ArrayList<Item>();
+		for (int x = 0; x < 5; x++)
+			items.add(Item.newBuilder().setName("Item-" + x).setQuantity(x + 1).setSku(1230 + x).build());
+		return items;
+	}
+
+}
diff --git a/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/archetype.properties b/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/archetype.properties
new file mode 100644
index 0000000..1147471
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/archetype.properties
@@ -0,0 +1,6 @@
+#Mon Sep 19 23:10:41 IST 2011
+version=0.1-SNAPSHOT
+groupId=org.apache.avro.example
+artifactId=simple-service
+package=org.apache.avro.example
+
diff --git a/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/goal.txt b/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/goal.txt
new file mode 100644
index 0000000..cc578f8
--- /dev/null
+++ b/lang/java/archetypes/avro-service-archetype/src/test/integration/projects/basic/goal.txt
@@ -0,0 +1 @@
+integration-test
diff --git a/lang/java/archetypes/pom.xml b/lang/java/archetypes/pom.xml
new file mode 100644
index 0000000..194a1a6
--- /dev/null
+++ b/lang/java/archetypes/pom.xml
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.avro</groupId>
+    <artifactId>avro-parent</artifactId>
+    <version>1.8.0</version>
+    <relativePath>../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>avro-archetypes-parent</artifactId>
+  <packaging>pom</packaging>
+
+  <name>Apache Avro Maven Archetypes</name>
+  <description>Archetypes parent defining configuration for generating archetype poms with the correct Avro version</description>
+
+  <properties>
+    <maven.archetype.version>2.1</maven.archetype.version>
+  </properties>
+
+  <modules>
+    <module>avro-service-archetype</module>
+  </modules>
+
+  <build>
+    <extensions>
+      <extension>
+        <groupId>org.apache.maven.archetype</groupId>
+        <artifactId>archetype-packaging</artifactId>
+        <version>${archetype-plugin.version}</version>
+      </extension>
+    </extensions>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <artifactId>maven-archetype-plugin</artifactId>
+          <version>${archetype-plugin.version}</version>
+          <extensions>true</extensions>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+      <!-- Creates a pom.xml for the archetype that references the current avro version -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-resources-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-pom-with-building-version</id>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <phase>generate-resources</phase>
+            <configuration>
+              <outputDirectory>target/generated-resources</outputDirectory>
+              <escapeString>\</escapeString>
+              <resources>
+                <resource>
+                  <directory>src/main/pom/</directory>
+                  <filtering>true</filtering>
+                  <includes>
+                    <include>pom.xml</include>
+                  </includes>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>copy-archetype-integration-resources</id>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <phase>verify</phase>
+            <configuration>
+              <outputDirectory>target/test-classes/</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>src/test/integration</directory>
+                  <includes>
+                    <include>**/*</include>
+                  </includes>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-archetype-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>integration-test</goal>
+            </goals>
+            <phase>install</phase>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
diff --git a/lang/java/avro/pom.xml b/lang/java/avro/pom.xml
new file mode 100644
index 0000000..183d3d8
--- /dev/null
+++ b/lang/java/avro/pom.xml
@@ -0,0 +1,175 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro</artifactId>
+
+  <name>Apache Avro</name>
+  <url>http://avro.apache.org</url>
+  <description>Avro core components</description>
+  <packaging>bundle</packaging>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro*,
+      com.thoughtworks.paranamer,
+      org.codehaus.jackson*,
+      org.xerial.snappy;resolution:=optional,
+      *
+    </osgi.import>
+    <osgi.export>org.apache.avro*;version="${project.version}"</osgi.export>
+  </properties>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>../../../share/schemas</directory>
+        <includes>
+          <include>org/apache/avro/data/Json.avsc</include>
+        </includes>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>com.thoughtworks.paranamer</groupId>
+        <artifactId>paranamer-maven-plugin</artifactId>
+        <version>${paranamer.version}</version>
+        <executions>
+          <execution>
+            <id>paranamer-test</id>
+            <configuration>
+              <sourceDirectory>${project.build.testSourceDirectory}</sourceDirectory>
+              <outputDirectory>${project.build.testOutputDirectory}</outputDirectory>
+            </configuration>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>generate</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+    <profile>
+      <id>interop-data-generate</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>exec-maven-plugin</artifactId>
+            <version>${exec-plugin.version}</version>
+            <executions>
+              <!-- Generate random data for interop tests, using null codec -->
+              <execution>
+                <id>interop-generate-null-codec</id>
+                <phase>generate-resources</phase>
+                <configuration>
+                  <mainClass>org.apache.avro.RandomData</mainClass>
+                  <classpathScope>test</classpathScope>
+                  <arguments>
+                    <argument>../../../share/test/schemas/interop.avsc</argument>
+                    <argument>../../../build/interop/data/java.avro</argument>
+                    <argument>100</argument>
+                  </arguments>
+                </configuration>
+                <goals><goal>java</goal></goals>
+              </execution>
+              <!-- Generate random data for interop tests, using deflate codec -->
+              <execution>
+                <id>interop-generate-deflate-codec</id>
+                <phase>generate-resources</phase>
+                <configuration>
+                  <mainClass>org.apache.avro.RandomData</mainClass>
+                  <classpathScope>test</classpathScope>
+                  <arguments>
+                    <argument>../../../share/test/schemas/interop.avsc</argument>
+                    <argument>../../../build/interop/data/java_deflate.avro</argument>
+                    <argument>100</argument>
+                    <argument>deflate</argument>
+                  </arguments>
+                </configuration>
+                <goals><goal>java</goal></goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.thoughtworks.paranamer</groupId>
+      <artifactId>paranamer</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.xerial.snappy</groupId>
+      <artifactId>snappy-java</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.tukaani</groupId>
+      <artifactId>xz</artifactId>
+      <version>${tukaani.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>joda-time</groupId>
+      <artifactId>joda-time</artifactId>
+      <optional>true</optional>
+    </dependency>
+  </dependencies>
+
+</project>
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java b/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java
new file mode 100644
index 0000000..11e2125
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/AvroRemoteException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.io.IOException;
+
+/** Base class for exceptions thrown to client by server. */
+public class AvroRemoteException extends IOException {
+  private Object value;
+
+  protected AvroRemoteException() {}
+
+  public AvroRemoteException(Throwable value) {
+    this(value.toString());
+    initCause(value);
+  }
+
+  public AvroRemoteException(Object value) {
+    super(value != null ? value.toString() : null);
+    this.value = value;
+  }
+  
+  public AvroRemoteException(Object value, Throwable cause) {
+    super(value != null ? value.toString() : null, cause);
+    this.value = value;
+  }
+  
+  public Object getValue() { return value; }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/AvroRuntimeException.java b/lang/java/avro/src/main/java/org/apache/avro/AvroRuntimeException.java
new file mode 100644
index 0000000..abbaaa0
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/AvroRuntimeException.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/** Base Avro exception. */
+public class AvroRuntimeException extends RuntimeException {
+  public AvroRuntimeException(Throwable cause) { super(cause); }
+  public AvroRuntimeException(String message) { super(message); }
+  public AvroRuntimeException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/AvroTypeException.java b/lang/java/avro/src/main/java/org/apache/avro/AvroTypeException.java
new file mode 100644
index 0000000..a4a1982
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/AvroTypeException.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+
+/** Thrown when an illegal type is used. */
+public class AvroTypeException extends AvroRuntimeException {
+  public AvroTypeException(String message) { super(message); }
+  public AvroTypeException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Conversion.java b/lang/java/avro/src/main/java/org/apache/avro/Conversion.java
new file mode 100644
index 0000000..c3baf4e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/Conversion.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.Map;
+import org.apache.avro.generic.GenericEnumSymbol;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.generic.IndexedRecord;
+
+/**
+ * Conversion between generic and logical type instances.
+ * <p>
+ * Instances of this class are added to GenericData to convert a logical type
+ * to a particular representation.
+ * <p>
+ * Implementations must provide:
+ * * {@link #getConvertedType()}: get the Java class used for the logical type
+ * * {@link #getLogicalTypeName()}: get the logical type this implements
+ * <p>
+ * Subclasses must also override all of the conversion methods for Avro's base
+ * types that are valid for the logical type, or else risk causing
+ * {@code UnsupportedOperationException} at runtime.
+ * <p>
+ * Optionally, use {@link #getRecommendedSchema()} to provide a Schema that
+ * will be used when a Schema is generated for the class returned by
+ * {@code getConvertedType}.
+ *
+ * @param <T> a Java type that generic data is converted to
+ */
+public abstract class Conversion<T> {
+
+  /**
+   * Return the Java class representing the logical type.
+   *
+   * @return a Java class returned by from methods and accepted by to methods
+   */
+  public abstract Class<T> getConvertedType();
+
+  /**
+   * Return the logical type this class converts.
+   *
+   * @return a String logical type name
+   */
+  public abstract String getLogicalTypeName();
+
+  public Schema getRecommendedSchema() {
+    throw new UnsupportedOperationException(
+        "No recommended schema for " + getLogicalTypeName());
+  }
+
+  public T fromBoolean(Boolean value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromBoolean is not supported for " + type.getName());
+  }
+
+  public T fromInt(Integer value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromInt is not supported for " + type.getName());
+  }
+
+  public T fromLong(Long value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromLong is not supported for " + type.getName());
+  }
+
+  public T fromFloat(Float value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromFloat is not supported for " + type.getName());
+  }
+
+  public T fromDouble(Double value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromDouble is not supported for " + type.getName());
+  }
+
+  public T fromCharSequence(CharSequence value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromCharSequence is not supported for " + type.getName());
+  }
+
+  public T fromEnumSymbol(GenericEnumSymbol value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromEnumSymbol is not supported for " + type.getName());
+  }
+
+  public T fromFixed(GenericFixed value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromFixed is not supported for " + type.getName());
+  }
+
+  public T fromBytes(ByteBuffer value, Schema schema, LogicalType type)  {
+    throw new UnsupportedOperationException(
+        "fromBytes is not supported for " + type.getName());
+  }
+
+  public T fromArray(Collection<?> value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromArray is not supported for " + type.getName());
+  }
+
+  public T fromMap(Map<?, ?> value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromMap is not supported for " + type.getName());
+  }
+
+  public T fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "fromRecord is not supported for " + type.getName());
+  }
+
+  public Boolean toBoolean(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toBoolean is not supported for " + type.getName());
+  }
+
+  public Integer toInt(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toInt is not supported for " + type.getName());
+  }
+
+  public Long toLong(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toLong is not supported for " + type.getName());
+  }
+
+  public Float toFloat(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toFloat is not supported for " + type.getName());
+  }
+
+  public Double toDouble(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toDouble is not supported for " + type.getName());
+  }
+
+  public CharSequence toCharSequence(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toCharSequence is not supported for " + type.getName());
+  }
+
+  public GenericEnumSymbol toEnumSymbol(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toEnumSymbol is not supported for " + type.getName());
+  }
+
+  public GenericFixed toFixed(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toFixed is not supported for " + type.getName());
+  }
+
+  public ByteBuffer toBytes(T value, Schema schema, LogicalType type)  {
+    throw new UnsupportedOperationException(
+        "toBytes is not supported for " + type.getName());
+  }
+
+  public Collection<?> toArray(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toArray is not supported for " + type.getName());
+  }
+
+  public Map<?, ?> toMap(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toMap is not supported for " + type.getName());
+  }
+
+  public IndexedRecord toRecord(T value, Schema schema, LogicalType type) {
+    throw new UnsupportedOperationException(
+        "toRecord is not supported for " + type.getName());
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Conversions.java b/lang/java/avro/src/main/java/org/apache/avro/Conversions.java
new file mode 100644
index 0000000..14408d9
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/Conversions.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.util.UUID;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericFixed;
+
+public class Conversions {
+
+  public static class UUIDConversion extends Conversion<UUID> {
+    @Override
+    public Class<UUID> getConvertedType() {
+      return UUID.class;
+    }
+
+    @Override
+    public Schema getRecommendedSchema() {
+      return LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "uuid";
+    }
+
+    @Override
+    public UUID fromCharSequence(CharSequence value, Schema schema, LogicalType type) {
+      return UUID.fromString(value.toString());
+    }
+
+    @Override
+    public CharSequence toCharSequence(UUID value, Schema schema, LogicalType type) {
+      return value.toString();
+    }
+  }
+
+  public static class DecimalConversion extends Conversion<BigDecimal> {
+    @Override
+    public Class<BigDecimal> getConvertedType() {
+      return BigDecimal.class;
+    }
+
+    @Override
+    public Schema getRecommendedSchema() {
+      throw new UnsupportedOperationException(
+          "No recommended schema for decimal (scale is required)");
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "decimal";
+    }
+
+    @Override
+    public BigDecimal fromBytes(ByteBuffer value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal) type).getScale();
+      // always copy the bytes out because BigInteger has no offset/length ctor
+      byte[] bytes = value.get(new byte[value.remaining()]).array();
+      return new BigDecimal(new BigInteger(bytes), scale);
+    }
+
+    @Override
+    public ByteBuffer toBytes(BigDecimal value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal) type).getScale();
+      if (scale != value.scale()) {
+        throw new AvroTypeException("Cannot encode decimal with scale " +
+            value.scale() + " as scale " + scale);
+      }
+      return ByteBuffer.wrap(value.unscaledValue().toByteArray());
+    }
+
+    @Override
+    public BigDecimal fromFixed(GenericFixed value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal) type).getScale();
+      return new BigDecimal(new BigInteger(value.bytes()), scale);
+    }
+
+    @Override
+    public GenericFixed toFixed(BigDecimal value, Schema schema, LogicalType type) {
+      int scale = ((LogicalTypes.Decimal) type).getScale();
+      if (scale != value.scale()) {
+        throw new AvroTypeException("Cannot encode decimal with scale " +
+            value.scale() + " as scale " + scale);
+      }
+
+      byte fillByte = (byte) (value.signum() < 0 ? 0xFF : 0x00);
+      byte[] unscaled = value.unscaledValue().toByteArray();
+      byte[] bytes = new byte[schema.getFixedSize()];
+      int offset = bytes.length - unscaled.length;
+
+      for (int i = 0; i < bytes.length; i += 1) {
+        if (i < offset) {
+          bytes[i] = fillByte;
+        } else {
+          bytes[i] = unscaled[i - offset];
+        }
+      }
+
+      return new GenericData.Fixed(schema, bytes);
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
new file mode 100644
index 0000000..6273036
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/JsonProperties.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.io.IOException;
+
+import org.apache.avro.util.internal.JacksonUtils;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.node.TextNode;
+
+/**
+ * Base class for objects that have JSON-valued properties. Avro and JSON values are
+ * represented in Java using the following mapping:
+ * 
+ * <table>
+ *   <th>
+ *     <td>Avro type</td>
+ *     <td>JSON type</td>
+ *     <td>Java type</td>
+ *   </th>
+ *   <tr>
+ *     <td><code>null</code></td>
+ *     <td><code>null</code></td>
+ *     <td>{@link #NULL_VALUE}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>boolean</code></td>
+ *     <td>Boolean</td>
+ *     <td><code>boolean</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>int</code></td>
+ *     <td>Number</td>
+ *     <td><code>int</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>long</code></td>
+ *     <td>Number</td>
+ *     <td><code>long</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>float</code></td>
+ *     <td>Number</td>
+ *     <td><code>float</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>double</code></td>
+ *     <td>Number</td>
+ *     <td><code>double</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>bytes</code></td>
+ *     <td>String</td>
+ *     <td><code>byte[]</code></td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>string</code></td>
+ *     <td>String</td>
+ *     <td>{@link java.lang.String}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>record</code></td>
+ *     <td>Object</td>
+ *     <td>{@link java.util.Map}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>enum</code></td>
+ *     <td>String</td>
+ *     <td>{@link java.lang.String}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>array</code></td>
+ *     <td>Array</td>
+ *     <td>{@link java.util.Collection}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>map</code></td>
+ *     <td>Object</td>
+ *     <td>{@link java.util.Map}</td>
+ *   </tr>
+ *   <tr>
+ *     <td><code>fixed</code></td>
+ *     <td>String</td>
+ *     <td><code>byte[]</code></td>
+ *   </tr>
+ * </table>
+ *
+ * @see org.apache.avro.data.Json
+ */
+public abstract class JsonProperties {
+  public static class Null {
+    private Null() {}
+  }
+  /** A value representing a JSON <code>null</code>. */
+  public static final Null NULL_VALUE = new Null();
+
+  Map<String,JsonNode> props = new LinkedHashMap<String,JsonNode>(1);
+
+  private Set<String> reserved;
+
+  JsonProperties(Set<String> reserved) {
+    this.reserved = reserved;
+  }
+
+  /**
+   * Returns the value of the named, string-valued property in this schema.
+   * Returns <tt>null</tt> if there is no string-valued property with that name.
+   */
+  public String getProp(String name) {
+    JsonNode value = getJsonProp(name);
+    return value != null && value.isTextual() ? value.getTextValue() : null;
+  }
+
+  /**
+   * Returns the value of the named property in this schema.
+   * Returns <tt>null</tt> if there is no property with that name.
+   * @deprecated use {@link #getObjectProp(String)}
+   */
+  @Deprecated
+  public synchronized JsonNode getJsonProp(String name) {
+    return props.get(name);
+  }
+
+  /**
+   * Returns the value of the named property in this schema.
+   * Returns <tt>null</tt> if there is no property with that name.
+   */
+  public synchronized Object getObjectProp(String name) {
+    return JacksonUtils.toObject(props.get(name));
+  }
+
+  /**
+   * Adds a property with the given name <tt>name</tt> and
+   * value <tt>value</tt>. Neither <tt>name</tt> nor <tt>value</tt> can be
+   * <tt>null</tt>. It is illegal to add a property if another with
+   * the same name but different value already exists in this schema.
+   * 
+   * @param name The name of the property to add
+   * @param value The value for the property to add
+   */
+  public void addProp(String name, String value) {
+    addProp(name, TextNode.valueOf(value));
+  }
+
+  /**
+   * Adds a property with the given name <tt>name</tt> and
+   * value <tt>value</tt>. Neither <tt>name</tt> nor <tt>value</tt> can be
+   * <tt>null</tt>. It is illegal to add a property if another with
+   * the same name but different value already exists in this schema.
+   * 
+   * @param name The name of the property to add
+   * @param value The value for the property to add
+   * @deprecated use {@link #addProp(String, Object)}
+   */
+  @Deprecated
+  public synchronized void addProp(String name, JsonNode value) {
+    if (reserved.contains(name))
+      throw new AvroRuntimeException("Can't set reserved property: " + name);
+      
+    if (value == null)
+      throw new AvroRuntimeException("Can't set a property to null: " + name);
+    
+    JsonNode old = props.get(name);
+    if (old == null)
+      props.put(name, value);
+    else if (!old.equals(value))
+      throw new AvroRuntimeException("Can't overwrite property: " + name);
+  }
+
+  public synchronized void addProp(String name, Object value) {
+    addProp(name, JacksonUtils.toJsonNode(value));
+  }
+
+  /** Return the defined properties that have string values. */
+  @Deprecated public Map<String,String> getProps() {
+    Map<String,String> result = new LinkedHashMap<String,String>();
+    for (Map.Entry<String,JsonNode> e : props.entrySet())
+      if (e.getValue().isTextual())
+        result.put(e.getKey(), e.getValue().getTextValue());
+    return result;
+  }
+
+  /** Convert a map of string-valued properties to Json properties. */
+  Map<String,JsonNode> jsonProps(Map<String,String> stringProps) {
+    Map<String,JsonNode> result = new LinkedHashMap<String,JsonNode>();
+    for (Map.Entry<String,String> e : stringProps.entrySet())
+      result.put(e.getKey(), TextNode.valueOf(e.getValue()));
+    return result;
+  }
+
+  /**
+   * Return the defined properties as an unmodifieable Map.
+   * @deprecated use {@link #getObjectProps()}
+   */
+  @Deprecated
+  public Map<String,JsonNode> getJsonProps() {
+    return Collections.unmodifiableMap(props);
+  }
+
+  /** Return the defined properties as an unmodifieable Map. */
+  public Map<String,Object> getObjectProps() {
+    Map<String,Object> result = new LinkedHashMap<String,Object>();
+    for (Map.Entry<String,JsonNode> e : props.entrySet())
+      result.put(e.getKey(), JacksonUtils.toObject(e.getValue()));
+    return result;
+  }
+
+  void writeProps(JsonGenerator gen) throws IOException {
+    for (Map.Entry<String,JsonNode> e : props.entrySet())
+      gen.writeObjectField(e.getKey(), e.getValue());
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/LogicalType.java b/lang/java/avro/src/main/java/org/apache/avro/LogicalType.java
new file mode 100644
index 0000000..d701693
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/LogicalType.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.specific.SpecificData;
+
+/**
+ * Logical types provides an opt-in way to extend Avro's types. Logical types
+ * specify a way of representing a high-level type as a base Avro type. For
+ * example, a date is specified as the number of days after the unix epoch (or
+ * before using a negative value). This enables extentions to Avro's type
+ * system without breaking binary compatibility. Older versions see the base
+ * type and ignore the logical type.
+ */
+public class LogicalType {
+
+  public static final String LOGICAL_TYPE_PROP = "logicalType";
+
+  private static final String[] INCOMPATIBLE_PROPS = new String[] {
+      GenericData.STRING_PROP, SpecificData.CLASS_PROP,
+      SpecificData.KEY_CLASS_PROP, SpecificData.ELEMENT_PROP
+  };
+
+  private final String name;
+
+  public LogicalType(String logicalTypeName) {
+    this.name = logicalTypeName.intern();
+  }
+
+  /**
+   * Get the name of this logical type.
+   * <p>
+   * This name is set as the Schema property "logicalType".
+   *
+   * @return the String name of the logical type
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Add this logical type to the given Schema.
+   * <p>
+   * The "logicalType" property will be set to this type's name, and other
+   * type-specific properties may be added. The Schema is first validated to
+   * ensure it is compatible.
+   *
+   * @param schema a Schema
+   * @return the modified Schema
+   * @throws IllegalArgumentException if the type and schema are incompatible
+   */
+  public Schema addToSchema(Schema schema) {
+    validate(schema);
+    schema.addProp(LOGICAL_TYPE_PROP, name);
+    schema.setLogicalType(this);
+    return schema;
+  }
+
+  /**
+   * Validate this logical type for the given Schema.
+   * <p>
+   * This will throw an exception if the Schema is incompatible with this type.
+   * For example, a date is stored as an int and is incompatible with a fixed
+   * Schema.
+   *
+   * @param schema a Schema
+   * @throws IllegalArgumentException if the type and schema are incompatible
+   */
+  public void validate(Schema schema) {
+    for (String incompatible : INCOMPATIBLE_PROPS) {
+      if (schema.getProp(incompatible) != null) {
+        throw new IllegalArgumentException(
+            LOGICAL_TYPE_PROP + " cannot be used with " + incompatible);
+      }
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/LogicalTypes.java b/lang/java/avro/src/main/java/org/apache/avro/LogicalTypes.java
new file mode 100644
index 0000000..3f96340
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/LogicalTypes.java
@@ -0,0 +1,364 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import org.apache.avro.util.WeakIdentityHashMap;
+
+public class LogicalTypes {
+
+  private static final Map<Schema, LogicalType> CACHE =
+      new WeakIdentityHashMap<Schema, LogicalType>();
+
+  public interface LogicalTypeFactory {
+    LogicalType fromSchema(Schema schema);
+  }
+
+  private static final Map<String, LogicalTypeFactory> REGISTERED_TYPES =
+      new ConcurrentHashMap<String, LogicalTypeFactory>();
+
+  public static void register(String logicalTypeName, LogicalTypeFactory factory) {
+    if (logicalTypeName == null) {
+      throw new NullPointerException("Invalid logical type name: null");
+    }
+    if (factory == null) {
+      throw new NullPointerException("Invalid logical type factory: null");
+    }
+    REGISTERED_TYPES.put(logicalTypeName, factory);
+  }
+
+  /**
+   * Returns the {@link LogicalType} from the schema, if one is present.
+   */
+  public static LogicalType fromSchema(Schema schema) {
+    return fromSchemaImpl(schema, true);
+  }
+
+  public static LogicalType fromSchemaIgnoreInvalid(Schema schema) {
+    if (CACHE.containsKey(schema)) {
+      return CACHE.get(schema);
+    }
+
+    LogicalType logicalType = fromSchemaImpl(schema, false);
+
+    // add to the cache, even if it is null
+    CACHE.put(schema, logicalType);
+
+    return logicalType;
+  }
+
+  private static LogicalType fromSchemaImpl(Schema schema, boolean throwErrors) {
+    String typeName = schema.getProp(LogicalType.LOGICAL_TYPE_PROP);
+
+    LogicalType logicalType;
+    try {
+      if (TIMESTAMP_MILLIS.equals(typeName)) {
+        logicalType = TIMESTAMP_MILLIS_TYPE;
+      } else if (DECIMAL.equals(typeName)) {
+        logicalType = new Decimal(schema);
+      } else if (UUID.equals(typeName)) {
+        logicalType = UUID_TYPE;
+      } else if (DATE.equals(typeName)) {
+        logicalType = DATE_TYPE;
+      } else if (TIMESTAMP_MICROS.equals(typeName)) {
+        logicalType = TIMESTAMP_MICROS_TYPE;
+      } else if (TIME_MILLIS.equals(typeName)) {
+        logicalType = TIME_MILLIS_TYPE;
+      } else if (TIME_MICROS.equals(typeName)) {
+        logicalType = TIME_MICROS_TYPE;
+      } else if (REGISTERED_TYPES.containsKey(typeName)) {
+        logicalType = REGISTERED_TYPES.get(typeName).fromSchema(schema);
+      } else {
+        logicalType = null;
+      }
+
+      // make sure the type is valid before returning it
+      if (logicalType != null) {
+        logicalType.validate(schema);
+      }
+    } catch (RuntimeException e) {
+      if (throwErrors) {
+        throw e;
+      }
+      // ignore invalid types
+      logicalType = null;
+    }
+
+    return logicalType;
+  }
+
+  private static final String DECIMAL = "decimal";
+  private static final String UUID = "uuid";
+  private static final String DATE = "date";
+  private static final String TIME_MILLIS = "time-millis";
+  private static final String TIME_MICROS = "time-micros";
+  private static final String TIMESTAMP_MILLIS = "timestamp-millis";
+  private static final String TIMESTAMP_MICROS = "timestamp-micros";
+
+  /** Create a Decimal LogicalType with the given precision and scale 0 */
+  public static Decimal decimal(int precision) {
+    return decimal(precision, 0);
+  }
+
+  /** Create a Decimal LogicalType with the given precision and scale */
+  public static Decimal decimal(int precision, int scale) {
+    return new Decimal(precision, scale);
+  }
+
+  private static final LogicalType UUID_TYPE = new LogicalType("uuid");
+
+  public static LogicalType uuid() {
+    return UUID_TYPE;
+  }
+
+  private static final Date DATE_TYPE = new Date();
+
+  public static Date date() {
+    return DATE_TYPE;
+  }
+
+  private static final TimeMillis TIME_MILLIS_TYPE = new TimeMillis();
+
+  public static TimeMillis timeMillis() {
+    return TIME_MILLIS_TYPE;
+  }
+
+  private static final TimeMicros TIME_MICROS_TYPE = new TimeMicros();
+
+  public static TimeMicros timeMicros() {
+    return TIME_MICROS_TYPE;
+  }
+
+  private static final TimestampMillis TIMESTAMP_MILLIS_TYPE =
+      new TimestampMillis();
+
+  public static TimestampMillis timestampMillis() {
+    return TIMESTAMP_MILLIS_TYPE;
+  }
+
+  private static final TimestampMicros TIMESTAMP_MICROS_TYPE =
+      new TimestampMicros();
+
+  public static TimestampMicros timestampMicros() {
+    return TIMESTAMP_MICROS_TYPE;
+  }
+
+  /** Decimal represents arbitrary-precision fixed-scale decimal numbers  */
+  public static class Decimal extends LogicalType {
+    private static final String PRECISION_PROP = "precision";
+    private static final String SCALE_PROP = "scale";
+
+    private final int precision;
+    private final int scale;
+
+    private Decimal(int precision, int scale) {
+      super(DECIMAL);
+      this.precision = precision;
+      this.scale = scale;
+    }
+
+    private Decimal(Schema schema) {
+      super("decimal");
+      if (!hasProperty(schema, PRECISION_PROP)) {
+        throw new IllegalArgumentException(
+            "Invalid decimal: missing precision");
+      }
+
+      this.precision = getInt(schema, PRECISION_PROP);
+
+      if (hasProperty(schema, SCALE_PROP)) {
+        this.scale = getInt(schema, SCALE_PROP);
+      } else {
+        this.scale = 0;
+      }
+    }
+
+    @Override
+    public Schema addToSchema(Schema schema) {
+      super.addToSchema(schema);
+      schema.addProp(PRECISION_PROP, precision);
+      schema.addProp(SCALE_PROP, scale);
+      return schema;
+    }
+
+    public int getPrecision() {
+      return precision;
+    }
+
+    public int getScale() {
+      return scale;
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      // validate the type
+      if (schema.getType() != Schema.Type.FIXED &&
+          schema.getType() != Schema.Type.BYTES) {
+        throw new IllegalArgumentException(
+            "Logical type decimal must be backed by fixed or bytes");
+      }
+      if (precision <= 0) {
+        throw new IllegalArgumentException("Invalid decimal precision: " +
+            precision + " (must be positive)");
+      } else if (precision > maxPrecision(schema)) {
+        throw new IllegalArgumentException(
+            "fixed(" + schema.getFixedSize() + ") cannot store " +
+                precision + " digits (max " + maxPrecision(schema) + ")");
+      }
+      if (scale < 0) {
+        throw new IllegalArgumentException("Invalid decimal scale: " +
+            scale + " (must be positive)");
+      } else if (scale > precision) {
+        throw new IllegalArgumentException("Invalid decimal scale: " +
+            scale + " (greater than precision: " + precision + ")");
+      }
+    }
+
+    private long maxPrecision(Schema schema) {
+      if (schema.getType() == Schema.Type.BYTES) {
+        // not bounded
+        return Integer.MAX_VALUE;
+      } else if (schema.getType() == Schema.Type.FIXED) {
+        int size = schema.getFixedSize();
+        return Math.round(          // convert double to long
+            Math.floor(Math.log10(  // number of base-10 digits
+                Math.pow(2, 8 * size - 1) - 1)  // max value stored
+            ));
+      } else {
+        // not valid for any other type
+        return 0;
+      }
+    }
+
+    private boolean hasProperty(Schema schema, String name) {
+      return (schema.getObjectProp(name) != null);
+    }
+
+    private int getInt(Schema schema, String name) {
+      Object obj = schema.getObjectProp(name);
+      if (obj instanceof Integer) {
+        return (Integer) obj;
+      }
+      throw new IllegalArgumentException("Expected int " + name + ": " +
+          (obj == null ? "null" : obj + ":" + obj.getClass().getSimpleName()));
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) return true;
+      if (o == null || getClass() != o.getClass()) return false;
+
+      Decimal decimal = (Decimal) o;
+
+      if (precision != decimal.precision) return false;
+      if (scale != decimal.scale) return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      int result = precision;
+      result = 31 * result + scale;
+      return result;
+    }
+  }
+
+  /** Date represents a date without a time */
+  public static class Date extends LogicalType {
+    private Date() {
+      super(DATE);
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getType() != Schema.Type.INT) {
+        throw new IllegalArgumentException(
+            "Date can only be used with an underlying int type");
+      }
+    }
+  }
+
+  /** TimeMillis represents a time in milliseconds without a date */
+  public static class TimeMillis extends LogicalType {
+    private TimeMillis() {
+      super(TIME_MILLIS);
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getType() != Schema.Type.INT) {
+        throw new IllegalArgumentException(
+            "Time (millis) can only be used with an underlying int type");
+      }
+    }
+  }
+
+  /** TimeMicros represents a time in microseconds without a date */
+  public static class TimeMicros extends LogicalType {
+    private TimeMicros() {
+      super(TIME_MICROS);
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getType() != Schema.Type.LONG) {
+        throw new IllegalArgumentException(
+            "Time (micros) can only be used with an underlying long type");
+      }
+    }
+  }
+
+  /** TimestampMillis represents a date and time in milliseconds */
+  public static class TimestampMillis extends LogicalType {
+    private TimestampMillis() {
+      super(TIMESTAMP_MILLIS);
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getType() != Schema.Type.LONG) {
+        throw new IllegalArgumentException(
+            "Timestamp (millis) can only be used with an underlying long type");
+      }
+    }
+  }
+
+  /** TimestampMicros represents a date and time in microseconds */
+  public static class TimestampMicros extends LogicalType {
+    private TimestampMicros() {
+      super(TIMESTAMP_MICROS);
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getType() != Schema.Type.LONG) {
+        throw new IllegalArgumentException(
+            "Timestamp (micros) can only be used with an underlying long type");
+      }
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Protocol.java b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
new file mode 100644
index 0000000..03844f6
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/Protocol.java
@@ -0,0 +1,541 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Set;
+import java.util.HashSet;
+
+import org.apache.avro.Schema.Field;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.node.TextNode;
+
+/** A set of messages forming an application protocol.
+ * <p> A protocol consists of:
+ * <ul>
+ * <li>a <i>name</i> for the protocol;
+ * <li>an optional <i>namespace</i>, further qualifying the name;
+ * <li>a list of <i>types</i>, or named {@link Schema schemas};
+ * <li>a list of <i>errors</i>, or named {@link Schema schemas} for exceptions;
+ * <li>a list of named <i>messages</i>, each of which specifies,
+ *   <ul>
+ *   <li><i>request</i>, the parameter schemas;
+ *   <li>one of either;
+ *     <ul><li>one-way</li></ul>
+ *   or
+ *     <ul>
+ *       <li><i>response</i>, the response schema;
+ *       <li><i>errors</i>, an optional list of potential error schema names.
+ *     </ul>
+ *   </ul>
+ * </ul>
+ */
+public class Protocol extends JsonProperties {
+  /** The version of the protocol specification implemented here. */
+  public static final long VERSION = 1;
+
+  // Support properties for both Protocol and Message objects
+  private static final Set<String> MESSAGE_RESERVED = new HashSet<String>();
+  static {
+    Collections.addAll(MESSAGE_RESERVED,
+                       "doc", "response","request", "errors", "one-way");
+  }
+
+  /** A protocol message. */
+  public class Message extends JsonProperties {
+    private String name;
+    private String doc;
+    private Schema request;
+
+    /** Construct a message. */
+    private Message(String name, String doc,
+                    Map<String,?> propMap, Schema request) {
+      super(MESSAGE_RESERVED);
+      this.name = name;
+      this.doc = doc;
+      this.request = request;
+
+      if (propMap != null)                        // copy props
+        for (Map.Entry<String,?> prop : propMap.entrySet()) {
+          Object value = prop.getValue();
+          this.addProp(prop.getKey(),
+                       value instanceof String
+                       ? TextNode.valueOf((String)value)
+                       : (JsonNode)value);
+        }
+    }
+
+    /** The name of this message. */
+    public String getName() { return name; }
+    /** The parameters of this message. */
+    public Schema getRequest() { return request; }
+    /** The returned data. */
+    public Schema getResponse() { return Schema.create(Schema.Type.NULL); }
+    /** Errors that might be thrown. */
+    public Schema getErrors() {
+      return Schema.createUnion(new ArrayList<Schema>());
+    }
+    
+    /** Returns true if this is a one-way message, with no response or errors.*/
+    public boolean isOneWay() { return true; }
+
+    public String toString() {
+      try {
+        StringWriter writer = new StringWriter();
+        JsonGenerator gen = Schema.FACTORY.createJsonGenerator(writer);
+        toJson(gen);
+        gen.flush();
+        return writer.toString();
+      } catch (IOException e) {
+        throw new AvroRuntimeException(e);
+      }
+    }
+    void toJson(JsonGenerator gen) throws IOException {
+      gen.writeStartObject();
+      if (doc != null) gen.writeStringField("doc", doc);
+      writeProps(gen);                           // write out properties
+      gen.writeFieldName("request");
+      request.fieldsToJson(types, gen);
+
+      toJson1(gen);
+      gen.writeEndObject();
+    }
+
+    void toJson1(JsonGenerator gen) throws IOException {
+      gen.writeStringField("response", "null");
+      gen.writeBooleanField("one-way", true);
+    }
+
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof Message)) return false;
+      Message that = (Message)o;
+      return this.name.equals(that.name)
+        && this.request.equals(that.request)
+        && props.equals(that.props);
+    }
+
+    public int hashCode() {
+      return name.hashCode() + request.hashCode() + props.hashCode();
+    }
+
+    public String getDoc() { return doc; }
+
+  }
+
+  private class TwoWayMessage extends Message {
+    private Schema response;
+    private Schema errors;
+    
+    /** Construct a message. */
+    private TwoWayMessage(String name, String doc, Map<String,?> propMap,
+                          Schema request, Schema response, Schema errors) {
+      super(name, doc, propMap, request);
+      this.response = response;
+      this.errors = errors;
+    }
+
+    @Override public Schema getResponse() { return response; }
+    @Override public Schema getErrors() { return errors; }
+    @Override public boolean isOneWay() { return false; }
+
+    @Override public boolean equals(Object o) {
+      if (!super.equals(o)) return false;
+      if (!(o instanceof TwoWayMessage)) return false;
+      TwoWayMessage that = (TwoWayMessage)o;
+      return this.response.equals(that.response)
+        && this.errors.equals(that.errors);
+    }
+
+    @Override public int hashCode() {
+      return super.hashCode() + response.hashCode() + errors.hashCode();
+    }
+
+    @Override void toJson1(JsonGenerator gen) throws IOException {
+      gen.writeFieldName("response");
+      response.toJson(types, gen);
+
+      List<Schema> errs = errors.getTypes();  // elide system error
+      if (errs.size() > 1) {
+        Schema union = Schema.createUnion(errs.subList(1, errs.size()));
+        gen.writeFieldName("errors");
+        union.toJson(types, gen);
+      }
+    }
+
+  }
+
+  private String name;
+  private String namespace;
+  private String doc;
+
+  private Schema.Names types = new Schema.Names();
+  private Map<String,Message> messages = new LinkedHashMap<String,Message>();
+  private byte[] md5;
+
+  /** An error that can be thrown by any message. */
+  public static final Schema SYSTEM_ERROR = Schema.create(Schema.Type.STRING);
+
+  /** Union type for generating system errors. */
+  public static final Schema SYSTEM_ERRORS;
+  static {
+    List<Schema> errors = new ArrayList<Schema>();
+    errors.add(SYSTEM_ERROR);
+    SYSTEM_ERRORS = Schema.createUnion(errors);
+  }
+
+  private static final Set<String> PROTOCOL_RESERVED = new HashSet<String>();
+  static {
+    Collections.addAll(PROTOCOL_RESERVED,
+       "namespace", "protocol", "doc",
+       "messages","types", "errors");
+  }
+
+  private Protocol() {
+    super(PROTOCOL_RESERVED);
+  }
+
+  public Protocol(String name, String doc, String namespace) {
+    super(PROTOCOL_RESERVED);
+    this.name = name;
+    this.doc = doc;
+    this.namespace = namespace;
+  }
+  public Protocol(String name, String namespace) {
+    this(name, null, namespace);
+  }
+
+  /** The name of this protocol. */
+  public String getName() { return name; }
+
+  /** The namespace of this protocol.  Qualifies its name. */
+  public String getNamespace() { return namespace; }
+  
+  /** Doc string for this protocol. */
+  public String getDoc() { return doc; }
+
+  /** The types of this protocol. */
+  public Collection<Schema> getTypes() { return types.values(); }
+
+  /** Returns the named type. */
+  public Schema getType(String name) { return types.get(name); }
+
+  /** Set the types of this protocol. */
+  public void setTypes(Collection<Schema> newTypes) {
+    types = new Schema.Names();
+    for (Schema s : newTypes)
+      types.add(s);
+  }
+
+  /** The messages of this protocol. */
+  public Map<String,Message> getMessages() { return messages; }
+
+  /** Create a one-way message. */
+  @Deprecated
+  public Message createMessage(String name, String doc, Schema request) {
+    return createMessage(name, doc, new LinkedHashMap<String,String>(),request);
+  }
+  /** Create a one-way message. */
+  public <T> Message createMessage(String name, String doc,
+                                   Map<String,T> propMap, Schema request) {
+    return new Message(name, doc, propMap, request);
+  }
+
+  /** Create a two-way message. */
+  @Deprecated
+  public Message createMessage(String name, String doc, Schema request,
+                               Schema response, Schema errors) {
+    return createMessage(name, doc, new LinkedHashMap<String,String>(),
+                         request, response, errors);
+  }
+  /** Create a two-way message. */
+  public <T> Message createMessage(String name, String doc,
+                                   Map<String,T> propMap, Schema request,
+                                   Schema response, Schema errors) {
+    return new TwoWayMessage(name, doc, propMap, request, response, errors);
+  }
+
+  public boolean equals(Object o) {
+    if (o == this) return true;
+    if (!(o instanceof Protocol)) return false;
+    Protocol that = (Protocol)o;
+    return this.name.equals(that.name)
+      && this.namespace.equals(that.namespace)
+      && this.types.equals(that.types)
+      && this.messages.equals(that.messages)
+      && this.props.equals(that.props);
+  }
+  
+  public int hashCode() {
+    return name.hashCode() + namespace.hashCode()
+      + types.hashCode() + messages.hashCode() + props.hashCode();
+  }
+
+  /** Render this as <a href="http://json.org/">JSON</a>.*/
+  @Override
+  public String toString() { return toString(false); }
+
+  /** Render this as <a href="http://json.org/">JSON</a>.
+   * @param pretty if true, pretty-print JSON.
+   */
+  public String toString(boolean pretty) {
+    try {
+      StringWriter writer = new StringWriter();
+      JsonGenerator gen = Schema.FACTORY.createJsonGenerator(writer);
+      if (pretty) gen.useDefaultPrettyPrinter();
+      toJson(gen);
+      gen.flush();
+      return writer.toString();
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+  void toJson(JsonGenerator gen) throws IOException {
+    types.space(namespace);
+
+    gen.writeStartObject();
+    gen.writeStringField("protocol", name);
+    gen.writeStringField("namespace", namespace);
+
+    if (doc != null) gen.writeStringField("doc", doc);
+    writeProps(gen);
+    gen.writeArrayFieldStart("types");
+    Schema.Names resolved = new Schema.Names(namespace);
+    for (Schema type : types.values())
+      if (!resolved.contains(type))
+        type.toJson(resolved, gen);
+    gen.writeEndArray();
+    
+    gen.writeObjectFieldStart("messages");
+    for (Map.Entry<String,Message> e : messages.entrySet()) {
+      gen.writeFieldName(e.getKey());
+      e.getValue().toJson(gen);
+    }
+    gen.writeEndObject();
+    gen.writeEndObject();
+  }
+
+  /** Return the MD5 hash of the text of this protocol. */
+  public byte[] getMD5() {
+    if (md5 == null)
+      try {
+        md5 = MessageDigest.getInstance("MD5")
+          .digest(this.toString().getBytes("UTF-8"));
+      } catch (Exception e) {
+        throw new AvroRuntimeException(e);
+      }
+    return md5;
+  }
+
+  /** Read a protocol from a Json file. */
+  public static Protocol parse(File file) throws IOException {
+    return parse(Schema.FACTORY.createJsonParser(file));
+  }
+
+  /** Read a protocol from a Json stream. */
+  public static Protocol parse(InputStream stream) throws IOException {
+    return parse(Schema.FACTORY.createJsonParser(stream));
+  }
+
+  /** Read a protocol from one or more json strings */
+  public static Protocol parse(String string, String... more) {
+    StringBuilder b = new StringBuilder(string);
+    for (String part : more)
+      b.append(part);
+    return parse(b.toString());
+  }
+      
+  /** Read a protocol from a Json string. */
+  public static Protocol parse(String string) {
+    try {
+      return parse(Schema.FACTORY.createJsonParser
+                   (new ByteArrayInputStream(string.getBytes("UTF-8"))));
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  private static Protocol parse(JsonParser parser) {
+    try {
+      Protocol protocol = new Protocol();
+      protocol.parse(Schema.MAPPER.readTree(parser));
+      return protocol;
+    } catch (IOException e) {
+      throw new SchemaParseException(e);
+    }
+  }
+
+  private void parse(JsonNode json) {
+    parseNamespace(json);
+    parseName(json);
+    parseTypes(json);
+    parseMessages(json);
+    parseDoc(json);
+    parseProps(json);
+  }
+
+  private void parseNamespace(JsonNode json) {
+    JsonNode nameNode = json.get("namespace");
+    if (nameNode == null) return;                 // no namespace defined
+    this.namespace = nameNode.getTextValue();
+    types.space(this.namespace);
+  }
+  
+  private void parseDoc(JsonNode json) {
+    this.doc = parseDocNode(json);
+  }
+
+  private String parseDocNode(JsonNode json) {
+    JsonNode nameNode = json.get("doc");
+    if (nameNode == null) return null;                 // no doc defined
+    return nameNode.getTextValue();
+  }
+
+  private void parseName(JsonNode json) {
+    JsonNode nameNode = json.get("protocol");
+    if (nameNode == null)
+      throw new SchemaParseException("No protocol name specified: "+json);
+    this.name = nameNode.getTextValue();
+  }
+
+  private void parseTypes(JsonNode json) {
+    JsonNode defs = json.get("types");
+    if (defs == null) return;                    // no types defined
+    if (!defs.isArray())
+      throw new SchemaParseException("Types not an array: "+defs);
+    for (JsonNode type : defs) {
+      if (!type.isObject())
+        throw new SchemaParseException("Type not an object: "+type);
+      Schema.parse(type, types);
+    }
+  }
+
+  private void parseProps(JsonNode json) {
+    for (Iterator<String> i = json.getFieldNames(); i.hasNext();) {
+      String p = i.next();                        // add non-reserved as props
+      if (!PROTOCOL_RESERVED.contains(p))
+        this.addProp(p, json.get(p));
+    }
+  }
+
+  private void parseMessages(JsonNode json) {
+    JsonNode defs = json.get("messages");
+    if (defs == null) return;                    // no messages defined
+    for (Iterator<String> i = defs.getFieldNames(); i.hasNext();) {
+      String prop = i.next();
+      this.messages.put(prop, parseMessage(prop, defs.get(prop)));
+    }
+  }
+
+  private Message parseMessage(String messageName, JsonNode json) {
+    String doc = parseDocNode(json);
+
+    Map<String,JsonNode> mProps = new LinkedHashMap<String,JsonNode>();
+    for (Iterator<String> i = json.getFieldNames(); i.hasNext();) {
+      String p = i.next();                        // add non-reserved as props
+      if (!MESSAGE_RESERVED.contains(p))
+        mProps.put(p, json.get(p));
+    }
+
+    JsonNode requestNode = json.get("request");
+    if (requestNode == null || !requestNode.isArray())
+      throw new SchemaParseException("No request specified: "+json);
+    List<Field> fields = new ArrayList<Field>();
+    for (JsonNode field : requestNode) {
+      JsonNode fieldNameNode = field.get("name");
+      if (fieldNameNode == null)
+        throw new SchemaParseException("No param name: "+field);
+      JsonNode fieldTypeNode = field.get("type");
+      if (fieldTypeNode == null)
+        throw new SchemaParseException("No param type: "+field);
+      String name = fieldNameNode.getTextValue();
+      String fieldDoc = null;
+      JsonNode fieldDocNode = field.get("doc");
+      if (fieldDocNode != null)
+        fieldDoc = fieldDocNode.getTextValue();
+      fields.add(new Field(name, Schema.parse(fieldTypeNode,types),
+                           fieldDoc,
+                           field.get("default")));
+    }
+    Schema request = Schema.createRecord(fields);
+    
+    boolean oneWay = false;
+    JsonNode oneWayNode = json.get("one-way");
+    if (oneWayNode != null) {
+      if (!oneWayNode.isBoolean())
+        throw new SchemaParseException("one-way must be boolean: "+json);
+      oneWay = oneWayNode.getBooleanValue();
+    }
+
+    JsonNode responseNode = json.get("response");
+    if (!oneWay && responseNode == null)
+      throw new SchemaParseException("No response specified: "+json);
+
+    JsonNode decls = json.get("errors");
+
+    if (oneWay) {
+      if (decls != null)
+        throw new SchemaParseException("one-way can't have errors: "+json);
+      if (responseNode != null
+          && Schema.parse(responseNode, types).getType() != Schema.Type.NULL)
+        throw new SchemaParseException("One way response must be null: "+json);
+      return new Message(messageName, doc, mProps, request);
+    }
+
+    Schema response = Schema.parse(responseNode, types);
+
+    List<Schema> errs = new ArrayList<Schema>();
+    errs.add(SYSTEM_ERROR);                       // every method can throw
+    if (decls != null) {
+      if (!decls.isArray())
+        throw new SchemaParseException("Errors not an array: "+json);
+      for (JsonNode decl : decls) {
+        String name = decl.getTextValue();
+        Schema schema = this.types.get(name);
+        if (schema == null)
+          throw new SchemaParseException("Undefined error: "+name);
+        if (!schema.isError())
+          throw new SchemaParseException("Not an error: "+name);
+        errs.add(schema);
+      }
+    }
+
+    return new TwoWayMessage(messageName, doc, mProps, request, response,
+                             Schema.createUnion(errs));
+  }
+
+  public static void main(String[] args) throws Exception {
+    System.out.println(Protocol.parse(new File(args[0])));
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/Schema.java b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
new file mode 100644
index 0000000..75d8dd9
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/Schema.java
@@ -0,0 +1,1617 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.avro.util.internal.JacksonUtils;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.node.DoubleNode;
+
+/** An abstract data type.
+ * <p>A schema may be one of:
+ * <ul>
+ * <li>A <i>record</i>, mapping field names to field value data;
+ * <li>An <i>enum</i>, containing one of a small set of symbols;
+ * <li>An <i>array</i> of values, all of the same schema;
+ * <li>A <i>map</i>, containing string/value pairs, of a declared schema;
+ * <li>A <i>union</i> of other schemas;
+ * <li>A <i>fixed</i> sized binary object;
+ * <li>A unicode <i>string</i>;
+ * <li>A sequence of <i>bytes</i>;
+ * <li>A 32-bit signed <i>int</i>;
+ * <li>A 64-bit signed <i>long</i>;
+ * <li>A 32-bit IEEE single-<i>float</i>; or
+ * <li>A 64-bit IEEE <i>double</i>-float; or
+ * <li>A <i>boolean</i>; or
+ * <li><i>null</i>.
+ * </ul>
+ * 
+ * A schema can be constructed using one of its static <tt>createXXX</tt>
+ * methods, or more conveniently using {@link SchemaBuilder}. The schema objects are
+ * <i>logically</i> immutable.
+ * There are only two mutating methods - {@link #setFields(List)} and
+ * {@link #addProp(String, String)}. The following restrictions apply on these
+ * two methods.
+ * <ul>
+ * <li> {@link #setFields(List)}, can be called at most once. This method exists
+ * in order to enable clients to build recursive schemas.
+ * <li> {@link #addProp(String, String)} can be called with property names
+ * that are not present already. It is not possible to change or delete an
+ * existing property.
+ * </ul>
+ */
+public abstract class Schema extends JsonProperties {
+  static final JsonFactory FACTORY = new JsonFactory();
+  static final ObjectMapper MAPPER = new ObjectMapper(FACTORY);
+
+  private static final int NO_HASHCODE = Integer.MIN_VALUE;
+
+  static {
+    FACTORY.enable(JsonParser.Feature.ALLOW_COMMENTS);
+    FACTORY.setCodec(MAPPER);
+  }
+
+  /** The type of a schema. */
+  public enum Type {
+    RECORD, ENUM, ARRAY, MAP, UNION, FIXED, STRING, BYTES,
+      INT, LONG, FLOAT, DOUBLE, BOOLEAN, NULL;
+    private String name;
+    private Type() { this.name = this.name().toLowerCase(); }
+    public String getName() { return name; }
+  };
+
+  private final Type type;
+  private LogicalType logicalType = null;
+
+  Schema(Type type) {
+    super(SCHEMA_RESERVED);
+    this.type = type;
+  }
+
+  /** Create a schema for a primitive type. */
+  public static Schema create(Type type) {
+    switch (type) {
+    case STRING:  return new StringSchema();
+    case BYTES:   return new BytesSchema();
+    case INT:     return new IntSchema();
+    case LONG:    return new LongSchema();
+    case FLOAT:   return new FloatSchema();
+    case DOUBLE:  return new DoubleSchema();
+    case BOOLEAN: return new BooleanSchema();
+    case NULL:    return new NullSchema();
+    default: throw new AvroRuntimeException("Can't create a: "+type);
+    }
+  }
+
+  private static final Set<String> SCHEMA_RESERVED = new HashSet<String>();
+  static {
+    Collections.addAll(SCHEMA_RESERVED,
+                       "doc", "fields", "items", "name", "namespace",
+                       "size", "symbols", "values", "type", "aliases");
+  }
+
+  int hashCode = NO_HASHCODE;
+
+  @Override public void addProp(String name, JsonNode value) {
+    super.addProp(name, value);
+    hashCode = NO_HASHCODE;
+  }
+
+  @Override public void addProp(String name, Object value) {
+    super.addProp(name, value);
+    hashCode = NO_HASHCODE;
+  }
+
+  public LogicalType getLogicalType() {
+    return logicalType;
+  }
+
+  void setLogicalType(LogicalType logicalType) {
+    this.logicalType = logicalType;
+  }
+
+  /** Create an anonymous record schema. */
+  public static Schema createRecord(List<Field> fields) {
+    Schema result = createRecord(null, null, null, false);
+    result.setFields(fields);
+    return result;
+  }
+
+  /** Create a named record schema. */
+  public static Schema createRecord(String name, String doc, String namespace,
+                                    boolean isError) {
+    return new RecordSchema(new Name(name, namespace), doc, isError);
+  }
+
+  /** Create a named record schema with fields already set. */
+  public static Schema createRecord(String name, String doc, String namespace,
+                                    boolean isError, List<Field> fields) {
+    return new RecordSchema(new Name(name, namespace), doc, isError, fields);
+  }
+
+  /** Create an enum schema. */
+  public static Schema createEnum(String name, String doc, String namespace,
+                                  List<String> values) {
+    return new EnumSchema(new Name(name, namespace), doc,
+        new LockableArrayList<String>(values));
+  }
+
+  /** Create an array schema. */
+  public static Schema createArray(Schema elementType) {
+    return new ArraySchema(elementType);
+  }
+
+  /** Create a map schema. */
+  public static Schema createMap(Schema valueType) {
+    return new MapSchema(valueType);
+  }
+
+  /** Create a union schema. */
+  public static Schema createUnion(List<Schema> types) {
+    return new UnionSchema(new LockableArrayList<Schema>(types));
+  }
+
+  /** Create a union schema. */
+  public static Schema createUnion(Schema... types) {
+    return createUnion(new LockableArrayList<Schema>(types));
+  }
+
+  /** Create a union schema. */
+  public static Schema createFixed(String name, String doc, String space,
+      int size) {
+    return new FixedSchema(new Name(name, space), doc, size);
+  }
+
+  /** Return the type of this schema. */
+  public Type getType() { return type; }
+
+  /**
+   * If this is a record, returns the Field with the
+   * given name <tt>fieldName</tt>. If there is no field by that name, a
+   * <tt>null</tt> is returned.
+   */
+  public Field getField(String fieldname) {
+    throw new AvroRuntimeException("Not a record: "+this);
+  }
+
+  /**
+   * If this is a record, returns the fields in it. The returned
+   * list is in the order of their positions.
+   */
+  public List<Field> getFields() {
+    throw new AvroRuntimeException("Not a record: "+this);
+  }
+
+  /**
+   * If this is a record, set its fields. The fields can be set
+   * only once in a schema.
+   */
+  public void setFields(List<Field> fields) {
+    throw new AvroRuntimeException("Not a record: "+this);
+  }
+
+  /** If this is an enum, return its symbols. */
+  public List<String> getEnumSymbols() {
+    throw new AvroRuntimeException("Not an enum: "+this);
+  }    
+
+  /** If this is an enum, return a symbol's ordinal value. */
+  public int getEnumOrdinal(String symbol) {
+    throw new AvroRuntimeException("Not an enum: "+this);
+  }    
+  
+  /** If this is an enum, returns true if it contains given symbol. */
+  public boolean hasEnumSymbol(String symbol) {
+    throw new AvroRuntimeException("Not an enum: "+this);
+  }
+
+  /** If this is a record, enum or fixed, returns its name, otherwise the name
+   * of the primitive type. */
+  public String getName() { return type.name; }
+
+  /** If this is a record, enum, or fixed, returns its docstring,
+   * if available.  Otherwise, returns null. */
+  public String getDoc() {
+    return null;
+  }
+
+  /** If this is a record, enum or fixed, returns its namespace, if any. */
+  public String getNamespace() {
+    throw new AvroRuntimeException("Not a named type: "+this);
+  }
+
+  /** If this is a record, enum or fixed, returns its namespace-qualified name,
+   * otherwise returns the name of the primitive type. */
+  public String getFullName() {
+    return getName();
+  }
+
+  /** If this is a record, enum or fixed, add an alias. */
+  public void addAlias(String alias) {
+    throw new AvroRuntimeException("Not a named type: "+this);
+  }
+
+  /** If this is a record, enum or fixed, add an alias. */
+  public void addAlias(String alias, String space) {
+    throw new AvroRuntimeException("Not a named type: "+this);
+  }
+
+  /** If this is a record, enum or fixed, return its aliases, if any. */
+  public Set<String> getAliases() {
+    throw new AvroRuntimeException("Not a named type: "+this);
+  }
+
+  /** Returns true if this record is an error type. */
+  public boolean isError() {
+    throw new AvroRuntimeException("Not a record: "+this);
+  }
+
+  /** If this is an array, returns its element type. */
+  public Schema getElementType() {
+    throw new AvroRuntimeException("Not an array: "+this);
+  }
+
+  /** If this is a map, returns its value type. */
+  public Schema getValueType() {
+    throw new AvroRuntimeException("Not a map: "+this);
+  }
+
+  /** If this is a union, returns its types. */
+  public List<Schema> getTypes() {
+    throw new AvroRuntimeException("Not a union: "+this);
+  }
+
+  /** If this is a union, return the branch with the provided full name. */
+  public Integer getIndexNamed(String name) {
+    throw new AvroRuntimeException("Not a union: "+this);
+  }
+
+  /** If this is fixed, returns its size. */
+  public int getFixedSize() {
+    throw new AvroRuntimeException("Not fixed: "+this);
+  }
+
+  /** Render this as <a href="http://json.org/">JSON</a>.*/
+  @Override
+  public String toString() { return toString(false); }
+
+  /** Render this as <a href="http://json.org/">JSON</a>.
+   * @param pretty if true, pretty-print JSON.
+   */
+  public String toString(boolean pretty) {
+    try {
+      StringWriter writer = new StringWriter();
+      JsonGenerator gen = FACTORY.createJsonGenerator(writer);
+      if (pretty) gen.useDefaultPrettyPrinter();
+      toJson(new Names(), gen);
+      gen.flush();
+      return writer.toString();
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  void toJson(Names names, JsonGenerator gen) throws IOException {
+    if (props.size() == 0) {                      // no props defined
+      gen.writeString(getName());                 // just write name
+    } else {
+      gen.writeStartObject();
+      gen.writeStringField("type", getName());
+      writeProps(gen);
+      gen.writeEndObject();
+    }
+  }
+
+  void fieldsToJson(Names names, JsonGenerator gen) throws IOException {
+    throw new AvroRuntimeException("Not a record: "+this);
+  }
+
+  public boolean equals(Object o) {
+    if (o == this) return true;
+    if (!(o instanceof Schema)) return false;
+    Schema that = (Schema)o;
+    if (!(this.type == that.type)) return false;
+    return equalCachedHash(that) && props.equals(that.props);
+  }
+  public final int hashCode() {
+    if (hashCode == NO_HASHCODE)
+      hashCode = computeHash();
+    return hashCode;
+  }
+
+  int computeHash() { return getType().hashCode() + props.hashCode(); }
+
+  final boolean equalCachedHash(Schema other) {
+    return (hashCode == other.hashCode)
+           || (hashCode == NO_HASHCODE)
+           || (other.hashCode == NO_HASHCODE);
+  }
+
+  private static final Set<String> FIELD_RESERVED = new HashSet<String>();
+  static {
+    Collections.addAll(FIELD_RESERVED,
+                       "default","doc","name","order","type","aliases");
+  }
+
+  /** A field within a record. */
+  public static class Field extends JsonProperties {
+
+    /** How values of this field should be ordered when sorting records. */
+    public enum Order {
+      ASCENDING, DESCENDING, IGNORE;
+      private String name;
+      private Order() { this.name = this.name().toLowerCase(); }
+    };
+
+    private final String name;    // name of the field.
+    private int position = -1;
+    private final Schema schema;
+    private final String doc;
+    private final JsonNode defaultValue;
+    private final Order order;
+    private Set<String> aliases;
+
+    /** @deprecated use {@link #Field(String, Schema, String, Object)} */
+    @Deprecated
+    public Field(String name, Schema schema, String doc,
+        JsonNode defaultValue) {
+      this(name, schema, doc, defaultValue, Order.ASCENDING);
+    }
+    /** @deprecated use {@link #Field(String, Schema, String, Object, Order)} */
+    @Deprecated
+    public Field(String name, Schema schema, String doc,
+        JsonNode defaultValue, Order order) {
+      super(FIELD_RESERVED);
+      this.name = validateName(name);
+      this.schema = schema;
+      this.doc = doc;
+      this.defaultValue = validateDefault(name, schema, defaultValue);
+      this.order = order;
+    }
+    /**
+     * @param defaultValue the default value for this field specified using the mapping
+     *  in {@link JsonProperties}
+     */
+    public Field(String name, Schema schema, String doc,
+        Object defaultValue) {
+      this(name, schema, doc, defaultValue, Order.ASCENDING);
+    }
+    /**
+     * @param defaultValue the default value for this field specified using the mapping
+     *  in {@link JsonProperties}
+     */
+    public Field(String name, Schema schema, String doc,
+        Object defaultValue, Order order) {
+      this(name, schema, doc, JacksonUtils.toJsonNode(defaultValue), order);
+    }
+    public String name() { return name; };
+    /** The position of this field within the record. */
+    public int pos() { return position; }
+    /** This field's {@link Schema}. */
+    public Schema schema() { return schema; }
+    /** Field's documentation within the record, if set. May return null. */
+    public String doc() { return doc; }
+    /** @deprecated use {@link #defaultVal() } */
+    @Deprecated public JsonNode defaultValue() { return defaultValue; }
+    /**
+     * @return the default value for this field specified using the mapping
+     *  in {@link JsonProperties}
+     */
+    public Object defaultVal() { return JacksonUtils.toObject(defaultValue); }
+    public Order order() { return order; }
+    @Deprecated public Map<String,String> props() { return getProps(); }
+    public void addAlias(String alias) {
+      if (aliases == null)
+        this.aliases = new LinkedHashSet<String>();
+      aliases.add(alias);
+    }
+    /** Return the defined aliases as an unmodifieable Set. */
+    public Set<String> aliases() {
+      if (aliases == null)
+        return Collections.emptySet();
+      return Collections.unmodifiableSet(aliases);
+    }
+    public boolean equals(Object other) {
+      if (other == this) return true;
+      if (!(other instanceof Field)) return false;
+      Field that = (Field) other;
+      return (name.equals(that.name)) &&
+        (schema.equals(that.schema)) &&
+        defaultValueEquals(that.defaultValue) &&
+        (order == that.order) &&
+        props.equals(that.props);
+    }
+    public int hashCode() { return name.hashCode() + schema.computeHash(); }
+    
+    private boolean defaultValueEquals(JsonNode thatDefaultValue) {
+      if (defaultValue == null)
+        return thatDefaultValue == null;
+      if (Double.isNaN(defaultValue.getDoubleValue()))
+        return Double.isNaN(thatDefaultValue.getDoubleValue());
+      return defaultValue.equals(thatDefaultValue);
+    }
+
+    @Override
+    public String toString() {
+      return name + " type:" + schema.type + " pos:" + position;
+    }
+  }
+
+  static class Name {
+    private final String name;
+    private final String space;
+    private final String full;
+    public Name(String name, String space) {
+      if (name == null) {                         // anonymous
+        this.name = this.space = this.full = null;
+        return;
+      }
+      int lastDot = name.lastIndexOf('.');
+      if (lastDot < 0) {                          // unqualified name
+        this.name = validateName(name);
+      } else {                                    // qualified name
+        space = name.substring(0, lastDot);       // get space from name
+        this.name = validateName(name.substring(lastDot+1, name.length()));
+      }
+      if ("".equals(space))
+        space = null;
+      this.space = space;
+      this.full = (this.space == null) ? this.name : this.space+"."+this.name;
+    }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof Name)) return false;
+      Name that = (Name)o;
+      return full==null ? that.full==null : full.equals(that.full);
+    }
+    public int hashCode() {
+      return full==null ? 0 : full.hashCode();
+    }
+    public String toString() { return full; }
+    public void writeName(Names names, JsonGenerator gen) throws IOException {
+      if (name != null) gen.writeStringField("name", name);
+      if (space != null) {
+        if (!space.equals(names.space()))
+          gen.writeStringField("namespace", space);
+      } else if (names.space() != null) {         // null within non-null
+        gen.writeStringField("namespace", "");
+      }
+    }
+    public String getQualified(String defaultSpace) {
+      return (space == null || space.equals(defaultSpace)) ? name : full;
+    }
+  }
+
+  private static abstract class NamedSchema extends Schema {
+    final Name name;
+    final String doc;
+    Set<Name> aliases;
+    public NamedSchema(Type type, Name name, String doc) {
+      super(type);
+      this.name = name;
+      this.doc = doc;
+      if (PRIMITIVES.containsKey(name.full)) {
+        throw new AvroTypeException("Schemas may not be named after primitives: " + name.full);
+      }
+    }
+    public String getName() { return name.name; }
+    public String getDoc() { return doc; }
+    public String getNamespace() { return name.space; }
+    public String getFullName() { return name.full; }
+    public void addAlias(String alias) {
+      addAlias(alias, null);
+    }
+    public void addAlias(String name, String space) {
+      if (aliases == null)
+        this.aliases = new LinkedHashSet<Name>();
+      if (space == null)
+        space = this.name.space;
+      aliases.add(new Name(name, space));
+    }
+    public Set<String> getAliases() {
+      Set<String> result = new LinkedHashSet<String>();
+      if (aliases != null)
+        for (Name alias : aliases)
+          result.add(alias.full);
+      return result;
+    }
+    public boolean writeNameRef(Names names, JsonGenerator gen)
+      throws IOException {
+      if (this.equals(names.get(name))) {
+        gen.writeString(name.getQualified(names.space()));
+        return true;
+      } else if (name.name != null) {
+        names.put(name, this);
+      }
+      return false;
+    }
+    public void writeName(Names names, JsonGenerator gen) throws IOException {
+      name.writeName(names, gen);
+    }
+    public boolean equalNames(NamedSchema that) {
+      return this.name.equals(that.name);
+    }
+    @Override int computeHash() {
+      return super.computeHash() + name.hashCode();
+    }
+    public void aliasesToJson(JsonGenerator gen) throws IOException {
+      if (aliases == null || aliases.size() == 0) return;
+      gen.writeFieldName("aliases");
+      gen.writeStartArray();
+      for (Name alias : aliases)
+        gen.writeString(alias.getQualified(name.space));
+      gen.writeEndArray();
+    }
+
+  }
+
+  private static class SeenPair {
+    private Object s1; private Object s2;
+    private SeenPair(Object s1, Object s2) { this.s1 = s1; this.s2 = s2; }
+    public boolean equals(Object o) {
+      return this.s1 == ((SeenPair)o).s1 && this.s2 == ((SeenPair)o).s2;
+    }
+    public int hashCode() {
+      return System.identityHashCode(s1) + System.identityHashCode(s2);
+    }
+  }
+
+  private static final ThreadLocal<Set> SEEN_EQUALS = new ThreadLocal<Set>() {
+    protected Set initialValue() { return new HashSet(); }
+  };
+  private static final ThreadLocal<Map> SEEN_HASHCODE = new ThreadLocal<Map>() {
+    protected Map initialValue() { return new IdentityHashMap(); }
+  };
+
+  @SuppressWarnings(value="unchecked")
+  private static class RecordSchema extends NamedSchema {
+    private List<Field> fields;
+    private Map<String, Field> fieldMap;
+    private final boolean isError;
+    public RecordSchema(Name name, String doc, boolean isError) {
+      super(Type.RECORD, name, doc);
+      this.isError = isError;
+    }
+
+    public RecordSchema(Name name, String doc, boolean isError,
+                        List<Field> fields) {
+      super(Type.RECORD, name, doc);
+      this.isError = isError;
+      setFields(fields);
+    }
+
+    public boolean isError() { return isError; }
+
+    @Override
+    public Field getField(String fieldname) {
+      if (fieldMap == null)
+        throw new AvroRuntimeException("Schema fields not set yet");
+      return fieldMap.get(fieldname);
+    }
+
+    @Override
+    public List<Field> getFields() {
+      if (fields == null)
+        throw new AvroRuntimeException("Schema fields not set yet");
+      return fields;
+    }
+
+    @Override
+    public void setFields(List<Field> fields) {
+      if (this.fields != null) {
+        throw new AvroRuntimeException("Fields are already set");
+      }
+      int i = 0;
+      fieldMap = new HashMap<String, Field>();
+      LockableArrayList ff = new LockableArrayList();
+      for (Field f : fields) {
+        if (f.position != -1)
+          throw new AvroRuntimeException("Field already used: " + f);
+        f.position = i++;
+        final Field existingField = fieldMap.put(f.name(), f);
+        if (existingField != null) {
+          throw new AvroRuntimeException(String.format(
+              "Duplicate field %s in record %s: %s and %s.",
+              f.name(), name, f, existingField));
+        }
+        ff.add(f);
+      }
+      this.fields = ff.lock();
+      this.hashCode = NO_HASHCODE;
+    }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof RecordSchema)) return false;
+      RecordSchema that = (RecordSchema)o;
+      if (!equalCachedHash(that)) return false;
+      if (!equalNames(that)) return false;
+      if (!props.equals(that.props)) return false;
+      Set seen = SEEN_EQUALS.get();
+      SeenPair here = new SeenPair(this, o);
+      if (seen.contains(here)) return true;       // prevent stack overflow
+      boolean first = seen.isEmpty();
+      try {
+        seen.add(here);
+        return fields.equals(((RecordSchema)o).fields);
+      } finally {
+        if (first) seen.clear();
+      }
+    }
+    @Override int computeHash() {
+      Map seen = SEEN_HASHCODE.get();
+      if (seen.containsKey(this)) return 0;       // prevent stack overflow
+      boolean first = seen.isEmpty();
+      try {
+        seen.put(this, this);
+        return super.computeHash() + fields.hashCode();
+      } finally {
+        if (first) seen.clear();
+      }
+    }
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      if (writeNameRef(names, gen)) return;
+      String savedSpace = names.space;            // save namespace
+      gen.writeStartObject();
+      gen.writeStringField("type", isError?"error":"record");
+      writeName(names, gen);
+      names.space = name.space;                   // set default namespace
+      if (getDoc() != null)
+        gen.writeStringField("doc", getDoc());
+
+      if (fields != null) {
+        gen.writeFieldName("fields");
+        fieldsToJson(names, gen);
+      }
+
+      writeProps(gen);
+      aliasesToJson(gen);
+      gen.writeEndObject();
+      names.space = savedSpace;                   // restore namespace
+    }
+
+    void fieldsToJson(Names names, JsonGenerator gen) throws IOException {
+      gen.writeStartArray();
+      for (Field f : fields) {
+        gen.writeStartObject();
+        gen.writeStringField("name", f.name());
+        gen.writeFieldName("type");
+        f.schema().toJson(names, gen);
+        if (f.doc() != null)
+          gen.writeStringField("doc", f.doc());
+        if (f.defaultValue() != null) {
+          gen.writeFieldName("default");
+          gen.writeTree(f.defaultValue());
+        }
+        if (f.order() != Field.Order.ASCENDING)
+          gen.writeStringField("order", f.order().name);
+        if (f.aliases != null && f.aliases.size() != 0) {
+          gen.writeFieldName("aliases");
+          gen.writeStartArray();
+          for (String alias : f.aliases)
+            gen.writeString(alias);
+          gen.writeEndArray();
+        }
+        f.writeProps(gen);
+        gen.writeEndObject();
+      }
+      gen.writeEndArray();
+    }
+  }
+
+  private static class EnumSchema extends NamedSchema {
+    private final List<String> symbols;
+    private final Map<String,Integer> ordinals;
+    public EnumSchema(Name name, String doc,
+        LockableArrayList<String> symbols) {
+      super(Type.ENUM, name, doc);
+      this.symbols = symbols.lock();
+      this.ordinals = new HashMap<String,Integer>();
+      int i = 0;
+      for (String symbol : symbols)
+        if (ordinals.put(validateName(symbol), i++) != null)
+          throw new SchemaParseException("Duplicate enum symbol: "+symbol);
+    }
+    public List<String> getEnumSymbols() { return symbols; }
+    public boolean hasEnumSymbol(String symbol) { 
+      return ordinals.containsKey(symbol); }
+    public int getEnumOrdinal(String symbol) { return ordinals.get(symbol); }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof EnumSchema)) return false;
+      EnumSchema that = (EnumSchema)o;
+      return equalCachedHash(that)
+        && equalNames(that)
+        && symbols.equals(that.symbols)
+        && props.equals(that.props);
+    }
+    @Override int computeHash() { return super.computeHash() + symbols.hashCode(); }
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      if (writeNameRef(names, gen)) return;
+      gen.writeStartObject();
+      gen.writeStringField("type", "enum");
+      writeName(names, gen);
+      if (getDoc() != null)
+        gen.writeStringField("doc", getDoc());
+      gen.writeArrayFieldStart("symbols");
+      for (String symbol : symbols)
+        gen.writeString(symbol);
+      gen.writeEndArray();
+      writeProps(gen);
+      aliasesToJson(gen);
+      gen.writeEndObject();
+    }
+  }
+
+  private static class ArraySchema extends Schema {
+    private final Schema elementType;
+    public ArraySchema(Schema elementType) {
+      super(Type.ARRAY);
+      this.elementType = elementType;
+    }
+    public Schema getElementType() { return elementType; }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof ArraySchema)) return false;
+      ArraySchema that = (ArraySchema)o;
+      return equalCachedHash(that)
+        && elementType.equals(that.elementType)
+        && props.equals(that.props);
+    }
+    @Override int computeHash() {
+      return super.computeHash() + elementType.computeHash();
+    }
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      gen.writeStartObject();
+      gen.writeStringField("type", "array");
+      gen.writeFieldName("items");
+      elementType.toJson(names, gen);
+      writeProps(gen);
+      gen.writeEndObject();
+    }
+  }
+
+  private static class MapSchema extends Schema {
+    private final Schema valueType;
+    public MapSchema(Schema valueType) {
+      super(Type.MAP);
+      this.valueType = valueType;
+    }
+    public Schema getValueType() { return valueType; }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof MapSchema)) return false;
+      MapSchema that = (MapSchema)o;
+      return equalCachedHash(that)
+        && valueType.equals(that.valueType)
+        && props.equals(that.props);
+    }
+    @Override int computeHash() {
+      return super.computeHash() + valueType.computeHash();
+    }
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      gen.writeStartObject();
+      gen.writeStringField("type", "map");
+      gen.writeFieldName("values");
+      valueType.toJson(names, gen);
+      writeProps(gen);
+      gen.writeEndObject();
+    }
+  }
+
+  private static class UnionSchema extends Schema {
+    private final List<Schema> types;
+    private final Map<String,Integer> indexByName
+      = new HashMap<String,Integer>();
+    public UnionSchema(LockableArrayList<Schema> types) {
+      super(Type.UNION);
+      this.types = types.lock();
+      int index = 0;
+      for (Schema type : types) {
+        if (type.getType() == Type.UNION)
+          throw new AvroRuntimeException("Nested union: "+this);
+        String name = type.getFullName();
+        if (name == null)
+          throw new AvroRuntimeException("Nameless in union:"+this);
+        if (indexByName.put(name, index++) != null)
+          throw new AvroRuntimeException("Duplicate in union:" + name);
+      }
+    }
+    public List<Schema> getTypes() { return types; }
+    public Integer getIndexNamed(String name) { return indexByName.get(name); }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof UnionSchema)) return false;
+      UnionSchema that = (UnionSchema)o;
+      return equalCachedHash(that)
+        && types.equals(that.types)
+        && props.equals(that.props);
+    }
+    @Override int computeHash() {
+      int hash = super.computeHash();
+      for (Schema type : types)
+        hash += type.computeHash();
+      return hash;
+    }
+    
+    @Override
+    public void addProp(String name, String value) {
+      throw new AvroRuntimeException("Can't set properties on a union: "+this);
+    }
+    
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      gen.writeStartArray();
+      for (Schema type : types)
+        type.toJson(names, gen);
+      gen.writeEndArray();
+    }
+  }
+
+  private static class FixedSchema extends NamedSchema {
+    private final int size;
+    public FixedSchema(Name name, String doc, int size) {
+      super(Type.FIXED, name, doc);
+      if (size < 0)
+        throw new IllegalArgumentException("Invalid fixed size: "+size);
+      this.size = size;
+    }
+    public int getFixedSize() { return size; }
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      if (!(o instanceof FixedSchema)) return false;
+      FixedSchema that = (FixedSchema)o;
+      return equalCachedHash(that)
+        && equalNames(that)
+        && size == that.size
+        && props.equals(that.props);
+    }
+    @Override int computeHash() { return super.computeHash() + size; }
+    void toJson(Names names, JsonGenerator gen) throws IOException {
+      if (writeNameRef(names, gen)) return;
+      gen.writeStartObject();
+      gen.writeStringField("type", "fixed");
+      writeName(names, gen);
+      if (getDoc() != null)
+        gen.writeStringField("doc", getDoc());
+      gen.writeNumberField("size", size);
+      writeProps(gen);
+      aliasesToJson(gen);
+      gen.writeEndObject();
+    }
+  }
+
+  private static class StringSchema extends Schema {
+    public StringSchema() { super(Type.STRING); }
+  }
+
+  private static class BytesSchema extends Schema {
+    public BytesSchema() { super(Type.BYTES); }
+  }
+
+  private static class IntSchema extends Schema {
+    public IntSchema() { super(Type.INT); }
+  }
+
+  private static class LongSchema extends Schema {
+    public LongSchema() { super(Type.LONG); }
+  }
+
+  private static class FloatSchema extends Schema {
+    public FloatSchema() { super(Type.FLOAT); }
+  }
+
+  private static class DoubleSchema extends Schema {
+    public DoubleSchema() { super(Type.DOUBLE); }
+  }
+
+  private static class BooleanSchema extends Schema {
+    public BooleanSchema() { super(Type.BOOLEAN); }
+  }
+  
+  private static class NullSchema extends Schema {
+    public NullSchema() { super(Type.NULL); }
+  }
+
+  /** A parser for JSON-format schemas.  Each named schema parsed with a parser
+   * is added to the names known to the parser so that subsequently parsed
+   * schemas may refer to it by name. */
+  public static class Parser {
+    private Names names = new Names();
+    private boolean validate = true;
+    private boolean validateDefaults = false;
+
+    /** Adds the provided types to the set of defined, named types known to
+     * this parser. */
+    public Parser addTypes(Map<String,Schema> types) {
+      for (Schema s : types.values())
+        names.add(s);
+      return this;
+    }
+
+    /** Returns the set of defined, named types known to this parser. */
+    public Map<String,Schema> getTypes() {
+      Map<String,Schema> result = new LinkedHashMap<String,Schema>();
+      for (Schema s : names.values())
+        result.put(s.getFullName(), s);
+      return result;
+    }
+
+    /** Enable or disable name validation. */
+    public Parser setValidate(boolean validate) {
+      this.validate = validate;
+      return this;
+    }
+
+    /** True iff names are validated.  True by default. */
+    public boolean getValidate() { return this.validate; }
+
+    /** Enable or disable default value validation. */
+    public Parser setValidateDefaults(boolean validateDefaults) {
+      this.validateDefaults = validateDefaults;
+      return this;
+    }
+
+    /** True iff default values are validated.  False by default. */
+    public boolean getValidateDefaults() { return this.validateDefaults; }
+
+    /** Parse a schema from the provided file.
+     * If named, the schema is added to the names known to this parser. */
+    public Schema parse(File file) throws IOException {
+      return parse(FACTORY.createJsonParser(file));
+    }
+
+    /** Parse a schema from the provided stream.
+     * If named, the schema is added to the names known to this parser. */
+    public Schema parse(InputStream in) throws IOException {
+      return parse(FACTORY.createJsonParser(in));
+    }
+
+    /** Read a schema from one or more json strings */
+    public Schema parse(String s, String... more) {
+      StringBuilder b = new StringBuilder(s);
+      for (String part : more)
+        b.append(part);
+      return parse(b.toString());
+    }
+      
+    /** Parse a schema from the provided string.
+     * If named, the schema is added to the names known to this parser. */
+    public Schema parse(String s) {
+      try {
+        return parse(FACTORY.createJsonParser(new StringReader(s)));
+      } catch (IOException e) {
+        throw new SchemaParseException(e);
+      }
+    }
+
+    private Schema parse(JsonParser parser) throws IOException {
+      boolean saved = validateNames.get();
+      boolean savedValidateDefaults = VALIDATE_DEFAULTS.get();
+      try {
+        validateNames.set(validate);
+        VALIDATE_DEFAULTS.set(validateDefaults);
+        return Schema.parse(MAPPER.readTree(parser), names);
+      } catch (JsonParseException e) {
+        throw new SchemaParseException(e);
+      } finally {
+        validateNames.set(saved);
+        VALIDATE_DEFAULTS.set(savedValidateDefaults);
+      }
+    }
+  }
+
+  /**
+   * Constructs a Schema object from JSON schema file <tt>file</tt>.
+   * The contents of <tt>file</tt> is expected to be in UTF-8 format.
+   * @param file  The file to read the schema from.
+   * @return  The freshly built Schema.
+   * @throws IOException if there was trouble reading the contents
+   * @throws JsonParseException if the contents are invalid
+   * @deprecated use {@link Schema.Parser} instead.
+   */
+  public static Schema parse(File file) throws IOException {
+    return new Parser().parse(file);
+  }
+
+  /**
+   * Constructs a Schema object from JSON schema stream <tt>in</tt>.
+   * The contents of <tt>in</tt> is expected to be in UTF-8 format.
+   * @param in  The input stream to read the schema from.
+   * @return  The freshly built Schema.
+   * @throws IOException if there was trouble reading the contents
+   * @throws JsonParseException if the contents are invalid
+   * @deprecated use {@link Schema.Parser} instead.
+   */
+  public static Schema parse(InputStream in) throws IOException {
+    return new Parser().parse(in);
+  }
+
+  /** Construct a schema from <a href="http://json.org/">JSON</a> text.
+   * @deprecated use {@link Schema.Parser} instead.
+   */
+  public static Schema parse(String jsonSchema) {
+    return new Parser().parse(jsonSchema);
+  }
+
+  /** Construct a schema from <a href="http://json.org/">JSON</a> text.
+   * @param validate true if names should be validated, false if not.
+   * @deprecated use {@link Schema.Parser} instead.
+   */
+  public static Schema parse(String jsonSchema, boolean validate) {
+    return new Parser().setValidate(validate).parse(jsonSchema);
+  }
+
+  static final Map<String,Type> PRIMITIVES = new HashMap<String,Type>();
+  static {
+    PRIMITIVES.put("string",  Type.STRING);
+    PRIMITIVES.put("bytes",   Type.BYTES);
+    PRIMITIVES.put("int",     Type.INT);
+    PRIMITIVES.put("long",    Type.LONG);
+    PRIMITIVES.put("float",   Type.FLOAT);
+    PRIMITIVES.put("double",  Type.DOUBLE);
+    PRIMITIVES.put("boolean", Type.BOOLEAN);
+    PRIMITIVES.put("null",    Type.NULL);
+  }
+
+  static class Names extends LinkedHashMap<Name, Schema> {
+    private String space;                         // default namespace
+
+    public Names() {}
+    public Names(String space) { this.space = space; }
+
+    public String space() { return space; }
+    public void space(String space) { this.space = space; }
+
+    @Override
+    public Schema get(Object o) {
+      Name name;
+      if (o instanceof String) {
+        Type primitive = PRIMITIVES.get((String)o);
+        if (primitive != null) return Schema.create(primitive);
+        name = new Name((String)o, space);
+        if (!containsKey(name))                   // if not in default
+          name = new Name((String)o, "");         // try anonymous
+      } else {
+        name = (Name)o;
+      }
+      return super.get(name);
+    }
+    public boolean contains(Schema schema) {
+      return get(((NamedSchema)schema).name) != null;
+    }
+    public void add(Schema schema) {
+      put(((NamedSchema)schema).name, schema);
+    }
+    @Override
+    public Schema put(Name name, Schema schema) {
+      if (containsKey(name))
+        throw new SchemaParseException("Can't redefine: "+name);
+      return super.put(name, schema);
+    }
+  }
+  
+  private static ThreadLocal<Boolean> validateNames
+    = new ThreadLocal<Boolean>() {
+    @Override protected Boolean initialValue() {
+      return true;
+    }
+  };
+    
+  private static String validateName(String name) {
+    if (!validateNames.get()) return name;        // not validating names
+    int length = name.length();
+    if (length == 0)
+      throw new SchemaParseException("Empty name");
+    char first = name.charAt(0);
+    if (!(Character.isLetter(first) || first == '_'))
+      throw new SchemaParseException("Illegal initial character: "+name);
+    for (int i = 1; i < length; i++) {
+      char c = name.charAt(i);
+      if (!(Character.isLetterOrDigit(c) || c == '_'))
+        throw new SchemaParseException("Illegal character in: "+name);
+    }
+    return name;
+  }
+
+  private static final ThreadLocal<Boolean> VALIDATE_DEFAULTS
+    = new ThreadLocal<Boolean>() {
+    @Override protected Boolean initialValue() {
+      return false;
+    }
+  };
+    
+  private static JsonNode validateDefault(String fieldName, Schema schema,
+                                          JsonNode defaultValue) {
+    if (VALIDATE_DEFAULTS.get() && (defaultValue != null)
+        && !isValidDefault(schema, defaultValue)) { // invalid default
+      String message = "Invalid default for field "+fieldName
+        +": "+defaultValue+" not a "+schema;
+      throw new AvroTypeException(message);     // throw exception
+    }
+    return defaultValue;
+  }
+
+  private static boolean isValidDefault(Schema schema, JsonNode defaultValue) {
+    if (defaultValue == null)
+      return false;
+    switch (schema.getType()) {
+    case STRING:  
+    case BYTES:
+    case ENUM:
+    case FIXED:
+      return defaultValue.isTextual();
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+      return defaultValue.isNumber();
+    case BOOLEAN:
+      return defaultValue.isBoolean();
+    case NULL:
+      return defaultValue.isNull();
+    case ARRAY:
+      if (!defaultValue.isArray())
+        return false;
+      for (JsonNode element : defaultValue)
+        if (!isValidDefault(schema.getElementType(), element))
+          return false;
+      return true;
+    case MAP:
+      if (!defaultValue.isObject())
+        return false;
+      for (JsonNode value : defaultValue)
+        if (!isValidDefault(schema.getValueType(), value))
+          return false;
+      return true;
+    case UNION:                                   // union default: first branch
+      return isValidDefault(schema.getTypes().get(0), defaultValue);
+    case RECORD:
+      if (!defaultValue.isObject())
+        return false;
+      for (Field field : schema.getFields())
+        if (!isValidDefault(field.schema(),
+                            defaultValue.has(field.name())
+                            ? defaultValue.get(field.name())
+                            : field.defaultValue()))
+          return false;
+      return true;
+    default:
+      return false;
+    }
+  }
+
+  /** @see #parse(String) */
+  static Schema parse(JsonNode schema, Names names) {
+    if (schema.isTextual()) {                     // name
+      Schema result = names.get(schema.getTextValue());
+      if (result == null)
+        throw new SchemaParseException("Undefined name: "+schema);
+      return result;
+    } else if (schema.isObject()) {
+      Schema result;
+      String type = getRequiredText(schema, "type", "No type");
+      Name name = null;
+      String savedSpace = names.space();
+      String doc = null;
+      if (type.equals("record") || type.equals("error")
+          || type.equals("enum") || type.equals("fixed")) {
+        String space = getOptionalText(schema, "namespace");
+        doc = getOptionalText(schema, "doc");
+        if (space == null)
+          space = names.space();
+        name = new Name(getRequiredText(schema, "name", "No name in schema"),
+                        space);
+        if (name.space != null) {                 // set default namespace
+          names.space(name.space);
+        }
+      }
+      if (PRIMITIVES.containsKey(type)) {         // primitive
+        result = create(PRIMITIVES.get(type));
+      } else if (type.equals("record") || type.equals("error")) { // record
+        List<Field> fields = new ArrayList<Field>();
+        result = new RecordSchema(name, doc, type.equals("error"));
+        if (name != null) names.add(result);
+        JsonNode fieldsNode = schema.get("fields");
+        if (fieldsNode == null || !fieldsNode.isArray())
+          throw new SchemaParseException("Record has no fields: "+schema);
+        for (JsonNode field : fieldsNode) {
+          String fieldName = getRequiredText(field, "name", "No field name");
+          String fieldDoc = getOptionalText(field, "doc");
+          JsonNode fieldTypeNode = field.get("type");
+          if (fieldTypeNode == null)
+            throw new SchemaParseException("No field type: "+field);
+          if (fieldTypeNode.isTextual()
+              && names.get(fieldTypeNode.getTextValue()) == null)
+            throw new SchemaParseException
+              (fieldTypeNode+" is not a defined name."
+               +" The type of the \""+fieldName+"\" field must be"
+               +" a defined name or a {\"type\": ...} expression.");
+          Schema fieldSchema = parse(fieldTypeNode, names);
+          Field.Order order = Field.Order.ASCENDING;
+          JsonNode orderNode = field.get("order");
+          if (orderNode != null)
+            order = Field.Order.valueOf(orderNode.getTextValue().toUpperCase());
+          JsonNode defaultValue = field.get("default");
+          if (defaultValue != null
+              && (Type.FLOAT.equals(fieldSchema.getType())
+                  || Type.DOUBLE.equals(fieldSchema.getType()))
+              && defaultValue.isTextual())
+            defaultValue =
+              new DoubleNode(Double.valueOf(defaultValue.getTextValue()));
+          Field f = new Field(fieldName, fieldSchema,
+                              fieldDoc, defaultValue, order);
+          Iterator<String> i = field.getFieldNames();
+          while (i.hasNext()) {                       // add field props
+            String prop = i.next();
+            if (!FIELD_RESERVED.contains(prop))
+              f.addProp(prop, field.get(prop));
+          }
+          f.aliases = parseAliases(field);
+          fields.add(f);
+        }
+        result.setFields(fields);
+      } else if (type.equals("enum")) {           // enum
+        JsonNode symbolsNode = schema.get("symbols");
+        if (symbolsNode == null || !symbolsNode.isArray())
+          throw new SchemaParseException("Enum has no symbols: "+schema);
+        LockableArrayList<String> symbols = new LockableArrayList<String>();
+        for (JsonNode n : symbolsNode)
+          symbols.add(n.getTextValue());
+        result = new EnumSchema(name, doc, symbols);
+        if (name != null) names.add(result);
+      } else if (type.equals("array")) {          // array
+        JsonNode itemsNode = schema.get("items");
+        if (itemsNode == null)
+          throw new SchemaParseException("Array has no items type: "+schema);
+        result = new ArraySchema(parse(itemsNode, names));
+      } else if (type.equals("map")) {            // map
+        JsonNode valuesNode = schema.get("values");
+        if (valuesNode == null)
+          throw new SchemaParseException("Map has no values type: "+schema);
+        result = new MapSchema(parse(valuesNode, names));
+      } else if (type.equals("fixed")) {          // fixed
+        JsonNode sizeNode = schema.get("size");
+        if (sizeNode == null || !sizeNode.isInt())
+          throw new SchemaParseException("Invalid or no size: "+schema);
+        result = new FixedSchema(name, doc, sizeNode.getIntValue());
+        if (name != null) names.add(result);
+      } else
+        throw new SchemaParseException("Type not supported: "+type);
+      Iterator<String> i = schema.getFieldNames();
+      while (i.hasNext()) {                       // add properties
+        String prop = i.next();
+        if (!SCHEMA_RESERVED.contains(prop))      // ignore reserved
+          result.addProp(prop, schema.get(prop));
+      }
+      // parse logical type if present
+      result.logicalType = LogicalTypes.fromSchemaIgnoreInvalid(result);
+      names.space(savedSpace);                  // restore space
+      if (result instanceof NamedSchema) {
+        Set<String> aliases = parseAliases(schema);
+        if (aliases != null)                      // add aliases
+          for (String alias : aliases)
+            result.addAlias(alias);
+      }
+      return result;
+    } else if (schema.isArray()) {                // union
+      LockableArrayList<Schema> types =
+        new LockableArrayList<Schema>(schema.size());
+      for (JsonNode typeNode : schema)
+        types.add(parse(typeNode, names));
+      return new UnionSchema(types);
+    } else {
+      throw new SchemaParseException("Schema not yet supported: "+schema);
+    }
+  }
+
+  private static Set<String> parseAliases(JsonNode node) {
+    JsonNode aliasesNode = node.get("aliases");
+    if (aliasesNode == null)
+      return null;
+    if (!aliasesNode.isArray())
+      throw new SchemaParseException("aliases not an array: "+node);
+    Set<String> aliases = new LinkedHashSet<String>();
+    for (JsonNode aliasNode : aliasesNode) {
+      if (!aliasNode.isTextual())
+        throw new SchemaParseException("alias not a string: "+aliasNode);
+      aliases.add(aliasNode.getTextValue());
+    }
+    return aliases;  
+  }
+
+  /** Extracts text value associated to key from the container JsonNode,
+   * and throws {@link SchemaParseException} if it doesn't exist.
+   *
+   * @param container Container where to find key.
+   * @param key Key to look for in container.
+   * @param error String to prepend to the SchemaParseException.
+   */
+  private static String getRequiredText(JsonNode container, String key,
+      String error) {
+    String out = getOptionalText(container, key);
+    if (null == out) {
+      throw new SchemaParseException(error + ": " + container);
+    }
+    return out;
+  }
+
+  /** Extracts text value associated to key from the container JsonNode. */
+  private static String getOptionalText(JsonNode container, String key) {
+    JsonNode jsonNode = container.get(key);
+    return jsonNode != null ? jsonNode.getTextValue() : null;
+  }
+
+  /**
+   * Parses a string as Json.
+   * @deprecated use {@link org.apache.avro.data.Json#parseJson(String)}
+   */
+  @Deprecated
+  public static JsonNode parseJson(String s) {
+    try {
+      return MAPPER.readTree(FACTORY.createJsonParser(new StringReader(s)));
+    } catch (JsonParseException e) {
+      throw new RuntimeException(e);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Rewrite a writer's schema using the aliases from a reader's schema.  This
+   * permits reading records, enums and fixed schemas whose names have changed,
+   * and records whose field names have changed.  The returned schema always
+   * contains the same data elements in the same order, but with possibly
+   * different names. */
+  public static Schema applyAliases(Schema writer, Schema reader) {
+    if (writer == reader) return writer;          // same schema
+
+    // create indexes of names
+    Map<Schema,Schema> seen = new IdentityHashMap<Schema,Schema>(1);
+    Map<Name,Name> aliases = new HashMap<Name, Name>(1);
+    Map<Name,Map<String,String>> fieldAliases =
+      new HashMap<Name, Map<String,String>>(1);
+    getAliases(reader, seen, aliases, fieldAliases);
+
+    if (aliases.size() == 0 && fieldAliases.size() == 0)
+      return writer;                              // no aliases
+    
+    seen.clear();
+    return applyAliases(writer, seen, aliases, fieldAliases);
+  }
+
+  private static Schema applyAliases(Schema s, Map<Schema,Schema> seen,
+                                     Map<Name,Name> aliases,
+                                     Map<Name,Map<String,String>> fieldAliases){
+
+    Name name = s instanceof NamedSchema ? ((NamedSchema)s).name : null;
+    Schema result = s;
+    switch (s.getType()) {
+    case RECORD:
+      if (seen.containsKey(s)) return seen.get(s); // break loops
+      if (aliases.containsKey(name))
+        name = aliases.get(name);
+      result = Schema.createRecord(name.full, s.getDoc(), null, s.isError());
+      seen.put(s, result);
+      List<Field> newFields = new ArrayList<Field>();
+      for (Field f : s.getFields()) {
+        Schema fSchema = applyAliases(f.schema, seen, aliases, fieldAliases);
+        String fName = getFieldAlias(name, f.name, fieldAliases);
+        Field newF = new Field(fName, fSchema, f.doc, f.defaultValue, f.order);
+        newF.props.putAll(f.props);               // copy props
+        newFields.add(newF);
+      }
+      result.setFields(newFields);
+      break;
+    case ENUM:
+      if (aliases.containsKey(name))
+        result = Schema.createEnum(aliases.get(name).full, s.getDoc(), null,
+                                   s.getEnumSymbols());
+      break;
+    case ARRAY:
+      Schema e = applyAliases(s.getElementType(), seen, aliases, fieldAliases);
+      if (e != s.getElementType())
+        result = Schema.createArray(e);
+      break;
+    case MAP:
+      Schema v = applyAliases(s.getValueType(), seen, aliases, fieldAliases);
+      if (v != s.getValueType())
+        result = Schema.createMap(v);
+      break;
+    case UNION:
+      List<Schema> types = new ArrayList<Schema>();
+      for (Schema branch : s.getTypes())
+        types.add(applyAliases(branch, seen, aliases, fieldAliases));
+      result = Schema.createUnion(types);
+      break;
+    case FIXED:
+      if (aliases.containsKey(name))
+        result = Schema.createFixed(aliases.get(name).full, s.getDoc(), null,
+                                    s.getFixedSize());
+      break;
+    }
+    if (result != s)
+      result.props.putAll(s.props);        // copy props
+    return result;
+  }
+
+
+  private static void getAliases(Schema schema,
+                                 Map<Schema,Schema> seen,
+                                 Map<Name,Name> aliases,
+                                 Map<Name,Map<String,String>> fieldAliases) {
+    if (schema instanceof NamedSchema) {
+      NamedSchema namedSchema = (NamedSchema)schema;
+      if (namedSchema.aliases != null)
+        for (Name alias : namedSchema.aliases)
+          aliases.put(alias, namedSchema.name);
+    }
+    switch (schema.getType()) {
+    case RECORD:
+      if (seen.containsKey(schema)) return;            // break loops
+      seen.put(schema, schema);
+      RecordSchema record = (RecordSchema)schema;
+      for (Field field : schema.getFields()) {
+        if (field.aliases != null)
+          for (String fieldAlias : field.aliases) {
+            Map<String,String> recordAliases = fieldAliases.get(record.name);
+            if (recordAliases == null)
+              fieldAliases.put(record.name,
+                               recordAliases = new HashMap<String,String>());
+            recordAliases.put(fieldAlias, field.name);
+          }
+        getAliases(field.schema, seen, aliases, fieldAliases);
+      }
+      if (record.aliases != null && fieldAliases.containsKey(record.name))
+        for (Name recordAlias : record.aliases)
+          fieldAliases.put(recordAlias, fieldAliases.get(record.name));
+      break;
+    case ARRAY:
+      getAliases(schema.getElementType(), seen, aliases, fieldAliases);
+      break;
+    case MAP:
+      getAliases(schema.getValueType(), seen, aliases, fieldAliases);
+      break;
+    case UNION:
+      for (Schema s : schema.getTypes())
+        getAliases(s, seen, aliases, fieldAliases);
+      break;
+    }
+  }
+
+  private static String getFieldAlias
+    (Name record, String field, Map<Name,Map<String,String>> fieldAliases) {
+    Map<String,String> recordAliases = fieldAliases.get(record);
+    if (recordAliases == null)
+      return field;
+    String alias = recordAliases.get(field);
+    if (alias == null)
+      return field;
+    return alias;
+  }
+
+  /**
+   * No change is permitted on LockableArrayList once lock() has been
+   * called on it.
+   * @param <E>
+   */
+  
+  /*
+   * This class keeps a boolean variable <tt>locked</tt> which is set
+   * to <tt>true</tt> in the lock() method. It's legal to call
+   * lock() any number of times. Any lock() other than the first one
+   * is a no-op.
+   * 
+   * This class throws <tt>IllegalStateException</tt> if a mutating
+   * operation is performed after being locked. Since modifications through
+   * iterator also use the list's mutating operations, this effectively
+   * blocks all modifications.
+   */
+  static class LockableArrayList<E> extends ArrayList<E> {
+    private static final long serialVersionUID = 1L;
+    private boolean locked = false;
+    
+    public LockableArrayList() {
+    }
+
+    public LockableArrayList(int size) {
+      super(size);
+    }
+
+    public LockableArrayList(List<E> types) {
+      super(types);
+    }
+
+    public LockableArrayList(E... types) {
+      super(types.length);
+      Collections.addAll(this, types);
+    }
+
+    public List<E> lock() {
+      locked = true;
+      return this;
+    }
+
+    private void ensureUnlocked() {
+      if (locked) {
+        throw new IllegalStateException();
+      }
+    }
+
+    public boolean add(E e) {
+      ensureUnlocked();
+      return super.add(e);
+    }
+    
+    public boolean remove(Object o) {
+      ensureUnlocked();
+      return super.remove(o);
+    }
+    
+    public E remove(int index) {
+      ensureUnlocked();
+      return super.remove(index);
+    }
+      
+    public boolean addAll(Collection<? extends E> c) {
+      ensureUnlocked();
+      return super.addAll(c);
+    }
+    
+    public boolean addAll(int index, Collection<? extends E> c) {
+      ensureUnlocked();
+      return super.addAll(index, c);
+    }
+    
+    public boolean removeAll(Collection<?> c) {
+      ensureUnlocked();
+      return super.removeAll(c);
+    }
+    
+    public boolean retainAll(Collection<?> c) {
+      ensureUnlocked();
+      return super.retainAll(c);
+    }
+    
+    public void clear() {
+      ensureUnlocked();
+      super.clear();
+    }
+
+  }
+  
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
new file mode 100644
index 0000000..5573014
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java
@@ -0,0 +1,2598 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.io.JsonStringEncoder;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.node.TextNode;
+
+/**
+ * <p>
+ * A fluent interface for building {@link Schema} instances. The flow of the API
+ * is designed to mimic the <a
+ * href="http://avro.apache.org/docs/current/spec.html#schemas">Avro Schema
+ * Specification</a>
+ * </p>
+ * For example, the below JSON schema and the fluent builder code to create it
+ * are very similar:
+ * 
+ * <pre>
+ * {
+ *   "type": "record",
+ *   "name": "HandshakeRequest", "namespace":"org.apache.avro.ipc",
+ *   "fields": [
+ *     {"name": "clientHash",
+ *      "type": {"type": "fixed", "name": "MD5", "size": 16}},
+ *     {"name": "clientProtocol", "type": ["null", "string"]},
+ *     {"name": "serverHash", "type": "MD5"},
+ *     {"name": "meta", "type": ["null", {"type": "map", "values": "bytes"}]}
+ *   ]
+ * }
+ * </pre>
+ * 
+ * <pre>
+ *   Schema schema = SchemaBuilder
+ *   .record("HandshakeRequest").namespace("org.apache.avro.ipc)
+ *   .fields()
+ *     .name("clientHash").type().fixed("MD5").size(16).noDefault()
+ *     .name("clientProtocol").type().nullable().stringType().noDefault()
+ *     .name("serverHash").type("MD5")
+ *     .name("meta").type().nullable().map().values().bytesType().noDefault()
+ *   .endRecord();
+ * </pre>
+ * <p/>
+ * 
+ * <h5>Usage Guide</h5>
+ * SchemaBuilder chains together many smaller builders and maintains nested
+ * context in order to mimic the Avro Schema specification. Every Avro type in
+ * JSON has required and optional JSON properties, as well as user-defined
+ * properties.
+ * <p/>
+ * <h6>Selecting and Building an Avro Type</h6>
+ * The API analogy for the right hand side of the Avro Schema JSON
+ * <pre>
+ * "type":
+ * </pre>
+ * is a {@link TypeBuilder}, {@link FieldTypeBuilder}, or
+ * {@link UnionFieldTypeBuilder}, depending on the context. These types all
+ * share a similar API for selecting and building types.
+ * <p/>
+ * <h5>Primitive Types</h5>
+ * All Avro primitive types are trivial to configure. A primitive type in 
+ * Avro JSON can be declared two ways, one that supports custom properties
+ * and one that does not:
+ * <pre>
+ * {"type":"int"}
+ * {"type":{"name":"int"}}
+ * {"type":{"name":"int", "customProp":"val"}}
+ * </pre>
+ * The analogous code form for the above three JSON lines are the below
+ * three lines:
+ * <pre>
+ *  .intType()
+ *  .intBuilder().endInt()
+ *  .intBuilder().prop("customProp", "val").endInt()
+ * </pre>
+ * Every primitive type has a shortcut to create the trivial type, and
+ * a builder when custom properties are required.  The first line above is
+ * a shortcut for the second, analogous to the JSON case.
+ * <h6>Named Types</h6>
+ * Avro named types have names, namespace, aliases, and doc.  In this API
+ * these share a common parent, {@link NamespacedBuilder}.
+ * The builders for named types require a name to be constructed, and optional
+ * configuration via:
+ * <li>{@link NamespacedBuilder#doc()}</li>
+ * <li>{@link NamespacedBuilder#namespace(String)}</li>
+ * <li>{@link NamespacedBuilder#aliases(String...)}</li>
+ * <li>{@link PropBuilder#prop(String, String)}</li>
+ * <p/>
+ * Each named type completes configuration of the optional properties
+ * with its own method:
+ * <li>{@link FixedBuilder#size(int)}</li>
+ * <li>{@link EnumBuilder#symbols(String...)}</li>
+ * <li>{@link RecordBuilder#fields()}</li>
+ * Example use of a named type with all optional parameters:
+ * <pre>
+ * .enumeration("Suit").namespace("org.apache.test")
+ *   .aliases("org.apache.test.OldSuit")
+ *   .doc("CardSuits")
+ *   .prop("customProp", "val")
+ *   .symbols("SPADES", "HEARTS", "DIAMONDS", "CLUBS")
+ * </pre>
+ * Which is equivalent to the JSON:
+ * <pre>
+ * { "type":"enum",
+ *   "name":"Suit", "namespace":"org.apache.test",
+ *   "aliases":["org.apache.test.OldSuit"],
+ *   "doc":"Card Suits",
+ *   "customProp":"val",
+ *   "symbols":["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]
+ * }
+ * </pre>
+ * <h6>Nested Types</h6>
+ * The Avro nested types, map and array, can have custom properties like
+ * all avro types, are not named, and must specify a nested type.
+ * After configuration of optional properties, an array or map 
+ * builds or selects its nested type with {@link ArrayBuilder#items()}
+ * and {@link MapBuilder#values()}, respectively.
+ * 
+ * <h6>Fields</h6>
+ * {@link RecordBuilder#fields()} returns a {@link FieldAssembler} for 
+ * defining the fields of the record and completing it.
+ * Each field must have a name, specified via {@link FieldAssembler#name(String)},
+ * which returns a {@link FieldBuilder} for defining aliases, custom properties,
+ * and documentation of the field.  After configuring these optional values for
+ * a field, the type is selected or built with {@link FieldBuilder#type()}.
+ * <p/>
+ * Fields have default values that must be specified to complete the field.
+ * {@link FieldDefault#noDefault()} is available for all field types, and
+ * a specific method is available for each type to use a default, for example
+ * {@link IntDefault#intDefault(int)}
+ * <p/>
+ * There are field shortcut methods on {@link FieldAssembler} for primitive types.
+ * These shortcuts create required, optional, and nullable fields, but do not 
+ * support field aliases, doc, or custom properties.
+ * 
+ * <h6>Unions</h6>
+ * Union types are built via {@link TypeBuilder#unionOf()} or
+ * {@link FieldTypeBuilder#unionOf()} in the context of type selection.
+ * This chains together multiple types, in union order.  For example:
+ * <pre>
+ * .unionOf()
+ *   .fixed("IPv4").size(4).and()
+ *   .fixed("IPv6").size(16).and()
+ *   .nullType().endUnion()
+ * </pre>
+ * is equivalent to the Avro schema JSON:
+ * <pre>
+ * [
+ *   {"type":"fixed", "name":"IPv4", "size":4},
+ *   {"type":"fixed", "name":"IPv6", "size":16},
+ *   "null"
+ * ]
+ * </pre>
+ * In a field context, the first type of a union defines what default type
+ * is allowed.
+ * </p>
+ * Unions have two shortcuts for common cases.  nullable()
+ * creates a union of a type and null.  In a field type context, optional()
+ * is available and creates a union of null and a type, with a null default.
+ * The below two are equivalent:
+ * <pre>
+ *   .unionOf().intType().and().nullType().endUnion()
+ *   .nullable().intType()
+ * </pre>
+ * The below two field declarations are equivalent:
+ * <pre>
+ *   .name("f").type().unionOf().nullType().and().longType().endUnion().nullDefault()
+ *   .name("f").type().optional().longType()
+ * </pre>
+ * 
+ * <h6>Explicit Types and Types by Name</h6>
+ * Types can also be specified explicitly by passing in a Schema, or by name:
+ * <pre>
+ *   .type(Schema.create(Schema.Type.INT)) // explicitly specified
+ *   .type("MD5")                       // reference by full name or short name
+ *   .type("MD5", "org.apache.avro.test")  // reference by name and namespace
+ * </pre>
+ * When a type is specified by name, and the namespace is absent or null, the
+ * namespace is inherited from the enclosing context.  A namespace will
+ * propagate as a default to child fields, nested types, or later defined types
+ * in a union.  To specify a name that has no namespace and ignore the inherited
+ * namespace, set the namespace to "".
+ * <p/>
+ * {@link SchemaBuilder#builder(String)} returns a type builder with a default
+ * namespace.  {@link SchemaBuilder#builder()} returns a type builder with no
+ * default namespace.
+ */
+public class SchemaBuilder {
+
+  private SchemaBuilder() {
+  }
+  
+  /**
+   * Create a builder for Avro schemas.
+   */
+  public static TypeBuilder<Schema> builder() {
+    return new TypeBuilder<Schema>(new SchemaCompletion(), new NameContext());
+  }
+
+  /**
+   * Create a builder for Avro schemas with a default namespace. Types created
+   * without namespaces will inherit the namespace provided.
+   */
+  public static TypeBuilder<Schema> builder(String namespace) {
+    return new TypeBuilder<Schema>(new SchemaCompletion(),
+        new NameContext().namespace(namespace));
+  }
+  
+  /**
+   * Create a builder for an Avro record with the specified name.
+   * This is equivalent to:
+   * <pre>
+   *   builder().record(name);
+   * </pre>
+   * @param name the record name
+   */
+  public static RecordBuilder<Schema> record(String name) {
+    return builder().record(name);
+  }
+
+  /**
+   * Create a builder for an Avro enum with the specified name and symbols (values).
+   * This is equivalent to:
+   * <pre>
+   *   builder().enumeration(name);
+   * </pre>
+   * @param name the enum name
+   */
+  public static EnumBuilder<Schema> enumeration(String name) {
+    return builder().enumeration(name);
+  }
+
+  /**
+   * Create a builder for an Avro fixed type with the specified name and size.
+   * This is equivalent to:
+   * <pre>
+   *   builder().fixed(name);
+   * </pre>
+   * @param name the fixed name
+   */
+  public static FixedBuilder<Schema> fixed(String name) {
+    return builder().fixed(name);
+  }
+
+  /**
+   * Create a builder for an Avro array
+   * This is equivalent to:
+   * <pre>
+   *   builder().array();
+   * </pre>
+   */
+  public static ArrayBuilder<Schema> array() {
+    return builder().array();
+  }
+
+  /**
+   * Create a builder for an Avro map
+   * This is equivalent to:
+   * <pre>
+   *   builder().map();
+   * </pre>
+   */
+  public static MapBuilder<Schema> map() {
+    return builder().map();
+  }
+  
+  /**
+   * Create a builder for an Avro union
+   * This is equivalent to:
+   * <pre>
+   *   builder().unionOf();
+   * </pre>
+   */
+  public static BaseTypeBuilder<UnionAccumulator<Schema>> unionOf() {
+    return builder().unionOf();
+  }
+  
+  /**
+   * Create a builder for a union of a type and null.
+   * This is a shortcut for:
+   * <pre>
+   *   builder().nullable();
+   * </pre>
+   * and the following two lines are equivalent:
+   * <pre>
+   *   nullable().intType();
+   * </pre>
+   * <pre>
+   *   unionOf().intType().and().nullType().endUnion();
+   * </pre>
+   */
+  public static BaseTypeBuilder<Schema> nullable() {
+    return builder().nullable();
+  }
+
+  
+  /**
+   * An abstract builder for all Avro types.  All Avro types
+   * can have arbitrary string key-value properties.
+   */
+  public static abstract class PropBuilder<S extends PropBuilder<S>> {
+    private Map<String, JsonNode> props = null;
+    protected PropBuilder() {
+    }
+    
+    /**
+     * Set name-value pair properties for this type or field.
+     */
+    public final S prop(String name, String val) {
+      return prop(name, TextNode.valueOf(val));
+    }
+    
+    // for internal use by the Parser
+    final S prop(String name, JsonNode val) {
+      if(!hasProps()) {
+        props = new HashMap<String, JsonNode>();
+      }
+      props.put(name, val);
+      return self();
+    }
+    
+    private boolean hasProps() {
+      return (props != null);
+    }
+    
+    final <T extends JsonProperties> T addPropsTo(T jsonable) {
+      if (hasProps()) {
+        for(Map.Entry<String, JsonNode> prop : props.entrySet()) {
+          jsonable.addProp(prop.getKey(), prop.getValue());
+        }
+      }
+      return jsonable;
+    }
+    /** a self-type for chaining builder subclasses.  Concrete subclasses
+     * must return 'this' **/
+    protected abstract S self();
+  }
+  
+  /**
+   * An abstract type that provides builder methods for configuring the name,
+   * doc, and aliases of all Avro types that have names (fields, Fixed, Record,
+   * and Enum).
+   * <p/>
+   * All Avro named types and fields have 'doc', 'aliases', and 'name'
+   * components. 'name' is required, and provided to this builder. 'doc' and
+   * 'alises' are optional.
+   */
+  public static abstract class NamedBuilder<S extends NamedBuilder<S>> extends
+      PropBuilder<S> {
+    private final String name;
+    private final NameContext names;
+    private String doc;
+    private String[] aliases;
+
+    protected NamedBuilder(NameContext names, String name) {
+      checkRequired(name, "Type must have a name");
+      this.names = names;
+      this.name = name;
+    }
+
+    /** configure this type's optional documentation string **/
+    public final S doc(String doc) {
+      this.doc = doc;
+      return self();
+    }
+
+    /** configure this type's optional name aliases **/
+    public final S aliases(String... aliases) {
+      this.aliases = aliases;
+      return self();
+    }
+
+    final String doc() {
+      return doc;
+    }
+
+    final String name() {
+      return name;
+    }
+
+    final NameContext names() {
+      return names;
+    }
+
+    final Schema addAliasesTo(Schema schema) {
+      if (null != aliases) {
+        for (String alias : aliases) {
+          schema.addAlias(alias);
+        }
+      }
+      return schema;
+    }
+
+    final Field addAliasesTo(Field field) {
+      if (null != aliases) {
+        for (String alias : aliases) {
+          field.addAlias(alias);
+        }
+      }
+      return field;
+    }
+  }
+  
+  /**
+   * An abstract type that provides builder methods for configuring the
+   * namespace for all Avro types that have namespaces (Fixed, Record, and
+   * Enum).
+   */
+  public static abstract class NamespacedBuilder<R, S extends NamespacedBuilder<R, S>>
+      extends NamedBuilder<S> {
+    private final Completion<R> context;
+    private String namespace;
+
+    protected NamespacedBuilder(Completion<R> context, NameContext names,
+        String name) {
+      super(names, name);
+      this.context = context;
+    }
+
+    /**
+     * Set the namespace of this type. To clear the namespace, set empty string.
+     * <p/>
+     * When the namespace is null or unset, the namespace of the type defaults
+     * to the namespace of the enclosing context.
+     **/
+    public final S namespace(String namespace) {
+      this.namespace = namespace;
+      return self();
+    }
+
+    final String space() {
+      if (null == namespace) {
+        return names().namespace;
+      }
+      return namespace;
+    }
+
+    final Schema completeSchema(Schema schema) {
+      addPropsTo(schema);
+      addAliasesTo(schema);
+      names().put(schema);
+      return schema;
+    }
+
+    final Completion<R> context() {
+      return context;
+    }
+  }
+  
+  /**
+   * An abstraction for sharing code amongst all primitive type builders.
+   */
+  private static abstract class PrimitiveBuilder<R, P extends PrimitiveBuilder<R, P>>
+      extends PropBuilder<P> {
+    private final Completion<R> context;
+    private final Schema immutable;
+
+    protected PrimitiveBuilder(Completion<R> context, NameContext names,
+        Schema.Type type) {
+      this.context = context;
+      this.immutable = names.getFullname(type.getName());
+    }
+
+    private R end() {
+      Schema schema = immutable;
+      if (super.hasProps()) {
+        schema = Schema.create(immutable.getType());
+        addPropsTo(schema);
+      }
+      return context.complete(schema);
+    }
+  }
+
+  /**
+   * Builds an Avro boolean type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endBoolean()}
+   **/
+  public static final class BooleanBuilder<R> extends
+      PrimitiveBuilder<R, BooleanBuilder<R>> {
+    private BooleanBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.BOOLEAN);
+    }
+
+    private static <R> BooleanBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new BooleanBuilder<R>(context, names);
+    }
+
+    @Override
+    protected BooleanBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endBoolean() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro int type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endInt()}
+   **/
+  public static final class IntBuilder<R> extends
+      PrimitiveBuilder<R, IntBuilder<R>> {
+    private IntBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.INT);
+    }
+
+    private static <R> IntBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new IntBuilder<R>(context, names);
+    }
+
+    @Override
+    protected IntBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endInt() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro long type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endLong()}
+   **/
+  public static final class LongBuilder<R> extends
+      PrimitiveBuilder<R, LongBuilder<R>> {
+    private LongBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.LONG);
+    }
+
+    private static <R> LongBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new LongBuilder<R>(context, names);
+    }
+
+    @Override
+    protected LongBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endLong() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro float type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endFloat()}
+   **/
+  public static final class FloatBuilder<R> extends
+      PrimitiveBuilder<R, FloatBuilder<R>> {
+    private FloatBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.FLOAT);
+    }
+
+    private static <R> FloatBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new FloatBuilder<R>(context, names);
+    }
+
+    @Override
+    protected FloatBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endFloat() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro double type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endDouble()}
+   **/
+  public static final class DoubleBuilder<R> extends
+      PrimitiveBuilder<R, DoubleBuilder<R>> {
+    private DoubleBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.DOUBLE);
+    }
+
+    private static <R> DoubleBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new DoubleBuilder<R>(context, names);
+    }
+
+    @Override
+    protected DoubleBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endDouble() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro string type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endString()}
+   **/
+  public static final class StringBldr<R> extends
+      PrimitiveBuilder<R, StringBldr<R>> {
+    private StringBldr(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.STRING);
+    }
+
+    private static <R> StringBldr<R> create(Completion<R> context,
+        NameContext names) {
+      return new StringBldr<R>(context, names);
+    }
+
+    @Override
+    protected StringBldr<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endString() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro bytes type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endBytes()}
+   **/
+  public static final class BytesBuilder<R> extends
+      PrimitiveBuilder<R, BytesBuilder<R>> {
+    private BytesBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.BYTES);
+    }
+
+    private static <R> BytesBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new BytesBuilder<R>(context, names);
+    }
+
+    @Override
+    protected BytesBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endBytes() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro null type with optional properties. Set properties with
+   * {@link #prop(String, String)}, and finalize with {@link #endNull()}
+   **/
+  public static final class NullBuilder<R> extends
+      PrimitiveBuilder<R, NullBuilder<R>> {
+    private NullBuilder(Completion<R> context, NameContext names) {
+      super(context, names, Schema.Type.NULL);
+    }
+
+    private static <R> NullBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new NullBuilder<R>(context, names);
+    }
+
+    @Override
+    protected NullBuilder<R> self() {
+      return this;
+    }
+
+    /** complete building this type, return control to context **/
+    public R endNull() {
+      return super.end();
+    }
+  }
+
+  /**
+   * Builds an Avro Fixed type with optional properties, namespace, doc, and
+   * aliases.
+   * <p/>
+   * Set properties with {@link #prop(String, String)}, namespace with
+   * {@link #namespace(String)}, doc with {@link #doc(String)}, and aliases with
+   * {@link #aliases(String[])}.
+   * <p/>
+   * The Fixed schema is finalized when its required size is set via
+   * {@link #size(int)}.
+   **/
+  public static final class FixedBuilder<R> extends
+      NamespacedBuilder<R, FixedBuilder<R>> {
+    private FixedBuilder(Completion<R> context, NameContext names, String name) {
+      super(context, names, name);
+    }
+
+    private static <R> FixedBuilder<R> create(Completion<R> context,
+        NameContext names, String name) {
+      return new FixedBuilder<R>(context, names, name);
+    }
+
+    @Override
+    protected FixedBuilder<R> self() {
+      return this;
+    }
+
+    /** Configure this fixed type's size, and end its configuration. **/
+    public R size(int size) {
+      Schema schema = Schema.createFixed(name(), super.doc(), space(), size);
+      completeSchema(schema);
+      return context().complete(schema);
+    }
+  }
+
+  /**
+   * Builds an Avro Enum type with optional properties, namespace, doc, and
+   * aliases.
+   * <p/>
+   * Set properties with {@link #prop(String, String)}, namespace with
+   * {@link #namespace(String)}, doc with {@link #doc(String)}, and aliases with
+   * {@link #aliases(String[])}.
+   * <p/>
+   * The Enum schema is finalized when its required symbols are set via
+   * {@link #symbols(String[])}.
+   **/
+  public static final class EnumBuilder<R> extends
+      NamespacedBuilder<R, EnumBuilder<R>> {
+    private EnumBuilder(Completion<R> context, NameContext names, String name) {
+      super(context, names, name);
+    }
+
+    private static <R> EnumBuilder<R> create(Completion<R> context,
+        NameContext names, String name) {
+      return new EnumBuilder<R>(context, names, name);
+    }
+
+    @Override
+    protected EnumBuilder<R> self() {
+      return this;
+    }
+
+    /** Configure this enum type's symbols, and end its configuration. **/
+    public R symbols(String... symbols) {
+      Schema schema = Schema.createEnum(name(), doc(), space(),
+          Arrays.asList(symbols));
+      completeSchema(schema);
+      return context().complete(schema);
+    }
+
+  }
+  
+  /**
+   * Builds an Avro Map type with optional properties.
+   * <p/>
+   * Set properties with {@link #prop(String, String)}.
+   * <p/>
+   * The Map schema's properties are finalized when {@link #values()} or
+   * {@link #values(Schema)} is called.
+   **/
+  public static final class MapBuilder<R> extends PropBuilder<MapBuilder<R>> {
+    private final Completion<R> context;
+    private final NameContext names;
+
+    private MapBuilder(Completion<R> context, NameContext names) {
+      this.context = context;
+      this.names = names;
+    }
+
+    private static <R> MapBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new MapBuilder<R>(context, names);
+    }
+
+    @Override
+    protected MapBuilder<R> self() {
+      return this;
+    }
+
+    /**
+     * Return a type builder for configuring the map's nested values schema.
+     * This builder will return control to the map's enclosing context when
+     * complete.
+     **/
+    public TypeBuilder<R> values() {
+      return new TypeBuilder<R>(new MapCompletion<R>(this, context), names);
+    }
+
+    /**
+     * Complete configuration of this map, setting the schema of the map values
+     * to the schema provided. Returns control to the enclosing context.
+     **/
+    public R values(Schema valueSchema) {
+      return new MapCompletion<R>(this, context).complete(valueSchema);
+    }
+  }
+
+  /**
+   * Builds an Avro Array type with optional properties.
+   * <p/>
+   * Set properties with {@link #prop(String, String)}.
+   * <p/>
+   * The Array schema's properties are finalized when {@link #items()} or
+   * {@link #items(Schema)} is called.
+   **/
+  public static final class ArrayBuilder<R> extends
+      PropBuilder<ArrayBuilder<R>> {
+    private final Completion<R> context;
+    private final NameContext names;
+
+    public ArrayBuilder(Completion<R> context, NameContext names) {
+      this.context = context;
+      this.names = names;
+    }
+
+    private static <R> ArrayBuilder<R> create(Completion<R> context,
+        NameContext names) {
+      return new ArrayBuilder<R>(context, names);
+    }
+
+    @Override
+    protected ArrayBuilder<R> self() {
+      return this;
+    }
+
+    /**
+     * Return a type builder for configuring the array's nested items schema.
+     * This builder will return control to the array's enclosing context when
+     * complete.
+     **/
+    public TypeBuilder<R> items() {
+      return new TypeBuilder<R>(new ArrayCompletion<R>(this, context), names);
+    }
+
+    /**
+     * Complete configuration of this array, setting the schema of the array
+     * items to the schema provided. Returns control to the enclosing context.
+     **/
+    public R items(Schema itemsSchema) {
+      return new ArrayCompletion<R>(this, context).complete(itemsSchema);
+    }
+  }
+
+  /**
+   * internal class for passing the naming context around. This allows for the
+   * following: 
+   * <li>Cache and re-use primitive schemas when they do not set
+   * properties.</li>
+   * <li>Provide a default namespace for nested contexts (as
+   * the JSON Schema spec does).</li>
+   * <li>Allow previously defined named types or primitive types 
+   * to be referenced by name.</li>
+   **/
+  private static class NameContext {
+    private static final Set<String> PRIMITIVES = new HashSet<String>();
+    {
+      PRIMITIVES.add("null");
+      PRIMITIVES.add("boolean");
+      PRIMITIVES.add("int");
+      PRIMITIVES.add("long");
+      PRIMITIVES.add("float");
+      PRIMITIVES.add("double");
+      PRIMITIVES.add("bytes");
+      PRIMITIVES.add("string");
+    }
+    private final HashMap<String, Schema> schemas;
+    private final String namespace;
+    
+    private NameContext() {
+      this.schemas = new HashMap<String, Schema>();
+      this.namespace = null;
+      schemas.put("null", Schema.create(Schema.Type.NULL));
+      schemas.put("boolean", Schema.create(Schema.Type.BOOLEAN));
+      schemas.put("int", Schema.create(Schema.Type.INT));
+      schemas.put("long", Schema.create(Schema.Type.LONG));
+      schemas.put("float", Schema.create(Schema.Type.FLOAT));
+      schemas.put("double", Schema.create(Schema.Type.DOUBLE));
+      schemas.put("bytes", Schema.create(Schema.Type.BYTES));
+      schemas.put("string", Schema.create(Schema.Type.STRING));
+    }
+    
+    private NameContext(HashMap<String, Schema> schemas, String namespace) {
+      this.schemas = schemas;
+      this.namespace = "".equals(namespace) ? null : namespace;
+    }
+    
+    private NameContext namespace(String namespace) {
+      return new NameContext(schemas, namespace);
+    }
+    
+    private Schema get(String name, String namespace) {
+      return getFullname(resolveName(name, namespace));
+    }
+    
+    private Schema getFullname(String fullName) {
+      Schema schema = schemas.get(fullName);
+      if(schema == null) {
+        throw new SchemaParseException("Undefined name: " + fullName);
+      }
+      return schema;
+    }
+    
+    private void put(Schema schema) {
+      String fullName = schema.getFullName();
+      if(schemas.containsKey(fullName)){
+        throw new SchemaParseException("Can't redefine: " + fullName);
+     }
+     schemas.put(fullName, schema);
+    }
+    
+    private String resolveName(String name, String space) {
+      if (PRIMITIVES.contains(name) && space == null) {
+        return name;
+      }
+      int lastDot = name.lastIndexOf('.');
+      if (lastDot < 0) { // short name
+        if (space == null) {
+          space = namespace;
+        }
+        if (space != null && !"".equals(space)) {
+          return space + "." + name;
+        }
+      } 
+      return name;
+    }
+  }
+ 
+  /**
+   * A common API for building types within a context. BaseTypeBuilder can build
+   * all types other than Unions. {@link TypeBuilder} can additionally build
+   * Unions.
+   * <p/>
+   * The builder has two contexts:
+   * <li>A naming context provides a default namespace and allows for previously
+   * defined named types to be referenced from {@link #type(String)}</li>
+   * <li>A completion context representing the scope that the builder was
+   * created in. A builder created in a nested context (for example,
+   * {@link MapBuilder#values()} will have a completion context assigned by the
+   * {@link MapBuilder}</li>
+   **/
+  public static class BaseTypeBuilder<R> {
+    private final Completion<R> context;
+    private final NameContext names;
+    
+    private BaseTypeBuilder(Completion<R> context, NameContext names) {
+      this.context = context;
+      this.names = names;
+    }
+    
+    /** Use the schema provided as the type. **/
+    public final R type(Schema schema) {
+      return context.complete(schema);
+    }
+    
+    /**
+     * Look up the type by name. This type must be previously defined in the
+     * context of this builder.
+     * <p/>
+     * The name may be fully qualified or a short name. If it is a short name,
+     * the default namespace of the current context will additionally be
+     * searched.
+     **/
+    public final R type(String name) {
+      return type(name, null);
+    }
+    
+    /**
+     * Look up the type by name and namespace. This type must be previously
+     * defined in the context of this builder.
+     * <p/>
+     * The name may be fully qualified or a short name. If it is a fully
+     * qualified name, the namespace provided is ignored. If it is a short name,
+     * the namespace provided is used if not null, else the default namespace of
+     * the current context will be used.
+     **/
+    public final R type(String name, String namespace) {
+      return type(names.get(name, namespace));
+    }
+
+    /**
+     * A plain boolean type without custom properties. This is equivalent to:
+     * <pre>
+     * booleanBuilder().endBoolean();
+     * </pre>
+     */
+    public final R booleanType() {
+      return booleanBuilder().endBoolean();
+    }
+
+    /**
+     * Build a boolean type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #booleanType()}.
+     */
+    public final BooleanBuilder<R> booleanBuilder() {
+      return BooleanBuilder.create(context, names);
+    }
+
+    /**
+     * A plain int type without custom properties. This is equivalent to:
+     * <pre>
+     * intBuilder().endInt();
+     * </pre>
+     */
+    public final R intType() {
+      return intBuilder().endInt();
+    }
+
+    /**
+     * Build an int type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #intType()}.
+     */
+    public final IntBuilder<R> intBuilder() {
+      return IntBuilder.create(context, names);
+    }
+    
+    /**
+     * A plain long type without custom properties. This is equivalent to:
+     * <pre>
+     * longBuilder().endLong();
+     * </pre>
+     */
+    public final R longType() {
+      return longBuilder().endLong();
+    }
+
+    /**
+     * Build a long type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #longType()}.
+     */
+    public final LongBuilder<R> longBuilder() {
+      return LongBuilder.create(context, names);
+    }
+
+    /**
+     * A plain float type without custom properties. This is equivalent to:
+     * <pre>
+     * floatBuilder().endFloat();
+     * </pre>
+     */
+    public final R floatType() {
+      return floatBuilder().endFloat();
+    }
+
+    /**
+     * Build a float type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #floatType()}.
+     */
+    public final FloatBuilder<R> floatBuilder() {
+      return FloatBuilder.create(context, names);
+    }
+
+    /**
+     * A plain double type without custom properties. This is equivalent to:
+     * <pre>
+     * doubleBuilder().endDouble();
+     * </pre>
+     */
+    public final R doubleType() {
+      return doubleBuilder().endDouble();
+    }
+
+    /**
+     * Build a double type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #doubleType()}.
+     */
+    public final DoubleBuilder<R> doubleBuilder() {
+      return DoubleBuilder.create(context, names);
+    }
+
+    /**
+     * A plain string type without custom properties. This is equivalent to:
+     * <pre>
+     * stringBuilder().endString();
+     * </pre>
+     */
+    public final R stringType() {
+      return stringBuilder().endString();
+    }
+
+    /**
+     * Build a string type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #stringType()}.
+     */
+    public final StringBldr<R> stringBuilder() {
+      return StringBldr.create(context, names);
+    }
+
+    /**
+     * A plain bytes type without custom properties. This is equivalent to:
+     * <pre>
+     * bytesBuilder().endBytes();
+     * </pre>
+     */
+    public final R bytesType() {
+      return bytesBuilder().endBytes();
+    }
+
+    /**
+     * Build a bytes type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #bytesType()}.
+     */
+    public final BytesBuilder<R> bytesBuilder() {
+      return BytesBuilder.create(context, names);
+    }
+
+    /**
+     * A plain null type without custom properties. This is equivalent to:
+     * <pre>
+     * nullBuilder().endNull();
+     * </pre>
+     */
+    public final R nullType() {
+      return nullBuilder().endNull();
+    }
+
+    /**
+     * Build a null type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #nullType()}.
+     */
+    public final NullBuilder<R> nullBuilder() {
+      return NullBuilder.create(context, names);
+    }
+
+    /** Build an Avro map type  Example usage:
+     * <pre>
+     * map().values().intType()
+     * </pre>
+     * Equivalent to Avro JSON Schema:
+     * <pre>
+     * {"type":"map", "values":"int"}
+     * </pre>
+     **/ 
+    public final MapBuilder<R> map() {
+      return MapBuilder.create(context, names);
+    }
+
+    /** Build an Avro array type  Example usage:
+     * <pre>
+     * array().items().longType()
+     * </pre>
+     * Equivalent to Avro JSON Schema:
+     * <pre>
+     * {"type":"array", "values":"long"}
+     * </pre>
+     **/ 
+    public final ArrayBuilder<R> array() {
+      return ArrayBuilder.create(context, names);
+    }
+
+    /** Build an Avro fixed type. Example usage:
+     * <pre>
+     * fixed("com.foo.IPv4").size(4)
+     * </pre>
+     * Equivalent to Avro JSON Schema:
+     * <pre>
+     * {"type":"fixed", "name":"com.foo.IPv4", "size":4}
+     * </pre>
+     **/ 
+    public final FixedBuilder<R> fixed(String name) {
+      return FixedBuilder.create(context, names, name);
+    }
+    
+    /** Build an Avro enum type. Example usage: 
+     * <pre>
+     * enumeration("Suits").namespace("org.cards").doc("card suit names")
+     *   .symbols("HEART", "SPADE", "DIAMOND", "CLUB")
+     * </pre>
+     * Equivalent to Avro JSON Schema:
+     * <pre>
+     * {"type":"enum", "name":"Suits", "namespace":"org.cards",
+     *  "doc":"card suit names", "symbols":[
+     *    "HEART", "SPADE", "DIAMOND", "CLUB"]}
+     * </pre>
+     **/ 
+    public final EnumBuilder<R> enumeration(String name) {
+      return EnumBuilder.create(context, names, name);
+    }
+
+    /** Build an Avro record type. Example usage:
+     * <pre>
+     * record("com.foo.Foo").fields()
+     *   .name("field1").typeInt().intDefault(1)
+     *   .name("field2").typeString().noDefault()
+     *   .name("field3").optional().typeFixed("FooFixed").size(4)
+     *   .endRecord()
+     * </pre>
+     * Equivalent to Avro JSON Schema:
+     * <pre>
+     * {"type":"record", "name":"com.foo.Foo", "fields": [
+     *   {"name":"field1", "type":"int", "default":1},
+     *   {"name":"field2", "type":"string"},
+     *   {"name":"field3", "type":[
+     *     null, {"type":"fixed", "name":"FooFixed", "size":4}
+     *     ]}
+     *   ]}
+     * </pre>
+     **/ 
+    public final RecordBuilder<R> record(String name) {
+      return RecordBuilder.create(context, names, name); 
+    }
+    
+    /** Build an Avro union schema type. Example usage:
+     * <pre>unionOf().stringType().and().bytesType().endUnion()</pre>
+     **/ 
+    protected BaseTypeBuilder<UnionAccumulator<R>> unionOf() {
+      return UnionBuilder.create(context, names);
+    }
+    
+    /** A shortcut for building a union of a type and null.
+     * <p/>
+     * For example, the code snippets below are equivalent:
+     * <pre>nullable().booleanType()</pre>
+     * <pre>unionOf().booleanType().and().nullType().endUnion()</pre>
+     **/
+    protected BaseTypeBuilder<R> nullable() {
+      return new BaseTypeBuilder<R>(new NullableCompletion<R>(context), names);
+    }
+    
+  }
+
+  /** A Builder for creating any Avro schema type. 
+   **/
+  public static final class TypeBuilder<R> extends BaseTypeBuilder<R> {
+    private TypeBuilder(Completion<R> context, NameContext names) {
+      super(context, names);
+    }
+
+    @Override
+    public BaseTypeBuilder<UnionAccumulator<R>> unionOf() {
+      return super.unionOf();
+    }
+    
+    @Override
+    public BaseTypeBuilder<R> nullable() {
+      return super.nullable();
+    }
+  }
+
+  /** A special builder for unions.  Unions cannot nest unions directly **/
+  private static final class UnionBuilder<R> extends
+      BaseTypeBuilder<UnionAccumulator<R>> {
+    private UnionBuilder(Completion<R> context, NameContext names) {
+      this(context, names, new ArrayList<Schema>());
+    }
+
+    private static <R> UnionBuilder<R> create(Completion<R> context, NameContext names) {
+      return new UnionBuilder<R>(context, names);
+    }
+
+    private UnionBuilder(Completion<R> context, NameContext names, List<Schema> schemas) {
+      super(new UnionCompletion<R>(context, names, schemas), names);
+    }
+  }
+
+  /**
+   * A special Builder for Record fields. The API is very similar to
+   * {@link BaseTypeBuilder}. However, fields have their own names, properties,
+   * and default values.
+   * <p/>
+   * The methods on this class create builder instances that return their
+   * control to the {@link FieldAssembler} of the enclosing record context after
+   * configuring a default for the field.
+   * <p/>
+   * For example, an int field with default value 1:
+   * <pre>
+   * intSimple().withDefault(1);
+   * </pre>
+   * or an array with items that are optional int types:
+   * <pre>
+   * array().items().optional().intType();
+   * </pre>
+   */
+  public static class BaseFieldTypeBuilder<R> {
+    protected final FieldBuilder<R> bldr;
+    protected final NameContext names;
+    private final CompletionWrapper wrapper;
+
+    protected BaseFieldTypeBuilder(FieldBuilder<R> bldr, CompletionWrapper wrapper) {
+      this.bldr = bldr;
+      this.names = bldr.names();
+      this.wrapper = wrapper;
+    }
+    
+    /**
+     * A plain boolean type without custom properties. This is equivalent to:
+     * <pre>
+     * booleanBuilder().endBoolean();
+     * </pre>
+     */
+    public final BooleanDefault<R> booleanType() {
+      return booleanBuilder().endBoolean();
+    }
+
+    /**
+     * Build a boolean type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #booleanType()}.
+     */
+    public final BooleanBuilder<BooleanDefault<R>> booleanBuilder() {
+      return BooleanBuilder.create(wrap(new BooleanDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain int type without custom properties. This is equivalent to:
+     * <pre>
+     * intBuilder().endInt();
+     * </pre>
+     */
+    public final IntDefault<R> intType() {
+      return intBuilder().endInt();
+    }
+
+    /**
+     * Build an int type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #intType()}.
+     */
+    public final IntBuilder<IntDefault<R>> intBuilder() {
+      return IntBuilder.create(wrap(new IntDefault<R>(bldr)), names);
+    }
+    
+    /**
+     * A plain long type without custom properties. This is equivalent to:
+     * <pre>
+     * longBuilder().endLong();
+     * </pre>
+     */
+    public final LongDefault<R> longType() {
+      return longBuilder().endLong();
+    }
+
+    /**
+     * Build a long type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #longType()}.
+     */
+    public final LongBuilder<LongDefault<R>> longBuilder() {
+      return LongBuilder.create(wrap(new LongDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain float type without custom properties. This is equivalent to:
+     * <pre>
+     * floatBuilder().endFloat();
+     * </pre>
+     */
+    public final FloatDefault<R> floatType() {
+      return floatBuilder().endFloat();
+    }
+
+    /**
+     * Build a float type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #floatType()}.
+     */
+    public final FloatBuilder<FloatDefault<R>> floatBuilder() {
+      return FloatBuilder.create(wrap(new FloatDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain double type without custom properties. This is equivalent to:
+     * <pre>
+     * doubleBuilder().endDouble();
+     * </pre>
+     */
+    public final DoubleDefault<R> doubleType() {
+      return doubleBuilder().endDouble();
+    }
+
+    /**
+     * Build a double type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #doubleType()}.
+     */
+    public final DoubleBuilder<DoubleDefault<R>> doubleBuilder() {
+      return DoubleBuilder.create(wrap(new DoubleDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain string type without custom properties. This is equivalent to:
+     * <pre>
+     * stringBuilder().endString();
+     * </pre>
+     */
+    public final StringDefault<R> stringType() {
+      return stringBuilder().endString();
+    }
+
+    /**
+     * Build a string type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #stringType()}.
+     */
+    public final StringBldr<StringDefault<R>> stringBuilder() {
+      return StringBldr.create(wrap(new StringDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain bytes type without custom properties. This is equivalent to:
+     * <pre>
+     * bytesBuilder().endBytes();
+     * </pre>
+     */
+    public final BytesDefault<R> bytesType() {
+      return bytesBuilder().endBytes();
+    }
+
+    /**
+     * Build a bytes type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #bytesType()}.
+     */
+    public final BytesBuilder<BytesDefault<R>> bytesBuilder() {
+      return BytesBuilder.create(wrap(new BytesDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain null type without custom properties. This is equivalent to:
+     * <pre>
+     * nullBuilder().endNull();
+     * </pre>
+     */
+    public final NullDefault<R> nullType() {
+      return nullBuilder().endNull();
+    }
+
+    /**
+     * Build a null type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #nullType()}.
+     */
+    public final NullBuilder<NullDefault<R>> nullBuilder() {
+      return NullBuilder.create(wrap(new NullDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro map type **/ 
+    public final MapBuilder<MapDefault<R>> map() {
+      return MapBuilder.create(wrap(new MapDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro array type **/ 
+    public final ArrayBuilder<ArrayDefault<R>> array() {
+      return ArrayBuilder.create(wrap(new ArrayDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro fixed type. **/ 
+    public final FixedBuilder<FixedDefault<R>> fixed(String name) {
+      return FixedBuilder.create(wrap(new FixedDefault<R>(bldr)), names, name);
+    }
+    
+    /** Build an Avro enum type. **/ 
+    public final EnumBuilder<EnumDefault<R>> enumeration(String name) {
+      return EnumBuilder.create(wrap(new EnumDefault<R>(bldr)), names, name);
+    }
+
+    /** Build an Avro record type. **/ 
+    public final RecordBuilder<RecordDefault<R>> record(String name) {
+      return RecordBuilder.create(wrap(new RecordDefault<R>(bldr)), names, name); 
+    }
+    
+    private <C> Completion<C> wrap(
+       Completion<C> completion) {
+      if (wrapper != null) {
+        return wrapper.wrap(completion);
+      }
+      return completion;
+    }
+  }
+  
+  /** FieldTypeBuilder adds {@link #unionOf()}, {@link #nullable()}, and {@link #optional()}
+   * to BaseFieldTypeBuilder. **/
+  public static final class FieldTypeBuilder<R> extends BaseFieldTypeBuilder<R> {
+    private FieldTypeBuilder(FieldBuilder<R> bldr) {
+      super(bldr, null);
+    }
+
+    /** Build an Avro union schema type. **/ 
+    public UnionFieldTypeBuilder<R> unionOf() {
+      return new UnionFieldTypeBuilder<R>(bldr);
+    }
+
+    /**
+     * A shortcut for building a union of a type and null, with an optional default
+     * value of the non-null type.
+     * <p/>
+     * For example, the two code snippets below are equivalent:
+     * <pre>nullable().booleanType().booleanDefault(true)</pre>
+     * <pre>unionOf().booleanType().and().nullType().endUnion().booleanDefault(true)</pre>
+     **/
+    public BaseFieldTypeBuilder<R> nullable() {
+      return new BaseFieldTypeBuilder<R>(bldr, new NullableCompletionWrapper());
+    }
+
+    /**
+     * A shortcut for building a union of null and a type, with a null default.
+     * <p/>
+     * For example, the two code snippets below are equivalent:
+     * <pre>optional().booleanType()</pre>
+     * <pre>unionOf().nullType().and().booleanType().endUnion().nullDefault()</pre>
+     */
+    public BaseTypeBuilder<FieldAssembler<R>> optional() {
+      return new BaseTypeBuilder<FieldAssembler<R>>(
+          new OptionalCompletion<R>(bldr), names);
+    }
+  }
+
+  /** Builder for a union field.  The first type in the union corresponds
+   * to the possible default value type.
+   */
+  public static final class UnionFieldTypeBuilder<R> {
+    private final FieldBuilder<R> bldr;
+    private final NameContext names;
+
+    private UnionFieldTypeBuilder(FieldBuilder<R> bldr) {
+      this.bldr = bldr;
+      this.names = bldr.names();
+    }
+    
+    /**
+     * A plain boolean type without custom properties. This is equivalent to:
+     * <pre>
+     * booleanBuilder().endBoolean();
+     * </pre>
+     */
+    public UnionAccumulator<BooleanDefault<R>> booleanType() {
+      return booleanBuilder().endBoolean();
+    }
+
+    /**
+     * Build a boolean type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #booleanType()}.
+     */
+    public BooleanBuilder<UnionAccumulator<BooleanDefault<R>>> booleanBuilder() {
+      return BooleanBuilder.create(completion(new BooleanDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain int type without custom properties. This is equivalent to:
+     * <pre>
+     * intBuilder().endInt();
+     * </pre>
+     */
+    public UnionAccumulator<IntDefault<R>> intType() {
+      return intBuilder().endInt();
+    }
+
+    /**
+     * Build an int type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #intType()}.
+     */
+    public IntBuilder<UnionAccumulator<IntDefault<R>>> intBuilder() {
+      return IntBuilder.create(completion(new IntDefault<R>(bldr)), names);
+    }
+    
+    /**
+     * A plain long type without custom properties. This is equivalent to:
+     * <pre>
+     * longBuilder().endLong();
+     * </pre>
+     */
+    public UnionAccumulator<LongDefault<R>> longType() {
+      return longBuilder().endLong();
+    }
+
+    /**
+     * Build a long type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #longType()}.
+     */
+    public LongBuilder<UnionAccumulator<LongDefault<R>>> longBuilder() {
+      return LongBuilder.create(completion(new LongDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain float type without custom properties. This is equivalent to:
+     * <pre>
+     * floatBuilder().endFloat();
+     * </pre>
+     */
+    public UnionAccumulator<FloatDefault<R>> floatType() {
+      return floatBuilder().endFloat();
+    }
+
+    /**
+     * Build a float type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #floatType()}.
+     */
+    public FloatBuilder<UnionAccumulator<FloatDefault<R>>> floatBuilder() {
+      return FloatBuilder.create(completion(new FloatDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain double type without custom properties. This is equivalent to:
+     * <pre>
+     * doubleBuilder().endDouble();
+     * </pre>
+     */
+    public UnionAccumulator<DoubleDefault<R>> doubleType() {
+      return doubleBuilder().endDouble();
+    }
+
+    /**
+     * Build a double type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #doubleType()}.
+     */
+    public DoubleBuilder<UnionAccumulator<DoubleDefault<R>>> doubleBuilder() {
+      return DoubleBuilder.create(completion(new DoubleDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain string type without custom properties. This is equivalent to:
+     * <pre>
+     * stringBuilder().endString();
+     * </pre>
+     */
+    public UnionAccumulator<StringDefault<R>> stringType() {
+      return stringBuilder().endString();
+    }
+
+    /**
+     * Build a string type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #stringType()}.
+     */
+    public StringBldr<UnionAccumulator<StringDefault<R>>> stringBuilder() {
+      return StringBldr.create(completion(new StringDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain bytes type without custom properties. This is equivalent to:
+     * <pre>
+     * bytesBuilder().endBytes();
+     * </pre>
+     */
+    public UnionAccumulator<BytesDefault<R>> bytesType() {
+      return bytesBuilder().endBytes();
+    }
+
+    /**
+     * Build a bytes type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #bytesType()}.
+     */
+    public BytesBuilder<UnionAccumulator<BytesDefault<R>>> bytesBuilder() {
+      return BytesBuilder.create(completion(new BytesDefault<R>(bldr)), names);
+    }
+
+    /**
+     * A plain null type without custom properties. This is equivalent to:
+     * <pre>
+     * nullBuilder().endNull();
+     * </pre>
+     */
+    public UnionAccumulator<NullDefault<R>> nullType() {
+      return nullBuilder().endNull();
+    }
+
+    /**
+     * Build a null type that can set custom properties. If custom properties
+     * are not needed it is simpler to use {@link #nullType()}.
+     */
+    public NullBuilder<UnionAccumulator<NullDefault<R>>> nullBuilder() {
+      return NullBuilder.create(completion(new NullDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro map type **/ 
+    public MapBuilder<UnionAccumulator<MapDefault<R>>> map() {
+      return MapBuilder.create(completion(new MapDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro array type **/ 
+    public ArrayBuilder<UnionAccumulator<ArrayDefault<R>>> array() {
+      return ArrayBuilder.create(completion(new ArrayDefault<R>(bldr)), names);
+    }
+
+    /** Build an Avro fixed type. **/ 
+    public FixedBuilder<UnionAccumulator<FixedDefault<R>>> fixed(String name) {
+      return FixedBuilder.create(completion(new FixedDefault<R>(bldr)), names, name);
+    }
+    
+    /** Build an Avro enum type. **/ 
+    public EnumBuilder<UnionAccumulator<EnumDefault<R>>> enumeration(String name) {
+      return EnumBuilder.create(completion(new EnumDefault<R>(bldr)), names, name);
+    }
+
+    /** Build an Avro record type. **/ 
+    public RecordBuilder<UnionAccumulator<RecordDefault<R>>> record(String name) {
+      return RecordBuilder.create(completion(new RecordDefault<R>(bldr)), names, name); 
+    }
+    
+    private <C> UnionCompletion<C> completion(Completion<C> context) {
+      return new UnionCompletion<C>(context, names, new ArrayList<Schema>());
+    }
+  }
+
+  public final static class RecordBuilder<R> extends
+      NamespacedBuilder<R, RecordBuilder<R>> {
+    private RecordBuilder(Completion<R> context, NameContext names, String name) {
+      super(context, names, name);
+    }
+
+    private static <R> RecordBuilder<R> create(Completion<R> context,
+        NameContext names, String name) {
+      return new RecordBuilder<R>(context, names, name);
+    }
+
+    @Override
+    protected RecordBuilder<R> self() {
+      return this;
+    }
+
+    public FieldAssembler<R> fields() {
+      Schema record = Schema.createRecord(name(), doc(), space(), false);
+      // place the record in the name context, fields yet to be set.
+      completeSchema(record);
+      return new FieldAssembler<R>(
+          context(), names().namespace(record.getNamespace()), record);
+    }
+  }
+
+  public final static class FieldAssembler<R> {
+    private final List<Field> fields = new ArrayList<Field>();
+    private final Completion<R> context;
+    private final NameContext names;
+    private final Schema record;
+
+    private FieldAssembler(Completion<R> context, NameContext names, Schema record) {
+      this.context = context;
+      this.names = names;
+      this.record = record;
+    }
+
+    /**
+     * Add a field with the given name.
+     * @return A {@link FieldBuilder} for the given name.
+     */
+    public FieldBuilder<R> name(String fieldName) {
+      return new FieldBuilder<R>(this, names, fieldName);
+    }
+    
+    /**
+     * Shortcut for creating a boolean field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().booleanType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredBoolean(String fieldName) {
+      return name(fieldName).type().booleanType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional boolean field: a union of null and 
+     * boolean with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().booleanType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalBoolean(String fieldName) {
+      return name(fieldName).type().optional().booleanType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable boolean field: a union of boolean and
+     * null with an boolean default.
+     * <p/>
+     * This is equivalent to:
+     * 
+     * <pre>
+     * name(fieldName).type().nullable().booleanType().booleanDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableBoolean(String fieldName, boolean defaultVal) {
+      return name(fieldName)
+          .type().nullable().booleanType().booleanDefault(defaultVal);
+    }
+
+    /**
+     * Shortcut for creating an int field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().intType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredInt(String fieldName) {
+      return name(fieldName).type().intType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional int field: a union of null and int
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().intType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalInt(String fieldName) {
+      return name(fieldName).type().optional().intType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable int field: a union of int and null
+     * with an int default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().intType().intDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableInt(String fieldName, int defaultVal) {
+      return name(fieldName).type().nullable().intType().intDefault(defaultVal);
+    }
+
+    /**
+     * Shortcut for creating a long field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().longType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredLong(String fieldName) {
+      return name(fieldName).type().longType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional long field: a union of null and long
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().longType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalLong(String fieldName) {
+      return name(fieldName).type().optional().longType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable long field: a union of long and null
+     * with a long default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().longType().longDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableLong(String fieldName, long defaultVal) {
+      return name(fieldName).type().nullable().longType().longDefault(defaultVal);
+    }
+    
+    /**
+     * Shortcut for creating a float field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().floatType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredFloat(String fieldName) {
+      return name(fieldName).type().floatType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional float field: a union of null and float
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().floatType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalFloat(String fieldName) {
+      return name(fieldName).type().optional().floatType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable float field: a union of float and null
+     * with a float default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().floatType().floatDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableFloat(String fieldName, float defaultVal) {
+      return name(fieldName).type().nullable().floatType().floatDefault(defaultVal);
+    }
+
+    /**
+     * Shortcut for creating a double field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().doubleType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredDouble(String fieldName) {
+      return name(fieldName).type().doubleType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional double field: a union of null and double
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().doubleType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalDouble(String fieldName) {
+      return name(fieldName).type().optional().doubleType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable double field: a union of double and null
+     * with a double default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().doubleType().doubleDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableDouble(String fieldName, double defaultVal) {
+      return name(fieldName).type().nullable().doubleType().doubleDefault(defaultVal);
+    }
+    
+    /**
+     * Shortcut for creating a string field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().stringType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredString(String fieldName) {
+      return name(fieldName).type().stringType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional string field: a union of null and string
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().stringType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalString(String fieldName) {
+      return name(fieldName).type().optional().stringType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable string field: a union of string and null
+     * with a string default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().stringType().stringDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableString(String fieldName, String defaultVal) {
+      return name(fieldName).type().nullable().stringType().stringDefault(defaultVal);
+    }
+
+    /**
+     * Shortcut for creating a bytes field with the given name and no default.
+     * <p/>This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().bytesType().noDefault()
+     * </pre>
+     */
+    public FieldAssembler<R> requiredBytes(String fieldName) {
+      return name(fieldName).type().bytesType().noDefault();
+    }
+    
+    /**
+     * Shortcut for creating an optional bytes field: a union of null and bytes
+     * with null default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().optional().bytesType()
+     * </pre>
+     */
+    public FieldAssembler<R> optionalBytes(String fieldName) {
+      return name(fieldName).type().optional().bytesType();
+    }
+    
+    /**
+     * Shortcut for creating a nullable bytes field: a union of bytes and null
+     * with a bytes default.<p/>
+     * This is equivalent to:
+     * <pre>
+     *   name(fieldName).type().nullable().bytesType().bytesDefault(defaultVal)
+     * </pre>
+     */
+    public FieldAssembler<R> nullableBytes(String fieldName, byte[] defaultVal) {
+      return name(fieldName).type().nullable().bytesType().bytesDefault(defaultVal);
+    }
+
+    /**
+     * End adding fields to this record, returning control
+     * to the context that this record builder was created in.
+     */
+    public R endRecord() {
+      record.setFields(fields);
+      return context.complete(record);
+    }
+
+    private FieldAssembler<R> addField(Field field) {
+      fields.add(field);
+      return this;
+    }
+    
+  }
+  
+  /**
+   * Builds a Field in the context of a {@link FieldAssembler}.
+   * 
+   * Usage is to first configure any of the optional parameters and then to call one
+   * of the type methods to complete the field.  For example
+   * <pre>
+   *   .namespace("org.apache.example").orderDecending().type()
+   * </pre>
+   * Optional parameters for a field are namespace, doc, order, and aliases.
+   */
+  public final static class FieldBuilder<R> extends
+      NamedBuilder<FieldBuilder<R>> {
+    private final FieldAssembler<R> fields;
+    private Schema.Field.Order order = Schema.Field.Order.ASCENDING;
+
+    private FieldBuilder(FieldAssembler<R> fields, NameContext names, String name) {
+      super(names, name);
+      this.fields = fields;
+    }
+    
+    /** Set this field to have ascending order.  Ascending is the default **/
+    public FieldBuilder<R> orderAscending() {
+      order = Schema.Field.Order.ASCENDING;
+      return self();
+    }
+    
+    /** Set this field to have decending order.  Decending is the default **/
+    public FieldBuilder<R> orderDescending() {
+      order = Schema.Field.Order.DESCENDING;
+      return self();
+    }
+
+    /** Set this field to ignore order.  **/
+    public FieldBuilder<R> orderIgnore() {
+      order = Schema.Field.Order.IGNORE;
+      return self();
+    }
+    
+    /**
+     * Final step in configuring this field, finalizing name, namespace, alias,
+     * and order.
+     * @return A builder for the field's type and default value.
+     */
+    public FieldTypeBuilder<R> type() {
+      return new FieldTypeBuilder<R>(this);
+    }
+
+    /**
+     * Final step in configuring this field, finalizing name, namespace, alias,
+     * and order.  Sets the field's type to the provided schema, returns a 
+     * {@link GenericDefault}.
+     */
+    public GenericDefault<R> type(Schema type) {
+      return new GenericDefault<R>(this, type);
+    }
+
+    /**
+     * Final step in configuring this field, finalizing name, namespace, alias,
+     * and order. Sets the field's type to the schema by name reference.
+     * <p/>
+     * The name must correspond with a named schema that has already been
+     * created in the context of this builder. The name may be a fully qualified
+     * name, or a short name. If it is a short name, the namespace context of
+     * this builder will be used.
+     * <p/>
+     * The name and namespace context rules are the same as the Avro schema JSON
+     * specification.
+     */
+    public GenericDefault<R> type(String name) {
+      return type(name, null);
+    }
+
+    /**
+     * Final step in configuring this field, finalizing name, namespace, alias,
+     * and order. Sets the field's type to the schema by name reference.
+     * <p/>
+     * The name must correspond with a named schema that has already been
+     * created in the context of this builder. The name may be a fully qualified
+     * name, or a short name. If it is a full name, the namespace is ignored. If
+     * it is a short name, the namespace provided is used. If the namespace
+     * provided is null, the namespace context of this builder will be used.
+     * <p/>
+     * The name and namespace context rules are the same as the Avro schema JSON
+     * specification.
+     */
+    public GenericDefault<R> type(String name, String namespace) {
+      Schema schema = names().get(name, namespace);
+      return type(schema);
+    }
+    
+    private FieldAssembler<R> completeField(Schema schema, Object defaultVal) {
+      JsonNode defaultNode = toJsonNode(defaultVal);
+      return completeField(schema, defaultNode);
+    }
+    
+    private FieldAssembler<R> completeField(Schema schema) {
+      return completeField(schema, null);
+    }
+    
+    private FieldAssembler<R> completeField(Schema schema, JsonNode defaultVal) {
+      Field field = new Field(name(), schema, doc(), defaultVal, order);
+      addPropsTo(field);
+      addAliasesTo(field);
+      return fields.addField(field);
+    }
+
+    @Override
+    protected FieldBuilder<R> self() {
+      return this;
+    }
+  }
+    
+  /** Abstract base class for field defaults. **/
+  public static abstract class FieldDefault<R, S extends FieldDefault<R, S>> extends Completion<S> {
+    private final FieldBuilder<R> field;
+    private Schema schema;
+    FieldDefault(FieldBuilder<R> field) {
+      this.field = field;
+    }
+    
+    /** Completes this field with no default value **/
+    public final FieldAssembler<R> noDefault() {
+      return field.completeField(schema);
+    }
+    
+    private FieldAssembler<R> usingDefault(Object defaultVal) {
+      return field.completeField(schema, defaultVal);
+    }
+    
+    @Override
+    final S complete(Schema schema) {
+      this.schema = schema;
+      return self();
+    }
+    
+    abstract S self();
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class BooleanDefault<R> extends FieldDefault<R, BooleanDefault<R>> {
+    private BooleanDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided **/
+    public final FieldAssembler<R> booleanDefault(boolean defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final BooleanDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class IntDefault<R> extends FieldDefault<R, IntDefault<R>> {
+    private IntDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided **/
+    public final FieldAssembler<R> intDefault(int defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final IntDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class LongDefault<R> extends FieldDefault<R, LongDefault<R>> {
+    private LongDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided **/
+    public final FieldAssembler<R> longDefault(long defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final LongDefault<R> self() {
+      return this;
+    }
+  }
+
+  /** Choose whether to use a default value for the field or not. **/
+  public static class FloatDefault<R> extends FieldDefault<R, FloatDefault<R>> {
+    private FloatDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided **/
+    public final FieldAssembler<R> floatDefault(float defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final FloatDefault<R> self() {
+      return this;
+    }
+  }
+
+  /** Choose whether to use a default value for the field or not. **/
+  public static class DoubleDefault<R> extends FieldDefault<R, DoubleDefault<R>> {
+    private DoubleDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided **/
+    public final FieldAssembler<R> doubleDefault(double defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final DoubleDefault<R> self() {
+      return this;
+    }
+  }
+
+  /** Choose whether to use a default value for the field or not. **/
+  public static class StringDefault<R> extends FieldDefault<R, StringDefault<R>> {
+    private StringDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with the default value provided. Cannot be null. **/
+    public final FieldAssembler<R> stringDefault(String defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final StringDefault<R> self() {
+      return this;
+    }
+  }
+
+  /** Choose whether to use a default value for the field or not. **/
+  public static class BytesDefault<R> extends FieldDefault<R, BytesDefault<R>> {
+    private BytesDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> bytesDefault(byte[] defaultVal) {
+      return super.usingDefault(ByteBuffer.wrap(defaultVal));
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> bytesDefault(ByteBuffer defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null.
+     * The string is interpreted as a byte[], with each character code point
+     * value equalling the byte value, as in the Avro spec JSON default. **/
+    public final FieldAssembler<R> bytesDefault(String defaultVal) {
+      return super.usingDefault(defaultVal);  
+    }
+    
+    @Override
+    final BytesDefault<R> self() {
+      return this;
+    }
+  }
+
+  /** Choose whether to use a default value for the field or not. **/
+  public static class NullDefault<R> extends FieldDefault<R, NullDefault<R>> {
+    private NullDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+
+    /** Completes this field with a default value of null **/
+    public final FieldAssembler<R> nullDefault() {
+      return super.usingDefault(null);
+    }
+    
+    @Override
+    final NullDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class MapDefault<R> extends FieldDefault<R, MapDefault<R>> {
+    private MapDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final <K, V> FieldAssembler<R> mapDefault(Map<K, V> defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final MapDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class ArrayDefault<R> extends FieldDefault<R, ArrayDefault<R>> {
+    private ArrayDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final <V> FieldAssembler<R> arrayDefault(List<V> defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final ArrayDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class FixedDefault<R> extends FieldDefault<R, FixedDefault<R>> {
+    private FixedDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> fixedDefault(byte[] defaultVal) {
+      return super.usingDefault(ByteBuffer.wrap(defaultVal));
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> fixedDefault(ByteBuffer defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null.
+     * The string is interpreted as a byte[], with each character code point
+     * value equalling the byte value, as in the Avro spec JSON default. **/
+    public final FieldAssembler<R> fixedDefault(String defaultVal) {
+      return super.usingDefault(defaultVal);  
+    }
+    
+    @Override
+    final FixedDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class EnumDefault<R> extends FieldDefault<R, EnumDefault<R>> {
+    private EnumDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> enumDefault(String defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final EnumDefault<R> self() {
+      return this;
+    }
+  }
+  
+  /** Choose whether to use a default value for the field or not. **/
+  public static class RecordDefault<R> extends FieldDefault<R, RecordDefault<R>> {
+    private RecordDefault(FieldBuilder<R> field) {
+      super(field);
+    }
+    
+    /** Completes this field with the default value provided, cannot be null **/
+    public final FieldAssembler<R> recordDefault(GenericRecord defaultVal) {
+      return super.usingDefault(defaultVal);
+    }
+    
+    @Override
+    final RecordDefault<R> self() {
+      return this;
+    }
+  }
+  
+  public final static class GenericDefault<R> {
+    private final FieldBuilder<R> field;
+    private final Schema schema;
+    private GenericDefault(FieldBuilder<R> field, Schema schema) {
+      this.field = field;
+      this.schema = schema;
+    }
+    
+    /** Do not use a default value for this field. **/
+    public FieldAssembler<R> noDefault() {
+      return field.completeField(schema);
+    }
+    
+    /** Completes this field with the default value provided.
+     * The value must conform to the schema of the field. **/
+    public FieldAssembler<R> withDefault(Object defaultVal) {
+      return field.completeField(schema, defaultVal);
+    }
+  }
+  
+  /** 
+   * Completion<R> is for internal builder use, all subclasses are private.
+   * 
+   * Completion is an object that takes a Schema and returns some result.
+   */
+  private abstract static class Completion<R> {
+    abstract R complete(Schema schema);
+  }
+  
+  private static class SchemaCompletion extends Completion<Schema> {
+    @Override
+    protected Schema complete(Schema schema) {
+      return schema;
+    }
+  }
+  
+  private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
+  
+  private static class NullableCompletion<R> extends Completion<R> {
+    private final Completion<R> context;
+    private NullableCompletion(Completion<R> context) {
+      this.context = context;
+    }
+    @Override
+    protected R complete(Schema schema) {
+      // wrap the schema as a union of the schema and null
+      Schema nullable = Schema.createUnion(Arrays.asList(schema, NULL_SCHEMA));
+      return context.complete(nullable);
+    }
+  }
+  
+  private static class OptionalCompletion<R> extends Completion<FieldAssembler<R>> {
+    private final FieldBuilder<R> bldr;
+    public OptionalCompletion(FieldBuilder<R> bldr) {
+      this.bldr = bldr;
+    }
+    @Override
+    protected FieldAssembler<R> complete(Schema schema) {
+      // wrap the schema as a union of null and the schema
+      Schema optional = Schema.createUnion(Arrays.asList(NULL_SCHEMA, schema));
+      return bldr.completeField(optional, (Object)null);
+    }
+  }
+  
+  private abstract static class CompletionWrapper {
+    abstract <R> Completion<R> wrap(Completion<R> completion);
+  }
+  
+  private static final class NullableCompletionWrapper extends CompletionWrapper {
+    @Override
+    <R> Completion<R> wrap(Completion<R> completion) {
+      return new NullableCompletion<R>(completion);
+    }
+  }
+
+  private static abstract class NestedCompletion<R> extends Completion<R> {
+    private final Completion<R> context;
+    private final PropBuilder<?> assembler;
+
+    private NestedCompletion(PropBuilder<?> assembler, Completion<R> context) {
+      this.context = context;
+      this.assembler = assembler;
+    }
+
+    @Override
+    protected final R complete(Schema schema) {
+      Schema outer = outerSchema(schema);
+      assembler.addPropsTo(outer);
+      return context.complete(outer);
+    }
+
+    protected abstract Schema outerSchema(Schema inner);
+  }
+
+  private static class MapCompletion<R> extends NestedCompletion<R> {
+    private MapCompletion(MapBuilder<R> assembler, Completion<R> context) {
+      super(assembler, context);
+    }
+
+    @Override
+    protected Schema outerSchema(Schema inner) {
+      return Schema.createMap(inner);
+    }
+  }
+
+  private static class ArrayCompletion<R> extends NestedCompletion<R> {
+    private ArrayCompletion(ArrayBuilder<R> assembler, Completion<R> context) {
+      super(assembler, context);
+    }
+
+    @Override
+    protected Schema outerSchema(Schema inner) {
+      return Schema.createArray(inner);
+    }
+  }
+
+  private static class UnionCompletion<R> extends
+      Completion<UnionAccumulator<R>> {
+    private final Completion<R> context;
+    private final NameContext names;
+    private final List<Schema> schemas;
+
+    private UnionCompletion(Completion<R> context, NameContext names, List<Schema> schemas) {
+      this.context = context;
+      this.names = names;
+      this.schemas = schemas;
+    }
+
+    @Override
+    protected UnionAccumulator<R> complete(Schema schema) {
+      List<Schema> updated = new ArrayList<Schema>(this.schemas);
+      updated.add(schema);
+      return new UnionAccumulator<R>(context, names, updated);
+    }
+  }
+  
+  /** Accumulates all of the types in a union.  Add an additional type with
+   * {@link #and()}.  Complete the union with {@link #endUnion()}
+   */
+  public static final class UnionAccumulator<R> {
+    private final Completion<R> context;
+    private final NameContext names;
+    private final List<Schema> schemas;
+
+    private UnionAccumulator(Completion<R> context, NameContext names, List<Schema> schemas) {
+      this.context = context;
+      this.names = names;
+      this.schemas = schemas;
+    }
+
+    /** Add an additional type to this union **/
+    public BaseTypeBuilder<UnionAccumulator<R>> and() {
+      return new UnionBuilder<R>(context, names, schemas);
+    }
+
+    /** Complete this union **/
+    public R endUnion() {
+      Schema schema = Schema.createUnion(schemas);
+      return context.complete(schema);
+    }
+  }
+
+  private static void checkRequired(Object reference, String errorMessage) {
+    if (reference == null) {
+      throw new NullPointerException(errorMessage);
+    }
+  }
+
+  // create default value JsonNodes from objects
+  private static JsonNode toJsonNode(Object o) {
+    try {
+      String s;
+      if (o instanceof ByteBuffer) {
+        // special case since GenericData.toString() is incorrect for bytes
+        // note that this does not handle the case of a default value with nested bytes
+        ByteBuffer bytes = ((ByteBuffer) o);
+        bytes.mark();
+        byte[] data = new byte[bytes.remaining()];
+        bytes.get(data);
+        bytes.reset(); // put the buffer back the way we got it
+        s = new String(data, "ISO-8859-1");
+        char[] quoted = JsonStringEncoder.getInstance().quoteAsString(s);
+        s = "\"" + new String(quoted) + "\"";
+      } else {
+        s = GenericData.get().toString(o);
+      }
+      return new ObjectMapper().readTree(s);
+    } catch (IOException e) {
+      throw new SchemaBuilderException(e);
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilderException.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilderException.java
new file mode 100644
index 0000000..abf81ec
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaBuilderException.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/** Thrown for errors building schemas. */
+public class SchemaBuilderException extends AvroRuntimeException {
+  public SchemaBuilderException(Throwable cause) { super(cause); }
+  public SchemaBuilderException(String message) { super(message); }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
new file mode 100644
index 0000000..e62ea0b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaCompatibility.java
@@ -0,0 +1,526 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Evaluate the compatibility between a reader schema and a writer schema.
+ * A reader and a writer schema are declared compatible if all datum instances of the writer
+ * schema can be successfully decoded using the specified reader schema.
+ */
+public class SchemaCompatibility {
+  private static final Logger LOG = LoggerFactory.getLogger(SchemaCompatibility.class);
+
+  /** Utility class cannot be instantiated. */
+  private SchemaCompatibility() {
+  }
+
+  /** Message to annotate reader/writer schema pairs that are compatible. */
+  public static final String READER_WRITER_COMPATIBLE_MESSAGE =
+      "Reader schema can always successfully decode data written using the writer schema.";
+
+  /**
+   * Validates that the provided reader schema can be used to decode avro data written with the
+   * provided writer schema.
+   *
+   * @param reader schema to check.
+   * @param writer schema to check.
+   * @return a result object identifying any compatibility errors.
+   */
+  public static SchemaPairCompatibility checkReaderWriterCompatibility(
+      final Schema reader,
+      final Schema writer
+  ) {
+    final SchemaCompatibilityType compatibility =
+        new ReaderWriterCompatiblityChecker()
+            .getCompatibility(reader, writer);
+
+    final String message;
+    switch (compatibility) {
+      case INCOMPATIBLE: {
+        message = String.format(
+            "Data encoded using writer schema:%n%s%n"
+            + "will or may fail to decode using reader schema:%n%s%n",
+            writer.toString(true),
+            reader.toString(true));
+        break;
+      }
+      case COMPATIBLE: {
+        message = READER_WRITER_COMPATIBLE_MESSAGE;
+        break;
+      }
+      default: throw new AvroRuntimeException("Unknown compatibility: " + compatibility);
+    }
+
+    return new SchemaPairCompatibility(
+        compatibility,
+        reader,
+        writer,
+        message);
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  /**
+   * Tests the equality of two Avro named schemas.
+   *
+   * <p> Matching includes reader name aliases. </p>
+   *
+   * @param reader Named reader schema.
+   * @param writer Named writer schema.
+   * @return whether the names of the named schemas match or not.
+   */
+  public static boolean schemaNameEquals(final Schema reader, final Schema writer) {
+    final String writerFullName = writer.getFullName();
+    if (objectsEqual(reader.getFullName(), writerFullName)) {
+      return true;
+    }
+    // Apply reader aliases:
+    if (reader.getAliases().contains(writerFullName)) {
+      return true;
+    }
+    return false;
+  }
+
+  /**
+   * Identifies the writer field that corresponds to the specified reader field.
+   *
+   * <p> Matching includes reader name aliases. </p>
+   *
+   * @param writerSchema Schema of the record where to look for the writer field.
+   * @param readerField Reader field to identify the corresponding writer field of.
+   * @return the writer field, if any does correspond, or None.
+   */
+  public static Field lookupWriterField(final Schema writerSchema, final Field readerField) {
+    assert (writerSchema.getType() == Type.RECORD);
+    final List<Field> writerFields = new ArrayList<Field>();
+    final Field direct = writerSchema.getField(readerField.name());
+    if (direct != null) {
+      writerFields.add(direct);
+    }
+    for (final String readerFieldAliasName : readerField.aliases()) {
+      final Field writerField = writerSchema.getField(readerFieldAliasName);
+      if (writerField != null) {
+        writerFields.add(writerField);
+      }
+    }
+    switch (writerFields.size()) {
+      case 0: return null;
+      case 1: return writerFields.get(0);
+      default: {
+        throw new AvroRuntimeException(String.format(
+            "Reader record field %s matches multiple fields in writer record schema %s",
+            readerField, writerSchema));
+      }
+    }
+  }
+
+  /**
+   * Reader/writer schema pair that can be used as a key in a hash map.
+   *
+   * This reader/writer pair differentiates Schema objects based on their system hash code.
+   */
+  private static final class ReaderWriter {
+    private final Schema mReader;
+    private final Schema mWriter;
+
+    /**
+     * Initializes a new reader/writer pair.
+     *
+     * @param reader Reader schema.
+     * @param writer Writer schema.
+     */
+    public ReaderWriter(final Schema reader, final Schema writer) {
+      mReader = reader;
+      mWriter = writer;
+    }
+
+    /**
+     * Returns the reader schema in this pair.
+     * @return the reader schema in this pair.
+     */
+    public Schema getReader() {
+      return mReader;
+    }
+
+    /**
+     * Returns the writer schema in this pair.
+     * @return the writer schema in this pair.
+     */
+    public Schema getWriter() {
+      return mWriter;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public int hashCode() {
+      return System.identityHashCode(mReader) ^ System.identityHashCode(mWriter);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public boolean equals(Object obj) {
+      if (!(obj instanceof ReaderWriter)) {
+        return false;
+      }
+      final ReaderWriter that = (ReaderWriter) obj;
+      // Use pointer comparison here:
+      return (this.mReader == that.mReader)
+          && (this.mWriter == that.mWriter);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String toString() {
+      return String.format("ReaderWriter{reader:%s, writer:%s}", mReader, mWriter);
+    }
+  }
+
+  /**
+   * Determines the compatibility of a reader/writer schema pair.
+   *
+   * <p> Provides memoization to handle recursive schemas. </p>
+   */
+  private static final class ReaderWriterCompatiblityChecker {
+    private final Map<ReaderWriter, SchemaCompatibilityType> mMemoizeMap =
+        new HashMap<ReaderWriter, SchemaCompatibilityType>();
+
+    /**
+     * Reports the compatibility of a reader/writer schema pair.
+     *
+     * <p> Memoizes the compatibility results. </p>
+     *
+     * @param reader Reader schema to test.
+     * @param writer Writer schema to test.
+     * @return the compatibility of the reader/writer schema pair.
+     */
+    public SchemaCompatibilityType getCompatibility(
+        final Schema reader,
+        final Schema writer
+    ) {
+      LOG.debug("Checking compatibility of reader {} with writer {}", reader, writer);
+      final ReaderWriter pair = new ReaderWriter(reader, writer);
+      final SchemaCompatibilityType existing = mMemoizeMap.get(pair);
+      if (existing != null) {
+        if (existing == SchemaCompatibilityType.RECURSION_IN_PROGRESS) {
+          // Break the recursion here.
+          // schemas are compatible unless proven incompatible:
+          return SchemaCompatibilityType.COMPATIBLE;
+        }
+        return existing;
+      }
+      // Mark this reader/writer pair as "in progress":
+      mMemoizeMap.put(pair, SchemaCompatibilityType.RECURSION_IN_PROGRESS);
+      final SchemaCompatibilityType calculated = calculateCompatibility(reader, writer);
+      mMemoizeMap.put(pair, calculated);
+      return calculated;
+    }
+
+    /**
+     * Calculates the compatibility of a reader/writer schema pair.
+     *
+     * <p>
+     * Relies on external memoization performed by {@link #getCompatibility(Schema, Schema)}.
+     * </p>
+     *
+     * @param reader Reader schema to test.
+     * @param writer Writer schema to test.
+     * @return the compatibility of the reader/writer schema pair.
+     */
+    private SchemaCompatibilityType calculateCompatibility(
+        final Schema reader,
+        final Schema writer
+    ) {
+      assert (reader != null);
+      assert (writer != null);
+
+      if (reader.getType() == writer.getType()) {
+        switch (reader.getType()) {
+          case NULL:
+          case BOOLEAN:
+          case INT:
+          case LONG:
+          case FLOAT:
+          case DOUBLE:
+          case BYTES:
+          case STRING: {
+            return SchemaCompatibilityType.COMPATIBLE;
+          }
+          case ARRAY: {
+            return getCompatibility(reader.getElementType(), writer.getElementType());
+          }
+          case MAP: {
+            return getCompatibility(reader.getValueType(), writer.getValueType());
+          }
+          case FIXED: {
+            // fixed size and name must match:
+            if (!schemaNameEquals(reader, writer)) {
+              return SchemaCompatibilityType.INCOMPATIBLE;
+            }
+            if (reader.getFixedSize() != writer.getFixedSize()) {
+              return SchemaCompatibilityType.INCOMPATIBLE;
+            }
+            return SchemaCompatibilityType.COMPATIBLE;
+          }
+          case ENUM: {
+            // enum names must match:
+            if (!schemaNameEquals(reader, writer)) {
+              return SchemaCompatibilityType.INCOMPATIBLE;
+            }
+            // reader symbols must contain all writer symbols:
+            final Set<String> symbols = new HashSet<String>(writer.getEnumSymbols());
+            symbols.removeAll(reader.getEnumSymbols());
+            // TODO: Report a human-readable error.
+            // if (!symbols.isEmpty()) {
+            // }
+            return symbols.isEmpty()
+                ? SchemaCompatibilityType.COMPATIBLE
+                : SchemaCompatibilityType.INCOMPATIBLE;
+          }
+          case RECORD: {
+            // record names must match:
+            if (!schemaNameEquals(reader, writer)) {
+              return SchemaCompatibilityType.INCOMPATIBLE;
+            }
+
+            // Check that each field in the reader record can be populated from the writer record:
+            for (final Field readerField : reader.getFields()) {
+              final Field writerField = lookupWriterField(writer, readerField);
+              if (writerField == null) {
+                // Reader field does not correspond to any field in the writer record schema,
+                // reader field must have a default value.
+                if (readerField.defaultValue() == null) {
+                  // reader field has no default value
+                  return SchemaCompatibilityType.INCOMPATIBLE;
+                }
+              } else {
+                if (getCompatibility(readerField.schema(), writerField.schema())
+                    == SchemaCompatibilityType.INCOMPATIBLE) {
+                  return SchemaCompatibilityType.INCOMPATIBLE;
+                }
+              }
+            }
+
+            // All fields in the reader record can be populated from the writer record:
+            return SchemaCompatibilityType.COMPATIBLE;
+          }
+          case UNION: {
+            // Check that each individual branch of the writer union can be decoded:
+            for (final Schema writerBranch : writer.getTypes()) {
+              if (getCompatibility(reader, writerBranch) == SchemaCompatibilityType.INCOMPATIBLE) {
+                return SchemaCompatibilityType.INCOMPATIBLE;
+              }
+            }
+            // Each schema in the writer union can be decoded with the reader:
+            return SchemaCompatibilityType.COMPATIBLE;
+          }
+
+          default: {
+            throw new AvroRuntimeException("Unknown schema type: " + reader.getType());
+          }
+        }
+
+      } else {
+        // Reader and writer have different schema types:
+
+        // Handle the corner case where writer is a union of a singleton branch: { X } === X
+        if ((writer.getType() == Schema.Type.UNION)
+            && writer.getTypes().size() == 1) {
+          return getCompatibility(reader, writer.getTypes().get(0));
+        }
+
+        switch (reader.getType()) {
+          case NULL: return SchemaCompatibilityType.INCOMPATIBLE;
+          case BOOLEAN: return SchemaCompatibilityType.INCOMPATIBLE;
+          case INT: return SchemaCompatibilityType.INCOMPATIBLE;
+          case LONG: {
+            return (writer.getType() == Type.INT)
+                ? SchemaCompatibilityType.COMPATIBLE
+                : SchemaCompatibilityType.INCOMPATIBLE;
+          }
+          case FLOAT: {
+            return ((writer.getType() == Type.INT)
+                || (writer.getType() == Type.LONG))
+                ? SchemaCompatibilityType.COMPATIBLE
+                : SchemaCompatibilityType.INCOMPATIBLE;
+
+          }
+          case DOUBLE: {
+            return ((writer.getType() == Type.INT)
+                || (writer.getType() == Type.LONG)
+                || (writer.getType() == Type.FLOAT))
+                ? SchemaCompatibilityType.COMPATIBLE
+                : SchemaCompatibilityType.INCOMPATIBLE;
+          }
+          case BYTES: return SchemaCompatibilityType.INCOMPATIBLE;
+          case STRING: return SchemaCompatibilityType.INCOMPATIBLE;
+          case ARRAY: return SchemaCompatibilityType.INCOMPATIBLE;
+          case MAP: return SchemaCompatibilityType.INCOMPATIBLE;
+          case FIXED: return SchemaCompatibilityType.INCOMPATIBLE;
+          case ENUM: return SchemaCompatibilityType.INCOMPATIBLE;
+          case RECORD: return SchemaCompatibilityType.INCOMPATIBLE;
+          case UNION: {
+            for (final Schema readerBranch : reader.getTypes()) {
+              if (getCompatibility(readerBranch, writer) == SchemaCompatibilityType.COMPATIBLE) {
+                return SchemaCompatibilityType.COMPATIBLE;
+              }
+            }
+            // No branch in the reader union has been found compatible with the writer schema:
+            return SchemaCompatibilityType.INCOMPATIBLE;
+          }
+
+          default: {
+            throw new AvroRuntimeException("Unknown schema type: " + reader.getType());
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Identifies the type of a schema compatibility result.
+   */
+  public static enum SchemaCompatibilityType {
+    COMPATIBLE,
+    INCOMPATIBLE,
+
+    /** Used internally to tag a reader/writer schema pair and prevent recursion. */
+    RECURSION_IN_PROGRESS;
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  /**
+   * Provides information about the compatibility of a single reader and writer schema pair.
+   *
+   * Note: This class represents a one-way relationship from the reader to the writer schema.
+   */
+  public static final class SchemaPairCompatibility {
+    /** The type of this result. */
+    private final SchemaCompatibilityType mType;
+
+    /** Validated reader schema. */
+    private final Schema mReader;
+
+    /** Validated writer schema. */
+    private final Schema mWriter;
+
+    /** Human readable description of this result. */
+    private final String mDescription;
+
+    /**
+     * Constructs a new instance.
+     *
+     * @param type of the schema compatibility.
+     * @param reader schema that was validated.
+     * @param writer schema that was validated.
+     * @param description of this compatibility result.
+     */
+    public SchemaPairCompatibility(
+        SchemaCompatibilityType type,
+        Schema reader,
+        Schema writer,
+        String description) {
+      mType = type;
+      mReader = reader;
+      mWriter = writer;
+      mDescription = description;
+    }
+
+    /**
+     * Gets the type of this result.
+     *
+     * @return the type of this result.
+     */
+    public SchemaCompatibilityType getType() {
+      return mType;
+    }
+
+    /**
+     * Gets the reader schema that was validated.
+     *
+     * @return reader schema that was validated.
+     */
+    public Schema getReader() {
+      return mReader;
+    }
+
+    /**
+     * Gets the writer schema that was validated.
+     *
+     * @return writer schema that was validated.
+     */
+    public Schema getWriter() {
+      return mWriter;
+    }
+
+    /**
+     * Gets a human readable description of this validation result.
+     *
+     * @return a human readable description of this validation result.
+     */
+    public String getDescription() {
+      return mDescription;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public String toString() {
+      return String.format(
+          "SchemaPairCompatibility{type:%s, readerSchema:%s, writerSchema:%s, description:%s}",
+          mType, mReader, mWriter, mDescription);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public boolean equals(Object other) {
+      if ((null != other) && (other instanceof SchemaPairCompatibility)) {
+        final SchemaPairCompatibility result = (SchemaPairCompatibility) other;
+        return objectsEqual(result.mType, mType)
+            && objectsEqual(result.mReader, mReader)
+            && objectsEqual(result.mWriter, mWriter)
+            && objectsEqual(result.mDescription, mDescription);
+      } else {
+        return false;
+      }
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(new Object[]{mType, mReader, mWriter, mDescription});
+    }
+  }
+
+  /** Borrowed from Guava's Objects.equal(a, b) */
+  private static boolean objectsEqual(Object obj1, Object obj2) {
+    return (obj1 == obj2) || ((obj1 != null) && obj1.equals(obj2));
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaNormalization.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaNormalization.java
new file mode 100644
index 0000000..16d2642
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaNormalization.java
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.util.Map;
+import java.util.HashMap;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/** Collection of static methods for generating the cannonical form of
+ * schemas (see {@link #toParsingForm}) -- and fingerprints of cannonical
+ * forms ({@link #fingerprint}).
+ */
+public class SchemaNormalization {
+
+  private SchemaNormalization() {}
+
+  /** Returns "Parsing Canonical Form" of a schema as defined by Avro
+    * spec. */
+  public static String toParsingForm(Schema s) {
+    try {
+      Map<String,String> env = new HashMap<String,String>();
+      return build(env, s, new StringBuilder()).toString();
+    } catch (IOException e) {
+      // Shouldn't happen, b/c StringBuilder can't throw IOException
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Returns a fingerprint of a string of bytes.  This string is
+    * presumed to contain a canonical form of a schema.  The
+    * algorithm used to compute the fingerprint is selected by the
+    * argument <i>fpName</i>.  If <i>fpName</i> equals the string
+    * <code>"CRC-64-AVRO"</code>, then the result of {@link #fingerprint64} is
+    * returned in little-endian format.  Otherwise, <i>fpName</i> is
+    * used as an algorithm name for {@link
+    * MessageDigest#getInstance(String)}, which will throw
+    * <code>NoSuchAlgorithmException</code> if it doesn't recognize
+    * the name.
+    * <p> Recommended Avro practice dictiates that
+    * <code>"CRC-64-AVRO"</code> is used for 64-bit fingerprints,
+    * <code>"MD5"</code> is used for 128-bit fingerprints, and
+    * <code>"SHA-256"</code> is used for 256-bit fingerprints. */
+  public static byte[] fingerprint(String fpName, byte[] data)
+    throws NoSuchAlgorithmException
+  {
+    if (fpName.equals("CRC-64-AVRO")) {
+      long fp = fingerprint64(data);
+      byte[] result = new byte[8];
+      for (int i = 0; i < 8; i++) {
+        result[i] = (byte)fp;
+        fp >>= 8;
+      }
+      return result;
+    }
+
+    MessageDigest md = MessageDigest.getInstance(fpName);
+    return md.digest(data);
+  }
+
+  /** Returns the 64-bit Rabin Fingerprint (as recommended in the Avro
+    * spec) of a byte string. */
+  public static long fingerprint64(byte[] data) {
+    long result = EMPTY64;
+    for (byte b: data)
+      result = (result >>> 8) ^ FP64.FP_TABLE[(int)(result ^ b) & 0xff];
+    return result;
+  }
+
+  /** Returns {@link #fingerprint} applied to the parsing canonical form
+    * of the supplied schema. */
+  public static byte[] parsingFingerprint(String fpName, Schema s)
+    throws NoSuchAlgorithmException
+  {
+    try {
+      return fingerprint(fpName, toParsingForm(s).getBytes("UTF-8"));
+    } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); }
+  }
+
+  /** Returns {@link #fingerprint64} applied to the parsing canonical form
+    * of the supplied schema. */
+  public static long parsingFingerprint64(Schema s) {
+    try {
+      return fingerprint64(toParsingForm(s).getBytes("UTF-8"));
+    } catch (java.io.UnsupportedEncodingException e)
+      { throw new RuntimeException(e); }
+  }
+
+  private static Appendable build(Map<String,String> env, Schema s,
+                                  Appendable o) throws IOException {
+    boolean firstTime = true;
+    Schema.Type st = s.getType();
+    switch (st) {
+    default: // boolean, bytes, double, float, int, long, null, string
+      return o.append('"').append(st.getName()).append('"');
+
+    case UNION:
+      o.append('[');
+      for (Schema b: s.getTypes()) {
+        if (! firstTime) o.append(','); else firstTime = false;
+        build(env, b, o);
+      }
+      return o.append(']');
+
+    case ARRAY:  case MAP:
+      o.append("{\"type\":\"").append(st.getName()).append("\"");
+      if (st == Schema.Type.ARRAY)
+        build(env, s.getElementType(), o.append(",\"items\":"));
+      else build(env, s.getValueType(), o.append(",\"values\":"));
+      return o.append("}");
+
+    case ENUM: case FIXED: case RECORD:
+      String name = s.getFullName();
+      if (env.get(name) != null) return o.append(env.get(name));
+      String qname = "\""+name+"\"";
+      env.put(name, qname);
+      o.append("{\"name\":").append(qname);
+      o.append(",\"type\":\"").append(st.getName()).append("\"");
+      if (st == Schema.Type.ENUM) {
+        o.append(",\"symbols\":[");
+        for (String enumSymbol: s.getEnumSymbols()) {
+          if (! firstTime) o.append(','); else firstTime = false;
+          o.append('"').append(enumSymbol).append('"');
+        }
+        o.append("]");
+      } else if (st == Schema.Type.FIXED) {
+        o.append(",\"size\":").append(Integer.toString(s.getFixedSize()));
+      } else { // st == Schema.Type.RECORD
+        o.append(",\"fields\":[");
+        for (Schema.Field f: s.getFields()) {
+          if (! firstTime) o.append(','); else firstTime = false;
+          o.append("{\"name\":\"").append(f.name()).append("\"");
+          build(env, f.schema(), o.append(",\"type\":")).append("}");
+        }
+        o.append("]");
+      }
+      return o.append("}");
+    }
+  }
+
+  final static long EMPTY64 = 0xc15d213aa4d7a795L;
+
+  /* An inner class ensures that FP_TABLE initialized only when needed. */
+  private static class FP64 {
+    private static final long[] FP_TABLE = new long[256];
+    static {
+      for (int i = 0; i < 256; i++) {
+        long fp = i;
+        for (int j = 0; j < 8; j++) {
+          long mask = -(fp & 1L);
+          fp = (fp >>> 1) ^ (EMPTY64 & mask);
+        }
+        FP_TABLE[i] = fp;
+      }
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaParseException.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaParseException.java
new file mode 100644
index 0000000..60a413d
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaParseException.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/** Thrown for errors parsing schemas and protocols. */
+public class SchemaParseException extends AvroRuntimeException {
+  public SchemaParseException(Throwable cause) { super(cause); }
+  public SchemaParseException(String message) { super(message); }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationException.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationException.java
new file mode 100644
index 0000000..4a1af80
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationException.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * Thrown when {@link SchemaValidator} fails to validate a schema.
+ */
+public class SchemaValidationException extends Exception {
+
+  public SchemaValidationException(Schema reader, Schema writer) {
+    super(getMessage(reader, writer));
+  }
+
+  public SchemaValidationException(Schema reader, Schema writer,
+      Throwable cause) {
+    super(getMessage(reader, writer), cause);
+  }
+
+  private static String getMessage(Schema reader, Schema writer) {
+    return "Unable to read schema: \n"
+        + writer.toString(true) + "\nusing schema:\n" + reader.toString(true);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java
new file mode 100644
index 0000000..dc1c9cc
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidationStrategy.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * An interface for validating the compatibility of a single schema against
+ * another.
+ * <p>
+ * What makes one schema compatible with another is not defined by the contract.
+ * <p/>
+ */
+public interface SchemaValidationStrategy {
+
+  /**
+   * Validates that one schema is compatible with another.
+   * 
+   * @throws SchemaValidationException if the schemas are not compatible.
+   */
+  void validate(Schema toValidate, Schema existing)
+      throws SchemaValidationException;
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java
new file mode 100644
index 0000000..197c5c0
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidator.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * <p>
+ * A SchemaValidator has one method, which validates that a {@link Schema} is
+ * <b>compatible<b/> with the other schemas provided.
+ * </p>
+ * <p>
+ * What makes one Schema compatible with another is not part of the interface
+ * contract.
+ * </p>
+ */
+public interface SchemaValidator {
+
+  /**
+   * Validate one schema against others. The order of the schemas to validate
+   * against is chronological from most recent to oldest, if there is a natural
+   * chronological order. This allows some validators to identify which schemas
+   * are the most "recent" in order to validate only against the mosst recent
+   * schema(s).
+   * 
+   * @param toValidate The schema to validate
+   * @param existing The schemas to validate against, in order from most recent to latest if applicable
+   * @throws SchemaValidationException if the schema fails to validate.
+   */
+  void validate(Schema toValidate, Iterable<Schema> existing) throws SchemaValidationException;
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java
new file mode 100644
index 0000000..e1563d2
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/SchemaValidatorBuilder.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * <p>
+ * A Builder for creating SchemaValidators.
+ * </p>
+ */
+public final class SchemaValidatorBuilder {
+  private SchemaValidationStrategy strategy;
+
+  public SchemaValidatorBuilder strategy(SchemaValidationStrategy strategy) {
+    this.strategy = strategy;
+    return this;
+  }
+
+  /**
+   * Use a strategy that validates that a schema can be used to read existing
+   * schema(s) according to the Avro default schema resolution.
+   */
+  public SchemaValidatorBuilder canReadStrategy() {
+    this.strategy = new ValidateCanRead();
+    return this;
+  }
+
+  /**
+   * Use a strategy that validates that a schema can be read by existing
+   * schema(s) according to the Avro default schema resolution.
+   */
+  public SchemaValidatorBuilder canBeReadStrategy() {
+    this.strategy = new ValidateCanBeRead();
+    return this;
+  }
+
+  /**
+   * Use a strategy that validates that a schema can read existing schema(s),
+   * and vice-versa, according to the Avro default schema resolution.
+   */
+  public SchemaValidatorBuilder mutualReadStrategy() {
+    this.strategy = new ValidateMutualRead();
+    return this;
+  }
+  
+  public SchemaValidator validateLatest() {
+    valid();
+    return new ValidateLatest(strategy);
+  }
+  
+  public SchemaValidator validateAll() {
+    valid();
+    return new ValidateAll(strategy);
+  }
+  
+  private void valid() {
+    if(null == strategy) {
+      throw new AvroRuntimeException("SchemaValidationStrategy not specified in builder");
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java b/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java
new file mode 100644
index 0000000..ed66aca
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/UnresolvedUnionException.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+/** Thrown when the expected contents of a union cannot be resolved. */
+public class UnresolvedUnionException extends AvroRuntimeException {
+  private Object unresolvedDatum;
+  private Schema unionSchema;
+  
+  public UnresolvedUnionException(Schema unionSchema, Object unresolvedDatum) {
+    super("Not in union "+unionSchema+": "+unresolvedDatum);
+    this.unionSchema = unionSchema;
+    this.unresolvedDatum = unresolvedDatum;
+  }
+
+  public Object getUnresolvedDatum() {
+    return unresolvedDatum;
+  }
+
+  public Schema getUnionSchema() {
+    return unionSchema;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateAll.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateAll.java
new file mode 100644
index 0000000..a6d5a31
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateAll.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.util.Iterator;
+
+/**
+ * <p>
+ * A {@link SchemaValidator} for validating the provided schema against all
+ * schemas in the Iterable in {@link #validate(Schema, Iterable)}.
+ * </p>
+ * <p>
+ * Uses the {@link SchemaValidationStrategy} provided in the constructor to
+ * validate the {@link Schema} against each Schema in the Iterable, in Iterator
+ * order, via {@link SchemaValidationStrategy#validate(Schema, Schema)}.
+ * </p>
+ */
+public final class ValidateAll implements SchemaValidator {
+  private final SchemaValidationStrategy strategy;
+
+  /**
+   * @param strategy
+   *          The strategy to use for validation of pairwise schemas.
+   */
+  public ValidateAll(SchemaValidationStrategy strategy) {
+    this.strategy = strategy;
+  }
+
+  @Override
+  public void validate(Schema toValidate, Iterable<Schema> schemasInOrder)
+      throws SchemaValidationException {
+    Iterator<Schema> schemas = schemasInOrder.iterator();
+    while (schemas.hasNext()) {
+      Schema existing = schemas.next();
+      strategy.validate(toValidate, existing);
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java
new file mode 100644
index 0000000..60d4b04
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanBeRead.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * A {@link SchemaValidationStrategy} that checks that the data written with the
+ * {@link Schema} to validate can be read by the existing schema according to
+ * the default Avro schema resolution rules.
+ * 
+ */
+class ValidateCanBeRead implements SchemaValidationStrategy {
+
+  /**
+   * Validate that data written with first schema provided can be read using the
+   * second schema, according to the default Avro schema resolution rules.
+   * 
+   * @throws SchemaValidationException
+   *           if the second schema cannot read data written by the first.
+   */
+  @Override
+  public void validate(Schema toValidate, Schema existing)
+      throws SchemaValidationException {
+    ValidateMutualRead.canRead(toValidate, existing);
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java
new file mode 100644
index 0000000..bbf0c1e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateCanRead.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+/**
+ * A {@link SchemaValidationStrategy} that checks that the {@link Schema} to
+ * validate can read the existing schema according to the default Avro schema
+ * resolution rules.
+ * 
+ */
+class ValidateCanRead implements SchemaValidationStrategy {
+
+  /**
+   * Validate that the first schema provided can be used to read data written
+   * with the second schema, according to the default Avro schema resolution
+   * rules.
+   * 
+   * @throws SchemaValidationException
+   *           if the first schema cannot read data written by the second.
+   */
+  @Override
+  public void validate(Schema toValidate, Schema existing)
+      throws SchemaValidationException {
+    ValidateMutualRead.canRead(existing, toValidate);
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateLatest.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateLatest.java
new file mode 100644
index 0000000..e068706
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateLatest.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.util.Iterator;
+
+/**
+ * <p>
+ * A {@link SchemaValidator} for validating the provided schema against the
+ * first {@link Schema} in the iterable in {@link #validate(Schema, Iterable)}.
+ * </p>
+ * <p>
+ * Uses the {@link SchemaValidationStrategy} provided in the constructor to
+ * validate the schema against the first Schema in the iterable, if it exists,
+ * via {@link SchemaValidationStrategy#validate(Schema, Schema)}.
+ * </p>
+ */
+public final class ValidateLatest implements SchemaValidator {
+  private final SchemaValidationStrategy strategy;
+
+  /**
+   * @param strategy
+   *          The strategy to use for validation of pairwise schemas.
+   */
+  public ValidateLatest(SchemaValidationStrategy strategy) {
+    this.strategy = strategy;
+  }
+
+  @Override
+  public void validate(Schema toValidate, Iterable<Schema> schemasInOrder)
+      throws SchemaValidationException {
+    Iterator<Schema> schemas = schemasInOrder.iterator();
+    if (schemas.hasNext()) {
+      Schema existing = schemas.next();
+      strategy.validate(toValidate, existing);
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java b/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
new file mode 100644
index 0000000..5f8861e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ValidateMutualRead.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.io.IOException;
+
+import org.apache.avro.io.parsing.ResolvingGrammarGenerator;
+import org.apache.avro.io.parsing.Symbol;
+
+/**
+ * A {@link SchemaValidationStrategy} that checks that the {@link Schema} to
+ * validate and the existing schema can mutually read each other according to
+ * the default Avro schema resolution rules.
+ * 
+ */
+class ValidateMutualRead implements SchemaValidationStrategy {
+
+  /**
+   * Validate that the schemas provided can mutually read data written by each
+   * other according to the default Avro schema resolution rules.
+   * 
+   * @throws SchemaValidationException if the schemas are not mutually compatible.
+   */
+  @Override
+  public void validate(Schema toValidate, Schema existing)
+      throws SchemaValidationException {
+    canRead(toValidate, existing);
+    canRead(existing, toValidate);
+  }
+
+  /**
+   * Validates that data written with one schema can be read using another,
+   * based on the default Avro schema resolution rules.
+   * 
+   * @param writtenWith
+   *          The "writer's" schema, representing data to be read.
+   * @param readUsing
+   *          The "reader's" schema, representing how the reader will interpret
+   *          data.
+   * @throws SchemaValidationException
+   *           if the schema <b>readUsing<b/> cannot be used to read data
+   *           written with <b>writtenWith<b/>
+   */
+  static void canRead(Schema writtenWith, Schema readUsing)
+      throws SchemaValidationException {
+    boolean error;
+    try {
+      error = Symbol.hasErrors(new ResolvingGrammarGenerator().generate(
+          writtenWith, readUsing));
+    } catch (IOException e) {
+      throw new SchemaValidationException(readUsing, writtenWith, e);
+    }
+    if (error) {
+      throw new SchemaValidationException(readUsing, writtenWith);
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java
new file mode 100644
index 0000000..b55cfd0
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/ErrorBuilder.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.data;
+
+/** Interface for error builders */
+public interface ErrorBuilder<T> extends RecordBuilder<T> {
+  
+  /** Gets the value */
+  Object getValue();
+  
+  /** Sets the value */
+  ErrorBuilder<T> setValue(Object value);
+  
+  /** Checks whether the value has been set */
+  boolean hasValue();
+  
+  /** Clears the value */
+  ErrorBuilder<T> clearValue();
+  
+  /** Gets the error cause */
+  Throwable getCause();
+  
+  /** Sets the error cause */
+  ErrorBuilder<T> setCause(Throwable cause);
+  
+  /** Checks whether the cause has been set */
+  boolean hasCause();
+  
+  /** Clears the cause */
+  ErrorBuilder<T> clearCause();
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/Json.java b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
new file mode 100644
index 0000000..73a57c2
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/Json.java
@@ -0,0 +1,279 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.data;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Iterator;
+
+import org.apache.avro.util.internal.JacksonUtils;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.node.JsonNodeFactory;
+import org.codehaus.jackson.node.LongNode;
+import org.codehaus.jackson.node.DoubleNode;
+import org.codehaus.jackson.node.TextNode;
+import org.codehaus.jackson.node.BooleanNode;
+import org.codehaus.jackson.node.NullNode;
+import org.codehaus.jackson.node.ArrayNode;
+import org.codehaus.jackson.node.ObjectNode;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.ResolvingDecoder;
+
+/** Utilities for reading and writing arbitrary Json data in Avro format. */
+public class Json {
+  private Json() {}                               // singleton: no public ctor
+
+  static final JsonFactory FACTORY = new JsonFactory();
+  static final ObjectMapper MAPPER = new ObjectMapper(FACTORY);
+
+  /** The schema for Json data. */
+  public static final Schema SCHEMA;
+  static {
+    try {
+      SCHEMA = Schema.parse
+        (Json.class.getResourceAsStream("/org/apache/avro/data/Json.avsc"));
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  /**
+   * {@link DatumWriter} for arbitrary Json data.
+   * @deprecated use {@link ObjectWriter}
+   */
+  @Deprecated
+  public static class Writer implements DatumWriter<JsonNode> {
+
+    @Override public void setSchema(Schema schema) {
+      if (!SCHEMA.equals(schema))
+        throw new RuntimeException("Not the Json schema: "+schema);
+    }
+    
+    @Override
+    public void write(JsonNode datum, Encoder out) throws IOException {
+      Json.write(datum, out);
+    }
+  }
+
+  /**
+   * {@link DatumReader} for arbitrary Json data.
+   * @deprecated use {@link ObjectReader}
+   */
+  @Deprecated
+  public static class Reader implements DatumReader<JsonNode> {
+    private Schema written;
+    private ResolvingDecoder resolver;
+
+    @Override public void setSchema(Schema schema) {
+      this.written = SCHEMA.equals(written) ? null : schema;
+    }
+
+    @Override
+    public JsonNode read(JsonNode reuse, Decoder in) throws IOException {
+      if (written == null)                        // same schema
+        return Json.read(in);
+
+      // use a resolver to adapt alternate version of Json schema
+      if (resolver == null)
+        resolver = DecoderFactory.get().resolvingDecoder(written, SCHEMA, null);
+      resolver.configure(in);
+      JsonNode result = Json.read(resolver);
+      resolver.drain();
+      return result;
+    }
+  }
+
+  /** {@link DatumWriter} for arbitrary Json data using the object model described
+   *  in {@link org.apache.avro.JsonProperties}. */
+  public static class ObjectWriter implements DatumWriter<Object> {
+
+    @Override public void setSchema(Schema schema) {
+      if (!SCHEMA.equals(schema))
+        throw new RuntimeException("Not the Json schema: "+schema);
+    }
+
+    @Override
+    public void write(Object datum, Encoder out) throws IOException {
+      Json.writeObject(datum, out);
+    }
+  }
+
+  /** {@link DatumReader} for arbitrary Json data using the object model described
+   *  in {@link org.apache.avro.JsonProperties}. */
+  public static class ObjectReader implements DatumReader<Object> {
+    private Schema written;
+    private ResolvingDecoder resolver;
+
+    @Override public void setSchema(Schema schema) {
+      this.written = SCHEMA.equals(written) ? null : schema;
+    }
+
+    @Override
+    public Object read(Object reuse, Decoder in) throws IOException {
+      if (written == null)                        // same schema
+        return Json.readObject(in);
+
+      // use a resolver to adapt alternate version of Json schema
+      if (resolver == null)
+        resolver = DecoderFactory.get().resolvingDecoder(written, SCHEMA, null);
+      resolver.configure(in);
+      Object result = Json.readObject(resolver);
+      resolver.drain();
+      return result;
+    }
+  }
+
+  /**
+   * Parses a JSON string and converts it to the object model described in
+   * {@link org.apache.avro.JsonProperties}.
+   */
+  public static Object parseJson(String s) {
+    try {
+      return JacksonUtils.toObject(MAPPER.readTree(FACTORY.createJsonParser(
+          new StringReader(s))));
+    } catch (JsonParseException e) {
+      throw new RuntimeException(e);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /**
+   * Converts an instance of the object model described in
+   * {@link org.apache.avro.JsonProperties} to a JSON string.
+   */
+  public static String toString(Object datum) {
+    return JacksonUtils.toJsonNode(datum).toString();
+  }
+
+  /** Note: this enum must be kept aligned with the union in Json.avsc. */
+  private enum JsonType { LONG, DOUBLE, STRING, BOOLEAN, NULL, ARRAY, OBJECT }
+  
+  /**
+   * Write Json data as Avro data.
+   * @deprecated internal method
+   */
+  @Deprecated
+  public static void write(JsonNode node, Encoder out) throws IOException {
+    switch(node.asToken()) {
+    case VALUE_NUMBER_INT:
+      out.writeIndex(JsonType.LONG.ordinal());
+      out.writeLong(node.getLongValue());
+      break;
+    case VALUE_NUMBER_FLOAT:
+      out.writeIndex(JsonType.DOUBLE.ordinal());
+      out.writeDouble(node.getDoubleValue());
+      break;
+    case VALUE_STRING:
+      out.writeIndex(JsonType.STRING.ordinal());
+      out.writeString(node.getTextValue());
+      break;
+    case VALUE_TRUE:
+      out.writeIndex(JsonType.BOOLEAN.ordinal());
+      out.writeBoolean(true);
+      break;
+    case VALUE_FALSE:
+      out.writeIndex(JsonType.BOOLEAN.ordinal());
+      out.writeBoolean(false);
+      break;
+    case VALUE_NULL:
+      out.writeIndex(JsonType.NULL.ordinal());
+      out.writeNull();
+      break;
+    case START_ARRAY:
+      out.writeIndex(JsonType.ARRAY.ordinal());
+      out.writeArrayStart();
+      out.setItemCount(node.size());
+      for (JsonNode element : node) {
+        out.startItem();
+        write(element, out);
+      }
+      out.writeArrayEnd();
+      break;
+    case START_OBJECT:
+      out.writeIndex(JsonType.OBJECT.ordinal());
+      out.writeMapStart();
+      out.setItemCount(node.size());
+      Iterator<String> i = node.getFieldNames();
+      while (i.hasNext()) {
+        out.startItem();
+        String name = i.next();
+        out.writeString(name);
+        write(node.get(name), out);
+      }
+      out.writeMapEnd();
+      break;
+    default:
+      throw new AvroRuntimeException(node.asToken()+" unexpected: "+node);
+    }
+  }
+
+  /**
+   * Read Json data from Avro data.
+   * @deprecated internal method
+   */
+  @Deprecated
+  public static JsonNode read(Decoder in) throws IOException {
+    switch (JsonType.values()[in.readIndex()]) {
+    case LONG:
+      return new LongNode(in.readLong());
+    case DOUBLE:
+      return new DoubleNode(in.readDouble());
+    case STRING:
+      return new TextNode(in.readString());
+    case BOOLEAN:
+      return in.readBoolean() ? BooleanNode.TRUE : BooleanNode.FALSE;
+    case NULL:
+      in.readNull();
+      return NullNode.getInstance();
+    case ARRAY:
+      ArrayNode array = JsonNodeFactory.instance.arrayNode();
+      for (long l = in.readArrayStart(); l > 0; l = in.arrayNext())
+        for (long i = 0; i < l; i++)
+          array.add(read(in));
+      return array;
+    case OBJECT:
+      ObjectNode object = JsonNodeFactory.instance.objectNode();
+      for (long l = in.readMapStart(); l > 0; l = in.mapNext())
+        for (long i = 0; i < l; i++)
+          object.put(in.readString(), read(in));
+      return object;
+    default:
+      throw new AvroRuntimeException("Unexpected Json node type");
+    }
+  }
+
+  private static void writeObject(Object datum, Encoder out) throws IOException {
+    write(JacksonUtils.toJsonNode(datum), out);
+  }
+
+  private static Object readObject(Decoder in) throws IOException {
+    return JacksonUtils.toObject(read(in));
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java
new file mode 100644
index 0000000..8c7a660
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilder.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.data;
+
+
+/** Interface for record builders */
+public interface RecordBuilder<T> {
+  /**
+   * Constructs a new instance using the values set in the RecordBuilder.
+   * If a particular value was not set and the schema defines a default 
+   * value, the default value will be used.
+   * @return a new instance using values set in the RecordBuilder.
+   */
+  T build();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
new file mode 100644
index 0000000..ca73b70
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/RecordBuilderBase.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.data;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.IndexedRecord;
+
+/** Abstract base class for RecordBuilder implementations.  Not thread-safe. */
+public abstract class RecordBuilderBase<T extends IndexedRecord> 
+  implements RecordBuilder<T> {
+  private static final Field[] EMPTY_FIELDS = new Field[0];
+  private final Schema schema;
+  private final Field[] fields;
+  private final boolean[] fieldSetFlags;
+  private final GenericData data;
+  
+  protected final Schema schema() { return schema; }
+  protected final Field[] fields() { return fields; }
+  protected final boolean[] fieldSetFlags() { return fieldSetFlags; }
+  protected final GenericData data() { return data; }
+
+  /**
+   * Creates a RecordBuilderBase for building records of the given type.
+   * @param schema the schema associated with the record class.
+   */
+  protected RecordBuilderBase(Schema schema, GenericData data) {
+    this.schema = schema;
+    this.data = data;
+    fields = (Field[]) schema.getFields().toArray(EMPTY_FIELDS);
+    fieldSetFlags = new boolean[fields.length];
+  }
+  
+  /**
+   * RecordBuilderBase copy constructor.
+   * Makes a deep copy of the values in the other builder.
+   * @param other RecordBuilderBase instance to copy.
+   */
+  protected RecordBuilderBase(RecordBuilderBase<T> other, GenericData data) {
+    this.schema = other.schema;
+    this.data = data;
+    fields = (Field[]) schema.getFields().toArray(EMPTY_FIELDS);
+    fieldSetFlags = new boolean[other.fieldSetFlags.length];
+    System.arraycopy(
+        other.fieldSetFlags, 0, fieldSetFlags, 0, fieldSetFlags.length);
+  }
+  
+  /**
+   * Validates that a particular value for a given field is valid according to 
+   * the following algorithm:
+   * 1. If the value is not null, or the field type is null, or the field type 
+   * is a union which accepts nulls, returns.
+   * 2. Else, if the field has a default value, returns.
+   * 3. Otherwise throws AvroRuntimeException. 
+   * @param field the field to validate.
+   * @param value the value to validate.
+   * @throws NullPointerException if value is null and the given field does 
+   * not accept null values.
+   */
+  protected void validate(Field field, Object value) {
+    if (isValidValue(field, value)) {
+      return;
+    }
+    else if (field.defaultValue() != null) {
+      return;
+    }
+    else {
+      throw new AvroRuntimeException(
+          "Field " + field + " does not accept null values");
+    }
+  }
+
+  /**
+   * Tests whether a value is valid for a specified field. 
+   * @param f the field for which to test the value.
+   * @param value the value to test.
+   * @return true if the value is valid for the given field; false otherwise.
+   */
+  protected static boolean isValidValue(Field f, Object value) {
+    if (value != null) {
+      return true;
+    }
+    
+    Schema schema = f.schema();
+    Type type = schema.getType();
+    
+    // If the type is null, any value is valid
+    if (type == Type.NULL) {
+      return true;
+    }
+
+    // If the type is a union that allows nulls, any value is valid
+    if (type == Type.UNION) {
+      for (Schema s : schema.getTypes()) {
+        if (s.getType() == Type.NULL) {
+          return true;
+        }
+      }
+    }
+    
+    // The value is null but the type does not allow nulls
+    return false;
+  }
+  
+  /**
+   * Gets the default value of the given field, if any.
+   * @param field the field whose default value should be retrieved.
+   * @return the default value associated with the given field, 
+   * or null if none is specified in the schema.
+   * @throws IOException 
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  protected Object defaultValue(Field field) throws IOException {    
+    return data.deepCopy(field.schema(), data.getDefaultValue(field));
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + Arrays.hashCode(fieldSetFlags);
+    result = prime * result + ((schema == null) ? 0 : schema.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    @SuppressWarnings("rawtypes")
+    RecordBuilderBase other = (RecordBuilderBase) obj;
+    if (!Arrays.equals(fieldSetFlags, other.fieldSetFlags))
+      return false;
+    if (schema == null) {
+      if (other.schema != null)
+        return false;
+    } else if (!schema.equals(other.schema))
+      return false;
+    return true;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/TimeConversions.java b/lang/java/avro/src/main/java/org/apache/avro/data/TimeConversions.java
new file mode 100644
index 0000000..b53bb1f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/TimeConversions.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.data;
+
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Schema;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.Days;
+import org.joda.time.LocalDate;
+import org.joda.time.LocalTime;
+
+public class TimeConversions {
+  public static class DateConversion extends Conversion<LocalDate> {
+    private static final LocalDate EPOCH_DATE = new LocalDate(1970, 1, 1);
+
+    @Override
+    public Class<LocalDate> getConvertedType() {
+      return LocalDate.class;
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "date";
+    }
+
+    @Override
+    public LocalDate fromInt(Integer daysFromEpoch, Schema schema, LogicalType type) {
+      return EPOCH_DATE.plusDays(daysFromEpoch);
+    }
+
+    @Override
+    public Integer toInt(LocalDate date, Schema schema, LogicalType type) {
+      return Days.daysBetween(EPOCH_DATE, date).getDays();
+    }
+  }
+
+  public static class TimeConversion extends Conversion<LocalTime> {
+    @Override
+    public Class<LocalTime> getConvertedType() {
+      return LocalTime.class;
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "time-millis";
+    }
+
+    @Override
+    public LocalTime fromInt(Integer millisFromMidnight, Schema schema, LogicalType type) {
+      return LocalTime.fromMillisOfDay(millisFromMidnight);
+    }
+
+    @Override
+    public Integer toInt(LocalTime time, Schema schema, LogicalType type) {
+      return time.millisOfDay().get();
+    }
+  }
+
+  public static class TimeMicrosConversion extends Conversion<LocalTime> {
+    @Override
+    public Class<LocalTime> getConvertedType() {
+      return LocalTime.class;
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "time-micros";
+    }
+
+    @Override
+    public LocalTime fromLong(Long microsFromMidnight, Schema schema, LogicalType type) {
+      return LocalTime.fromMillisOfDay(microsFromMidnight / 1000);
+    }
+  }
+
+  public static class LossyTimeMicrosConversion extends TimeMicrosConversion {
+    @Override
+    public Long toLong(LocalTime time, Schema schema, LogicalType type) {
+      return 1000 * (long) time.millisOfDay().get();
+    }
+  }
+
+  public static class TimestampConversion extends Conversion<DateTime> {
+    @Override
+    public Class<DateTime> getConvertedType() {
+      return DateTime.class;
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "timestamp-millis";
+    }
+
+    @Override
+    public DateTime fromLong(Long millisFromEpoch, Schema schema, LogicalType type) {
+      return new DateTime(millisFromEpoch, DateTimeZone.UTC);
+    }
+
+    @Override
+    public Long toLong(DateTime timestamp, Schema schema, LogicalType type) {
+      return timestamp.getMillis();
+    }
+  }
+
+  public static class TimestampMicrosConversion extends Conversion<DateTime> {
+    @Override
+    public Class<DateTime> getConvertedType() {
+      return DateTime.class;
+    }
+
+    @Override
+    public String getLogicalTypeName() {
+      return "timestamp-micros";
+    }
+
+    @Override
+    public DateTime fromLong(Long microsFromEpoch, Schema schema, LogicalType type) {
+      return new DateTime(microsFromEpoch / 1000, DateTimeZone.UTC);
+    }
+  }
+
+  public static class LossyTimestampMicrosConversion extends TimestampMicrosConversion {
+    @Override
+    public Long toLong(DateTime timestamp, Schema schema, LogicalType type) {
+      return 1000 * timestamp.getMillis();
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/data/package.html b/lang/java/avro/src/main/java/org/apache/avro/data/package.html
new file mode 100644
index 0000000..792c93a
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/data/package.html
@@ -0,0 +1,22 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>Interfaces and base classes shared by generic, specific and reflect.
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
new file mode 100644
index 0000000..09cf623
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/BZip2Codec.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
+
+/** * Implements bzip2 compression and decompression. */
+public class BZip2Codec extends Codec {
+
+  public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
+  private ByteArrayOutputStream outputBuffer;
+
+  static class Option extends CodecFactory {
+    @Override
+    protected Codec createInstance() {
+      return new BZip2Codec();
+    }
+  }
+
+  @Override
+  public String getName() { return DataFileConstants.BZIP2_CODEC; }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer uncompressedData) throws IOException {
+
+    ByteArrayOutputStream baos = getOutputBuffer(uncompressedData.remaining());
+    BZip2CompressorOutputStream outputStream = new BZip2CompressorOutputStream(baos);
+
+    try {
+      outputStream.write(uncompressedData.array(),
+                         uncompressedData.position(),
+                         uncompressedData.remaining());
+    } finally {
+      outputStream.close();
+    }
+
+    ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+    return result;
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer compressedData) throws IOException {
+    ByteArrayInputStream bais = new ByteArrayInputStream(compressedData.array());
+    BZip2CompressorInputStream inputStream = new BZip2CompressorInputStream(bais);
+    try {
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+      byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
+
+      int readCount = -1;
+      
+      while ( (readCount = inputStream.read(buffer, compressedData.position(), buffer.length))> 0) {
+        baos.write(buffer, 0, readCount);
+      }
+      
+      ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+      return result;
+    } finally {
+      inputStream.close();
+    }
+  }
+
+  @Override public int hashCode() { return getName().hashCode(); }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (getClass() != obj.getClass())
+      return false;
+    return true;
+  }
+
+  //get and initialize the output buffer for use.
+  private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
+    if (null == outputBuffer) {
+      outputBuffer = new ByteArrayOutputStream(suggestedLength);
+    }
+    outputBuffer.reset();
+    return outputBuffer;
+  }
+
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java b/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java
new file mode 100644
index 0000000..af5e013
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/Codec.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** 
+ * Interface for Avro-supported compression codecs for data files.
+ */
+public abstract class Codec {
+  /** Name of the codec; written to the file's metadata. */
+  public abstract String getName();
+  /** Compresses the input data */
+  public abstract ByteBuffer compress(ByteBuffer uncompressedData) throws IOException;
+  /** Decompress the data  */
+  public abstract ByteBuffer decompress(ByteBuffer compressedData) throws IOException;
+  /** 
+   * Codecs must implement an equals() method.  Two codecs, A and B are equal
+   * if: the result of A and B decompressing content compressed by A is the same
+   * AND the retult of A and B decompressing content compressed by B is the same
+   **/
+  @Override
+  public abstract boolean equals(Object other);
+  /** 
+   * Codecs must implement a hashCode() method that is consistent with equals().*/
+  @Override
+  public abstract int hashCode();
+  @Override
+  public String toString() {
+    return getName();
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java b/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java
new file mode 100644
index 0000000..6f25ea2
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/CodecFactory.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.zip.Deflater;
+
+import org.apache.avro.AvroRuntimeException;
+import org.tukaani.xz.LZMA2Options;
+
+/**  Encapsulates the ability to specify and configure a compression codec.
+ *
+ * Currently there are three codecs registered by default:
+ * <ul>
+ *   <li>{@code null}</li>
+ *   <li>{@code deflate}</li>
+ *   <li>{@code snappy}</li>
+ *   <li>{@code bzip2}</li>
+ * </ul>
+ *
+ * New and custom codecs can be registered using {@link #addCodec(String,
+ * CodecFactory)}.
+ */
+public abstract class CodecFactory {
+  /** Null codec, for no compression. */
+  public static CodecFactory nullCodec() {
+    return NullCodec.OPTION;
+  }
+
+  /** Deflate codec, with specific compression.
+   * compressionLevel should be between 1 and 9, inclusive. */
+  public static CodecFactory deflateCodec(int compressionLevel) {
+    return new DeflateCodec.Option(compressionLevel);
+  }
+
+  /** XZ codec, with specific compression.
+   * compressionLevel should be between 1 and 9, inclusive. */
+  public static CodecFactory xzCodec(int compressionLevel) {
+      return new XZCodec.Option(compressionLevel);
+  }
+
+  /** Snappy codec.*/
+  public static CodecFactory snappyCodec() {
+    return new SnappyCodec.Option();
+  }
+
+  /** bzip2 codec.*/
+  public static CodecFactory bzip2Codec() {
+    return new BZip2Codec.Option();
+  }
+
+  /** Creates internal Codec. */
+  protected abstract Codec createInstance();
+  
+  /** Mapping of string names (stored as metas) and codecs. 
+   * Note that currently options (like compression level)
+   * are not recoverable. */
+  private static final Map<String, CodecFactory> REGISTERED = 
+    new HashMap<String, CodecFactory>();
+
+  public static final int DEFAULT_DEFLATE_LEVEL = Deflater.DEFAULT_COMPRESSION;
+  public static final int DEFAULT_XZ_LEVEL = LZMA2Options.PRESET_DEFAULT;
+
+  static {
+    addCodec("null", nullCodec());
+    addCodec("deflate", deflateCodec(DEFAULT_DEFLATE_LEVEL));
+    addCodec("snappy", snappyCodec());
+    addCodec("bzip2", bzip2Codec());
+    addCodec("xz", xzCodec(DEFAULT_XZ_LEVEL));
+  }
+
+  /** Maps a codec name into a CodecFactory.
+   *
+   * Currently there are five codecs registered by default:
+   * <ul>
+   *   <li>{@code null}</li>
+   *   <li>{@code deflate}</li>
+   *   <li>{@code snappy}</li>
+   *   <li>{@code bzip2}</li>
+   *   <li>{@code xz}</li>
+   * </ul>
+   */
+  public static CodecFactory fromString(String s) {
+    CodecFactory o = REGISTERED.get(s);
+    if (o == null) {
+      throw new AvroRuntimeException("Unrecognized codec: " + s);
+    }
+    return o;
+  }
+  
+
+
+  /** Adds a new codec implementation.  If name already had
+   * a codec associated with it, returns the previous codec. */
+  public static CodecFactory addCodec(String name, CodecFactory c) {
+    return REGISTERED.put(name, c);
+  }
+  
+  @Override
+  public String toString() {
+    Codec instance = this.createInstance();
+    return instance.toString();
+  }
+  
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java
new file mode 100644
index 0000000..4061962
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileConstants.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.file;
+
+/**
+ * Constants used in data files.
+ */
+public class DataFileConstants {
+  private DataFileConstants() {}                  // no public ctor
+
+  public static final byte VERSION = 1;
+  public static final byte[] MAGIC = new byte[] {
+    (byte)'O', (byte)'b', (byte)'j', VERSION
+  };
+  public static final long FOOTER_BLOCK = -1;
+  public static final int SYNC_SIZE = 16;
+  public static final int DEFAULT_SYNC_INTERVAL = 4000*SYNC_SIZE; 
+
+  public static final String SCHEMA = "avro.schema";
+  public static final String CODEC = "avro.codec";
+  public static final String NULL_CODEC = "null";
+  public static final String DEFLATE_CODEC = "deflate";
+  public static final String SNAPPY_CODEC = "snappy";
+  public static final String BZIP2_CODEC = "bzip2";
+  public static final String XZ_CODEC = "xz";
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
new file mode 100644
index 0000000..be12574
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader.java
@@ -0,0 +1,251 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.io.EOFException;
+import java.io.InputStream;
+import java.io.File;
+import java.util.Arrays;
+
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.DatumReader;
+import static org.apache.avro.file.DataFileConstants.SYNC_SIZE;
+import static org.apache.avro.file.DataFileConstants.MAGIC;
+
+/** Random access to files written with {@link DataFileWriter}.
+ * @see DataFileWriter
+ */
+public class DataFileReader<D>
+  extends DataFileStream<D> implements FileReader<D> {
+  private SeekableInputStream sin;
+  private long blockStart;
+
+  /** Open a reader for a file. */
+  public static <D> FileReader<D> openReader(File file, DatumReader<D> reader)
+    throws IOException {
+    return openReader(new SeekableFileInput(file), reader);
+  }
+
+  /** Open a reader for a file. */
+  public static <D> FileReader<D> openReader(SeekableInput in,
+                                             DatumReader<D> reader)
+    throws IOException {
+    if (in.length() < MAGIC.length)
+      throw new IOException("Not an Avro data file");
+
+    // read magic header
+    byte[] magic = new byte[MAGIC.length];
+    in.seek(0);
+    for (int c = 0; c < magic.length; c = in.read(magic, c, magic.length-c)) {}
+    in.seek(0);
+
+    if (Arrays.equals(MAGIC, magic))              // current format
+      return new DataFileReader<D>(in, reader);
+    if (Arrays.equals(DataFileReader12.MAGIC, magic)) // 1.2 format
+      return new DataFileReader12<D>(in, reader);
+    
+    throw new IOException("Not an Avro data file");
+  }
+
+  /**
+   * Construct a reader for a file at the current position of the input,
+   * without reading the header.
+   * @param sync True to read forward to the next sync point after opening,
+   *             false to assume that the input is already at a valid sync
+   *             point.
+   */
+  public static <D> DataFileReader<D> openReader(SeekableInput in,
+                                                 DatumReader<D> reader,
+                                                 Header header,
+                                                 boolean sync)
+      throws IOException {
+    DataFileReader<D> dreader = new DataFileReader<D>(in, reader, header);
+    // seek/sync to an (assumed) valid position
+    if (sync)
+      dreader.sync(in.tell());
+    else
+      dreader.seek(in.tell());
+    return dreader;
+  }
+
+  /** Construct a reader for a file. */
+  public DataFileReader(File file, DatumReader<D> reader) throws IOException {
+    this(new SeekableFileInput(file), reader);
+  }
+
+  /** Construct a reader for a file. */
+  public DataFileReader(SeekableInput sin, DatumReader<D> reader)
+    throws IOException {
+    super(reader);
+    this.sin = new SeekableInputStream(sin);
+    initialize(this.sin);
+    blockFinished();
+  }
+
+  /** Construct using a {@link DataFileStream.Header}.  Does not call {@link
+      #sync(long)} or {@link #seek(long)}. */
+  protected DataFileReader(SeekableInput sin, DatumReader<D> reader,
+                           Header header) throws IOException {
+    super(reader);
+    this.sin = new SeekableInputStream(sin);
+    initialize(this.sin, header);
+  }
+
+  /** Move to a specific, known synchronization point, one returned from {@link
+   * DataFileWriter#sync()} while writing.  If synchronization points were not
+   * saved while writing a file, use {@link #sync(long)} instead. */
+  public void seek(long position) throws IOException {
+    sin.seek(position);
+    vin = DecoderFactory.get().binaryDecoder(this.sin, vin);
+    datumIn = null;
+    blockRemaining = 0;
+    blockStart = position;
+  }
+
+  /** Move to the next synchronization point after a position. To process a
+   * range of file entires, call this with the starting position, then check
+   * {@link #pastSync(long)} with the end point before each call to {@link
+   * #next()}. */
+  @Override
+  public void sync(long position) throws IOException {
+    seek(position);
+    // work around an issue where 1.5.4 C stored sync in metadata
+    if ((position == 0) && (getMeta("avro.sync") != null)) {
+      initialize(sin);                            // re-init to skip header
+      return;
+    }
+    try {
+      int i=0, b;
+      InputStream in = vin.inputStream();
+      vin.readFixed(syncBuffer);
+    do {
+      int j = 0;
+      for (; j < SYNC_SIZE; j++) {
+        if (getHeader().sync[j] != syncBuffer[(i+j)%SYNC_SIZE])
+          break;
+      }
+      if (j == SYNC_SIZE) {                       // matched a complete sync
+        blockStart = position + i + SYNC_SIZE;
+        return;
+      }
+      b = in.read();
+      syncBuffer[i++%SYNC_SIZE] = (byte)b;
+    } while (b != -1);
+    } catch (EOFException e) {
+      // fall through
+    }
+    // if no match or EOF set start to the end position
+      blockStart = sin.tell();
+    //System.out.println("block start location after EOF: " + blockStart );
+      return;
+  }
+
+  @Override
+  protected void blockFinished() throws IOException {
+    blockStart = sin.tell() - vin.inputStream().available();
+  }
+
+  /** Return the last synchronization point before our current position. */
+  public long previousSync() {
+    return blockStart;
+  }
+
+  /** Return true if past the next synchronization point after a position. */ 
+  @Override
+  public boolean pastSync(long position) throws IOException {
+    return ((blockStart >= position+SYNC_SIZE)||(blockStart >= sin.length()));
+  }
+
+  @Override public long tell() throws IOException { return sin.tell(); }
+
+  static class SeekableInputStream extends InputStream 
+  implements SeekableInput {
+    private final byte[] oneByte = new byte[1];
+    private SeekableInput in;
+
+    SeekableInputStream(SeekableInput in) throws IOException {
+        this.in = in;
+      }
+    
+    @Override
+    public void seek(long p) throws IOException {
+      if (p < 0)
+        throw new IOException("Illegal seek: " + p);
+      in.seek(p);
+      }
+
+    @Override
+    public long tell() throws IOException {
+      return in.tell();
+    }
+
+    @Override
+    public long length() throws IOException {
+      return in.length();
+    }
+
+    @Override
+    public int read(byte[] b) throws IOException {
+      return in.read(b, 0, b.length);
+      }
+    
+    @Override
+    public int read(byte[] b, int off, int len) throws IOException {
+      return in.read(b, off, len);
+    }
+
+    @Override
+    public int read() throws IOException {
+      int n = read(oneByte, 0, 1);
+      if (n == 1) {
+        return oneByte[0] & 0xff;
+      } else {
+        return n;
+      }
+    }
+
+    @Override
+    public long skip(long skip) throws IOException {
+      long position = in.tell();
+      long length = in.length();
+      long remaining = length - position;
+      if (remaining > skip) {
+        in.seek(skip);
+        return in.tell() - position;
+      } else {
+        in.seek(remaining);
+        return in.tell() - position;
+    }
+  }
+
+    @Override
+    public void close() throws IOException {
+        in.close();
+        super.close();
+    }
+
+    @Override
+    public int available() throws IOException {
+      long remaining = (in.length() - in.tell());
+      return (remaining > Integer.MAX_VALUE) ? Integer.MAX_VALUE
+          : (int) remaining;
+    }
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
new file mode 100644
index 0000000..0194de0
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileReader12.java
@@ -0,0 +1,227 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.io.Closeable;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryDecoder;
+
+/** Read files written by Avro version 1.2. */
+public class DataFileReader12<D> implements FileReader<D>, Closeable {
+  private static final byte VERSION = 0;
+  static final byte[] MAGIC = new byte[] {
+    (byte)'O', (byte)'b', (byte)'j', VERSION
+  };
+  private static final long FOOTER_BLOCK = -1;
+  private static final int SYNC_SIZE = 16;
+  private static final int SYNC_INTERVAL = 1000*SYNC_SIZE; 
+
+  private static final String SCHEMA = "schema";
+  private static final String SYNC = "sync";
+  private static final String COUNT = "count";
+  private static final String CODEC = "codec";
+  private static final String NULL_CODEC = "null";
+
+  private Schema schema;
+  private DatumReader<D> reader;
+  private DataFileReader.SeekableInputStream in;
+  private BinaryDecoder vin;
+
+  private Map<String,byte[]> meta = new HashMap<String,byte[]>();
+
+  private long count;                           // # entries in file
+  private long blockCount;                      // # entries in block
+  private long blockStart;
+  private byte[] sync = new byte[SYNC_SIZE];
+  private byte[] syncBuffer = new byte[SYNC_SIZE];
+
+  /** Construct a reader for a file. */
+  public DataFileReader12(SeekableInput sin, DatumReader<D> reader)
+    throws IOException {
+    this.in = new DataFileReader.SeekableInputStream(sin);
+
+    byte[] magic = new byte[4];
+    in.read(magic);
+    if (!Arrays.equals(MAGIC, magic))
+      throw new IOException("Not a data file.");
+
+    long length = in.length();
+    in.seek(length-4);
+    int footerSize=(in.read()<<24)+(in.read()<<16)+(in.read()<<8)+in.read();
+    seek(length-footerSize);
+    long l = vin.readMapStart();
+    if (l > 0) {
+      do {
+        for (long i = 0; i < l; i++) {
+          String key = vin.readString(null).toString();
+          ByteBuffer value = vin.readBytes(null);
+          byte[] bb = new byte[value.remaining()];
+          value.get(bb);
+          meta.put(key, bb);
+        }
+      } while ((l = vin.mapNext()) != 0);
+    }
+
+    this.sync = getMeta(SYNC);
+    this.count = getMetaLong(COUNT);
+    String codec = getMetaString(CODEC);
+    if (codec != null && ! codec.equals(NULL_CODEC)) {
+      throw new IOException("Unknown codec: " + codec);
+    }
+    this.schema = Schema.parse(getMetaString(SCHEMA));
+    this.reader = reader;
+
+    reader.setSchema(schema);
+
+    seek(MAGIC.length);         // seek to start
+  }
+
+  /** Return the value of a metadata property. */
+  public synchronized byte[] getMeta(String key) {
+    return meta.get(key);
+  }
+  /** Return the value of a metadata property. */
+  public synchronized String getMetaString(String key) {
+    byte[] value = getMeta(key);
+    if (value == null) {
+      return null;
+    }
+    try {
+      return new String(value, "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  /** Return the value of a metadata property. */
+  public synchronized long getMetaLong(String key) {
+    return Long.parseLong(getMetaString(key));
+  }
+
+  /** Return the schema used in this file. */
+  @Override
+  public Schema getSchema() { return schema; }
+
+  // Iterator and Iterable implementation
+  private D peek;
+  @Override public Iterator<D> iterator() { return this; }
+  @Override public boolean hasNext() {
+    if (peek != null || blockCount != 0)
+      return true;
+    this.peek = next();
+    return peek != null;
+  }
+  @Override public D next() {
+    if (peek != null) {
+      D result = peek;
+      peek = null;
+      return result;
+    }
+    try {
+      return next(null);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  @Override public void remove() { throw new UnsupportedOperationException(); }
+
+  /** Return the next datum in the file. */
+  @Override
+  public synchronized D next(D reuse) throws IOException {
+    while (blockCount == 0) {                     // at start of block
+
+      if (in.tell() == in.length())               // at eof
+        return null;
+
+      skipSync();                                 // skip a sync
+
+      blockCount = vin.readLong();                // read blockCount
+         
+      if (blockCount == FOOTER_BLOCK) { 
+        seek(vin.readLong()+in.tell());           // skip a footer
+      }
+    }
+    blockCount--;
+    return reader.read(reuse, vin);
+  }
+
+  private void skipSync() throws IOException {
+    vin.readFixed(syncBuffer);
+    if (!Arrays.equals(syncBuffer, sync))
+      throw new IOException("Invalid sync!");
+  }
+
+  /** Move to the specified synchronization point, as returned by {@link
+   * DataFileWriter#sync()}. */
+  public synchronized void seek(long position) throws IOException {
+    in.seek(position);
+    blockCount = 0;
+    blockStart = position;
+    vin = DecoderFactory.get().binaryDecoder(in, vin);
+  }
+
+  /** Move to the next synchronization point after a position. */
+  @Override
+  public synchronized void sync(long position) throws IOException {
+    if (in.tell()+SYNC_SIZE >= in.length()) {
+      seek(in.length());
+      return;
+    }
+    in.seek(position);
+    vin.readFixed(syncBuffer);
+    for (int i = 0; in.tell() < in.length(); i++) {
+      int j = 0;
+      for (; j < sync.length; j++) {
+        if (sync[j] != syncBuffer[(i+j)%sync.length])
+          break;
+      }
+      if (j == sync.length) {                     // position before sync
+        seek(in.tell() - SYNC_SIZE);
+        return;
+      }
+      syncBuffer[i%sync.length] = (byte)in.read();
+    }
+    seek(in.length());
+  }
+
+  /** Return true if past the next synchronization point after a position. */ 
+  @Override
+  public boolean pastSync(long position) throws IOException {
+    return ((blockStart >= position+SYNC_SIZE)||(blockStart >= in.length()));
+  }
+
+  /** Return the current position in the input. */
+  @Override
+  public long tell() throws IOException { return in.tell(); }
+
+  /** Close this reader. */
+  @Override
+  public synchronized void close() throws IOException {
+    in.close();
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
new file mode 100644
index 0000000..a19762b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileStream.java
@@ -0,0 +1,378 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Closeable;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.DatumReader;
+
+/** Streaming access to files written by {@link DataFileWriter}.  Use {@link
+ * DataFileReader} for file-based input.
+ * @see DataFileWriter
+ */
+public class DataFileStream<D> implements Iterator<D>, Iterable<D>, Closeable {
+
+  /**
+   * A handle that can be used to reopen a DataFile without re-reading the
+   * header of the stream.
+   */
+  public static final class Header {
+    Schema schema;
+    Map<String,byte[]> meta = new HashMap<String,byte[]>();
+    private transient List<String> metaKeyList = new ArrayList<String>();
+    byte[] sync = new byte[DataFileConstants.SYNC_SIZE];
+    private Header() {}
+  }
+
+  private DatumReader<D> reader;
+  private long blockSize;
+  private boolean availableBlock = false;
+  private Header header;
+
+  /** Decoder on raw input stream.  (Used for metadata.) */
+  BinaryDecoder vin;
+  /** Secondary decoder, for datums.
+   *  (Different than vin for block segments.) */
+  BinaryDecoder datumIn = null;
+
+  ByteBuffer blockBuffer;
+  long blockCount;                              // # entries in block
+  long blockRemaining;                          // # entries remaining in block
+  byte[] syncBuffer = new byte[DataFileConstants.SYNC_SIZE];
+  private Codec codec;
+
+  /** Construct a reader for an input stream.  For file-based input, use 
+   * {@link DataFileReader}.  This will buffer, wrapping with a 
+   * {@link java.io.BufferedInputStream}
+   * is not necessary. */
+  public DataFileStream(InputStream in, DatumReader<D> reader)
+    throws IOException {
+    this.reader = reader;
+    initialize(in);
+  }
+
+  /**
+   * create an unitialized DataFileStream
+   */
+  protected DataFileStream(DatumReader<D> reader) throws IOException {
+    this.reader = reader;
+  }
+  
+  /** Initialize the stream by reading from its head. */
+  void initialize(InputStream in) throws IOException {
+    this.header = new Header();
+    this.vin = DecoderFactory.get().binaryDecoder(in, vin);
+    byte[] magic = new byte[DataFileConstants.MAGIC.length];
+    try {
+      vin.readFixed(magic);                         // read magic
+    } catch (IOException e) {
+      throw new IOException("Not a data file.");
+    }
+    if (!Arrays.equals(DataFileConstants.MAGIC, magic))
+      throw new IOException("Not a data file.");
+
+    long l = vin.readMapStart();                  // read meta data
+    if (l > 0) {
+      do {
+        for (long i = 0; i < l; i++) {
+          String key = vin.readString(null).toString();
+          ByteBuffer value = vin.readBytes(null);
+          byte[] bb = new byte[value.remaining()];
+          value.get(bb);
+          header.meta.put(key, bb);
+          header.metaKeyList.add(key);
+        }
+      } while ((l = vin.mapNext()) != 0);
+    }
+    vin.readFixed(header.sync);                          // read sync
+    
+    // finalize the header
+    header.metaKeyList = Collections.unmodifiableList(header.metaKeyList);
+    header.schema = Schema.parse(getMetaString(DataFileConstants.SCHEMA),false);
+    this.codec = resolveCodec();
+    reader.setSchema(header.schema);
+  }
+
+  /** Initialize the stream without reading from it. */
+  void initialize(InputStream in, Header header) throws IOException {
+    this.header = header;
+    this.codec = resolveCodec();
+    reader.setSchema(header.schema);
+  }
+
+  Codec resolveCodec() {
+    String codecStr = getMetaString(DataFileConstants.CODEC);
+    if (codecStr != null) {
+      return CodecFactory.fromString(codecStr).createInstance();
+    } else {
+      return CodecFactory.nullCodec().createInstance();
+    }
+  }
+
+  /** A handle that can be used to reopen this stream without rereading the
+   * head. */
+  public Header getHeader() { return header; }
+
+  /** Return the schema used in this file. */
+  public Schema getSchema() { return header.schema; }
+
+  /** Return the list of keys in the metadata */
+  public List<String> getMetaKeys() {
+    return header.metaKeyList;
+  }
+
+  /** Return the value of a metadata property. */
+  public byte[] getMeta(String key) {
+    return header.meta.get(key);
+  }
+  /** Return the value of a metadata property. */
+  public String getMetaString(String key) {
+    byte[] value = getMeta(key);
+    if (value == null) {
+      return null;
+    }
+    try {
+      return new String(value, "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  /** Return the value of a metadata property. */
+  public long getMetaLong(String key) {
+    return Long.parseLong(getMetaString(key));
+  }
+
+  /** Returns an iterator over entries in this file.  Note that this iterator
+   * is shared with other users of the file: it does not contain a separate
+   * pointer into the file. */
+  @Override
+  public Iterator<D> iterator() { return this; }
+
+  private DataBlock block = null;
+  /** True if more entries remain in this file. */
+  @Override
+  public boolean hasNext() {
+    try {
+      if (blockRemaining == 0) {
+        // check that the previous block was finished
+        if (null != datumIn) {
+          boolean atEnd = datumIn.isEnd();
+          if (!atEnd) {
+            throw new IOException("Block read partially, the data may be corrupt");
+          }
+        }
+        if (hasNextBlock()) {
+          block = nextRawBlock(block);
+          block.decompressUsing(codec);
+          blockBuffer = block.getAsByteBuffer();
+          datumIn = DecoderFactory.get().binaryDecoder(
+              blockBuffer.array(), blockBuffer.arrayOffset() +
+              blockBuffer.position(), blockBuffer.remaining(), datumIn);
+        }
+      }
+      return blockRemaining != 0;
+    } catch (EOFException e) {                    // at EOF
+      return false;
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  /** Read the next datum in the file.
+   * @throws NoSuchElementException if no more remain in the file.
+   */
+  @Override
+  public D next() {
+    try {
+      return next(null);
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  /** Read the next datum from the file.
+   * @param reuse an instance to reuse.
+   * @throws NoSuchElementException if no more remain in the file.
+   */
+  public D next(D reuse) throws IOException {
+    if (!hasNext())
+      throw new NoSuchElementException();
+    D result = reader.read(reuse, datumIn);
+    if (0 == --blockRemaining) {
+      blockFinished();
+    }
+    return result;
+  }
+
+  /** Expert: Return the next block in the file, as binary-encoded data. */
+  public ByteBuffer nextBlock() throws IOException {
+    if (!hasNext())
+      throw new NoSuchElementException();
+    if (blockRemaining != blockCount)
+      throw new IllegalStateException("Not at block start.");
+    blockRemaining = 0;
+    datumIn = null;
+    return blockBuffer;
+  }
+
+  /** Expert: Return the count of items in the current block. */
+  public long getBlockCount() { return blockCount; }
+
+  /** Expert: Return the size in bytes (uncompressed) of the current block. */
+  public long getBlockSize() { return blockSize; }
+
+  protected void blockFinished() throws IOException {
+    // nothing for the stream impl
+  }
+
+  boolean hasNextBlock() {
+    try {
+      if (availableBlock) return true;
+      if (vin.isEnd()) return false;
+      blockRemaining = vin.readLong();      // read block count
+      blockSize = vin.readLong();           // read block size
+      if (blockSize > Integer.MAX_VALUE ||
+          blockSize < 0) {
+        throw new IOException("Block size invalid or too large for this " +
+          "implementation: " + blockSize);
+      }
+      blockCount = blockRemaining;
+      availableBlock = true;
+      return true;
+    } catch (EOFException eof) {
+      return false;
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  DataBlock nextRawBlock(DataBlock reuse) throws IOException {
+    if (!hasNextBlock()) {
+      throw new NoSuchElementException();
+    }
+    if (reuse == null || reuse.data.length < (int) blockSize) {
+      reuse = new DataBlock(blockRemaining, (int) blockSize);
+    } else {
+      reuse.numEntries = blockRemaining;
+      reuse.blockSize = (int)blockSize;
+    }
+    // throws if it can't read the size requested
+    vin.readFixed(reuse.data, 0, reuse.blockSize);
+    vin.readFixed(syncBuffer);
+    availableBlock = false;
+    if (!Arrays.equals(syncBuffer, header.sync))
+      throw new IOException("Invalid sync!");
+    return reuse;
+  }
+
+  /** Not supported. */
+  @Override
+  public void remove() { throw new UnsupportedOperationException(); }
+
+  /** Close this reader. */
+  @Override
+  public void close() throws IOException {
+    vin.inputStream().close();
+  }
+
+  static class DataBlock {
+    private byte[] data;
+    private long numEntries;
+    private int blockSize;
+    private int offset = 0;
+    private boolean flushOnWrite = true;
+    private DataBlock(long numEntries, int blockSize) {
+      this.data = new byte[blockSize];
+      this.numEntries = numEntries;
+      this.blockSize = blockSize;
+    }
+    
+    DataBlock(ByteBuffer block, long numEntries) {
+      this.data = block.array();
+      this.blockSize = block.remaining();
+      this.offset = block.arrayOffset() + block.position();
+      this.numEntries = numEntries;
+    }
+    
+    byte[] getData() {
+      return data;
+    }
+    
+    long getNumEntries() {
+      return numEntries;
+    }
+    
+    int getBlockSize() {
+      return blockSize;
+    }
+
+    boolean isFlushOnWrite() {
+      return flushOnWrite;
+    }
+
+    void setFlushOnWrite(boolean flushOnWrite) {
+      this.flushOnWrite = flushOnWrite;
+    }
+    
+    ByteBuffer getAsByteBuffer() {
+      return ByteBuffer.wrap(data, offset, blockSize);
+    }
+    
+    void decompressUsing(Codec c) throws IOException {
+      ByteBuffer result = c.decompress(getAsByteBuffer());
+      data = result.array();
+      blockSize = result.remaining();
+    }
+    
+    void compressUsing(Codec c) throws IOException {
+      ByteBuffer result = c.compress(getAsByteBuffer());
+      data = result.array();
+      blockSize = result.remaining();
+    }
+    
+    void writeBlockTo(BinaryEncoder e, byte[] sync) throws IOException {
+      e.writeLong(this.numEntries);
+      e.writeLong(this.blockSize);
+      e.writeFixed(this.data, offset, this.blockSize);
+      e.writeFixed(sync);
+      if (flushOnWrite) {
+        e.flush();
+      }
+    }
+    
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
new file mode 100644
index 0000000..52fb895
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
@@ -0,0 +1,480 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.BufferedOutputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FilterOutputStream;
+import java.io.Flushable;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileStream.DataBlock;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+
+/** Stores in a file a sequence of data conforming to a schema.  The schema is
+ * stored in the file with the data.  Each datum in a file is of the same
+ * schema.  Data is written with a {@link DatumWriter}.  Data is grouped into
+ * <i>blocks</i>.  A synchronization marker is written between blocks, so that
+ * files may be split.  Blocks may be compressed.  Extensible metadata is
+ * stored at the end of the file.  Files may be appended to.
+ * @see DataFileReader
+ */
+public class DataFileWriter<D> implements Closeable, Flushable {
+  private Schema schema;
+  private DatumWriter<D> dout;
+
+  private OutputStream underlyingStream;
+
+  private BufferedFileOutputStream out;
+  private BinaryEncoder vout;
+
+  private final Map<String,byte[]> meta = new HashMap<String,byte[]>();
+
+  private long blockCount;                       // # entries in current block
+
+  private NonCopyingByteArrayOutputStream buffer;
+  private BinaryEncoder bufOut;
+
+  private byte[] sync;                          // 16 random bytes
+  private int syncInterval = DataFileConstants.DEFAULT_SYNC_INTERVAL;
+
+  private boolean isOpen;
+  private Codec codec;
+
+  private boolean flushOnEveryBlock = true;
+
+  /** Construct a writer, not yet open. */
+  public DataFileWriter(DatumWriter<D> dout) {
+    this.dout = dout;
+  }
+  
+  private void assertOpen() {
+    if (!isOpen) throw new AvroRuntimeException("not open");
+  }
+  private void assertNotOpen() {
+    if (isOpen) throw new AvroRuntimeException("already open");
+  }
+  
+  /** 
+   * Configures this writer to use the given codec. 
+   * May not be reset after writes have begun.
+   */
+  public DataFileWriter<D> setCodec(CodecFactory c) {
+    assertNotOpen();
+    this.codec = c.createInstance();
+    setMetaInternal(DataFileConstants.CODEC, codec.getName());
+    return this;
+  }
+
+  /**
+   * Set the synchronization interval for this file, in bytes. 
+   * Valid values range from 32 to 2^30
+   * Suggested values are between 2K and 2M
+   *
+   * The stream is flushed by default at the end of each synchronization
+   * interval.
+   *
+   * If {@linkplain #setFlushOnEveryBlock(boolean)} is
+   * called with param set to false, then the block may not be flushed to the
+   * stream after the sync marker is written. In this case,
+   * the {@linkplain #flush()} must be called to flush the stream.
+   * 
+   * Invalid values throw IllegalArgumentException
+   * 
+   * @param syncInterval 
+   *   the approximate number of uncompressed bytes to write in each block
+   * @return 
+   *   this DataFileWriter
+   */
+  public DataFileWriter<D> setSyncInterval(int syncInterval) {
+    if (syncInterval < 32 || syncInterval > (1 << 30)) {
+      throw new IllegalArgumentException("Invalid syncInterval value: " + syncInterval);
+    }
+    this.syncInterval = syncInterval;
+    return this;
+  }
+
+  /** Open a new file for data matching a schema with a random sync. */
+  public DataFileWriter<D> create(Schema schema, File file) throws IOException {
+    return create(schema, new SyncableFileOutputStream(file), null);
+  }
+
+  /** Open a new file for data matching a schema with a random sync. */
+  public DataFileWriter<D> create(Schema schema, OutputStream outs)
+    throws IOException {
+    return create(schema, outs, null);
+  }
+
+  /** Open a new file for data matching a schema with an explicit sync. */
+  public DataFileWriter<D> create(Schema schema, OutputStream outs, byte[] sync)
+    throws IOException {
+    assertNotOpen();
+
+    this.schema = schema;
+    setMetaInternal(DataFileConstants.SCHEMA, schema.toString());
+    if (sync == null ) {
+      this.sync = generateSync();
+    } else if (sync.length == 16) {
+      this.sync = sync;
+    } else {
+      throw new IOException("sync must be exactly 16 bytes");
+    }
+
+    init(outs);
+
+    vout.writeFixed(DataFileConstants.MAGIC);           // write magic
+
+    vout.writeMapStart();                         // write metadata
+    vout.setItemCount(meta.size());
+    for (Map.Entry<String,byte[]> entry : meta.entrySet()) {
+      vout.startItem();
+      vout.writeString(entry.getKey());
+      vout.writeBytes(entry.getValue());
+    }
+    vout.writeMapEnd();
+    vout.writeFixed(this.sync); // write initial sync
+    vout.flush(); //vout may be buffered, flush before writing to out
+    return this;
+  }
+
+  /**
+   * Set whether this writer should flush the block to the stream every time
+   * a sync marker is written. By default, the writer will flush the buffer
+   * each time a sync marker is written (if the block size limit is reached
+   * or the {@linkplain #sync()} is called.
+   * @param flushOnEveryBlock - If set to false, this writer will not flush
+   *                          the block to the stream until {@linkplain
+   *                          #flush()} is explicitly called.
+   */
+  public void setFlushOnEveryBlock(boolean flushOnEveryBlock) {
+    this.flushOnEveryBlock = flushOnEveryBlock;
+  }
+
+  /**
+   * @return - true if this writer flushes the block to the stream every time
+   * a sync marker is written. Else returns false.
+   */
+  public boolean isFlushOnEveryBlock() {
+    return this.flushOnEveryBlock;
+  }
+
+  /** Open a writer appending to an existing file. */
+  public DataFileWriter<D> appendTo(File file) throws IOException {
+    return appendTo(new SeekableFileInput(file),
+                    new SyncableFileOutputStream(file, true));
+  }
+
+  /** Open a writer appending to an existing file.
+   * @param in reading the existing file.
+   * @param out positioned at the end of the existing file.
+   */
+  public DataFileWriter<D> appendTo(SeekableInput in, OutputStream out)
+    throws IOException {
+    assertNotOpen();
+    DataFileReader<D> reader =
+      new DataFileReader<D>(in, new GenericDatumReader<D>());
+    this.schema = reader.getSchema();
+    this.sync = reader.getHeader().sync;
+    this.meta.putAll(reader.getHeader().meta);
+    byte[] codecBytes = this.meta.get(DataFileConstants.CODEC);
+    if (codecBytes != null) {
+      String strCodec = new String(codecBytes, "UTF-8");
+      this.codec = CodecFactory.fromString(strCodec).createInstance();
+    } else {
+      this.codec = CodecFactory.nullCodec().createInstance();
+    }
+    reader.close();
+
+    init(out);
+
+    return this;
+  }
+
+  private void init(OutputStream outs) throws IOException {
+    this.underlyingStream = outs;
+    this.out = new BufferedFileOutputStream(outs);
+    EncoderFactory efactory = new EncoderFactory();
+    this.vout = efactory.binaryEncoder(out, null);
+    dout.setSchema(schema);
+    buffer = new NonCopyingByteArrayOutputStream(
+        Math.min((int)(syncInterval * 1.25), Integer.MAX_VALUE/2 -1));
+    this.bufOut = efactory.binaryEncoder(buffer, null);
+    if (this.codec == null) {
+      this.codec = CodecFactory.nullCodec().createInstance();
+    }
+    this.isOpen = true;
+  }
+
+  private static byte[] generateSync() {
+    try {
+      MessageDigest digester = MessageDigest.getInstance("MD5");
+      long time = System.currentTimeMillis();
+      digester.update((UUID.randomUUID()+"@"+time).getBytes());
+      return digester.digest();
+    } catch (NoSuchAlgorithmException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private DataFileWriter<D> setMetaInternal(String key, byte[] value) {
+    assertNotOpen();
+    meta.put(key, value);
+    return this;
+  }
+  
+  private DataFileWriter<D> setMetaInternal(String key, String value) {
+    try {
+      return setMetaInternal(key, value.getBytes("UTF-8"));
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Set a metadata property. */
+  public DataFileWriter<D> setMeta(String key, byte[] value) {
+    if (isReservedMeta(key)) {
+      throw new AvroRuntimeException("Cannot set reserved meta key: " + key);
+    }
+    return setMetaInternal(key, value);
+  }
+  
+  public static boolean isReservedMeta(String key) {
+    return key.startsWith("avro.");
+  }
+
+  /** Set a metadata property. */
+  public DataFileWriter<D> setMeta(String key, String value) {
+    try {
+      return setMeta(key, value.getBytes("UTF-8"));
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  /** Set a metadata property. */
+  public DataFileWriter<D> setMeta(String key, long value) {
+    return setMeta(key, Long.toString(value));
+  }
+
+  /** Thrown by {@link #append(Object)} when an exception occurs while writing a
+   * datum to the buffer.  When this is thrown, the file is unaltered and may
+   * continue to be appended to. */
+  public static class AppendWriteException extends RuntimeException {
+    public AppendWriteException(Exception e) { super(e); }
+  }
+
+  /** Append a datum to the file.
+   * @see AppendWriteException
+   */
+  public void append(D datum) throws IOException {
+    assertOpen();
+    int usedBuffer = bufferInUse();
+    try {
+      dout.write(datum, bufOut);
+    } catch (IOException e) {
+      resetBufferTo(usedBuffer);
+      throw new AppendWriteException(e);
+    } catch (RuntimeException re) {
+      resetBufferTo(usedBuffer);
+      throw new AppendWriteException(re);
+    }
+    blockCount++;
+    writeIfBlockFull();
+  }
+  
+  // if there is an error encoding, flush the encoder and then
+  // reset the buffer position to contain size bytes, discarding the rest.
+  // Otherwise the file will be corrupt with a partial record.
+  private void resetBufferTo(int size) throws IOException {
+    bufOut.flush();
+    byte[] data = buffer.toByteArray();
+    buffer.reset();
+    buffer.write(data, 0, size);
+  }
+
+  /** Expert: Append a pre-encoded datum to the file.  No validation is
+   * performed to check that the encoding conforms to the file's schema.
+   * Appending non-conforming data may result in an unreadable file. */
+  public void appendEncoded(ByteBuffer datum) throws IOException {
+    assertOpen();
+    bufOut.writeFixed(datum);
+    blockCount++;
+    writeIfBlockFull();
+  }
+  
+  private int bufferInUse() {
+    return (buffer.size() + bufOut.bytesBuffered());
+  }
+
+  private void writeIfBlockFull() throws IOException {
+    if (bufferInUse() >= syncInterval)
+      writeBlock();
+  }
+
+  /**
+   * Appends data from another file.  otherFile must have the same schema.
+   * Data blocks will be copied without de-serializing data.  If the codecs
+   * of the two files are compatible, data blocks are copied directly without
+   * decompression.  If the codecs are not compatible, blocks from otherFile
+   * are uncompressed and then compressed using this file's codec.
+   * <p/>
+   * If the recompress flag is set all blocks are decompressed and then compressed
+   * using this file's codec.  This is useful when the two files have compatible
+   * compression codecs but different codec options.  For example, one might
+   * append a file compressed with deflate at compression level 1 to a file with
+   * deflate at compression level 7.  If <i>recompress</i> is false, blocks
+   * will be copied without changing the compression level.  If true, they will
+   * be converted to the new compression level.
+   * @param otherFile
+   * @param recompress
+   * @throws IOException
+   */
+  public void appendAllFrom(DataFileStream<D> otherFile, boolean recompress) throws IOException {
+    assertOpen();
+    // make sure other file has same schema
+    Schema otherSchema = otherFile.getSchema();
+    if (!this.schema.equals(otherSchema)) {
+      throw new IOException("Schema from file " + otherFile + " does not match");
+    }
+    // flush anything written so far
+    writeBlock();
+    Codec otherCodec = otherFile.resolveCodec();
+    DataBlock nextBlockRaw = null;
+    if (codec.equals(otherCodec) && !recompress) {
+      // copy raw bytes
+      while(otherFile.hasNextBlock()) {
+        nextBlockRaw = otherFile.nextRawBlock(nextBlockRaw);
+        nextBlockRaw.writeBlockTo(vout, sync);
+      }
+    } else {
+      while(otherFile.hasNextBlock()) {
+        nextBlockRaw = otherFile.nextRawBlock(nextBlockRaw);
+        nextBlockRaw.decompressUsing(otherCodec);
+        nextBlockRaw.compressUsing(codec);
+        nextBlockRaw.writeBlockTo(vout, sync);
+      }
+    }
+  }
+  
+  private void writeBlock() throws IOException {
+    if (blockCount > 0) {
+      bufOut.flush();
+      ByteBuffer uncompressed = buffer.getByteArrayAsByteBuffer();
+      DataBlock block = new DataBlock(uncompressed, blockCount);
+      block.setFlushOnWrite(flushOnEveryBlock);
+      block.compressUsing(codec);
+      block.writeBlockTo(vout, sync);
+      buffer.reset();
+      blockCount = 0;
+    }
+  }
+
+  /** Return the current position as a value that may be passed to {@link
+   * DataFileReader#seek(long)}.  Forces the end of the current block,
+   * emitting a synchronization marker. By default, this will also flush the
+   * block to the stream.
+   *
+   * If {@linkplain #setFlushOnEveryBlock(boolean)} is
+   * called with param set to false, then this method may not flush
+   * the block. In this case, the {@linkplain #flush()} must be called to
+   * flush the stream.
+   */
+  public long sync() throws IOException {
+    assertOpen();
+    writeBlock();
+    return out.tell();
+  }
+
+  /** Calls {@linkplain #sync()} and then flushes the current state of the
+   * file.
+   */
+  @Override
+  public void flush() throws IOException {
+    sync();
+    vout.flush();
+  }
+
+  /**
+   * If this writer was instantiated using a File or using an
+   * {@linkplain Syncable} instance, this method flushes all buffers for this
+   * writer to disk. In other cases, this method behaves exactly
+   * like {@linkplain #flush()}.
+   *
+   * @throws IOException
+   */
+  public void fSync() throws IOException {
+    flush();
+    if (underlyingStream instanceof Syncable) {
+      ((Syncable) underlyingStream).sync();
+    }
+  }
+
+  /** Flush and close the file. */
+  @Override
+  public void close() throws IOException {
+    if (isOpen) {
+      flush();
+      out.close();
+      isOpen = false;
+    }
+  }
+
+  private class BufferedFileOutputStream extends BufferedOutputStream {
+    private long position;                         // start of buffer
+
+    private class PositionFilter extends FilterOutputStream {
+      public PositionFilter(OutputStream out) throws IOException { super(out); }
+      @Override
+      public void write(byte[] b, int off, int len) throws IOException {
+        out.write(b, off, len);
+        position += len;                           // update on write
+      }
+    }
+
+    public BufferedFileOutputStream(OutputStream out) throws IOException {
+      super(null);
+      this.out = new PositionFilter(out);
+    }
+
+    public long tell() { return position+count; }
+  }
+
+  private static class NonCopyingByteArrayOutputStream extends ByteArrayOutputStream {
+    NonCopyingByteArrayOutputStream(int initialSize) {
+      super(initialSize);
+    }
+    ByteBuffer getByteArrayAsByteBuffer() {
+      return ByteBuffer.wrap(buf, 0, count);
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
new file mode 100644
index 0000000..2a0c8c5
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DeflateCodec.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+import java.util.zip.Inflater;
+import java.util.zip.InflaterOutputStream;
+
+/** 
+ * Implements DEFLATE (RFC1951) compression and decompression. 
+ *
+ * Note that there is a distinction between RFC1951 (deflate)
+ * and RFC1950 (zlib).  zlib adds an extra 2-byte header
+ * at the front, and a 4-byte checksum at the end.  The
+ * code here, by passing "true" as the "nowrap" option to
+ * {@link Inflater} and {@link Deflater}, is using
+ * RFC1951.
+ */
+class DeflateCodec extends Codec {
+  
+  static class Option extends CodecFactory {
+    private int compressionLevel;
+
+    Option(int compressionLevel) {
+      this.compressionLevel = compressionLevel;
+    }
+
+    @Override
+    protected Codec createInstance() {
+      return new DeflateCodec(compressionLevel);
+    }
+  }
+
+  private ByteArrayOutputStream outputBuffer;
+  private Deflater deflater;
+  private Inflater inflater;
+  //currently only do 'nowrap' -- RFC 1951, not zlib
+  private boolean nowrap = true; 
+  private int compressionLevel;
+
+  public DeflateCodec(int compressionLevel) {
+    this.compressionLevel = compressionLevel;
+  }
+
+  @Override
+  public String getName() {
+    return DataFileConstants.DEFLATE_CODEC;
+  }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    DeflaterOutputStream ios = new DeflaterOutputStream(baos, getDeflater());
+    writeAndClose(data, ios);
+    ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+    return result;
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    InflaterOutputStream ios = new InflaterOutputStream(baos, getInflater());
+    writeAndClose(data, ios);
+    ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+    return result;
+  }
+  
+  private void writeAndClose(ByteBuffer data, OutputStream to) throws IOException {
+    byte[] input = data.array();
+    int offset = data.arrayOffset() + data.position();
+    int length = data.remaining();
+    try {
+      to.write(input, offset, length);
+    } finally {
+      to.close();
+    }
+  }
+  
+  // get and initialize the inflater for use.
+  private Inflater getInflater() {
+    if (null == inflater) {
+      inflater = new Inflater(nowrap);
+    }
+    inflater.reset();
+    return inflater;
+  }
+
+  // get and initialize the deflater for use.
+  private Deflater getDeflater() {
+    if (null == deflater) {
+      deflater = new Deflater(compressionLevel, nowrap);
+    }
+    deflater.reset();
+    return deflater;
+  }
+  
+  // get and initialize the output buffer for use.
+  private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
+    if (null == outputBuffer) {
+      outputBuffer = new ByteArrayOutputStream(suggestedLength);
+    }
+    outputBuffer.reset();
+    return outputBuffer;
+  }
+  
+  @Override
+  public int hashCode() {
+    return nowrap ? 0 : 1;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (getClass() != obj.getClass())
+      return false;
+    DeflateCodec other = (DeflateCodec)obj;
+    return (this.nowrap == other.nowrap);
+  }
+
+  @Override
+  public String toString() {
+    return getName() + "-" + compressionLevel;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
new file mode 100644
index 0000000..19de11c
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/FileReader.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.io.Closeable;
+import java.util.Iterator;
+
+import org.apache.avro.Schema;
+
+/** Interface for reading data from a file. */
+public interface FileReader<D> extends Iterator<D>, Iterable<D>, Closeable {
+  /** Return the schema for data in this file. */
+  Schema getSchema();
+
+  /** Read the next datum from the file.
+   * @param reuse an instance to reuse.
+   * @throws NoSuchElementException if no more remain in the file.
+   */
+  D next(D reuse) throws IOException;
+
+  /** Move to the next synchronization point after a position. To process a
+   * range of file entires, call this with the starting position, then check
+   * {@link #pastSync(long)} with the end point before each call to {@link
+   * #next()}. */
+  void sync(long position) throws IOException;
+
+  /** Return true if past the next synchronization point after a position. */ 
+  boolean pastSync(long position) throws IOException;
+
+  /** Return the current position in the input. */
+  long tell() throws IOException;
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/LengthLimitedInputStream.java b/lang/java/avro/src/main/java/org/apache/avro/file/LengthLimitedInputStream.java
new file mode 100644
index 0000000..a4b4199
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/LengthLimitedInputStream.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+/** Represents a substream of certain length. */
+class LengthLimitedInputStream extends FilterInputStream {
+
+  /** Bytes remaining. */
+  private long remaining;
+
+  protected LengthLimitedInputStream(InputStream in, long maxLength) {
+    super(in);
+    remaining = maxLength;
+  }
+
+  @Override
+  public int read() throws IOException {
+    if (remaining > 0) {
+      int v = super.read();
+      if (v != -1) {
+        remaining--;
+      }
+      return v;
+    }
+    return -1;
+  }
+
+  @Override
+  public int read(byte[] b) throws IOException {
+    return read(b, 0, b.length);
+  }
+
+  /**
+   * Returns at most Integer.MAX_VALUE.
+   */
+  private int remainingInt() {
+    return (int)Math.min(remaining, Integer.MAX_VALUE);
+  }
+
+  @Override
+  public int read(byte[] b, int off, int len) throws IOException {
+    if (remaining == 0) {
+      return -1;
+    }
+    if (len > remaining) {
+      len = remainingInt();
+    }
+    int v = super.read(b, off, len);
+    if (v != -1) {
+      remaining -= v;
+    }
+    return v;
+  }
+
+  @Override
+  public int available() throws IOException {
+    return Math.min(super.available(), remainingInt());
+  }
+
+  @Override
+  public long skip(long n) throws IOException {
+    long v = super.skip(Math.min(remaining, n));
+    remaining -= v;
+    return v;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
new file mode 100644
index 0000000..fd82d9b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/NullCodec.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** Implements "null" (pass through) codec. */
+final class NullCodec extends Codec {
+  
+  private static final NullCodec INSTANCE = new NullCodec();
+
+  static class Option extends CodecFactory {
+    @Override
+    protected Codec createInstance() {
+      return INSTANCE;
+    }
+  }
+
+  /** No options available for NullCodec. */
+  public static final CodecFactory OPTION = new Option();
+
+  @Override
+  public String getName() {
+    return DataFileConstants.NULL_CODEC;
+  }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer buffer) throws IOException {
+    return buffer;
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer data) throws IOException {
+    return data;
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (this == other)
+      return true;
+    return (this.getClass() == other.getClass());
+  }
+
+  @Override
+  public int hashCode() {
+    return 2;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java
new file mode 100644
index 0000000..166ef83
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableByteArrayInput.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+
+/** A {@link SeekableInput} backed with data in a byte array. */
+public class SeekableByteArrayInput extends ByteArrayInputStream implements SeekableInput {
+
+    public SeekableByteArrayInput(byte[] data) {
+        super(data);
+    }
+
+    public long length() throws IOException {
+        return this.count;
+    }
+
+    public void seek(long p) throws IOException {
+        this.reset();
+        this.skip(p);
+    }
+
+    public long tell() throws IOException {
+        return this.pos;
+    }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java
new file mode 100644
index 0000000..afa0675
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableFileInput.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+/** A {@link FileInputStream} that implements {@link SeekableInput}. */
+public class SeekableFileInput
+  extends FileInputStream implements SeekableInput {
+
+  public SeekableFileInput(File file) throws IOException { super(file); }
+  public SeekableFileInput(FileDescriptor fd) throws IOException { super(fd); }
+
+  public void seek(long p) throws IOException { getChannel().position(p); }
+  public long tell() throws IOException { return getChannel().position(); }
+  public long length() throws IOException { return getChannel().size(); }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SeekableInput.java b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableInput.java
new file mode 100644
index 0000000..7e8a0a4
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SeekableInput.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.io.Closeable;
+
+/** An InputStream that supports seek and tell. */
+public interface SeekableInput extends Closeable {
+
+  /** Set the position for the next {@link java.io.InputStream#read(byte[],int,int) read()}. */
+  void seek(long p) throws IOException;
+
+  /** Return the position of the next {@link java.io.InputStream#read(byte[],int,int) read()}. */
+  long tell() throws IOException;
+
+  /** Return the length of the file. */
+  long length() throws IOException;
+
+  /** Equivalent to {@link java.io.InputStream#read(byte[],int,int)}. */
+  int read(byte[] b, int off, int len) throws IOException;
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
new file mode 100644
index 0000000..0787050
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SnappyCodec.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.zip.CRC32;
+
+import org.xerial.snappy.Snappy;
+
+/** * Implements Snappy compression and decompression. */
+class SnappyCodec extends Codec {
+  private CRC32 crc32 = new CRC32();
+
+  static class Option extends CodecFactory {
+    @Override
+    protected Codec createInstance() {
+      return new SnappyCodec();
+    }
+  }
+
+  private SnappyCodec() {}
+
+  @Override public String getName() { return DataFileConstants.SNAPPY_CODEC; }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer in) throws IOException {
+    ByteBuffer out =
+      ByteBuffer.allocate(Snappy.maxCompressedLength(in.remaining())+4);
+    int size = Snappy.compress(in.array(), in.position(), in.remaining(),
+                               out.array(), 0);
+    crc32.reset();
+    crc32.update(in.array(), in.position(), in.remaining());
+    out.putInt(size, (int)crc32.getValue());
+
+    out.limit(size+4);
+
+    return out;
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer in) throws IOException {
+    ByteBuffer out = ByteBuffer.allocate
+      (Snappy.uncompressedLength(in.array(),in.position(),in.remaining()-4));
+    int size = Snappy.uncompress(in.array(),in.position(),in.remaining()-4,
+                                 out.array(), 0);
+    out.limit(size);
+    
+    crc32.reset();
+    crc32.update(out.array(), 0, size);
+    if (in.getInt(in.limit()-4) != (int)crc32.getValue())
+      throw new IOException("Checksum failure");
+    
+    return out;
+  }
+  
+  @Override public int hashCode() { return getName().hashCode(); }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (getClass() != obj.getClass())
+      return false;
+    return true;
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/Syncable.java b/lang/java/avro/src/main/java/org/apache/avro/file/Syncable.java
new file mode 100644
index 0000000..1510100
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/Syncable.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+
+public interface Syncable {
+
+  /**
+   * Sync the file to disk. On supported platforms, this method behaves like
+   * POSIX <code>fsync</code> and syncs all underlying OS buffers for this
+   * file descriptor to disk. On these platforms, if this method returns,
+   * the data written to this instance is guaranteed to be persisted on disk.
+   *
+   * @throws IOException - if an error occurred while attempting to sync the
+   *                     data to disk.
+   */
+  void sync() throws IOException;
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/SyncableFileOutputStream.java b/lang/java/avro/src/main/java/org/apache/avro/file/SyncableFileOutputStream.java
new file mode 100644
index 0000000..736a5d4
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/SyncableFileOutputStream.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+/**
+ * An implementation of {@linkplain Syncable} which writes to a file.
+ * An instance of this class can be used with {@linkplain DataFileWriter} to
+ * guarantee that Avro Container Files are persisted to disk on supported
+ * platforms using the
+ * {@linkplain org.apache.avro.file.DataFileWriter#fSync()} method.
+ *
+ * @see FileOutputStream
+ */
+public class SyncableFileOutputStream
+  extends FileOutputStream implements Syncable {
+
+  /**
+   * Creates an instance of {@linkplain SyncableFileOutputStream} with the
+   * given name.
+   *
+   * @param name - the full file name.
+   * @throws FileNotFoundException - if the file cannot be created or opened.
+   */
+  public SyncableFileOutputStream(String name) throws FileNotFoundException {
+    super(name);
+  }
+
+  /**
+   * Creates an instance of {@linkplain SyncableFileOutputStream} using the
+   * given {@linkplain File} instance.
+   *
+   * @param file - The file to use to create the output stream.
+   *
+   * @throws FileNotFoundException - if the file cannot be created or opened.
+   */
+  public SyncableFileOutputStream(File file)
+    throws FileNotFoundException {
+    super(file);
+  }
+
+  /**
+   * Creates an instance of {@linkplain SyncableFileOutputStream} with the
+   * given name and optionally append to the file if it already exists.
+   *
+   * @param name - the full file name.
+   * @param append - true if the file is to be appended to
+   *
+   * @throws FileNotFoundException - if the file cannot be created or opened.
+   */
+  public SyncableFileOutputStream(String name, boolean append)
+    throws FileNotFoundException {
+    super(name, append);
+  }
+
+  /**
+   * Creates an instance of {@linkplain SyncableFileOutputStream}
+   * that writes to the file represented by the given {@linkplain File}
+   * instance and optionally append to the file if it already exists.
+   *
+   * @param file - the file instance to use to create the stream.
+   * @param append - true if the file is to be appended to
+   *
+   * @throws FileNotFoundException - if the file cannot be created or opened.
+   */
+  public SyncableFileOutputStream(File file, boolean append)
+    throws FileNotFoundException {
+    super(file, append);
+  }
+
+  /**
+   * Creates an instance of {@linkplain SyncableFileOutputStream}
+   * using the given {@linkplain FileDescriptor} instance.
+   */
+  public SyncableFileOutputStream(FileDescriptor fdObj) {
+    super(fdObj);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void sync() throws IOException {
+    getFD().sync();
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java b/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
new file mode 100644
index 0000000..7677b3f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/XZCodec.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
+import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream;
+import org.apache.commons.compress.utils.IOUtils;
+
+/** * Implements xz compression and decompression. */
+public class XZCodec extends Codec {
+
+  static class Option extends CodecFactory {
+      private int compressionLevel;
+
+      Option(int compressionLevel) {
+        this.compressionLevel = compressionLevel;
+      }
+
+      @Override
+      protected Codec createInstance() {
+        return new XZCodec(compressionLevel);
+      }
+    }
+
+  private ByteArrayOutputStream outputBuffer;
+  private int compressionLevel;
+
+  public XZCodec(int compressionLevel) {
+    this.compressionLevel = compressionLevel;
+  }
+
+  @Override
+  public String getName() {
+    return DataFileConstants.XZ_CODEC;
+  }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    OutputStream ios = new XZCompressorOutputStream(baos, compressionLevel);
+    writeAndClose(data, ios);
+    return ByteBuffer.wrap(baos.toByteArray());
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    InputStream bytesIn = new ByteArrayInputStream(
+      data.array(),
+      data.arrayOffset() + data.position(),
+      data.remaining());
+    InputStream ios = new XZCompressorInputStream(bytesIn);
+    try {
+      IOUtils.copy(ios, baos);
+    } finally {
+      ios.close();
+    }
+    return ByteBuffer.wrap(baos.toByteArray());
+  }
+
+  private void writeAndClose(ByteBuffer data, OutputStream to) throws IOException {
+    byte[] input = data.array();
+    int offset = data.arrayOffset() + data.position();
+    int length = data.remaining();
+    try {
+      to.write(input, offset, length);
+    } finally {
+      to.close();
+    }
+  }
+
+  // get and initialize the output buffer for use.
+  private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
+    if (null == outputBuffer) {
+      outputBuffer = new ByteArrayOutputStream(suggestedLength);
+    }
+    outputBuffer.reset();
+    return outputBuffer;
+  }
+
+  @Override
+  public int hashCode() {
+    return compressionLevel;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (getClass() != obj.getClass())
+      return false;
+    XZCodec other = (XZCodec)obj;
+    return (this.compressionLevel == other.compressionLevel);
+  }
+
+  @Override
+  public String toString() {
+    return getName() + "-" + compressionLevel;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/package.html b/lang/java/avro/src/main/java/org/apache/avro/file/package.html
new file mode 100644
index 0000000..be67e66
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+A container file for Avro data.
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java
new file mode 100644
index 0000000..40b7e0f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericArray.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.generic;
+
+import java.util.List;
+
+/** Array that permits reuse of contained elements. */
+public interface GenericArray<T> extends List<T>, GenericContainer {
+  /** The current content of the location where {@link #add(Object)} would next
+   * store an element, if any.  This permits reuse of arrays and their elements
+   * without allocating new objects. */
+  T peek();
+  
+  /** Reverses the order of the elements in this array. */
+  void reverse();
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericContainer.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericContainer.java
new file mode 100644
index 0000000..e1c2472
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericContainer.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import org.apache.avro.Schema;
+
+/** Contains data of other types. */
+public interface GenericContainer {
+  /** The schema of this instance. */
+  Schema getSchema();
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
new file mode 100644
index 0000000..fe2128a
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericData.java
@@ -0,0 +1,1149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.nio.ByteBuffer;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.AbstractList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.LinkedHashMap;
+import java.util.WeakHashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.UnresolvedUnionException;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.parsing.ResolvingGrammarGenerator;
+import org.apache.avro.util.Utf8;
+
+import org.codehaus.jackson.JsonNode;
+
+/** Utilities for generic Java data. See {@link GenericRecordBuilder} for a convenient
+ * way to build {@link GenericRecord} instances.
+ * @see GenericRecordBuilder
+ */
+public class GenericData {
+
+  private static final GenericData INSTANCE = new GenericData();
+  
+  /** Used to specify the Java type for a string schema. */
+  public enum StringType { CharSequence, String, Utf8 };
+
+  public static final String STRING_PROP = "avro.java.string";
+  protected static final String STRING_TYPE_STRING = "String";
+
+  private final ClassLoader classLoader;
+
+  /** Set the Java type to be used when reading this schema.  Meaningful only
+   * only string schemas and map schemas (for the keys). */
+  public static void setStringType(Schema s, StringType stringType) {
+    // Utf8 is the default and implements CharSequence, so we only need to add
+    // a property when the type is String
+    if (stringType == StringType.String)
+      s.addProp(GenericData.STRING_PROP, GenericData.STRING_TYPE_STRING);
+  }
+
+  /** Return the singleton instance. */
+  public static GenericData get() { return INSTANCE; }
+
+  /** For subclasses.  Applications normally use {@link GenericData#get()}. */
+  public GenericData() {
+    this(null);
+  }
+
+  /** For subclasses.  GenericData does not use a ClassLoader. */
+  public GenericData(ClassLoader classLoader) {
+    this.classLoader = (classLoader != null)
+      ? classLoader
+      : getClass().getClassLoader();
+  }
+
+  /** Return the class loader that's used (by subclasses). */
+  public ClassLoader getClassLoader() { return classLoader; }
+
+  private Map<String, Conversion<?>> conversions =
+      new HashMap<String, Conversion<?>>();
+
+  private Map<Class<?>, Map<String, Conversion<?>>> conversionsByClass =
+      new IdentityHashMap<Class<?>, Map<String, Conversion<?>>>();
+
+  /**
+   * Registers the given conversion to be used when reading and writing with
+   * this data model.
+   *
+   * @param conversion a logical type Conversion.
+   */
+  public void addLogicalTypeConversion(Conversion<?> conversion) {
+    conversions.put(conversion.getLogicalTypeName(), conversion);
+    Class<?> type = conversion.getConvertedType();
+    if (conversionsByClass.containsKey(type)) {
+      conversionsByClass.get(type).put(
+          conversion.getLogicalTypeName(), conversion);
+    } else {
+      Map<String, Conversion<?>> conversions = new LinkedHashMap<String, Conversion<?>>();
+      conversions.put(conversion.getLogicalTypeName(), conversion);
+      conversionsByClass.put(type, conversions);
+    }
+  }
+
+  /**
+   * Returns the first conversion found for the given class.
+   *
+   * @param datumClass a Class
+   * @return the first registered conversion for the class, or null
+   */
+  @SuppressWarnings("unchecked")
+  public <T> Conversion<T> getConversionByClass(Class<T> datumClass) {
+    Map<String, Conversion<?>> conversions = conversionsByClass.get(datumClass);
+    if (conversions != null) {
+      return (Conversion<T>) conversions.values().iterator().next();
+    }
+    return null;
+  }
+
+  /**
+   * Returns the conversion for the given class and logical type.
+   *
+   * @param datumClass a Class
+   * @param logicalType a LogicalType
+   * @return the conversion for the class and logical type, or null
+   */
+  @SuppressWarnings("unchecked")
+  public <T> Conversion<T> getConversionByClass(Class<T> datumClass,
+                                                LogicalType logicalType) {
+    Map<String, Conversion<?>> conversions = conversionsByClass.get(datumClass);
+    if (conversions != null) {
+      return (Conversion<T>) conversions.get(logicalType.getName());
+    }
+    return null;
+  }
+
+  /**
+   * Returns the Conversion for the given logical type.
+   *
+   * @param logicalType a logical type
+   * @return the conversion for the logical type, or null
+   */
+  @SuppressWarnings("unchecked")
+  public Conversion<Object> getConversionFor(LogicalType logicalType) {
+    if (logicalType == null) {
+      return null;
+    }
+    return (Conversion<Object>) conversions.get(logicalType.getName());
+  }
+
+  /** Default implementation of {@link GenericRecord}. Note that this implementation
+   * does not fill in default values for fields if they are not specified; use {@link
+   * GenericRecordBuilder} in that case.
+   * @see GenericRecordBuilder
+   */
+  public static class Record implements GenericRecord, Comparable<Record> {
+    private final Schema schema;
+    private final Object[] values;
+    public Record(Schema schema) {
+      if (schema == null || !Type.RECORD.equals(schema.getType()))
+        throw new AvroRuntimeException("Not a record schema: "+schema);
+      this.schema = schema;
+      this.values = new Object[schema.getFields().size()];
+    }
+    public Record(Record other, boolean deepCopy) {
+      schema = other.schema;
+      values = new Object[schema.getFields().size()];
+      if (deepCopy) {
+        for (int ii = 0; ii < values.length; ii++) {
+          values[ii] = INSTANCE.deepCopy(
+              schema.getFields().get(ii).schema(), other.values[ii]);
+        }
+      }
+      else {
+        System.arraycopy(other.values, 0, values, 0, other.values.length);
+      }
+    }
+    @Override public Schema getSchema() { return schema; }
+    @Override public void put(String key, Object value) {
+      Schema.Field field = schema.getField(key);
+      if (field == null)
+        throw new AvroRuntimeException("Not a valid schema field: "+key);
+
+      values[field.pos()] = value;
+    }
+    @Override public void put(int i, Object v) { values[i] = v; }
+    @Override public Object get(String key) {
+      Field field = schema.getField(key);
+      if (field == null) return null;
+      return values[field.pos()];
+    }
+    @Override public Object get(int i) { return values[i]; }
+    @Override public boolean equals(Object o) {
+      if (o == this) return true;                 // identical object
+      if (!(o instanceof Record)) return false;   // not a record
+      Record that = (Record)o;
+      if (!this.schema.equals(that.schema))
+        return false;                             // not the same schema
+      return GenericData.get().compare(this, that, schema, true) == 0;
+    }
+    @Override public int hashCode() {
+      return GenericData.get().hashCode(this, schema);
+    }
+    @Override public int compareTo(Record that) {
+      return GenericData.get().compare(this, that, schema);
+    }
+    @Override public String toString() {
+      return GenericData.get().toString(this);
+    }
+  }
+
+  /** Default implementation of an array. */
+  @SuppressWarnings(value="unchecked")
+  public static class Array<T> extends AbstractList<T>
+    implements GenericArray<T>, Comparable<GenericArray<T>> {
+    private static final Object[] EMPTY = new Object[0];
+    private final Schema schema;
+    private int size;
+    private Object[] elements = EMPTY;
+    public Array(int capacity, Schema schema) {
+      if (schema == null || !Type.ARRAY.equals(schema.getType()))
+        throw new AvroRuntimeException("Not an array schema: "+schema);
+      this.schema = schema;
+      if (capacity != 0)
+        elements = new Object[capacity];
+    }
+    public Array(Schema schema, Collection<T> c) {
+      if (schema == null || !Type.ARRAY.equals(schema.getType()))
+        throw new AvroRuntimeException("Not an array schema: "+schema);
+      this.schema = schema;
+      if (c != null) {
+        elements = new Object[c.size()];
+        addAll(c);
+      }
+    }
+    @Override
+    public Schema getSchema() { return schema; }
+    @Override public int size() { return size; }
+    @Override public void clear() { size = 0; }
+    @Override public Iterator<T> iterator() {
+      return new Iterator<T>() {
+        private int position = 0;
+        @Override
+        public boolean hasNext() { return position < size; }
+        @Override
+        public T next() { return (T)elements[position++]; }
+        @Override
+        public void remove() { throw new UnsupportedOperationException(); }
+      };
+    }
+    @Override public T get(int i) {
+      if (i >= size)
+        throw new IndexOutOfBoundsException("Index " + i + " out of bounds.");
+      return (T)elements[i];
+    }
+    @Override public boolean add(T o) {
+      if (size == elements.length) {
+        Object[] newElements = new Object[(size * 3)/2 + 1];
+        System.arraycopy(elements, 0, newElements, 0, size);
+        elements = newElements;
+      }
+      elements[size++] = o;
+      return true;
+    }
+    @Override public void add(int location, T o) {
+      if (location > size || location < 0) {
+        throw new IndexOutOfBoundsException("Index " + location + " out of bounds.");
+      }
+      if (size == elements.length) {
+        Object[] newElements = new Object[(size * 3)/2 + 1];
+        System.arraycopy(elements, 0, newElements, 0, size);
+        elements = newElements;
+      }
+      System.arraycopy(elements, location, elements, location + 1, size - location);
+      elements[location] = o;
+      size++;
+    }
+    @Override public T set(int i, T o) {
+      if (i >= size)
+        throw new IndexOutOfBoundsException("Index " + i + " out of bounds.");
+      T response = (T)elements[i];
+      elements[i] = o;
+      return response;
+    }
+    @Override public T remove(int i) {
+      if (i >= size)
+        throw new IndexOutOfBoundsException("Index " + i + " out of bounds.");
+      T result = (T)elements[i];
+      --size;
+      System.arraycopy(elements, i+1, elements, i, (size-i));
+      elements[size] = null;
+      return result;
+    }
+    @Override
+    public T peek() {
+      return (size < elements.length) ? (T)elements[size] : null;
+    }
+    @Override
+    public int compareTo(GenericArray<T> that) {
+      return GenericData.get().compare(this, that, this.getSchema());
+    }
+    @Override
+    public void reverse() {
+      int left = 0;
+      int right = elements.length - 1;
+      
+      while (left < right) {
+        Object tmp = elements[left];
+        elements[left] = elements[right];
+        elements[right] = tmp;
+        
+        left++;
+        right--;
+      }
+    }
+    @Override
+    public String toString() {
+      StringBuilder buffer = new StringBuilder();
+      buffer.append("[");
+      int count = 0;
+      for (T e : this) {
+        buffer.append(e==null ? "null" : e.toString());
+        if (++count < size())
+          buffer.append(", ");
+      }
+      buffer.append("]");
+      return buffer.toString();
+    }
+  }
+
+  /** Default implementation of {@link GenericFixed}. */
+  public static class Fixed implements GenericFixed, Comparable<Fixed> {
+    private Schema schema;
+    private byte[] bytes;
+
+    public Fixed(Schema schema) { setSchema(schema); }
+
+    public Fixed(Schema schema, byte[] bytes) {
+      this.schema = schema;
+      this.bytes = bytes;
+    }
+
+    protected Fixed() {}
+
+    protected void setSchema(Schema schema) {
+      this.schema = schema;
+      this.bytes = new byte[schema.getFixedSize()];
+    }
+
+    @Override public Schema getSchema() { return schema; }
+
+    public void bytes(byte[] bytes) { this.bytes = bytes; }
+
+    @Override
+    public byte[] bytes() { return bytes; }
+
+    @Override
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      return o instanceof GenericFixed
+        && Arrays.equals(bytes, ((GenericFixed)o).bytes());
+    }
+
+    @Override
+    public int hashCode() { return Arrays.hashCode(bytes); }
+
+    @Override
+    public String toString() { return Arrays.toString(bytes); }
+
+    @Override
+    public int compareTo(Fixed that) {
+      return BinaryData.compareBytes(this.bytes, 0, this.bytes.length,
+                                     that.bytes, 0, that.bytes.length);
+    }
+  }
+
+  /** Default implementation of {@link GenericEnumSymbol}. */
+  public static class EnumSymbol
+      implements GenericEnumSymbol, Comparable<GenericEnumSymbol>  {
+    private Schema schema;
+    private String symbol;
+
+    public EnumSymbol(Schema schema, String symbol) {
+      this.schema = schema;
+      this.symbol = symbol;
+    }
+
+    /**
+     * Maps existing Objects into an Avro enum
+     * by calling toString(), eg for Java Enums
+     */
+    public EnumSymbol(Schema schema, Object symbol) {
+      this(schema, symbol.toString());
+    }
+
+    @Override public Schema getSchema() { return schema; }
+
+    @Override
+    public boolean equals(Object o) {
+      if (o == this) return true;
+      return o instanceof GenericEnumSymbol
+        && symbol.equals(o.toString());
+    }
+
+    @Override
+    public int hashCode() { return symbol.hashCode(); }
+
+    @Override
+    public String toString() { return symbol; }
+
+    @Override
+    public int compareTo(GenericEnumSymbol that) {
+      return GenericData.get().compare(this, that, schema);
+    }
+  }
+
+  /** Returns a {@link DatumReader} for this kind of data. */
+  public DatumReader createDatumReader(Schema schema) {
+    return new GenericDatumReader(schema, schema, this);
+  }
+
+  /** Returns a {@link DatumReader} for this kind of data. */
+  public DatumReader createDatumReader(Schema writer, Schema reader) {
+    return new GenericDatumReader(writer, reader, this);
+  }
+
+  /** Returns a {@link DatumWriter} for this kind of data. */
+  public DatumWriter createDatumWriter(Schema schema) {
+    return new GenericDatumWriter(schema, this);
+  }
+
+  /** Returns true if a Java datum matches a schema. */
+  public boolean validate(Schema schema, Object datum) {
+    switch (schema.getType()) {
+    case RECORD:
+      if (!isRecord(datum)) return false;
+      for (Field f : schema.getFields()) {
+        if (!validate(f.schema(), getField(datum, f.name(), f.pos())))
+          return false;
+      }
+      return true;
+    case ENUM:
+      if (!isEnum(datum)) return false;
+      return schema.getEnumSymbols().contains(datum.toString());
+    case ARRAY:
+      if (!(isArray(datum))) return false;
+      for (Object element : getArrayAsCollection(datum))
+        if (!validate(schema.getElementType(), element))
+          return false;
+      return true;
+    case MAP:
+      if (!(isMap(datum))) return false;
+      @SuppressWarnings(value="unchecked")
+      Map<Object,Object> map = (Map<Object,Object>)datum;
+      for (Map.Entry<Object,Object> entry : map.entrySet())
+        if (!validate(schema.getValueType(), entry.getValue()))
+          return false;
+      return true;
+    case UNION:
+      try {
+        int i = resolveUnion(schema, datum);
+        return validate(schema.getTypes().get(i), datum);
+      } catch (UnresolvedUnionException e) {
+        return false;
+      }
+    case FIXED:
+      return datum instanceof GenericFixed
+        && ((GenericFixed)datum).bytes().length==schema.getFixedSize();
+    case STRING:  return isString(datum);
+    case BYTES:   return isBytes(datum);
+    case INT:     return isInteger(datum);
+    case LONG:    return isLong(datum);
+    case FLOAT:   return isFloat(datum);
+    case DOUBLE:  return isDouble(datum);
+    case BOOLEAN: return isBoolean(datum);
+    case NULL:    return datum == null;
+    default: return false;
+    }
+  }
+
+  /** Renders a Java datum as <a href="http://www.json.org/">JSON</a>. */
+  public String toString(Object datum) {
+    StringBuilder buffer = new StringBuilder();
+    toString(datum, buffer);
+    return buffer.toString();
+  }
+  /** Renders a Java datum as <a href="http://www.json.org/">JSON</a>. */
+  protected void toString(Object datum, StringBuilder buffer) {
+    if (isRecord(datum)) {
+      buffer.append("{");
+      int count = 0;
+      Schema schema = getRecordSchema(datum);
+      for (Field f : schema.getFields()) {
+        toString(f.name(), buffer);
+        buffer.append(": ");
+        toString(getField(datum, f.name(), f.pos()), buffer);
+        if (++count < schema.getFields().size())
+          buffer.append(", ");
+      }
+      buffer.append("}");
+    } else if (isArray(datum)) {
+      Collection<?> array = getArrayAsCollection(datum);
+      buffer.append("[");
+      long last = array.size()-1;
+      int i = 0;
+      for (Object element : array) {
+        toString(element, buffer);
+        if (i++ < last)
+          buffer.append(", ");
+      }        
+      buffer.append("]");
+    } else if (isMap(datum)) {
+      buffer.append("{");
+      int count = 0;
+      @SuppressWarnings(value="unchecked")
+      Map<Object,Object> map = (Map<Object,Object>)datum;
+      for (Map.Entry<Object,Object> entry : map.entrySet()) {
+        toString(entry.getKey(), buffer);
+        buffer.append(": ");
+        toString(entry.getValue(), buffer);
+        if (++count < map.size())
+          buffer.append(", ");
+      }
+      buffer.append("}");
+    } else if (isString(datum)|| isEnum(datum)) {
+      buffer.append("\"");
+      writeEscapedString(datum.toString(), buffer);
+      buffer.append("\"");
+    } else if (isBytes(datum)) {
+      buffer.append("{\"bytes\": \"");
+      ByteBuffer bytes = (ByteBuffer)datum;
+      for (int i = bytes.position(); i < bytes.limit(); i++)
+        buffer.append((char)bytes.get(i));
+      buffer.append("\"}");
+    } else if (((datum instanceof Float) &&       // quote Nan & Infinity
+                (((Float)datum).isInfinite() || ((Float)datum).isNaN()))
+               || ((datum instanceof Double) &&
+                   (((Double)datum).isInfinite() || ((Double)datum).isNaN()))) {
+      buffer.append("\"");
+      buffer.append(datum);
+      buffer.append("\"");
+    } else {
+      buffer.append(datum);
+    }
+  }
+  
+  /* Adapted from http://code.google.com/p/json-simple */
+  private void writeEscapedString(String string, StringBuilder builder) {
+    for(int i = 0; i < string.length(); i++){
+      char ch = string.charAt(i);
+      switch(ch){
+        case '"':
+          builder.append("\\\"");
+          break;
+        case '\\':
+          builder.append("\\\\");
+          break;
+        case '\b':
+          builder.append("\\b");
+          break;
+        case '\f':
+          builder.append("\\f");
+          break;
+        case '\n':
+          builder.append("\\n");
+          break;
+        case '\r':
+          builder.append("\\r");
+          break;
+        case '\t':
+          builder.append("\\t");
+          break;
+        default:
+          // Reference: http://www.unicode.org/versions/Unicode5.1.0/
+          if((ch>='\u0000' && ch<='\u001F') || (ch>='\u007F' && ch<='\u009F') || (ch>='\u2000' && ch<='\u20FF')){
+            String hex = Integer.toHexString(ch);
+            builder.append("\\u");
+            for(int j = 0; j < 4 - hex.length(); j++)
+              builder.append('0');
+            builder.append(hex.toUpperCase());
+          } else {
+            builder.append(ch);
+          }
+        }
+    }
+  }
+
+  /** Create a schema given an example datum. */
+  public Schema induce(Object datum) {
+    if (isRecord(datum)) {
+      return getRecordSchema(datum);
+    } else if (isArray(datum)) {
+      Schema elementType = null;
+      for (Object element : getArrayAsCollection(datum)) {
+        if (elementType == null) {
+          elementType = induce(element);
+        } else if (!elementType.equals(induce(element))) {
+          throw new AvroTypeException("No mixed type arrays.");
+        }
+      }
+      if (elementType == null) {
+        throw new AvroTypeException("Empty array: "+datum);
+      }
+      return Schema.createArray(elementType);
+
+    } else if (isMap(datum)) {
+      @SuppressWarnings(value="unchecked")
+      Map<Object,Object> map = (Map<Object,Object>)datum;
+      Schema value = null;
+      for (Map.Entry<Object,Object> entry : map.entrySet()) {
+        if (value == null) {
+          value = induce(entry.getValue());
+        } else if (!value.equals(induce(entry.getValue()))) {
+          throw new AvroTypeException("No mixed type map values.");
+        }
+      }
+      if (value == null) {
+        throw new AvroTypeException("Empty map: "+datum);
+      }
+      return Schema.createMap(value);
+    } else if (datum instanceof GenericFixed) {
+      return Schema.createFixed(null, null, null,
+                                ((GenericFixed)datum).bytes().length);
+    }
+    else if (isString(datum)) return Schema.create(Type.STRING);
+    else if (isBytes(datum)) return Schema.create(Type.BYTES);
+    else if (isInteger(datum))    return Schema.create(Type.INT);
+    else if (isLong(datum))       return Schema.create(Type.LONG);
+    else if (isFloat(datum))      return Schema.create(Type.FLOAT);
+    else if (isDouble(datum))     return Schema.create(Type.DOUBLE);
+    else if (isBoolean(datum))    return Schema.create(Type.BOOLEAN);
+    else if (datum == null)               return Schema.create(Type.NULL);
+
+    else throw new AvroTypeException("Can't create schema for: "+datum);
+  }
+
+  /** Called by {@link GenericDatumReader#readRecord} to set a record fields
+   * value to a record instance.  The default implementation is for {@link
+   * IndexedRecord}.*/
+  public void setField(Object record, String name, int position, Object o) {
+    ((IndexedRecord)record).put(position, o);
+  }
+  
+  /** Called by {@link GenericDatumReader#readRecord} to retrieve a record
+   * field value from a reused instance.  The default implementation is for
+   * {@link IndexedRecord}.*/
+  public Object getField(Object record, String name, int position) {
+    return ((IndexedRecord)record).get(position);
+  }
+
+  /** Produce state for repeated calls to {@link
+   * #getField(Object,String,int,Object)} and {@link
+   * #setField(Object,String,int,Object,Object)} on the same record.*/
+  protected Object getRecordState(Object record, Schema schema) { return null; }
+
+  /** Version of {@link #setField} that has state. */
+  protected void setField(Object r, String n, int p, Object o, Object state) {
+    setField(r, n, p, o);
+  }
+  
+  /** Version of {@link #getField} that has state. */
+  protected Object getField(Object record, String name, int pos, Object state) {
+    return getField(record, name, pos);
+  }
+
+  /** Return the index for a datum within a union.  Implemented with {@link
+   * Schema#getIndexNamed(String)} and {@link #getSchemaName(Object)}.*/
+  public int resolveUnion(Schema union, Object datum) {
+    // if there is a logical type that works, use it first
+    // this allows logical type concrete classes to overlap with supported ones
+    // for example, a conversion could return a map
+    if (datum != null) {
+      Map<String, Conversion<?>> conversions = conversionsByClass.get(datum.getClass());
+      if (conversions != null) {
+        List<Schema> candidates = union.getTypes();
+        for (int i = 0; i < candidates.size(); i += 1) {
+          LogicalType candidateType = candidates.get(i).getLogicalType();
+          if (candidateType != null) {
+            Conversion<?> conversion = conversions.get(candidateType.getName());
+            if (conversion != null) {
+              return i;
+            }
+          }
+        }
+      }
+    }
+
+    Integer i = union.getIndexNamed(getSchemaName(datum));
+    if (i != null)
+      return i;
+    throw new UnresolvedUnionException(union, datum);
+  }
+
+  /** Return the schema full name for a datum.  Called by {@link
+   * #resolveUnion(Schema,Object)}. */
+  protected String getSchemaName(Object datum) {
+    if (datum == null)
+      return Type.NULL.getName();
+    if (isRecord(datum))
+      return getRecordSchema(datum).getFullName();
+    if (isEnum(datum))
+      return getEnumSchema(datum).getFullName();
+    if (isArray(datum))
+      return Type.ARRAY.getName();
+    if (isMap(datum))
+      return Type.MAP.getName();
+    if (isFixed(datum))
+      return getFixedSchema(datum).getFullName();
+    if (isString(datum))
+      return Type.STRING.getName();
+    if (isBytes(datum))
+      return Type.BYTES.getName();
+    if (isInteger(datum))
+      return Type.INT.getName();
+    if (isLong(datum))
+      return Type.LONG.getName();
+    if (isFloat(datum))
+      return Type.FLOAT.getName();
+    if (isDouble(datum))
+      return Type.DOUBLE.getName();
+    if (isBoolean(datum))
+      return Type.BOOLEAN.getName();
+    throw new AvroRuntimeException
+      (String.format("Unknown datum type %s: %s",
+                     datum.getClass().getName(), datum));
+ }
+
+  /** Called by {@link #resolveUnion(Schema,Object)}.  May be overridden for
+      alternate data representations.*/
+  protected boolean instanceOf(Schema schema, Object datum) {
+    switch (schema.getType()) {
+    case RECORD:
+      if (!isRecord(datum)) return false;
+      return (schema.getFullName() == null)
+        ? getRecordSchema(datum).getFullName() == null
+        : schema.getFullName().equals(getRecordSchema(datum).getFullName());
+    case ENUM:
+      if (!isEnum(datum)) return false;
+      return schema.getFullName().equals(getEnumSchema(datum).getFullName());
+    case ARRAY:   return isArray(datum);
+    case MAP:     return isMap(datum);
+    case FIXED:
+      if (!isFixed(datum)) return false;
+      return schema.getFullName().equals(getFixedSchema(datum).getFullName());
+    case STRING:  return isString(datum);
+    case BYTES:   return isBytes(datum);
+    case INT:     return isInteger(datum);
+    case LONG:    return isLong(datum);
+    case FLOAT:   return isFloat(datum);
+    case DOUBLE:  return isDouble(datum);
+    case BOOLEAN: return isBoolean(datum);
+    case NULL:    return datum == null;
+    default: throw new AvroRuntimeException("Unexpected type: " +schema);
+    }
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isArray(Object datum) {
+    return datum instanceof Collection;
+  }
+
+  /** Called to access an array as a collection. */
+  protected Collection getArrayAsCollection(Object datum) {
+    return (Collection)datum;
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isRecord(Object datum) {
+    return datum instanceof IndexedRecord;
+  }
+
+  /** Called to obtain the schema of a record.  By default calls
+   * {GenericContainer#getSchema().  May be overridden for alternate record
+   * representations. */
+  protected Schema getRecordSchema(Object record) {
+    return ((GenericContainer)record).getSchema();
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isEnum(Object datum) {
+    return datum instanceof GenericEnumSymbol;
+  }
+  
+  /** Called to obtain the schema of a enum.  By default calls
+   * {GenericContainer#getSchema().  May be overridden for alternate enum
+   * representations. */
+  protected Schema getEnumSchema(Object enu) {
+    return ((GenericContainer)enu).getSchema();
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isMap(Object datum) {
+    return datum instanceof Map;
+  }
+  
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isFixed(Object datum) {
+    return datum instanceof GenericFixed;
+  }
+
+  /** Called to obtain the schema of a fixed.  By default calls
+   * {GenericContainer#getSchema().  May be overridden for alternate fixed
+   * representations. */
+  protected Schema getFixedSchema(Object fixed) {
+    return ((GenericContainer)fixed).getSchema();
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isString(Object datum) {
+    return datum instanceof CharSequence;
+  }
+
+  /** Called by the default implementation of {@link #instanceOf}.*/
+  protected boolean isBytes(Object datum) {
+    return datum instanceof ByteBuffer;
+  }
+
+   /**
+   * Called by the default implementation of {@link #instanceOf}.
+   */
+  protected boolean isInteger(Object datum) {
+    return datum instanceof Integer;
+  }
+
+  /**
+   * Called by the default implementation of {@link #instanceOf}.
+   */
+  protected boolean isLong(Object datum) {
+    return datum instanceof Long;
+  }
+
+  /**
+   * Called by the default implementation of {@link #instanceOf}.
+   */
+  protected boolean isFloat(Object datum) {
+    return datum instanceof Float;
+  }
+
+  /**
+   * Called by the default implementation of {@link #instanceOf}.
+   */
+  protected boolean isDouble(Object datum) {
+    return datum instanceof Double;
+  }
+
+  /**
+   * Called by the default implementation of {@link #instanceOf}.
+   */
+  protected boolean isBoolean(Object datum) {
+    return datum instanceof Boolean;
+  }
+   
+
+  /** Compute a hash code according to a schema, consistent with {@link
+   * #compare(Object,Object,Schema)}. */
+  public int hashCode(Object o, Schema s) {
+    if (o == null) return 0;                      // incomplete datum
+    int hashCode = 1;
+    switch (s.getType()) {
+    case RECORD:
+      for (Field f : s.getFields()) {
+        if (f.order() == Field.Order.IGNORE)
+          continue;
+        hashCode = hashCodeAdd(hashCode,
+                               getField(o, f.name(), f.pos()), f.schema());
+      }
+      return hashCode;
+    case ARRAY:
+      Collection<?> a = (Collection<?>)o;
+      Schema elementType = s.getElementType();
+      for (Object e : a)
+        hashCode = hashCodeAdd(hashCode, e, elementType);
+      return hashCode;
+    case UNION:
+      return hashCode(o, s.getTypes().get(resolveUnion(s, o)));
+    case ENUM:
+      return s.getEnumOrdinal(o.toString());
+    case NULL:
+      return 0;
+    case STRING:
+      return (o instanceof Utf8 ? o : new Utf8(o.toString())).hashCode();
+    default:
+      return o.hashCode();
+    }
+  }
+
+  /** Add the hash code for an object into an accumulated hash code. */
+  protected int hashCodeAdd(int hashCode, Object o, Schema s) {
+    return 31*hashCode + hashCode(o, s);
+  }
+
+  /** Compare objects according to their schema.  If equal, return zero.  If
+   * greater-than, return 1, if less than return -1.  Order is consistent with
+   * that of {@link BinaryData#compare(byte[], int, byte[], int, Schema)}.
+   */
+  public int compare(Object o1, Object o2, Schema s) {
+    return compare(o1, o2, s, false);
+  }
+
+  /** Comparison implementation.  When equals is true, only checks for equality,
+   * not for order. */
+  @SuppressWarnings(value="unchecked")
+  protected int compare(Object o1, Object o2, Schema s, boolean equals) {
+    if (o1 == o2) return 0;
+    switch (s.getType()) {
+    case RECORD:
+      for (Field f : s.getFields()) {
+        if (f.order() == Field.Order.IGNORE)
+          continue;                               // ignore this field
+        int pos = f.pos();
+        String name = f.name();
+        int compare =
+          compare(getField(o1, name, pos), getField(o2, name, pos),
+                  f.schema(), equals);
+        if (compare != 0)                         // not equal
+          return f.order() == Field.Order.DESCENDING ? -compare : compare;
+      }
+      return 0;
+    case ENUM:
+      return s.getEnumOrdinal(o1.toString()) - s.getEnumOrdinal(o2.toString());
+    case ARRAY:
+      Collection a1 = (Collection)o1;
+      Collection a2 = (Collection)o2;
+      Iterator e1 = a1.iterator();
+      Iterator e2 = a2.iterator();
+      Schema elementType = s.getElementType();
+      while(e1.hasNext() && e2.hasNext()) {
+        int compare = compare(e1.next(), e2.next(), elementType, equals);
+        if (compare != 0) return compare;
+      }
+      return e1.hasNext() ? 1 : (e2.hasNext() ? -1 : 0);
+    case MAP:
+      if (equals)
+        return ((Map)o1).equals(o2) ? 0 : 1;
+      throw new AvroRuntimeException("Can't compare maps!");
+    case UNION:
+      int i1 = resolveUnion(s, o1);
+      int i2 = resolveUnion(s, o2);
+      return (i1 == i2)
+        ? compare(o1, o2, s.getTypes().get(i1), equals)
+        : i1 - i2;
+    case NULL:
+      return 0;
+    case STRING:
+      Utf8 u1 = o1 instanceof Utf8 ? (Utf8)o1 : new Utf8(o1.toString());
+      Utf8 u2 = o2 instanceof Utf8 ? (Utf8)o2 : new Utf8(o2.toString());
+      return u1.compareTo(u2);
+    default:
+      return ((Comparable)o1).compareTo(o2);
+    }
+  }
+
+  private final Map<Field, Object> defaultValueCache
+    = Collections.synchronizedMap(new WeakHashMap<Field, Object>());
+
+  /**
+   * Gets the default value of the given field, if any.
+   * @param field the field whose default value should be retrieved.
+   * @return the default value associated with the given field, 
+   * or null if none is specified in the schema.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  public Object getDefaultValue(Field field) {    
+    JsonNode json = field.defaultValue();
+    if (json == null)
+      throw new AvroRuntimeException("Field " + field
+                                     + " not set and has no default value");
+    if (json.isNull()
+        && (field.schema().getType() == Type.NULL
+            || (field.schema().getType() == Type.UNION
+                && field.schema().getTypes().get(0).getType() == Type.NULL))) {
+      return null;
+    }
+    
+    // Check the cache
+    Object defaultValue = defaultValueCache.get(field);
+    
+    // If not cached, get the default Java value by encoding the default JSON
+    // value and then decoding it:
+    if (defaultValue == null)
+      try {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
+        ResolvingGrammarGenerator.encode(encoder, field.schema(), json);
+        encoder.flush();
+        BinaryDecoder decoder =
+          DecoderFactory.get().binaryDecoder(baos.toByteArray(), null);
+        defaultValue =
+          createDatumReader(field.schema()).read(null, decoder);
+
+        defaultValueCache.put(field, defaultValue);
+      } catch (IOException e) {
+        throw new AvroRuntimeException(e);
+      }
+
+    return defaultValue;
+  }
+
+  private static final Schema STRINGS = Schema.create(Type.STRING);
+
+  /**
+   * Makes a deep copy of a value given its schema.
+   * @param schema the schema of the value to deep copy.
+   * @param value the value to deep copy.
+   * @return a deep copy of the given value.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  public <T> T deepCopy(Schema schema, T value) {
+    if (value == null) {
+      return null;
+    }
+    switch (schema.getType()) {
+      case ARRAY:
+        List<Object> arrayValue = (List) value;
+        List<Object> arrayCopy = new GenericData.Array<Object>(
+            arrayValue.size(), schema);
+        for (Object obj : arrayValue) {
+          arrayCopy.add(deepCopy(schema.getElementType(), obj));
+        }
+        return (T)arrayCopy;
+      case BOOLEAN:
+        return value; // immutable
+      case BYTES:
+        ByteBuffer byteBufferValue = (ByteBuffer) value;
+        int start = byteBufferValue.position();
+        int length = byteBufferValue.limit() - start;
+        byte[] bytesCopy = new byte[length];
+        byteBufferValue.get(bytesCopy, 0, length);
+        byteBufferValue.position(start);
+        return (T)ByteBuffer.wrap(bytesCopy, 0, length);
+      case DOUBLE:
+        return value; // immutable
+      case ENUM:
+        // Enums are immutable; shallow copy will suffice
+        return value;
+      case FIXED:
+        return (T)createFixed(null, ((GenericFixed) value).bytes(), schema);
+      case FLOAT:
+        return value; // immutable
+      case INT:
+        return value; // immutable
+      case LONG:
+        return value; // immutable
+      case MAP:
+        Map<CharSequence, Object> mapValue = (Map) value;
+        Map<CharSequence, Object> mapCopy = 
+          new HashMap<CharSequence, Object>(mapValue.size());
+        for (Map.Entry<CharSequence, Object> entry : mapValue.entrySet()) {
+          mapCopy.put((CharSequence)(deepCopy(STRINGS, entry.getKey())),
+              deepCopy(schema.getValueType(), entry.getValue()));
+        }
+        return (T)mapCopy;
+      case NULL:
+        return null;
+      case RECORD:
+        Object oldState = getRecordState(value, schema);
+        Object newRecord = newRecord(null, schema);
+        Object newState = getRecordState(newRecord, schema);
+        for (Field f : schema.getFields()) {
+          int pos = f.pos();
+          String name = f.name();
+          Object newValue = deepCopy(f.schema(),
+                                     getField(value, name, pos, oldState));
+          setField(newRecord, name, pos, newValue, newState);
+        }
+        return (T)newRecord;
+      case STRING:
+        // Strings are immutable
+        if (value instanceof String) {
+          return (T)value;
+        }
+        
+        // Some CharSequence subclasses are mutable, so we still need to make 
+        // a copy
+        else if (value instanceof Utf8) {
+          // Utf8 copy constructor is more efficient than converting 
+          // to string and then back to Utf8
+          return (T)new Utf8((Utf8)value);
+        }
+        return (T)new Utf8(value.toString());
+      case UNION:
+        return deepCopy(
+            schema.getTypes().get(resolveUnion(schema, value)), value);
+      default:
+        throw new AvroRuntimeException(
+            "Deep copy failed for schema \"" + schema + "\" and value \"" +
+            value + "\"");
+    }
+  }
+  
+  /** Called to create an fixed value. May be overridden for alternate fixed
+   * representations.  By default, returns {@link GenericFixed}. */
+  public Object createFixed(Object old, Schema schema) {
+    if ((old instanceof GenericFixed)
+        && ((GenericFixed)old).bytes().length == schema.getFixedSize())
+      return old;
+    return new GenericData.Fixed(schema);
+  }
+  
+  /** Called to create an fixed value. May be overridden for alternate fixed
+   * representations.  By default, returns {@link GenericFixed}. */
+  public Object createFixed(Object old, byte[] bytes, Schema schema) {
+    GenericFixed fixed = (GenericFixed)createFixed(old, schema);
+    System.arraycopy(bytes, 0, fixed.bytes(), 0, schema.getFixedSize());
+    return fixed;
+  }
+  
+  /** Called to create an enum value. May be overridden for alternate enum
+   * representations.  By default, returns a GenericEnumSymbol. */
+  public Object createEnum(String symbol, Schema schema) {
+    return new EnumSymbol(schema, symbol);
+  }
+
+  /**
+   * Called to create new record instances. Subclasses may override to use a
+   * different record implementation. The returned instance must conform to the
+   * schema provided. If the old object contains fields not present in the
+   * schema, they should either be removed from the old object, or it should
+   * create a new instance that conforms to the schema. By default, this returns
+   * a {@link GenericData.Record}.
+   */
+  public Object newRecord(Object old, Schema schema) {
+    if (old instanceof IndexedRecord) {
+      IndexedRecord record = (IndexedRecord)old;
+      if (record.getSchema() == schema)
+        return record;
+    }
+    return new GenericData.Record(schema);
+  }
+  
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
new file mode 100644
index 0000000..9417b22
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumReader.java
@@ -0,0 +1,569 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.Map;
+import java.util.Collection;
+import java.nio.ByteBuffer;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.ResolvingDecoder;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.util.WeakIdentityHashMap;
+
+/** {@link DatumReader} for generic Java objects. */
+public class GenericDatumReader<D> implements DatumReader<D> {
+  private final GenericData data;
+  private Schema actual;
+  private Schema expected;
+  
+  private ResolvingDecoder creatorResolver = null;
+  private final Thread creator;
+
+  public GenericDatumReader() {
+    this(null, null, GenericData.get());
+  }
+
+  /** Construct where the writer's and reader's schemas are the same. */
+  public GenericDatumReader(Schema schema) {
+    this(schema, schema, GenericData.get());
+  }
+
+  /** Construct given writer's and reader's schema. */
+  public GenericDatumReader(Schema writer, Schema reader) {
+    this(writer, reader, GenericData.get());
+  }
+
+  public GenericDatumReader(Schema writer, Schema reader, GenericData data) {
+    this(data);
+    this.actual = writer;
+    this.expected = reader;
+  }
+
+  protected GenericDatumReader(GenericData data) {
+    this.data = data;
+    this.creator = Thread.currentThread();
+  }
+
+  /** Return the {@link GenericData} implementation. */
+  public GenericData getData() { return data; }
+
+  /** Return the writer's schema. */
+  public Schema getSchema() { return actual; }
+
+  @Override
+  public void setSchema(Schema writer) {
+    this.actual = writer;
+    if (expected == null) {
+      expected = actual;
+    }
+    creatorResolver = null;
+  }
+
+  /** Get the reader's schema. */
+  public Schema getExpected() { return expected; }
+
+  /** Set the reader's schema. */
+  public void setExpected(Schema reader) {
+    this.expected = reader;
+    creatorResolver = null;
+  }
+
+  private static final ThreadLocal<Map<Schema,Map<Schema,ResolvingDecoder>>>
+    RESOLVER_CACHE =
+    new ThreadLocal<Map<Schema,Map<Schema,ResolvingDecoder>>>() {
+    protected Map<Schema,Map<Schema,ResolvingDecoder>> initialValue() {
+      return new WeakIdentityHashMap<Schema,Map<Schema,ResolvingDecoder>>();
+    }
+  };
+
+  /** Gets a resolving decoder for use by this GenericDatumReader.
+   *  Unstable API.
+   *  Currently uses a thread local cache to prevent constructing the
+   *  resolvers too often, because that is very expensive.
+   */
+  protected final ResolvingDecoder getResolver(Schema actual, Schema expected)
+    throws IOException {
+    Thread currThread = Thread.currentThread();
+    ResolvingDecoder resolver;
+    if (currThread == creator && creatorResolver != null) {
+      return creatorResolver;
+    } 
+
+    Map<Schema,ResolvingDecoder> cache = RESOLVER_CACHE.get().get(actual);
+    if (cache == null) {
+      cache = new WeakIdentityHashMap<Schema,ResolvingDecoder>();
+      RESOLVER_CACHE.get().put(actual, cache);
+    }
+    resolver = cache.get(expected);
+    if (resolver == null) {
+      resolver = DecoderFactory.get().resolvingDecoder(
+          Schema.applyAliases(actual, expected), expected, null);
+      cache.put(expected, resolver);
+    }
+    
+    if (currThread == creator){
+      creatorResolver = resolver;
+    }
+
+    return resolver;
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public D read(D reuse, Decoder in) throws IOException {
+    ResolvingDecoder resolver = getResolver(actual, expected);
+    resolver.configure(in);
+    D result = (D) read(reuse, expected, resolver);
+    resolver.drain();
+    return result;
+  }
+  
+  /** Called to read data.*/
+  protected Object read(Object old, Schema expected,
+      ResolvingDecoder in) throws IOException {
+    Object datum = readWithoutConversion(old, expected, in);
+    LogicalType logicalType = expected.getLogicalType();
+    if (logicalType != null) {
+      Conversion<?> conversion = getData().getConversionFor(logicalType);
+      if (conversion != null) {
+        return convert(datum, expected, logicalType, conversion);
+      }
+    }
+    return datum;
+  }
+
+  protected Object readWithConversion(Object old, Schema expected,
+                                      LogicalType logicalType,
+                                      Conversion<?> conversion,
+                                      ResolvingDecoder in) throws IOException {
+    return convert(readWithoutConversion(old, expected, in),
+        expected, logicalType, conversion);
+  }
+
+  protected Object readWithoutConversion(Object old, Schema expected,
+      ResolvingDecoder in) throws IOException {
+    switch (expected.getType()) {
+    case RECORD:  return readRecord(old, expected, in);
+    case ENUM:    return readEnum(expected, in);
+    case ARRAY:   return readArray(old, expected, in);
+    case MAP:     return readMap(old, expected, in);
+    case UNION:   return read(old, expected.getTypes().get(in.readIndex()), in);
+    case FIXED:   return readFixed(old, expected, in);
+    case STRING:  return readString(old, expected, in);
+    case BYTES:   return readBytes(old, expected, in);
+    case INT:     return readInt(old, expected, in);
+    case LONG:    return in.readLong();
+    case FLOAT:   return in.readFloat();
+    case DOUBLE:  return in.readDouble();
+    case BOOLEAN: return in.readBoolean();
+    case NULL:    in.readNull(); return null;
+    default: throw new AvroRuntimeException("Unknown type: " + expected);
+    }
+  }
+
+  protected Object convert(Object datum, Schema schema, LogicalType type,
+                           Conversion<?> conversion) {
+    try {
+      switch (schema.getType()) {
+      case RECORD:  return conversion.fromRecord((IndexedRecord) datum, schema, type);
+      case ENUM:    return conversion.fromEnumSymbol((GenericEnumSymbol) datum, schema, type);
+      case ARRAY:   return conversion.fromArray(getData().getArrayAsCollection(datum), schema, type);
+      case MAP:     return conversion.fromMap((Map<?, ?>) datum, schema, type);
+      case FIXED:   return conversion.fromFixed((GenericFixed) datum, schema, type);
+      case STRING:  return conversion.fromCharSequence((CharSequence) datum, schema, type);
+      case BYTES:   return conversion.fromBytes((ByteBuffer) datum, schema, type);
+      case INT:     return conversion.fromInt((Integer) datum, schema, type);
+      case LONG:    return conversion.fromLong((Long) datum, schema, type);
+      case FLOAT:   return conversion.fromFloat((Float) datum, schema, type);
+      case DOUBLE:  return conversion.fromDouble((Double) datum, schema, type);
+      case BOOLEAN: return conversion.fromBoolean((Boolean) datum, schema, type);
+      }
+      return datum;
+    } catch (ClassCastException e) {
+      throw new AvroRuntimeException("Cannot convert " + datum + ":" +
+          datum.getClass().getSimpleName() + ": expected generic type", e);
+    }
+  }
+
+  /** Called to read a record instance. May be overridden for alternate record
+   * representations.*/
+  protected Object readRecord(Object old, Schema expected, 
+      ResolvingDecoder in) throws IOException {
+    Object r = data.newRecord(old, expected);
+    Object state = data.getRecordState(r, expected);
+    
+    for (Field f : in.readFieldOrder()) {
+      int pos = f.pos();
+      String name = f.name();
+      Object oldDatum = null;
+      if (old!=null) {
+        oldDatum = data.getField(r, name, pos, state);
+      }
+      readField(r, f, oldDatum, in, state);
+    }
+
+    return r;
+  }
+  
+  /** Called to read a single field of a record. May be overridden for more 
+   * efficient or alternate implementations.*/
+  protected void readField(Object r, Field f, Object oldDatum,
+    ResolvingDecoder in, Object state) throws IOException {
+    data.setField(r, f.name(), f.pos(), read(oldDatum, f.schema(), in), state);
+  }
+  
+  /** Called to read an enum value. May be overridden for alternate enum
+   * representations.  By default, returns a GenericEnumSymbol. */
+  protected Object readEnum(Schema expected, Decoder in) throws IOException {
+    return createEnum(expected.getEnumSymbols().get(in.readEnum()), expected);
+  }
+
+  /** Called to create an enum value. May be overridden for alternate enum
+   * representations.  By default, returns a GenericEnumSymbol. */
+  protected Object createEnum(String symbol, Schema schema) {
+    return data.createEnum(symbol, schema);
+  }
+
+  /** Called to read an array instance.  May be overridden for alternate array
+   * representations.*/
+  protected Object readArray(Object old, Schema expected,
+      ResolvingDecoder in) throws IOException {
+    Schema expectedType = expected.getElementType();
+    long l = in.readArrayStart();
+    long base = 0;
+    if (l > 0) {
+      LogicalType logicalType = expectedType.getLogicalType();
+      Conversion<?> conversion = getData().getConversionFor(logicalType);
+      Object array = newArray(old, (int) l, expected);
+      do {
+        if (logicalType != null && conversion != null) {
+          for (long i = 0; i < l; i++) {
+            addToArray(array, base + i, readWithConversion(
+                peekArray(array), expectedType, logicalType, conversion, in));
+          }
+        } else {
+          for (long i = 0; i < l; i++) {
+            addToArray(array, base + i, readWithoutConversion(
+                peekArray(array), expectedType, in));
+          }
+        }
+        base += l;
+      } while ((l = in.arrayNext()) > 0);
+      return array;
+    } else {
+      return newArray(old, 0, expected);
+    }
+  }
+
+  /** Called by the default implementation of {@link #readArray} to retrieve a
+   * value from a reused instance.  The default implementation is for {@link
+   * GenericArray}.*/
+  @SuppressWarnings("unchecked")
+  protected Object peekArray(Object array) {
+    return (array instanceof GenericArray)
+      ? ((GenericArray)array).peek()
+      : null;
+  }
+
+  /** Called by the default implementation of {@link #readArray} to add a
+   * value.  The default implementation is for {@link Collection}.*/
+  @SuppressWarnings("unchecked")
+  protected void addToArray(Object array, long pos, Object e) {
+    ((Collection) array).add(e);
+  }
+  
+  /** Called to read a map instance.  May be overridden for alternate map
+   * representations.*/
+  protected Object readMap(Object old, Schema expected,
+      ResolvingDecoder in) throws IOException {
+    Schema eValue = expected.getValueType();
+    long l = in.readMapStart();
+    LogicalType logicalType = eValue.getLogicalType();
+    Conversion<?> conversion = getData().getConversionFor(logicalType);
+    Object map = newMap(old, (int) l);
+    if (l > 0) {
+      do {
+        if (logicalType != null && conversion != null) {
+          for (int i = 0; i < l; i++) {
+            addToMap(map, readMapKey(null, expected, in),
+                readWithConversion(null, eValue, logicalType, conversion, in));
+          }
+        } else {
+          for (int i = 0; i < l; i++) {
+            addToMap(map, readMapKey(null, expected, in),
+                readWithoutConversion(null, eValue, in));
+          }
+        }
+      } while ((l = in.mapNext()) > 0);
+    }
+    return map;
+  }
+
+  /** Called by the default implementation of {@link #readMap} to read a
+   * key value.  The default implementation returns delegates to
+   * {@link #readString(Object, org.apache.avro.io.Decoder)}.*/
+  protected Object readMapKey(Object old, Schema expected, Decoder in)
+    throws IOException{
+    return readString(old, expected, in);
+  }
+
+  /** Called by the default implementation of {@link #readMap} to add a
+   * key/value pair.  The default implementation is for {@link Map}.*/
+  @SuppressWarnings("unchecked")
+  protected void addToMap(Object map, Object key, Object value) {
+    ((Map) map).put(key, value);
+  }
+  
+  /** Called to read a fixed value. May be overridden for alternate fixed
+   * representations.  By default, returns {@link GenericFixed}. */
+  protected Object readFixed(Object old, Schema expected, Decoder in)
+    throws IOException {
+    GenericFixed fixed = (GenericFixed)data.createFixed(old, expected);
+    in.readFixed(fixed.bytes(), 0, expected.getFixedSize());
+    return fixed;
+  }
+  
+  /** 
+   * Called to create an fixed value. May be overridden for alternate fixed
+   * representations.  By default, returns {@link GenericFixed}.
+   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * {@link GenericData#createFixed(Object, Schema)}
+   */
+  @Deprecated
+  protected Object createFixed(Object old, Schema schema) {
+    return data.createFixed(old, schema);
+  }
+
+  /** 
+   * Called to create an fixed value. May be overridden for alternate fixed
+   * representations.  By default, returns {@link GenericFixed}.
+   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * {@link GenericData#createFixed(Object, byte[], Schema)}
+   */
+  @Deprecated
+  protected Object createFixed(Object old, byte[] bytes, Schema schema) {
+    return data.createFixed(old, bytes, schema);
+  }
+  
+  /**
+   * Called to create new record instances. Subclasses may override to use a
+   * different record implementation. The returned instance must conform to the
+   * schema provided. If the old object contains fields not present in the
+   * schema, they should either be removed from the old object, or it should
+   * create a new instance that conforms to the schema. By default, this returns
+   * a {@link GenericData.Record}.
+   * @deprecated As of Avro 1.6.0 this method has been moved to 
+   * {@link GenericData#newRecord(Object, Schema)}
+   */
+  @Deprecated
+  protected Object newRecord(Object old, Schema schema) {
+    return data.newRecord(old, schema);
+  }
+
+  /** Called to create new array instances.  Subclasses may override to use a
+   * different array implementation.  By default, this returns a {@link
+   * GenericData.Array}.*/
+  @SuppressWarnings("unchecked")
+  protected Object newArray(Object old, int size, Schema schema) {
+    if (old instanceof Collection) {
+      ((Collection) old).clear();
+      return old;
+    } else return new GenericData.Array(size, schema);
+  }
+
+  /** Called to create new array instances.  Subclasses may override to use a
+   * different map implementation.  By default, this returns a {@link
+   * HashMap}.*/
+  @SuppressWarnings("unchecked")
+  protected Object newMap(Object old, int size) {
+    if (old instanceof Map) {
+      ((Map) old).clear();
+      return old;
+    } else return new HashMap<Object, Object>(size);
+  }
+
+  /** Called to read strings.  Subclasses may override to use a different
+   * string representation.  By default, this calls {@link
+   * #readString(Object,Decoder)}.*/
+  protected Object readString(Object old, Schema expected,
+                              Decoder in) throws IOException {
+    Class stringClass = getStringClass(expected);
+    if (stringClass == String.class)
+      return in.readString();
+    if (stringClass == CharSequence.class)
+      return readString(old, in);
+    return newInstanceFromString(stringClass, in.readString());
+  }                  
+
+  /** Called to read strings.  Subclasses may override to use a different
+   * string representation.  By default, this calls {@link
+   * Decoder#readString(Utf8)}.*/
+  protected Object readString(Object old, Decoder in) throws IOException {
+    return in.readString(old instanceof Utf8 ? (Utf8)old : null);
+  }
+
+  /** Called to create a string from a default value.  Subclasses may override
+   * to use a different string representation.  By default, this calls {@link
+   * Utf8#Utf8(String)}.*/
+  protected Object createString(String value) { return new Utf8(value); }
+
+  /** Determines the class to used to represent a string Schema.  By default
+   * uses {@link GenericData#STRING_PROP} to determine whether {@link Utf8} or
+   * {@link String} is used.  Subclasses may override for alternate
+   * representations.
+   */
+  protected Class findStringClass(Schema schema) {
+    String name = schema.getProp(GenericData.STRING_PROP);
+    if (name == null) return CharSequence.class;
+
+    switch (GenericData.StringType.valueOf(name)) {
+      case String:
+        return String.class;
+      default:
+        return CharSequence.class;
+    }
+  }
+
+  private Map<Schema,Class> stringClassCache =
+    new IdentityHashMap<Schema,Class>();
+
+  private Class getStringClass(Schema s) {
+    Class c = stringClassCache.get(s);
+    if (c == null) {
+      c = findStringClass(s);
+      stringClassCache.put(s, c);
+    }
+    return c;
+  }
+
+  private final Map<Class,Constructor> stringCtorCache =
+    new HashMap<Class,Constructor>();
+
+  @SuppressWarnings("unchecked")
+  protected Object newInstanceFromString(Class c, String s) {
+    try {
+      Constructor ctor = stringCtorCache.get(c);
+      if (ctor == null) {
+        ctor = c.getDeclaredConstructor(String.class);
+        ctor.setAccessible(true);
+        stringCtorCache.put(c, ctor);
+      }
+      return ctor.newInstance(s);
+    } catch (NoSuchMethodException e) {
+      throw new AvroRuntimeException(e);
+    } catch (InstantiationException e) {
+      throw new AvroRuntimeException(e);
+    } catch (IllegalAccessException e) {
+      throw new AvroRuntimeException(e);
+    } catch (InvocationTargetException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  /** Called to read byte arrays.  Subclasses may override to use a different
+   * byte array representation.  By default, this calls {@link
+   * Decoder#readBytes(ByteBuffer)}.*/
+  protected Object readBytes(Object old, Schema s, Decoder in)
+    throws IOException {
+    return readBytes(old, in);
+  }
+
+  /** Called to read byte arrays.  Subclasses may override to use a different
+   * byte array representation.  By default, this calls {@link
+   * Decoder#readBytes(ByteBuffer)}.*/
+  protected Object readBytes(Object old, Decoder in) throws IOException {
+    return in.readBytes(old instanceof ByteBuffer ? (ByteBuffer) old : null);
+  }
+
+  /** Called to read integers.  Subclasses may override to use a different
+   * integer representation.  By default, this calls {@link
+   * Decoder#readInt()}.*/
+  protected Object readInt(Object old, Schema expected, Decoder in)
+    throws IOException {
+    return in.readInt();
+  }
+
+  /** Called to create byte arrays from default values.  Subclasses may
+   * override to use a different byte array representation.  By default, this
+   * calls {@link ByteBuffer#wrap(byte[])}.*/
+  protected Object createBytes(byte[] value) { return ByteBuffer.wrap(value); }
+
+  /** Skip an instance of a schema. */
+  public static void skip(Schema schema, Decoder in) throws IOException {
+    switch (schema.getType()) {
+    case RECORD:
+      for (Field field : schema.getFields())
+        skip(field.schema(), in);
+      break;
+    case ENUM:
+      in.readInt();
+      break;
+    case ARRAY:
+      Schema elementType = schema.getElementType();
+      for (long l = in.skipArray(); l > 0; l = in.skipArray()) {
+        for (long i = 0; i < l; i++) {
+          skip(elementType, in);
+        }
+      }
+      break;
+    case MAP:
+      Schema value = schema.getValueType();
+      for (long l = in.skipMap(); l > 0; l = in.skipMap()) {
+        for (long i = 0; i < l; i++) {
+          in.skipString();
+          skip(value, in);
+        }
+      }
+      break;
+    case UNION:
+      skip(schema.getTypes().get((int)in.readIndex()), in);
+      break;
+    case FIXED:
+      in.skipFixed(schema.getFixedSize());
+      break;
+    case STRING:
+      in.skipString();
+      break;
+    case BYTES:
+      in.skipBytes();
+      break;
+    case INT:     in.readInt();           break;
+    case LONG:    in.readLong();          break;
+    case FLOAT:   in.readFloat();         break;
+    case DOUBLE:  in.readDouble();        break;
+    case BOOLEAN: in.readBoolean();       break;
+    case NULL:                            break;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java
new file mode 100644
index 0000000..7cfa022
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericDatumWriter.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ConcurrentModificationException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Collection;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+
+/** {@link DatumWriter} for generic Java objects. */
+public class GenericDatumWriter<D> implements DatumWriter<D> {
+  private final GenericData data;
+  private Schema root;
+
+  public GenericDatumWriter() { this(GenericData.get()); }
+
+  protected GenericDatumWriter(GenericData data) { this.data = data; }
+
+  public GenericDatumWriter(Schema root) {
+    this();
+    setSchema(root);
+  }
+
+  public GenericDatumWriter(Schema root, GenericData data) {
+    this(data);
+    setSchema(root);
+  }
+
+  /** Return the {@link GenericData} implementation. */
+  public GenericData getData() { return data; }
+
+  public void setSchema(Schema root) { this.root = root; }
+
+  public void write(D datum, Encoder out) throws IOException {
+    write(root, datum, out);
+  }
+
+  /** Called to write data.*/
+  protected void write(Schema schema, Object datum, Encoder out)
+      throws IOException {
+    LogicalType logicalType = schema.getLogicalType();
+    if (datum != null && logicalType != null) {
+      Conversion<?> conversion = getData()
+          .getConversionByClass(datum.getClass(), logicalType);
+      writeWithoutConversion(schema,
+          convert(schema, logicalType, conversion, datum), out);
+    } else {
+      writeWithoutConversion(schema, datum, out);
+    }
+  }
+
+  private <T> Object convert(Schema schema, LogicalType logicalType,
+                             Conversion<T> conversion, Object datum) {
+    if (conversion == null) {
+      return datum;
+    }
+    Class<T> fromClass = conversion.getConvertedType();
+    switch (schema.getType()) {
+    case RECORD:  return conversion.toRecord(fromClass.cast(datum), schema, logicalType);
+    case ENUM:    return conversion.toEnumSymbol(fromClass.cast(datum), schema, logicalType);
+    case ARRAY:   return conversion.toArray(fromClass.cast(datum), schema, logicalType);
+    case MAP:     return conversion.toMap(fromClass.cast(datum), schema, logicalType);
+    case FIXED:   return conversion.toFixed(fromClass.cast(datum), schema, logicalType);
+    case STRING:  return conversion.toCharSequence(fromClass.cast(datum), schema, logicalType);
+    case BYTES:   return conversion.toBytes(fromClass.cast(datum), schema, logicalType);
+    case INT:     return conversion.toInt(fromClass.cast(datum), schema, logicalType);
+    case LONG:    return conversion.toLong(fromClass.cast(datum), schema, logicalType);
+    case FLOAT:   return conversion.toFloat(fromClass.cast(datum), schema, logicalType);
+    case DOUBLE:  return conversion.toDouble(fromClass.cast(datum), schema, logicalType);
+    case BOOLEAN: return conversion.toBoolean(fromClass.cast(datum), schema, logicalType);
+    }
+    return datum;
+  }
+
+  /** Called to write data.*/
+  protected void writeWithoutConversion(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    try {
+      switch (schema.getType()) {
+      case RECORD: writeRecord(schema, datum, out); break;
+      case ENUM:   writeEnum(schema, datum, out);   break;
+      case ARRAY:  writeArray(schema, datum, out);  break;
+      case MAP:    writeMap(schema, datum, out);    break;
+      case UNION:
+        int index = resolveUnion(schema, datum);
+        out.writeIndex(index);
+        write(schema.getTypes().get(index), datum, out);
+        break;
+      case FIXED:   writeFixed(schema, datum, out);   break;
+      case STRING:  writeString(schema, datum, out);  break;
+      case BYTES:   writeBytes(datum, out);           break;
+      case INT:     out.writeInt(((Number)datum).intValue()); break;
+      case LONG:    out.writeLong((Long)datum);       break;
+      case FLOAT:   out.writeFloat((Float)datum);     break;
+      case DOUBLE:  out.writeDouble((Double)datum);   break;
+      case BOOLEAN: out.writeBoolean((Boolean)datum); break;
+      case NULL:    out.writeNull();                  break;
+      default: error(schema,datum);
+      }
+    } catch (NullPointerException e) {
+      throw npe(e, " of "+schema.getFullName());
+    }
+  }
+
+  /** Helper method for adding a message to an NPE. */
+  protected NullPointerException npe(NullPointerException e, String s) {
+    NullPointerException result = new NullPointerException(e.getMessage()+s);
+    result.initCause(e.getCause() == null ? e : e.getCause());
+    return result;
+  }
+
+  /** Called to write a record.  May be overridden for alternate record
+   * representations.*/
+  protected void writeRecord(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    Object state = data.getRecordState(datum, schema);
+    for (Field f : schema.getFields()) {
+      writeField(datum, f, out, state);
+    }
+  }
+  
+  /** Called to write a single field of a record. May be overridden for more 
+   * efficient or alternate implementations.*/
+  protected void writeField(Object datum, Field f, Encoder out, Object state) 
+      throws IOException {
+    Object value = data.getField(datum, f.name(), f.pos(), state);
+    try {
+      write(f.schema(), value, out);
+    } catch (NullPointerException e) {
+      throw npe(e, " in field " + f.name());
+    }
+  }
+  
+  /** Called to write an enum value.  May be overridden for alternate enum
+   * representations.*/
+  protected void writeEnum(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (!data.isEnum(datum))
+      throw new AvroTypeException("Not an enum: "+datum);
+    out.writeEnum(schema.getEnumOrdinal(datum.toString()));
+  }
+  
+  /** Called to write a array.  May be overridden for alternate array
+   * representations.*/
+  protected void writeArray(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    Schema element = schema.getElementType();
+    long size = getArraySize(datum);
+    long actualSize = 0;
+    out.writeArrayStart();
+    out.setItemCount(size);
+    for (Iterator<? extends Object> it = getArrayElements(datum); it.hasNext();) {
+      out.startItem();
+      write(element, it.next(), out);
+      actualSize++;
+    }
+    out.writeArrayEnd();
+    if (actualSize != size) {
+      throw new ConcurrentModificationException("Size of array written was " +
+          size + ", but number of elements written was " + actualSize + ". ");
+    }
+  }
+
+  /** Called to find the index for a datum within a union.  By default calls
+   * {@link GenericData#resolveUnion(Schema,Object)}.*/
+  protected int resolveUnion(Schema union, Object datum) {
+    return data.resolveUnion(union, datum);
+  }
+
+  /** Called by the default implementation of {@link #writeArray} to get the
+   * size of an array.  The default implementation is for {@link Collection}.*/
+  @SuppressWarnings("unchecked")
+  protected long getArraySize(Object array) {
+    return ((Collection) array).size();
+  }
+
+  /** Called by the default implementation of {@link #writeArray} to enumerate
+   * array elements.  The default implementation is for {@link Collection}.*/
+  @SuppressWarnings("unchecked")
+  protected Iterator<? extends Object> getArrayElements(Object array) {
+    return ((Collection) array).iterator();
+  }
+  
+  /** Called to write a map.  May be overridden for alternate map
+   * representations.*/
+  protected void writeMap(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    Schema value = schema.getValueType();
+    int size = getMapSize(datum);
+    int actualSize = 0;
+    out.writeMapStart();
+    out.setItemCount(size);
+    for (Map.Entry<Object,Object> entry : getMapEntries(datum)) {
+      out.startItem();
+      writeString(entry.getKey().toString(), out);
+      write(value, entry.getValue(), out);
+      actualSize++;
+    }
+    out.writeMapEnd();
+    if (actualSize != size) {
+      throw new ConcurrentModificationException("Size of map written was " +
+          size + ", but number of entries written was " + actualSize + ". ");
+    }
+  }
+
+  /** Called by the default implementation of {@link #writeMap} to get the size
+   * of a map.  The default implementation is for {@link Map}.*/
+  @SuppressWarnings("unchecked")
+  protected int getMapSize(Object map) {
+    return ((Map) map).size();
+  }
+
+  /** Called by the default implementation of {@link #writeMap} to enumerate
+   * map elements.  The default implementation is for {@link Map}.*/
+  @SuppressWarnings("unchecked")
+  protected Iterable<Map.Entry<Object,Object>> getMapEntries(Object map) {
+    return ((Map) map).entrySet();
+  }
+  
+  /** Called to write a string.  May be overridden for alternate string
+   * representations.*/
+  protected void writeString(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    writeString(datum, out);
+  }
+  /** Called to write a string.  May be overridden for alternate string
+   * representations.*/
+  protected void writeString(Object datum, Encoder out) throws IOException {
+    out.writeString((CharSequence) datum);
+  }
+
+  /** Called to write a bytes.  May be overridden for alternate bytes
+   * representations.*/
+  protected void writeBytes(Object datum, Encoder out) throws IOException {
+    out.writeBytes((ByteBuffer)datum);
+  }
+
+  /** Called to write a fixed value.  May be overridden for alternate fixed
+   * representations.*/
+  protected void writeFixed(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    out.writeFixed(((GenericFixed)datum).bytes(), 0, schema.getFixedSize());
+  }
+  
+  private void error(Schema schema, Object datum) {
+    throw new AvroTypeException("Not a "+schema+": "+datum);
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java
new file mode 100644
index 0000000..2b6619f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericEnumSymbol.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+/** An enum symbol. */
+public interface GenericEnumSymbol
+    extends GenericContainer, Comparable<GenericEnumSymbol> {
+  /** Return the symbol. */
+  String toString();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericFixed.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericFixed.java
new file mode 100644
index 0000000..d9b697f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericFixed.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+/** Fixed-size data. */
+public interface GenericFixed extends GenericContainer {
+  /** Return the data. */
+  byte[] bytes();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecord.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecord.java
new file mode 100644
index 0000000..aa8c193
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecord.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+/** A generic instance of a record schema.  Fields are accessible by name as
+ * well as by index. */
+public interface GenericRecord extends IndexedRecord {
+  /** Set the value of a field given its name. */
+  void put(String key, Object v);
+  /** Return the value of a field given its name. */
+  Object get(String key);
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java
new file mode 100644
index 0000000..2137104
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/GenericRecordBuilder.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.io.IOException;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.data.RecordBuilderBase;
+import org.apache.avro.generic.GenericData.Record;
+
+/** A RecordBuilder for generic records. GenericRecordBuilder fills in default values
+ * for fields if they are not specified.  */
+public class GenericRecordBuilder extends RecordBuilderBase<Record> {
+  private final GenericData.Record record;
+  
+  /**
+   * Creates a GenericRecordBuilder for building Record instances.
+   * @param schema the schema associated with the record class.
+   */
+  public GenericRecordBuilder(Schema schema) {
+    super(schema, GenericData.get());
+    record = new GenericData.Record(schema);
+  }
+  
+  /**
+   * Creates a GenericRecordBuilder by copying an existing GenericRecordBuilder.
+   * @param other the GenericRecordBuilder to copy.
+   */
+  public GenericRecordBuilder(GenericRecordBuilder other) {
+    super(other, GenericData.get());
+    record = new GenericData.Record(other.record, /* deepCopy = */ true);
+  }
+  
+  /**
+   * Creates a GenericRecordBuilder by copying an existing record instance.
+   * @param other the record instance to copy.
+   */
+  public GenericRecordBuilder(Record other) {
+    super(other.getSchema(), GenericData.get());
+    record = new GenericData.Record(other, /* deepCopy = */ true);
+    
+    // Set all fields in the RecordBuilder that are set in the record
+    for (Field f : schema().getFields()) {
+      Object value = other.get(f.pos());
+      // Only set the value if it is not null, if the schema type is null, 
+      // or if the schema type is a union that accepts nulls.
+      if (isValidValue(f, value)) {
+        set(f, data().deepCopy(f.schema(), value));
+      }
+    }
+  }
+  
+  /**
+   * Gets the value of a field.
+   * @param fieldName the name of the field to get.
+   * @return the value of the field with the given name, or null if not set.
+   */
+  public Object get(String fieldName) {
+    return get(schema().getField(fieldName));
+  }
+  
+  /**
+   * Gets the value of a field.
+   * @param field the field to get.
+   * @return the value of the given field, or null if not set.
+   */
+  public Object get(Field field) {
+    return get(field.pos());
+  }
+  
+  /**
+   * Gets the value of a field.
+   * @param pos the position of the field to get.
+   * @return the value of the field with the given position, or null if not set.
+   */
+  protected Object get(int pos) {
+    return record.get(pos);
+  }
+  
+  /**
+   * Sets the value of a field.
+   * @param fieldName the name of the field to set.
+   * @param value the value to set.
+   * @return a reference to the RecordBuilder.
+   */
+  public GenericRecordBuilder set(String fieldName, Object value) {
+    return set(schema().getField(fieldName), value);
+  }
+  
+  /**
+   * Sets the value of a field.
+   * @param field the field to set.
+   * @param value the value to set.
+   * @return a reference to the RecordBuilder.
+   */
+  public GenericRecordBuilder set(Field field, Object value) {
+    return set(field, field.pos(), value);
+  }
+  
+  /**
+   * Sets the value of a field.
+   * @param pos the field to set.
+   * @param value the value to set.
+   * @return a reference to the RecordBuilder.
+   */
+  protected GenericRecordBuilder set(int pos, Object value) {
+    return set(fields()[pos], pos, value);
+  }
+  
+  /**
+   * Sets the value of a field.
+   * @param field the field to set.
+   * @param pos the position of the field.
+   * @param value the value to set.
+   * @return a reference to the RecordBuilder.
+   */
+  private GenericRecordBuilder set(Field field, int pos, Object value) {
+    validate(field, value);
+    record.put(pos, value);
+    fieldSetFlags()[pos] = true;
+    return this;
+  }
+  
+  /**
+   * Checks whether a field has been set.
+   * @param fieldName the name of the field to check.
+   * @return true if the given field is non-null; false otherwise.
+   */
+  public boolean has(String fieldName) {
+    return has(schema().getField(fieldName));
+  }
+  
+  /**
+   * Checks whether a field has been set.
+   * @param field the field to check.
+   * @return true if the given field is non-null; false otherwise.
+   */
+  public boolean has(Field field) {
+    return has(field.pos());
+  }
+  
+  /**
+   * Checks whether a field has been set.
+   * @param pos the position of the field to check.
+   * @return true if the given field is non-null; false otherwise.
+   */
+  protected boolean has(int pos) {
+    return fieldSetFlags()[pos];
+  }
+  
+  /**
+   * Clears the value of the given field.
+   * @param fieldName the name of the field to clear.
+   * @return a reference to the RecordBuilder.
+   */
+  public GenericRecordBuilder clear(String fieldName) {
+    return clear(schema().getField(fieldName));
+  }
+  
+  /**
+   * Clears the value of the given field.
+   * @param field the field to clear.
+   * @return a reference to the RecordBuilder.
+   */
+  public GenericRecordBuilder clear(Field field) {
+    return clear(field.pos());
+  }
+  
+  /**
+   * Clears the value of the given field.
+   * @param pos the position of the field to clear.
+   * @return a reference to the RecordBuilder.
+   */
+  protected GenericRecordBuilder clear(int pos) {
+    record.put(pos, null);
+    fieldSetFlags()[pos] = false;
+    return this;
+  }
+  
+  @Override
+  public Record build() {
+    Record record;
+    try {
+      record = new GenericData.Record(schema());
+    } catch (Exception e) {
+      throw new AvroRuntimeException(e);
+    }
+    
+    for (Field field : fields()) {
+      Object value;
+      try {
+        value = getWithDefault(field);
+      } catch(IOException e) {
+        throw new AvroRuntimeException(e);
+      }
+      if (value != null) {
+        record.put(field.pos(), value);
+      }
+    }
+    
+    return record;
+  }
+  
+  /**
+   * Gets the value of the given field.
+   * If the field has been set, the set value is returned (even if it's null).
+   * If the field hasn't been set and has a default value, the default value 
+   * is returned.
+   * @param field the field whose value should be retrieved.
+   * @return the value set for the given field, the field's default value, 
+   * or null.
+   * @throws IOException
+   */
+  private Object getWithDefault(Field field) throws IOException {
+    return fieldSetFlags()[field.pos()] ? 
+        record.get(field.pos()) : defaultValue(field);
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = super.hashCode();
+    result = prime * result + ((record == null) ? 0 : record.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (!super.equals(obj))
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    GenericRecordBuilder other = (GenericRecordBuilder) obj;
+    if (record == null) {
+      if (other.record != null)
+        return false;
+    } else if (!record.equals(other.record))
+      return false;
+    return true;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/IndexedRecord.java b/lang/java/avro/src/main/java/org/apache/avro/generic/IndexedRecord.java
new file mode 100644
index 0000000..47d769e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/IndexedRecord.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+/** A record implementation that permits field access by integer index.*/
+public interface IndexedRecord extends GenericContainer {
+  /** Set the value of a field given its position in the schema.
+   * <p>This method is not meant to be called by user code, but only by {@link
+   * org.apache.avro.io.DatumReader} implementations. */
+  void put(int i, Object v);
+  /** Return the value of a field given its position in the schema.
+   * <p>This method is not meant to be called by user code, but only by {@link
+   * org.apache.avro.io.DatumWriter} implementations. */
+  Object get(int i);
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/generic/package.html b/lang/java/avro/src/main/java/org/apache/avro/generic/package.html
new file mode 100644
index 0000000..a879a3b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/generic/package.html
@@ -0,0 +1,43 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+A generic representation for Avro data.
+
+<p>This representation is best for applications which deal with
+  dynamic data, whose schemas are not known until runtime.
+
+<p>Avro schemas are mapped to Java types as follows:
+<ul>
+<li>Schema records are implemented as {@link org.apache.avro.generic.GenericRecord}.
+<li>Schema enums are implemented as {@link org.apache.avro.generic.GenericEnumSymbol}.
+<li>Schema arrays are implemented as {@link java.util.Collection}.
+<li>Schema maps are implemented as {@link java.util.Map}.
+<li>Schema fixed are implemented as {@link org.apache.avro.generic.GenericFixed}.
+<li>Schema strings are implemented as {@link java.lang.CharSequence}.
+<li>Schema bytes are implemented as {@link java.nio.ByteBuffer}.
+<li>Schema ints are implemented as {@link java.lang.Integer}.
+<li>Schema longs are implemented as {@link java.lang.Long}.
+<li>Schema floats are implemented as {@link java.lang.Float}.
+<li>Schema doubles are implemented as {@link java.lang.Double}.
+<li>Schema booleans are implemented as {@link java.lang.Boolean}.
+</ul>
+
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java
new file mode 100644
index 0000000..18cbf8a
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryData.java
@@ -0,0 +1,428 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.generic.GenericDatumReader;
+
+/** Utilities for binary-encoded data. */
+public class BinaryData {
+
+  private BinaryData() {}                      // no public ctor
+
+  private static class Decoders {
+    private final BinaryDecoder d1, d2;
+    public Decoders() {
+       this.d1 = new BinaryDecoder(new byte[0], 0, 0);
+       this.d2 = new BinaryDecoder(new byte[0], 0, 0);
+    }
+    public void set(byte[] data1, int off1, int len1, 
+                    byte[] data2, int off2, int len2) {
+      d1.setBuf(data1, off1, len1);
+      d2.setBuf(data2, off2, len2);
+    }
+    public void clear() {
+      d1.clearBuf();
+      d2.clearBuf();
+    }
+  }                     // no public ctor
+
+  private static final ThreadLocal<Decoders> DECODERS
+    = new ThreadLocal<Decoders>() {
+    @Override protected Decoders initialValue() { return new Decoders(); }
+  };
+
+  /** Compare binary encoded data.  If equal, return zero.  If greater-than,
+   * return 1, if less than return -1. Order is consistent with that of {@link
+   * org.apache.avro.generic.GenericData#compare(Object, Object, Schema)}.*/
+  public static int compare(byte[] b1, int s1,
+                            byte[] b2, int s2,
+                            Schema schema) {
+    return compare(b1, s1, b1.length - s1, b2, s2, b2.length - s2, schema);
+  }
+
+  /** Compare binary encoded data.  If equal, return zero.  If greater-than,
+   * return 1, if less than return -1. Order is consistent with that of {@link
+   * org.apache.avro.generic.GenericData#compare(Object, Object, Schema)}.*/
+  public static int compare(byte[] b1, int s1, int l1,
+                            byte[] b2, int s2, int l2,
+                            Schema schema) {
+    Decoders decoders = DECODERS.get();
+    decoders.set(b1, s1, l1, b2, s2, l2);
+    try {
+      return compare(decoders, schema);
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    } finally {
+      decoders.clear();
+    }
+  }
+
+  /** If equal, return the number of bytes consumed.  If greater than, return
+   * GT, if less than, return LT. */
+  private static int compare(Decoders d, Schema schema) throws IOException {
+    Decoder d1 = d.d1; Decoder d2 = d.d2;
+    switch (schema.getType()) {
+    case RECORD: {
+      for (Field field : schema.getFields()) {
+        if (field.order() == Field.Order.IGNORE) {
+          GenericDatumReader.skip(field.schema(), d1);
+          GenericDatumReader.skip(field.schema(), d2);
+          continue;
+        }
+        int c = compare(d, field.schema());
+        if (c != 0)
+          return (field.order() != Field.Order.DESCENDING) ? c : -c;
+      }
+      return 0;
+    }
+    case ENUM: case INT: {
+      int i1 = d1.readInt();
+      int i2 = d2.readInt();
+      return i1 == i2 ? 0 : (i1 > i2 ? 1 : -1);
+    }
+    case LONG: {
+      long l1 = d1.readLong();
+      long l2 = d2.readLong();
+      return l1 == l2 ? 0 : (l1 > l2 ? 1 : -1);
+    }
+    case ARRAY: {
+      long i = 0;                                 // position in array
+      long r1 = 0, r2 = 0;                        // remaining in current block
+      long l1 = 0, l2 = 0;                        // total array length
+      while (true) {
+        if (r1 == 0) {                            // refill blocks(s)
+          r1 = d1.readLong();
+          if (r1 < 0) { r1 = -r1; d1.readLong(); }
+          l1 += r1;
+        }
+        if (r2 == 0) {
+          r2 = d2.readLong();
+          if (r2 < 0) { r2 = -r2; d2.readLong(); }
+          l2 += r2;
+        }
+        if (r1 == 0 || r2 == 0)                   // empty block: done
+          return (l1 == l2) ? 0 : ((l1 > l2) ? 1 : -1);
+        long l = Math.min(l1, l2);
+        while (i < l) {                           // compare to end of block
+          int c = compare(d, schema.getElementType());
+          if (c != 0) return c;
+          i++; r1--; r2--;
+        }
+      }
+    }
+    case MAP:
+      throw new AvroRuntimeException("Can't compare maps!");
+    case UNION: {
+      int i1 = d1.readInt();
+      int i2 = d2.readInt();
+      if (i1 == i2) {
+        return compare(d, schema.getTypes().get(i1));
+      } else {
+        return i1 - i2;
+      }
+    }
+    case FIXED: {
+      int size = schema.getFixedSize();
+      int c = compareBytes(d.d1.getBuf(), d.d1.getPos(), size,
+                           d.d2.getBuf(), d.d2.getPos(), size);
+      d.d1.skipFixed(size);
+      d.d2.skipFixed(size);
+      return c;
+    }
+    case STRING: case BYTES: {
+      int l1 = d1.readInt();
+      int l2 = d2.readInt();
+      int c = compareBytes(d.d1.getBuf(), d.d1.getPos(), l1,
+                           d.d2.getBuf(), d.d2.getPos(), l2);
+      d.d1.skipFixed(l1);
+      d.d2.skipFixed(l2);
+      return c;
+    }
+    case FLOAT: {
+      float f1 = d1.readFloat();
+      float f2 = d2.readFloat();
+      return (f1 == f2) ? 0 : ((f1 > f2) ? 1 : -1);
+    }
+    case DOUBLE: {
+      double f1 = d1.readDouble();
+      double f2 = d2.readDouble();
+      return (f1 == f2) ? 0 : ((f1 > f2) ? 1 : -1);
+    }
+    case BOOLEAN:
+      boolean b1 = d1.readBoolean();
+      boolean b2 = d2.readBoolean();
+      return (b1 == b2) ? 0 : (b1 ? 1 : -1);
+    case NULL:
+      return 0;
+    default:
+      throw new AvroRuntimeException("Unexpected schema to compare!");
+    }
+  }
+
+  /** Lexicographically compare bytes.  If equal, return zero.  If greater-than,
+   * return a positive value, if less than return a negative value. */
+  public static int compareBytes(byte[] b1, int s1, int l1,
+                                 byte[] b2, int s2, int l2) {
+    int end1 = s1 + l1;
+    int end2 = s2 + l2;
+    for (int i = s1, j = s2; i < end1 && j < end2; i++, j++) {
+      int a = (b1[i] & 0xff);
+      int b = (b2[j] & 0xff);
+      if (a != b) {
+        return a - b;
+      }
+    }
+    return l1 - l2;
+  }
+
+  private static class HashData {
+    private final BinaryDecoder decoder;
+    public HashData() {
+      this.decoder = new BinaryDecoder(new byte[0], 0, 0);
+    }
+    public void set(byte[] bytes, int start, int len) {
+      this.decoder.setBuf(bytes, start, len);
+    }
+  }
+
+  private static final ThreadLocal<HashData> HASH_DATA
+    = new ThreadLocal<HashData>() {
+    @Override protected HashData initialValue() { return new HashData(); }
+  };
+
+  /** Hash binary encoded data. Consistent with {@link
+   * org.apache.avro.generic.GenericData#hashCode(Object, Schema)}.*/
+  public static int hashCode(byte[] bytes, int start, int length,
+                             Schema schema) {
+    HashData data = HASH_DATA.get();
+    data.set(bytes, start, length);
+    try {
+      return hashCode(data, schema);
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  private static int hashCode(HashData data, Schema schema)
+    throws IOException {
+    Decoder decoder = data.decoder;
+    switch (schema.getType()) {
+    case RECORD: {
+      int hashCode = 1;
+      for (Field field : schema.getFields()) {
+        if (field.order() == Field.Order.IGNORE) {
+          GenericDatumReader.skip(field.schema(), decoder);
+          continue;
+        }
+        hashCode = hashCode*31 + hashCode(data, field.schema());
+      }
+      return hashCode;
+    }
+    case ENUM: case INT:
+      return decoder.readInt();
+    case FLOAT:
+      return Float.floatToIntBits(decoder.readFloat());
+    case LONG: {
+      long l = decoder.readLong();
+      return (int)(l^(l>>>32));
+    }
+    case DOUBLE: {
+      long l = Double.doubleToLongBits(decoder.readDouble());
+      return (int)(l^(l>>>32));
+    }
+    case ARRAY: {
+      Schema elementType = schema.getElementType();
+      int hashCode = 1;
+      for (long l = decoder.readArrayStart(); l != 0; l = decoder.arrayNext())
+        for (long i = 0; i < l; i++)
+          hashCode = hashCode*31 + hashCode(data, elementType);
+      return hashCode;
+    }
+    case MAP:
+      throw new AvroRuntimeException("Can't hashCode maps!");
+    case UNION:
+      return hashCode(data, schema.getTypes().get(decoder.readInt()));
+    case FIXED:
+      return hashBytes(1, data, schema.getFixedSize(), false);
+    case STRING:
+      return hashBytes(0, data, decoder.readInt(), false);
+    case BYTES:
+      return hashBytes(1, data, decoder.readInt(), true);
+    case BOOLEAN:
+      return decoder.readBoolean() ? 1231 : 1237;
+    case NULL:
+      return 0;
+    default:
+      throw new AvroRuntimeException("Unexpected schema to hashCode!");
+    }
+  }
+
+  private static int hashBytes(int init, HashData data, int len, boolean rev)
+    throws IOException {
+    int hashCode = init;
+    byte[] bytes = data.decoder.getBuf();
+    int start = data.decoder.getPos();
+    int end = start+len;
+    if (rev) 
+      for (int i = end-1; i >= start; i--)
+        hashCode = hashCode*31 + bytes[i];
+    else
+      for (int i = start; i < end; i++)
+        hashCode = hashCode*31 + bytes[i];
+    data.decoder.skipFixed(len);
+    return hashCode;
+  }
+
+  /** Skip a binary-encoded long, returning the position after it. */
+  public static int skipLong(byte[] bytes, int start) {
+    int i = start;
+    for (int b = bytes[i++]; ((b & 0x80) != 0); b = bytes[i++]) {}
+    return i;
+  }
+
+  /** Encode a boolean to the byte array at the given position. Will throw
+   * IndexOutOfBounds if the position is not valid.
+   * @return The number of bytes written to the buffer, 1.
+   */
+  public static int encodeBoolean(boolean b, byte[] buf, int pos) {
+    buf[pos] = b ? (byte) 1 : (byte) 0;
+    return 1;
+  }
+
+  /** Encode an integer to the byte array at the given position. Will throw
+   * IndexOutOfBounds if it overflows. Users should ensure that there are at
+   * least 5 bytes left in the buffer before calling this method.
+   * @return The number of bytes written to the buffer, between 1 and 5.
+   */
+  public static int encodeInt(int n, byte[] buf, int pos) {
+  // move sign to low-order bit, and flip others if negative
+    n = (n << 1) ^ (n >> 31);
+    int start = pos;
+    if ((n & ~0x7F) != 0) {
+      buf[pos++] = (byte)((n | 0x80) & 0xFF);
+      n >>>= 7;
+      if (n > 0x7F) {
+        buf[pos++] = (byte)((n | 0x80) & 0xFF);
+        n >>>= 7;
+        if (n > 0x7F) {
+          buf[pos++] = (byte)((n | 0x80) & 0xFF);
+          n >>>= 7;
+          if (n > 0x7F) {
+            buf[pos++] = (byte)((n | 0x80) & 0xFF);
+            n >>>= 7;
+          }
+        }
+      }
+    } 
+    buf[pos++] = (byte) n;
+    return pos - start;
+  }
+
+  /** Encode a long to the byte array at the given position. Will throw
+   * IndexOutOfBounds if it overflows. Users should ensure that there are at
+   * least 10 bytes left in the buffer before calling this method.
+   * @return The number of bytes written to the buffer, between 1 and 10.
+   */
+  public static int encodeLong(long n, byte[] buf, int pos) {
+    // move sign to low-order bit, and flip others if negative
+    n = (n << 1) ^ (n >> 63);
+    int start = pos;
+    if ((n & ~0x7FL) != 0) {
+      buf[pos++] = (byte)((n | 0x80) & 0xFF);
+      n >>>= 7;
+      if (n > 0x7F) {
+        buf[pos++] = (byte)((n | 0x80) & 0xFF);
+        n >>>= 7;
+        if (n > 0x7F) {
+          buf[pos++] = (byte)((n | 0x80) & 0xFF);
+          n >>>= 7;
+          if (n > 0x7F) {
+            buf[pos++] = (byte)((n | 0x80) & 0xFF);
+            n >>>= 7;
+            if (n > 0x7F) {
+              buf[pos++] = (byte)((n | 0x80) & 0xFF);
+              n >>>= 7;
+              if (n > 0x7F) {
+                buf[pos++] = (byte)((n | 0x80) & 0xFF);
+                n >>>= 7;
+                if (n > 0x7F) {
+                  buf[pos++] = (byte)((n | 0x80) & 0xFF);
+                  n >>>= 7;
+                  if (n > 0x7F) {
+                    buf[pos++] = (byte)((n | 0x80) & 0xFF);
+                    n >>>= 7;
+                    if (n > 0x7F) {
+                      buf[pos++] = (byte)((n | 0x80) & 0xFF);
+                      n >>>= 7;
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+    buf[pos++] = (byte) n;
+    return pos - start;
+  }
+
+  /** Encode a float to the byte array at the given position. Will throw
+   * IndexOutOfBounds if it overflows. Users should ensure that there are at
+   * least 4 bytes left in the buffer before calling this method.
+   * @return Returns the number of bytes written to the buffer, 4.
+   */
+  public static int encodeFloat(float f, byte[] buf, int pos) {
+    int len = 1;
+    int bits = Float.floatToRawIntBits(f);
+    // hotspot compiler works well with this variant 
+    buf[pos]         = (byte)((bits       ) & 0xFF);
+    buf[pos + len++] = (byte)((bits >>>  8) & 0xFF);
+    buf[pos + len++] = (byte)((bits >>> 16) & 0xFF);
+    buf[pos + len++] = (byte)((bits >>> 24) & 0xFF);
+    return 4;
+  }
+
+  /** Encode a double to the byte array at the given position. Will throw
+   * IndexOutOfBounds if it overflows. Users should ensure that there are at
+   * least 8 bytes left in the buffer before calling this method.
+   * @return Returns the number of bytes written to the buffer, 8.
+   */
+  public static int encodeDouble(double d, byte[] buf, int pos) {
+    long bits = Double.doubleToRawLongBits(d);
+    int first = (int)(bits & 0xFFFFFFFF);
+    int second = (int)((bits >>> 32) & 0xFFFFFFFF);
+    // the compiler seems to execute this order the best, likely due to
+    // register allocation -- the lifetime of constants is minimized.
+    buf[pos]     = (byte)((first        ) & 0xFF);
+    buf[pos + 4] = (byte)((second       ) & 0xFF);
+    buf[pos + 5] = (byte)((second >>>  8) & 0xFF);
+    buf[pos + 1] = (byte)((first >>>   8) & 0xFF);
+    buf[pos + 2] = (byte)((first >>>  16) & 0xFF);
+    buf[pos + 6] = (byte)((second >>> 16) & 0xFF);
+    buf[pos + 7] = (byte)((second >>> 24) & 0xFF);
+    buf[pos + 3] = (byte)((first >>>  24) & 0xFF);
+    return 8;
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
new file mode 100644
index 0000000..7877002
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryDecoder.java
@@ -0,0 +1,992 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.util.Utf8;
+
+/** An {@link Decoder} for binary-format data.
+ * <p/>
+ * Instances are created using {@link DecoderFactory}.
+ * <p/>
+ * This class may read-ahead and buffer bytes from the source beyond what is
+ * required to serve its read methods.
+ * The number of unused bytes in the buffer can be accessed by
+ * inputStream().remaining(), if the BinaryDecoder is not 'direct'.
+ * 
+ * @see Encoder
+ */
+
+public class BinaryDecoder extends Decoder {
+  private ByteSource source = null;
+  // we keep the buffer and its state variables in this class and not in a
+  // container class for performance reasons. This improves performance
+  // over a container object by about 5% to 15%
+  // for example, we could have a FastBuffer class with these state variables
+  // and keep a private FastBuffer member here. This simplifies the
+  // "detach source" code and source access to the buffer, but
+  // hurts performance.
+  private byte[] buf = null;
+  private int minPos = 0;
+  private int pos = 0;
+  private int limit = 0;
+
+  byte[] getBuf() { return buf; }
+  int getPos() { return pos; }
+  int getLimit() { return limit; }
+
+  void setBuf(byte[] buf, int pos, int len) {
+    this.buf = buf;
+    this.pos = pos;
+    this.limit = pos+len;
+  }
+  void clearBuf() { this.buf = null; }
+
+  /** protected constructor for child classes */
+  protected BinaryDecoder() {
+  }
+
+  BinaryDecoder(InputStream in, int bufferSize) {
+    super();
+    configure(in, bufferSize);
+  }
+
+  BinaryDecoder(byte[] data, int offset, int length) {
+    super();
+    configure(data, offset, length);
+  }
+
+  BinaryDecoder configure(InputStream in, int bufferSize) {
+    configureSource(bufferSize, new InputStreamByteSource(in));
+    return this;
+  }
+
+  BinaryDecoder configure(byte[] data, int offset, int length) {
+    configureSource(DecoderFactory.DEFAULT_BUFFER_SIZE, new ByteArrayByteSource(
+        data, offset, length));
+    return this;
+  }
+
+  /**
+   * Initializes this decoder with a new ByteSource. Detaches the old source (if
+   * it exists) from this Decoder. The old source's state no longer depends on
+   * this Decoder and its InputStream interface will continue to drain the
+   * remaining buffer and source data.
+   * <p/>
+   * The decoder will read from the new source. The source will generally
+   * replace the buffer with its own. If the source allocates a new buffer, it
+   * will create it with size bufferSize.
+   */
+  private void configureSource(int bufferSize, ByteSource source) {
+    if (null != this.source) {
+      this.source.detach();
+    }
+    source.attach(bufferSize, this);
+    this.source = source;
+  }
+
+  @Override
+  public void readNull() throws IOException {
+  }
+
+  @Override
+  public boolean readBoolean() throws IOException {
+    // inlined, shorter version of ensureBounds
+    if (limit == pos) {
+      limit = source.tryReadRaw(buf, 0, buf.length);
+      pos = 0;
+      if (limit == 0) {
+        throw new EOFException();
+      }
+    }
+    int n = buf[pos++] & 0xff;
+    return n == 1;
+  }
+
+  @Override
+  public int readInt() throws IOException {
+    ensureBounds(5); // won't throw index out of bounds
+    int len = 1;
+    int b = buf[pos] & 0xff;
+    int n = b & 0x7f;
+    if (b > 0x7f) {
+      b = buf[pos + len++] & 0xff;
+      n ^= (b & 0x7f) << 7;
+      if (b > 0x7f) {
+        b = buf[pos + len++] & 0xff;
+        n ^= (b & 0x7f) << 14;
+        if (b > 0x7f) {
+          b = buf[pos + len++] & 0xff;
+          n ^= (b & 0x7f) << 21;
+          if (b > 0x7f) {
+            b = buf[pos + len++] & 0xff;
+            n ^= (b & 0x7f) << 28;
+            if (b > 0x7f) {
+              throw new IOException("Invalid int encoding");
+            }
+          }
+        }
+      }
+    }
+    pos += len;
+    if (pos > limit) {
+      throw new EOFException();
+    }
+    return (n >>> 1) ^ -(n & 1); // back to two's-complement
+  }
+
+  @Override
+  public long readLong() throws IOException {
+    ensureBounds(10);
+    int b = buf[pos++] & 0xff;
+    int n = b & 0x7f;
+    long l;
+    if (b > 0x7f) {
+      b = buf[pos++] & 0xff;
+      n ^= (b & 0x7f) << 7;
+      if (b > 0x7f) {
+        b = buf[pos++] & 0xff;
+        n ^= (b & 0x7f) << 14;
+        if (b > 0x7f) {
+          b = buf[pos++] & 0xff;
+          n ^= (b & 0x7f) << 21;
+          if (b > 0x7f) {
+            // only the low 28 bits can be set, so this won't carry
+            // the sign bit to the long
+            l = innerLongDecode((long)n);
+          } else {
+            l = n;
+          }
+        } else {
+          l = n;
+        }
+      } else {
+        l = n;
+      }
+    } else {
+      l = n;
+    }
+    if (pos > limit) {
+      throw new EOFException();
+    }
+    return (l >>> 1) ^ -(l & 1); // back to two's-complement
+  }
+  
+  // splitting readLong up makes it faster because of the JVM does more
+  // optimizations on small methods
+  private long innerLongDecode(long l) throws IOException {
+    int len = 1;
+    int b = buf[pos] & 0xff;
+    l ^= (b & 0x7fL) << 28;
+    if (b > 0x7f) {
+      b = buf[pos + len++] & 0xff;
+      l ^= (b & 0x7fL) << 35;
+      if (b > 0x7f) {
+        b = buf[pos + len++] & 0xff;
+        l ^= (b & 0x7fL) << 42;
+        if (b > 0x7f) {
+          b = buf[pos + len++] & 0xff;
+          l ^= (b & 0x7fL) << 49;
+          if (b > 0x7f) {
+            b = buf[pos + len++] & 0xff;
+            l ^= (b & 0x7fL) << 56;
+            if (b > 0x7f) {
+              b = buf[pos + len++] & 0xff;
+              l ^= (b & 0x7fL) << 63;
+              if (b > 0x7f) {
+                throw new IOException("Invalid long encoding");
+              }
+            }
+          }
+        }
+      }
+    }
+    pos += len;
+    return l;
+  }
+
+  @Override
+  public float readFloat() throws IOException {
+    ensureBounds(4);
+    int len = 1;
+    int n = (buf[pos] & 0xff) | ((buf[pos + len++] & 0xff) << 8)
+        | ((buf[pos + len++] & 0xff) << 16) | ((buf[pos + len++] & 0xff) << 24);
+    if ((pos + 4) > limit) {
+      throw new EOFException();
+    }
+    pos += 4;
+    return Float.intBitsToFloat(n);
+  }
+
+  @Override
+  public double readDouble() throws IOException {
+    ensureBounds(8);
+    int len = 1;
+    int n1 = (buf[pos] & 0xff) | ((buf[pos + len++] & 0xff) << 8)
+        | ((buf[pos + len++] & 0xff) << 16) | ((buf[pos + len++] & 0xff) << 24);
+    int n2 = (buf[pos + len++] & 0xff) | ((buf[pos + len++] & 0xff) << 8)
+        | ((buf[pos + len++] & 0xff) << 16) | ((buf[pos + len++] & 0xff) << 24);
+    if ((pos + 8) > limit) {
+      throw new EOFException();
+    }
+    pos += 8;
+    return Double.longBitsToDouble((((long) n1) & 0xffffffffL)
+        | (((long) n2) << 32));
+  }
+
+  @Override
+  public Utf8 readString(Utf8 old) throws IOException {
+    int length = readInt();
+    Utf8 result = (old != null ? old : new Utf8());
+    result.setByteLength(length);
+    if (0 != length) {
+      doReadBytes(result.getBytes(), 0, length);
+    }
+    return result;
+  }
+  
+  private final Utf8 scratchUtf8 = new Utf8();
+
+  @Override
+  public String readString() throws IOException {
+    return readString(scratchUtf8).toString();
+  }
+
+  @Override
+  public void skipString() throws IOException {
+    doSkipBytes(readInt());
+  }
+
+  @Override
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    int length = readInt();
+    ByteBuffer result;
+    if (old != null && length <= old.capacity()) {
+      result = old;
+      result.clear();
+    } else {
+      result = ByteBuffer.allocate(length);
+    }
+    doReadBytes(result.array(), result.position(), length);
+    result.limit(length);
+    return result;
+  }
+
+  @Override
+  public void skipBytes() throws IOException {
+    doSkipBytes(readInt());
+  }
+
+  @Override
+  public void readFixed(byte[] bytes, int start, int length) throws IOException {
+    doReadBytes(bytes, start, length);
+  }
+
+  @Override
+  public void skipFixed(int length) throws IOException {
+    doSkipBytes(length);
+  }
+
+  @Override
+  public int readEnum() throws IOException {
+    return readInt();
+  }
+
+  protected void doSkipBytes(long length) throws IOException {
+    int remaining = limit - pos;
+    if (length <= remaining) {
+      pos += length;
+    } else {
+      limit = pos = 0;
+      length -= remaining;
+      source.skipSourceBytes(length);
+    }
+  }
+
+  /**
+   * Reads <tt>length</tt> bytes into <tt>bytes</tt> starting at <tt>start</tt>.
+   * 
+   * @throws EOFException
+   *           If there are not enough number of bytes in the source.
+   * @throws IOException
+   */
+  protected void doReadBytes(byte[] bytes, int start, int length)
+      throws IOException {
+    if (length < 0)
+      throw new AvroRuntimeException("Malformed data. Length is negative: "
+                                     + length);
+    int remaining = limit - pos;
+    if (length <= remaining) {
+      System.arraycopy(buf, pos, bytes, start, length);
+      pos += length;
+    } else {
+      // read the rest of the buffer
+      System.arraycopy(buf, pos, bytes, start, remaining);
+      start += remaining;
+      length -= remaining;
+      pos = limit;
+      // finish from the byte source
+      source.readRaw(bytes, start, length);
+    }
+  }
+
+  /**
+   * Returns the number of items to follow in the current array or map. Returns
+   * 0 if there are no more items in the current array and the array/map has
+   * ended.
+   * 
+   * @throws IOException
+   */
+  protected long doReadItemCount() throws IOException {
+    long result = readLong();
+    if (result < 0) {
+      readLong(); // Consume byte-count if present
+      result = -result;
+    }
+    return result;
+  }
+
+  /**
+   * Reads the count of items in the current array or map and skip those items,
+   * if possible. If it could skip the items, keep repeating until there are no
+   * more items left in the array or map. If items cannot be skipped (because
+   * byte count to skip is not found in the stream) return the count of the
+   * items found. The client needs to skip the items individually.
+   * 
+   * @return Zero if there are no more items to skip and end of array/map is
+   *         reached. Positive number if some items are found that cannot be
+   *         skipped and the client needs to skip them individually.
+   * @throws IOException
+   */
+  private long doSkipItems() throws IOException {
+    long result = readInt();
+    while (result < 0) {
+      long bytecount = readLong();
+      doSkipBytes(bytecount);
+      result = readInt();
+    }
+    return result;
+  }
+
+  @Override
+  public long readArrayStart() throws IOException {
+    return doReadItemCount();
+  }
+
+  @Override
+  public long arrayNext() throws IOException {
+    return doReadItemCount();
+  }
+
+  @Override
+  public long skipArray() throws IOException {
+    return doSkipItems();
+  }
+
+  @Override
+  public long readMapStart() throws IOException {
+    return doReadItemCount();
+  }
+
+  @Override
+  public long mapNext() throws IOException {
+    return doReadItemCount();
+  }
+
+  @Override
+  public long skipMap() throws IOException {
+    return doSkipItems();
+  }
+
+  @Override
+  public int readIndex() throws IOException {
+    return readInt();
+  }
+
+  /**
+   * Returns true if the current BinaryDecoder is at the end of its source data and
+   * cannot read any further without throwing an EOFException or other
+   * IOException.
+   * <p/>
+   * Not all implementations of BinaryDecoder support isEnd(). Implementations that do
+   * not support isEnd() will throw a
+   * {@link java.lang.UnsupportedOperationException}.
+   */
+  public boolean isEnd() throws IOException {
+    if (limit - pos > 0) {
+      // buffer not empty, not at end.
+      return false;
+    } else {
+      if (source.isEof()) {
+        return true;
+      }
+      // read from source.
+      int read = source.tryReadRaw(buf, 0, buf.length);
+      pos = 0;
+      limit = read;
+      if (0 == read) {
+        // nothing left
+        return true;
+      }
+      return false;
+    }
+  }
+
+  /**
+   * Ensures that buf[pos + num - 1] is not out of the buffer array bounds.
+   * However, buf[pos + num -1] may be >= limit if there is not enough data left
+   * in the source to fill the array with num bytes.
+   * <p/>
+   * This method allows readers to read ahead by num bytes safely without
+   * checking for EOF at each byte. However, readers must ensure that their
+   * reads are valid by checking that their read did not advance past the limit
+   * before adjusting pos.
+   * <p/>
+   * num must be less than the buffer size and greater than 0
+   */
+  private void ensureBounds(int num) throws IOException {
+    int remaining = limit - pos;
+    if (remaining < num) {
+      // move remaining to front
+      source.compactAndFill(buf, pos, minPos, remaining);
+      if (pos >= limit)
+        throw new EOFException();
+    }
+  }
+
+  /**
+   * Returns an {@link java.io.InputStream} that is aware of any buffering that
+   * may occur in this BinaryDecoder. Readers that need to interleave decoding 
+   * Avro data with other reads must access this InputStream to do so unless
+   * the implementation is 'direct' and does not read beyond the minimum bytes
+   * necessary from the source.  
+   */
+  public InputStream inputStream() {
+    return source;
+  }
+
+  /**
+   * BufferAccessor is used by BinaryEncoder to enable {@link ByteSource}s and
+   * the InputStream returned by {@link BinaryDecoder.inputStream} to access the
+   * BinaryEncoder's buffer. When a BufferAccessor is created, it is attached to
+   * a BinaryDecoder and its buffer. Its accessors directly reference the
+   * BinaryDecoder's buffer. When detach() is called, it stores references to
+   * the BinaryDecoder's buffer directly. The BinaryDecoder only detaches a
+   * BufferAccessor when it is initializing to a new ByteSource. Therefore, a
+   * client that is using the InputStream returned by BinaryDecoder.inputStream
+   * can continue to use that stream after a BinaryDecoder has been
+   * reinitialized to read from new data.
+   */
+  static class BufferAccessor {
+    private final BinaryDecoder decoder;
+    private byte[] buf;
+    private int pos;
+    private int limit;
+    boolean detached = false;
+
+    private BufferAccessor(BinaryDecoder decoder) {
+      this.decoder = decoder;
+    }
+
+    void detach() {
+      this.buf = decoder.buf;
+      this.pos = decoder.pos;
+      this.limit = decoder.limit;
+      detached = true;
+    }
+
+    int getPos() {
+      if (detached)
+        return this.pos;
+      else
+        return decoder.pos;
+    }
+
+    int getLim() {
+      if (detached)
+        return this.limit;
+      else
+        return decoder.limit;
+    }
+
+    byte[] getBuf() {
+      if (detached)
+        return this.buf;
+      else
+        return decoder.buf;
+    }
+
+    void setPos(int pos) {
+      if (detached)
+        this.pos = pos;
+      else
+        decoder.pos = pos;
+    }
+
+    void setLimit(int limit) {
+      if (detached)
+        this.limit = limit;
+      else
+        decoder.limit = limit;
+    }
+
+    void setBuf(byte[] buf, int offset, int length) {
+      if (detached) {
+        this.buf = buf;
+        this.limit = offset + length;
+        this.pos = offset;
+      } else {
+        decoder.buf = buf;
+        decoder.limit = offset + length;
+        decoder.pos = offset;
+        decoder.minPos = offset;
+      }
+    }
+  }
+
+  /**
+   * ByteSource abstracts the source of data from the core workings of
+   * BinaryDecoder. This is very important for performance reasons because
+   * InputStream's API is a barrier to performance due to several quirks:
+   * InputStream does not in general require that as many bytes as possible have
+   * been read when filling a buffer.
+   * <p/>
+   * InputStream's terminating conditions for a read are two-fold: EOFException
+   * and '-1' on the return from read(). Implementations are supposed to return
+   * '-1' on EOF but often do not. The extra terminating conditions cause extra
+   * conditionals on both sides of the API, and slow performance significantly.
+   * <p/>
+   * ByteSource implementations provide read() and skip() variants that have
+   * stronger guarantees than InputStream, freeing client code to be simplified
+   * and faster.
+   * <p/>
+   * {@link skipSourceBytes} and {@link readRaw} are guaranteed to have read or
+   * skipped as many bytes as possible, or throw EOFException.
+   * {@link trySkipBytes} and {@link tryRead} are guaranteed to attempt to read
+   * or skip as many bytes as possible and never throw EOFException, while
+   * returning the exact number of bytes skipped or read. {@link isEof} returns
+   * true if all the source bytes have been read or skipped. This condition can
+   * also be detected by a client if an EOFException is thrown from
+   * {@link skipSourceBytes} or {@link readRaw}, or if {@link trySkipBytes} or
+   * {@link tryRead} return 0;
+   * <p/>
+   * A ByteSource also implements the InputStream contract for use by APIs that
+   * require it. The InputStream interface must take into account buffering in
+   * any decoder that this ByteSource is attached to. The other methods do not
+   * account for buffering.
+   */
+
+  abstract static class ByteSource extends InputStream {
+    // maintain a reference to the buffer, so that if this
+    // source is detached from the Decoder, and a client still
+    // has a reference to it via inputStream(), bytes are not
+    // lost
+    protected BufferAccessor ba;
+
+    protected ByteSource() {
+    }
+
+    abstract boolean isEof();
+
+    protected void attach(int bufferSize, BinaryDecoder decoder) {
+      decoder.buf = new byte[bufferSize];
+      decoder.pos = 0;
+      decoder.minPos = 0;
+      decoder.limit = 0;
+      this.ba = new BufferAccessor(decoder);
+      return;
+    }
+
+    protected void detach() {
+      ba.detach();
+    }
+
+    /**
+     * Skips length bytes from the source. If length bytes cannot be skipped due
+     * to end of file/stream/channel/etc an EOFException is thrown
+     * 
+     * @param length
+     *          the number of bytes to attempt to skip
+     * @throws IOException
+     *           if an error occurs
+     * @throws EOFException
+     *           if length bytes cannot be skipped
+     */
+    protected abstract void skipSourceBytes(long length) throws IOException;
+
+    /**
+     * Attempts to skip <i>skipLength</i> bytes from the source. Returns the
+     * actual number of bytes skipped. This method must attempt to skip as many
+     * bytes as possible up to <i>skipLength</i> bytes. Skipping 0 bytes signals
+     * end of stream/channel/file/etc
+     * 
+     * @param skipLength
+     *          the number of bytes to attempt to skip
+     * @return the count of actual bytes skipped.
+     */
+    protected abstract long trySkipBytes(long skipLength) throws IOException;
+
+    /**
+     * Reads raw from the source, into a byte[]. Used for reads that are larger
+     * than the buffer, or otherwise unbuffered. This is a mandatory read -- if
+     * there is not enough bytes in the source, EOFException is thrown.
+     * 
+     * @throws IOException
+     *           if an error occurs
+     * @throws EOFException
+     *           if len bytes cannot be read
+     * */
+    protected abstract void readRaw(byte[] data, int off, int len)
+        throws IOException;
+
+    /**
+     * Attempts to copy up to <i>len</i> bytes from the source into data,
+     * starting at index <i>off</i>. Returns the actual number of bytes copied
+     * which may be between 0 and <i>len</i>.
+     * <p/>
+     * This method must attempt to read as much as possible from the source.
+     * Returns 0 when at the end of stream/channel/file/etc.
+     * 
+     * @throws IOException
+     *           if an error occurs reading
+     **/
+    protected abstract int tryReadRaw(byte[] data, int off, int len)
+        throws IOException;
+
+    /**
+     * If this source buffers, compacts the buffer by placing the
+     * <i>remaining</i> bytes starting at <i>pos</i> at <i>minPos</i>. This may
+     * be done in the current buffer, or may replace the buffer with a new one.
+     * 
+     * The end result must be a buffer with at least 16 bytes of remaining space.
+     * 
+     * @param pos
+     * @param minPos
+     * @param remaining
+     * @throws IOException
+     */
+    protected void compactAndFill(byte[] buf, int pos, int minPos, int remaining)
+        throws IOException {
+      System.arraycopy(buf, pos, buf, minPos, remaining);
+      ba.setPos(minPos);
+      int newLimit = remaining
+          + tryReadRaw(buf, minPos + remaining, buf.length - remaining);
+      ba.setLimit(newLimit);
+    }
+
+    @Override
+    public int read(byte[] b, int off, int len) throws IOException {
+      int lim = ba.getLim();
+      int pos = ba.getPos();
+      byte[] buf = ba.getBuf();
+      int remaining = (lim - pos);
+      if (remaining >= len) {
+        System.arraycopy(buf, pos, b, off, len);
+        pos = pos + len;
+        ba.setPos(pos);
+        return len;
+      } else {
+        // flush buffer to array
+        System.arraycopy(buf, pos, b, off, remaining);
+        pos = pos + remaining;
+        ba.setPos(pos);
+        // get the rest from the stream (skip array)
+        int inputRead = remaining
+            + tryReadRaw(b, off + remaining, len - remaining);
+        if (inputRead == 0) {
+          return -1;
+        } else {
+          return inputRead;
+        }
+      }
+    }
+
+    @Override
+    public long skip(long n) throws IOException {
+      int lim = ba.getLim();
+      int pos = ba.getPos();
+      int remaining = lim - pos;
+      if (remaining > n) {
+        pos += n;
+        ba.setPos(pos);
+        return n;
+      } else {
+        pos = lim;
+        ba.setPos(pos);
+        long isSkipCount = trySkipBytes(n - remaining);
+        return isSkipCount + remaining;
+      }
+    }
+
+    /**
+     * returns the number of bytes remaining that this BinaryDecoder has
+     * buffered from its source
+     */
+    @Override
+    public int available() throws IOException {
+      return (ba.getLim() - ba.getPos());
+    }
+  }
+
+  private static class InputStreamByteSource extends ByteSource {
+    private InputStream in;
+    protected boolean isEof = false;
+
+    private InputStreamByteSource(InputStream in) {
+      super();
+      this.in = in;
+    }
+
+    @Override
+    protected void skipSourceBytes(long length) throws IOException {
+      boolean readZero = false;
+      while (length > 0) {
+        long n = in.skip(length);
+        if (n > 0) {
+          length -= n;
+          continue;
+        }
+        // The inputStream contract is evil.
+        // zero "might" mean EOF. So check for 2 in a row, we will
+        // infinite loop waiting for -1 with some classes others
+        // spuriously will return 0 on occasion without EOF
+        if (n == 0) {
+          if (readZero) {
+            isEof = true;
+            throw new EOFException();
+          }
+          readZero = true;
+          continue;
+        }
+        // read negative
+        isEof = true;
+        throw new EOFException();
+      }
+    }
+
+    @Override
+    protected long trySkipBytes(long length) throws IOException {
+      long leftToSkip = length;
+      try {
+        boolean readZero = false;
+        while (leftToSkip > 0) {
+          long n = in.skip(length);
+          if (n > 0) {
+            leftToSkip -= n;
+            continue;
+          }
+          // The inputStream contract is evil.
+          // zero "might" mean EOF. So check for 2 in a row, we will
+          // infinite loop waiting for -1 with some classes others
+          // spuriously will return 0 on occasion without EOF
+          if (n == 0) {
+            if (readZero) {
+              isEof = true;
+              break;
+            }
+            readZero = true;
+            continue;
+          }
+          // read negative
+          isEof = true;
+          break;
+
+        }
+      } catch (EOFException eof) {
+        isEof = true;
+      }
+      return length - leftToSkip;
+    }
+
+    @Override
+    protected void readRaw(byte[] data, int off, int len) throws IOException {
+      while (len > 0) {
+        int read = in.read(data, off, len);
+        if (read < 0) {
+          isEof = true;
+          throw new EOFException();
+        }
+        len -= read;
+        off += read;
+      }
+    }
+
+    @Override
+    protected int tryReadRaw(byte[] data, int off, int len) throws IOException {
+      int leftToCopy = len;
+      try {
+        while (leftToCopy > 0) {
+          int read = in.read(data, off, leftToCopy);
+          if (read < 0) {
+            isEof = true;
+            break;
+          }
+          leftToCopy -= read;
+          off += read;
+        }
+      } catch (EOFException eof) {
+        isEof = true;
+      }
+      return len - leftToCopy;
+    }
+
+    @Override
+    public int read() throws IOException {
+      if (ba.getLim() - ba.getPos() == 0) {
+        return in.read();
+      } else {
+        int position = ba.getPos();
+        int result = ba.getBuf()[position] & 0xff;
+        ba.setPos(position + 1);
+        return result;
+      }
+    }
+
+    @Override
+    public boolean isEof() {
+      return isEof;
+    }
+
+    @Override
+    public void close() throws IOException {
+      in.close();
+    }
+  }
+
+  /**
+   * This byte source is special. It will avoid copying data by using the
+   * source's byte[] as a buffer in the decoder.
+   * 
+   */
+  private static class ByteArrayByteSource extends ByteSource {
+    private byte[] data;
+    private int position;
+    private int max;
+    private boolean compacted = false;
+
+    private ByteArrayByteSource(byte[] data, int start, int len) {
+      super();
+      // make sure data is not too small, otherwise getLong may try and
+      // read 10 bytes and get index out of bounds.
+      if (data.length < 16 || len < 16) {
+        this.data = new byte[16];
+        System.arraycopy(data, start, this.data, 0, len);
+        this.position = 0;
+        this.max = len;
+      } else {
+        // use the array passed in
+        this.data = data;
+        this.position = start;
+        this.max = start + len;
+      }
+    }
+
+    @Override
+    protected void attach(int bufferSize, BinaryDecoder decoder) {
+      // buffer size is not used here, the byte[] source is the buffer.
+      decoder.buf = this.data;
+      decoder.pos = this.position;
+      decoder.minPos = this.position;
+      decoder.limit = this.max;
+      this.ba = new BufferAccessor(decoder);
+      return;
+    }
+
+    @Override
+    protected void skipSourceBytes(long length) throws IOException {
+      long skipped = trySkipBytes(length);
+      if (skipped < length) {
+        throw new EOFException();
+      }
+    }
+
+    @Override
+    protected long trySkipBytes(long length) throws IOException {
+      // the buffer is shared, so this should return 0
+      max = ba.getLim();
+      position = ba.getPos();
+      long remaining = max - position;
+      if (remaining >= length) {
+        position += length;
+        ba.setPos(position);
+        return length;
+      } else {
+        position += remaining;
+        ba.setPos(position);
+        return remaining;
+      }
+    }
+
+    @Override
+    protected void readRaw(byte[] data, int off, int len) throws IOException {
+      int read = tryReadRaw(data, off, len);
+      if (read < len) {
+        throw new EOFException();
+      }
+    }
+
+    @Override
+    protected int tryReadRaw(byte[] data, int off, int len) throws IOException {
+      // the buffer is shared, nothing to read
+      return 0;
+    }
+
+    @Override
+    protected void compactAndFill(byte[] buf, int pos, int minPos, int remaining)
+        throws IOException {
+      // this implementation does not want to mutate the array passed in,
+      // so it makes a new tiny buffer unless it has been compacted once before
+      if (!compacted) {
+        // assumes ensureCapacity is never called with a size more than 16
+        byte[] tinybuf = new byte[remaining + 16];
+        System.arraycopy(buf, pos, tinybuf, 0, remaining);
+        ba.setBuf(tinybuf, 0, remaining);
+        compacted = true;
+      }
+    }
+
+    @Override
+    public int read() throws IOException {
+      max = ba.getLim();
+      position = ba.getPos();
+      if (position >= max) {
+        return -1;
+      } else {
+        int result = ba.getBuf()[position++] & 0xff;
+        ba.setPos(position);
+        return result;
+      }
+    }
+
+    @Override
+    public void close() throws IOException {
+      ba.setPos(ba.getLim()); // effectively set isEof to false
+    }
+
+    @Override
+    public boolean isEof() {
+      int remaining = ba.getLim() - ba.getPos();
+      return (remaining == 0);
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
new file mode 100644
index 0000000..7be0390
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BinaryEncoder.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.util.Utf8;
+
+/**
+ * An abstract {@link Encoder} for Avro's binary encoding.
+ * <p/>
+ * To construct and configure instances, use {@link EncoderFactory}
+ * 
+ * @see EncoderFactory
+ * @see BufferedBinaryEncoder
+ * @see DirectBinaryEncoder
+ * @see BlockingBinaryEncoder
+ * @see Encoder
+ * @see Decoder
+ */
+public abstract class BinaryEncoder extends Encoder {
+  
+  @Override
+  public void writeNull() throws IOException {}
+  
+  @Override
+  public void writeString(Utf8 utf8) throws IOException {
+    this.writeBytes(utf8.getBytes(), 0, utf8.getByteLength());
+  }
+  
+  @Override
+  public void writeString(String string) throws IOException {
+    if (0 == string.length()) {
+      writeZero();
+      return;
+    }
+    byte[] bytes = string.getBytes("UTF-8");
+    writeInt(bytes.length);
+    writeFixed(bytes, 0, bytes.length);
+  }
+
+  @Override
+  public void writeBytes(ByteBuffer bytes) throws IOException {
+    int len = bytes.limit() - bytes.position();
+    if (0 == len) {
+      writeZero();
+    } else {
+      writeInt(len);
+      writeFixed(bytes);
+    }
+  }
+  
+  @Override
+  public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+    if (0 == len) {
+      writeZero();
+      return;
+    }
+    this.writeInt(len);
+    this.writeFixed(bytes, start, len);
+  }
+  
+  @Override
+  public void writeEnum(int e) throws IOException {
+    this.writeInt(e);
+  }
+
+  @Override
+  public void writeArrayStart() throws IOException {}
+
+  @Override
+  public void setItemCount(long itemCount) throws IOException {
+    if (itemCount > 0) {
+      this.writeLong(itemCount);
+    }
+  }
+  
+  @Override
+  public void startItem() throws IOException {}
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    writeZero();
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {}
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    writeZero();
+  }
+
+  @Override
+  public void writeIndex(int unionIndex) throws IOException {
+    writeInt(unionIndex);
+  }
+  
+  /** Write a zero byte to the underlying output. **/
+  protected abstract void writeZero() throws IOException;
+  
+  /**
+   * Returns the number of bytes currently buffered by this encoder. If this
+   * Encoder does not buffer, this will always return zero.
+   * <p/>
+   * Call {@link #flush()} to empty the buffer to the underlying output.
+   */
+  public abstract int bytesBuffered();
+  
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
new file mode 100644
index 0000000..e8b6c33
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingBinaryEncoder.java
@@ -0,0 +1,562 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+
+/** A {@link BinaryEncoder} implementation that writes large arrays and maps as a
+ * sequence of blocks. So long as individual primitive values fit in memory,
+ * arbitrarily long arrays and maps may be written and subsequently read without
+ * exhausting memory. Values are buffered until the specified block size would
+ * be exceeded, minimizing block overhead.
+ * <p/>
+ * Use {@link EncoderFactory#blockingBinaryEncoder(OutputStream, BinaryEncoder)}
+ * to construct and configure.
+ * <p/>
+ * BlockingBinaryEncoder buffers writes, data may not appear on the output until
+ * {@link #flush()} is called.
+ * <p/>
+ * BlockingBinaryEncoder is not thread-safe
+ * 
+ * @see BinaryEncoder
+ * @see EncoderFactory
+ * @see Encoder
+ */
+public class BlockingBinaryEncoder extends BufferedBinaryEncoder {
+
+ /* Implementation note:
+  *
+  * Blocking is complicated because of nesting.  If a large, nested
+  * value overflows your buffer, you've got to do a lot of dancing
+  * around to output the blocks correctly.
+  *
+  * To handle this complexity, this class keeps a stack of blocked
+  * values: each time a new block is started (e.g., by a call to
+  * {@link #writeArrayStart}), an entry is pushed onto this stack.
+  *
+  * In this stack, we keep track of the state of a block.  Blocks can
+  * be in two states.  "Regular" blocks have a non-zero byte count.
+  * "Overflow" blocks help us deal with the case where a block
+  * contains a value that's too big to buffer.  In this case, the
+  * block contains only one item, and we give it an unknown
+  * byte-count.  Because these values (1,unknown) are fixed, we're
+  * able to write the header for these overflow blocks to the
+  * underlying stream without seeing the entire block.  After writing
+  * this header, we've freed our buffer space to be fully devoted to
+  * blocking the large, inner value.
+  */
+
+  private static class BlockedValue {
+    public enum State {
+      /**
+       * The bottom element of our stack represents being _outside_
+       * of a blocked value.
+       */
+      ROOT,
+
+      /**
+       * Represents the "regular" case, i.e., a blocked-value whose
+       * current block is fully contained in the buffer.  In this
+       * case, {@link BlockedValue#start} points to the start of the
+       * blocks _data_ -- but no room has been left for a header!
+       * When this block is terminated, it's data will have to be
+       * moved over a bit to make room for the header. */
+      REGULAR,
+
+      /**
+       * Represents a blocked-value whose current block is in the
+       * overflow state.  In this case, {@link BlockedValue#start} is zero. The
+       * header for such a block has _already been written_ (we've
+       * written out a header indicating that the block has a single
+       * item, and we put a "zero" down for the byte-count to indicate
+       * that we don't know the physical length of the buffer.  Any blocks
+       *  _containing_ this block must be in the {@link #OVERFLOW}
+       *  state. */
+     OVERFLOW
+    }
+
+    /** The type of this blocked value (ARRAY or MAP). */
+    public Schema.Type type;
+
+    /** The state of this BlockedValue */
+    public State state;
+    
+    /** The location in the buffer where this blocked value starts */
+    public int start;
+
+    /**
+     * The index one past the last byte for the previous item. If this
+     * is the first item, this is same as {@link #start}.
+     */
+    public int lastFullItem;
+    
+    /**
+     * Number of items in this blocked value that are stored
+     * in the buffer.
+     */
+    public int items;
+
+    /** Number of items left to write*/
+    public long itemsLeftToWrite;
+
+    /** Create a ROOT instance. */
+    public BlockedValue() {
+      this.type = null;
+      this.state = BlockedValue.State.ROOT;
+      this.start = this.lastFullItem = 0;
+      this.items = 1; // Makes various assertions work out
+    }
+    
+    /**
+     * Check invariants of <code>this</code> and also the
+     * <code>BlockedValue</code> containing <code>this</code>.
+     */
+    public boolean check(BlockedValue prev, int pos) {
+      assert state != State.ROOT || type == null;
+      assert (state == State.ROOT ||
+              type == Schema.Type.ARRAY || type == Schema.Type.MAP);
+
+      assert 0 <= items;
+      assert 0 != items || start == pos;         // 0==itms ==> start==pos
+      assert 1 < items || start == lastFullItem; // 1<=itms ==> start==lFI
+      assert items <= 1 || start <= lastFullItem; // 1<itms ==> start<=lFI
+      assert lastFullItem <= pos;
+
+      switch (state) {
+      case ROOT:
+          assert start == 0;
+          assert prev == null;
+          break;
+      case REGULAR:
+          assert start >= 0;
+          assert prev.lastFullItem <= start;
+          assert 1 <= prev.items;
+          break;
+      case OVERFLOW:
+          assert start == 0;
+          assert items == 1;
+          assert prev.state == State.ROOT || prev.state == State.OVERFLOW;
+          break;
+      }
+      return false;
+    }
+  }
+
+  /**
+   * The buffer to hold the bytes before being written into the underlying
+   * stream.
+   */
+  private byte[] buf;
+  
+  /**
+   * Index into the location in {@link #buf}, where next byte can be written.
+   */
+  private int pos;
+  
+  /**
+   * The state stack.
+   */
+  private BlockedValue[] blockStack;
+  private int stackTop = -1;
+  private static final int STACK_STEP = 10;
+
+  //buffer large enough for up to two ints for a block header
+  //rounded up to a multiple of 4 bytes.
+  private byte[] headerBuffer = new byte[12];
+
+  private boolean check() {
+    assert buf != null;
+    assert 0 <= pos;
+    assert pos <= buf.length : pos + " " + buf.length;
+
+    assert blockStack != null;
+    BlockedValue prev = null;
+    for (int i = 0; i <= stackTop; i++) {
+      BlockedValue v = blockStack[i];
+      v.check(prev, pos);
+      prev = v;
+    }
+    return true;
+  }
+
+  BlockingBinaryEncoder(OutputStream out,
+      int blockBufferSize, int binaryEncoderBufferSize) {
+    super(out, binaryEncoderBufferSize);
+    this.buf = new byte[blockBufferSize];
+    this.pos = 0;
+    blockStack = new BlockedValue[0];
+    expandStack();
+    BlockedValue bv = blockStack[++stackTop];
+    bv.type = null;
+    bv.state = BlockedValue.State.ROOT;
+    bv.start = bv.lastFullItem = 0;
+    bv.items = 1;
+
+    assert check();
+  }
+
+  private void expandStack() {
+    int oldLength = blockStack.length;
+    blockStack = Arrays.copyOf(blockStack,
+        blockStack.length + STACK_STEP);
+    for (int i = oldLength; i < blockStack.length; i++) {
+      blockStack[i] = new BlockedValue();
+    }
+  }
+
+  BlockingBinaryEncoder configure(OutputStream out, int blockBufferSize,
+      int binaryEncoderBufferSize) {
+    super.configure(out, binaryEncoderBufferSize);
+    pos = 0;
+    stackTop = 0;
+    if (null == buf || buf.length != blockBufferSize) {
+      buf = new byte[blockBufferSize];
+    }
+    
+    assert check();
+    return this;
+  }
+  
+  @Override
+  public void flush() throws IOException {
+      BlockedValue bv = blockStack[stackTop];
+      if (bv.state == BlockedValue.State.ROOT) {
+        super.writeFixed(buf, 0, pos);
+        pos = 0;
+      } else {
+        while (bv.state != BlockedValue.State.OVERFLOW) {
+          compact();
+        }
+      }
+      super.flush();
+
+    assert check();
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    ensureBounds(1);
+    pos += BinaryData.encodeBoolean(b, buf, pos);
+  }
+
+  @Override
+  public void writeInt(int n) throws IOException {
+    ensureBounds(5);
+    pos += BinaryData.encodeInt(n, buf, pos);
+  }
+
+  @Override
+  public void writeLong(long n) throws IOException {
+    ensureBounds(10);
+    pos += BinaryData.encodeLong(n, buf, pos);
+  }
+    
+  @Override
+  public void writeFloat(float f) throws IOException {
+    ensureBounds(4);
+    pos += BinaryData.encodeFloat(f, buf, pos);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    ensureBounds(8);
+    pos += BinaryData.encodeDouble(d, buf, pos);
+  }
+
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    doWriteBytes(bytes, start, len);
+  }
+  
+  @Override
+  protected void writeZero() throws IOException {
+    ensureBounds(1);
+    buf[pos++] = (byte) 0;
+  }
+
+  @Override
+  public void writeArrayStart() throws IOException {
+    if (stackTop + 1 == blockStack.length) {
+      expandStack();
+    }
+
+    BlockedValue bv = blockStack[++stackTop];
+    bv.type = Schema.Type.ARRAY;
+    bv.state = BlockedValue.State.REGULAR;
+    bv.start = bv.lastFullItem = pos;
+    bv.items = 0;
+
+    assert check();
+  }
+
+  @Override
+  public void setItemCount(long itemCount) throws IOException {
+    BlockedValue v = blockStack[stackTop];
+    assert v.type == Schema.Type.ARRAY || v.type == Schema.Type.MAP;
+    assert v.itemsLeftToWrite == 0;
+    v.itemsLeftToWrite = itemCount;
+
+    assert check();
+  }
+  
+  @Override
+  public void startItem() throws IOException {
+    if (blockStack[stackTop].state == BlockedValue.State.OVERFLOW) {
+      finishOverflow();
+    }
+    BlockedValue t = blockStack[stackTop];
+    t.items++;
+    t.lastFullItem = pos;
+    t.itemsLeftToWrite--;
+
+    assert check();
+  }
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    BlockedValue top = blockStack[stackTop];
+    if (top.type != Schema.Type.ARRAY) {
+      throw new AvroTypeException("Called writeArrayEnd outside of an array.");
+    }
+    if (top.itemsLeftToWrite != 0) {
+      throw new AvroTypeException("Failed to write expected number of array elements.");
+    }
+    endBlockedValue();
+
+    assert check();
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {
+    if (stackTop + 1 == blockStack.length) {
+      expandStack();
+    }
+
+    BlockedValue bv = blockStack[++stackTop];
+    bv.type = Schema.Type.MAP;
+    bv.state = BlockedValue.State.REGULAR;
+    bv.start = bv.lastFullItem = pos;
+    bv.items = 0;
+
+    assert check();
+  }
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    BlockedValue top = blockStack[stackTop];
+    if (top.type != Schema.Type.MAP) {
+      throw new AvroTypeException("Called writeMapEnd outside of a map.");
+    }
+    if (top.itemsLeftToWrite != 0) {
+      throw new AvroTypeException("Failed to read write expected number of array elements.");
+    }
+    endBlockedValue();
+    
+    assert check();
+  }
+
+  @Override
+  public void writeIndex(int unionIndex) throws IOException {
+    ensureBounds(5);
+    pos += BinaryData.encodeInt(unionIndex, buf, pos);
+  }
+
+  @Override
+  public int bytesBuffered() {
+    return pos + super.bytesBuffered();
+  }
+  
+  private void endBlockedValue() throws IOException {
+    for (; ;) {
+      assert check();
+      BlockedValue t = blockStack[stackTop];
+      assert t.state != BlockedValue.State.ROOT;
+      if (t.state == BlockedValue.State.OVERFLOW) {
+        finishOverflow();
+      }
+      assert t.state == BlockedValue.State.REGULAR;
+      if (0 < t.items) {
+        int byteCount = pos - t.start;
+        if (t.start == 0 &&
+          blockStack[stackTop - 1].state
+            != BlockedValue.State.REGULAR) { // Lucky us -- don't have to move
+          super.writeInt(-t.items);
+          super.writeInt(byteCount);
+        } else {
+          int headerSize = 0;
+          headerSize += BinaryData.encodeInt(-t.items, headerBuffer, headerSize);
+          headerSize += BinaryData.encodeInt(byteCount, headerBuffer, headerSize);
+          if (buf.length >= pos + headerSize) {
+            pos += headerSize;
+            final int m = t.start;
+            System.arraycopy(buf, m, buf, m + headerSize, byteCount);
+            System.arraycopy(headerBuffer, 0, buf, m, headerSize);
+          } else {
+            compact();
+            continue;
+          }
+        }
+      }
+      stackTop--;
+      ensureBounds(1);
+      buf[pos++] = 0;   // Sentinel for last block in a blocked value
+      assert check();
+      if (blockStack[stackTop].state == BlockedValue.State.ROOT) {
+        flush();
+      }
+      return;
+    }
+  }
+
+  /**
+   * Called when we've finished writing the last item in an overflow
+   * buffer.  When this is finished, the top of the stack will be
+   * an empty block in the "regular" state.
+   * @throws IOException
+   */
+  private void finishOverflow() throws IOException {
+    BlockedValue s = blockStack[stackTop];
+    if (s.state != BlockedValue.State.OVERFLOW) {
+      throw new IllegalStateException("Not an overflow block");
+    }
+    assert check();
+
+    // Flush any remaining data for this block
+    super.writeFixed(buf, 0, pos);
+    pos = 0;
+
+    // Reset top of stack to be in REGULAR mode
+    s.state = BlockedValue.State.REGULAR;
+    s.start = s.lastFullItem = 0;
+    s.items = 0;
+    assert check();
+  }
+
+  private void ensureBounds(int l) throws IOException {
+    while (buf.length < (pos + l)) {
+      if (blockStack[stackTop].state == BlockedValue.State.REGULAR) {
+        compact();
+      } else {
+        super.writeFixed(buf, 0, pos);
+        pos = 0;
+      }
+    }
+  }
+
+  private void doWriteBytes(byte[] bytes, int start, int len)
+    throws IOException {
+    if (len < buf.length) {
+      ensureBounds(len);
+      System.arraycopy(bytes, start, buf, pos, len);
+      pos += len;
+    } else {
+      ensureBounds(buf.length);
+      assert blockStack[stackTop].state == BlockedValue.State.ROOT ||
+        blockStack[stackTop].state == BlockedValue.State.OVERFLOW;
+      write(bytes, start, len);
+    }
+  }
+
+  private void write(byte[] b, int off, int len) throws IOException {
+    if (blockStack[stackTop].state == BlockedValue.State.ROOT) {
+      super.writeFixed(b, off, len);
+    } else {
+      assert check();
+      while (buf.length < (pos + len)) {
+        if (blockStack[stackTop].state == BlockedValue.State.REGULAR) {
+          compact();
+        } else {
+          super.writeFixed(buf, 0, pos);
+          pos = 0;
+          if (buf.length <= len) {
+            super.writeFixed(b, off, len);
+            len = 0;
+          }
+        }
+      }
+      System.arraycopy(b, off, buf, pos, len);
+      pos += len;
+    }
+    assert check();
+  }
+
+  /** Only call if you're there are REGULAR-state values on the stack. */
+  private void compact() throws IOException {
+    assert check();
+
+    // Find first REGULAR-state value
+    BlockedValue s = null;
+    int i;
+    for (i = 1; i <= stackTop; i++) {
+      s = blockStack[i];
+      if (s.state == BlockedValue.State.REGULAR) break;
+    }
+    assert s != null;
+
+    // We're going to transition "s" into the overflow state.  To do
+    // this, We're going to flush any bytes prior to "s", then write
+    // any full items of "s" into a block, start an overflow
+    // block, write any remaining bytes of "s" up to the start of the
+    // next more deeply-nested blocked-value, and finally move over
+    // any remaining bytes (which will be from more deeply-nested
+    // blocked values).
+
+    // Flush any bytes prios to "s"
+    super.writeFixed(buf, 0, s.start);
+
+    // Write any full items of "s"
+    if (1 < s.items) {
+      super.writeInt(-(s.items - 1));
+      super.writeInt(s.lastFullItem - s.start);
+      super.writeFixed(buf, s.start, s.lastFullItem - s.start);
+      s.start = s.lastFullItem;
+      s.items = 1;
+    }
+
+    // Start an overflow block for s
+    super.writeInt(1);
+
+    // Write any remaining bytes for "s", up to the next-most
+    // deeply-nested value
+    BlockedValue n = ((i + 1) <= stackTop ?
+        blockStack[i + 1] : null);
+    int end = (n == null ? pos : n.start);
+    super.writeFixed(buf, s.lastFullItem, end - s.lastFullItem);
+
+    // Move over any bytes that remain (and adjust indices)
+    System.arraycopy(buf, end, buf, 0, pos - end);
+    for (int j = i + 1; j <= stackTop; j++) {
+        n = blockStack[j];
+        n.start -= end;
+        n.lastFullItem -= end;
+    }
+    pos -= end;
+
+    assert s.items == 1;
+    s.start = s.lastFullItem = 0;
+    s.state = BlockedValue.State.OVERFLOW;
+
+    assert check();
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java
new file mode 100644
index 0000000..cb0758c
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BufferedBinaryEncoder.java
@@ -0,0 +1,227 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+import java.nio.channels.WritableByteChannel;
+
+import org.apache.avro.AvroRuntimeException;
+
+/**
+ * An {@link Encoder} for Avro's binary encoding.
+ * <p/>
+ * This implementation buffers output to enhance performance.
+ * Output may not appear on the underlying output until flush() is called.
+ * <p/>
+ * {@link DirectBinaryEncoder} can be used in place of this implementation if
+ * the buffering semantics are not desired, and the performance difference
+ * is acceptable.
+ * <p/>
+ * To construct or reconfigure, use
+ * {@link EncoderFactory#binaryEncoder(OutputStream, BinaryEncoder)}.
+ * <p/>
+ * To change the buffer size, configure the factory instance used to 
+ * create instances with {@link EncoderFactory#configureBufferSize(int)}
+ *  @see Encoder
+ *  @see EncoderFactory
+ *  @see BlockingBinaryEncoder
+ *  @see DirectBinaryEncoder
+ */
+public class BufferedBinaryEncoder extends BinaryEncoder {
+  private byte[] buf;
+  private int pos;
+  private ByteSink sink;
+  private int bulkLimit;
+
+  BufferedBinaryEncoder(OutputStream out, int bufferSize) {
+    configure(out, bufferSize);
+  }
+  
+  BufferedBinaryEncoder configure(OutputStream out, int bufferSize) {
+    if (null == out)
+      throw new NullPointerException("OutputStream cannot be null!");
+    if (null != this.sink) {
+      if ( pos > 0) {
+        try {
+          flushBuffer();
+        } catch (IOException e) {
+          throw new AvroRuntimeException("Failure flushing old output", e);
+        }
+      }
+    }
+    this.sink = new OutputStreamSink(out);
+    pos = 0;
+    if (null == buf || buf.length != bufferSize) {
+      buf = new byte[bufferSize];
+    }
+    bulkLimit = buf.length >>> 1;
+    if (bulkLimit > 512) {
+      bulkLimit = 512;
+    }
+    return this;
+  }
+
+  @Override
+  public void flush() throws IOException {
+    flushBuffer();
+    sink.innerFlush();
+  }
+
+  /** Flushes the internal buffer to the underlying output. 
+   * Does not flush the underlying output.
+   */
+  private void flushBuffer() throws IOException {
+    if (pos > 0) {
+      sink.innerWrite(buf, 0, pos);
+      pos = 0;
+    }
+  }
+
+  /** Ensures that the buffer has at least num bytes free to write to between its
+   * current position and the end. This will not expand the buffer larger than
+   * its current size, for writes larger than or near to the size of the buffer,
+   * we flush the buffer and write directly to the output, bypassing the buffer.
+   * @param num
+   * @throws IOException
+   */
+  private void ensureBounds(int num) throws IOException {
+    int remaining = buf.length - pos;
+    if (remaining < num) {
+      flushBuffer();
+    }
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    // inlined, shorter version of ensureBounds
+    if (buf.length == pos) {
+      flushBuffer();
+    }
+    pos += BinaryData.encodeBoolean(b, buf, pos);
+  }
+
+  @Override
+  public void writeInt(int n) throws IOException {
+    ensureBounds(5);
+    pos += BinaryData.encodeInt(n, buf, pos);
+  }
+
+  @Override
+  public void writeLong(long n) throws IOException {
+    ensureBounds(10);
+    pos += BinaryData.encodeLong(n, buf, pos);
+  }
+
+  @Override
+  public void writeFloat(float f) throws IOException {
+    ensureBounds(4);
+    pos += BinaryData.encodeFloat(f, buf, pos);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    ensureBounds(8);
+    pos += BinaryData.encodeDouble(d, buf, pos);
+  }
+
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    if (len > bulkLimit) {
+      //too big, write direct
+      flushBuffer();
+      sink.innerWrite(bytes, start, len);
+      return;
+    }
+    ensureBounds(len);
+    System.arraycopy(bytes, start, buf, pos, len);
+    pos += len;
+  }
+  
+  @Override
+  public void writeFixed(ByteBuffer bytes) throws IOException {
+    if (!bytes.hasArray() && bytes.remaining() > bulkLimit) {
+      flushBuffer();
+      sink.innerWrite(bytes);                     // bypass the buffer
+    } else {
+      super.writeFixed(bytes);
+    }
+  }
+
+  @Override
+  protected void writeZero() throws IOException {
+    writeByte(0);
+  }
+  
+  private void writeByte(int b) throws IOException {
+    if (pos == buf.length) {
+      flushBuffer();
+    }
+    buf[pos++] = (byte) (b & 0xFF);
+  }
+
+  @Override
+  public int bytesBuffered() {
+    return pos;
+  }
+
+  /**
+   * ByteSink abstracts the destination of written data from the core workings
+   * of BinaryEncoder.
+   * <p/>
+   * Currently the only destination option is an OutputStream, but we may later
+   * want to handle other constructs or specialize for certain OutputStream
+   * Implementations such as ByteBufferOutputStream.
+   * <p/>
+   */
+  private abstract static class ByteSink {
+    protected ByteSink() {}
+    /** Write data from bytes, starting at off, for len bytes **/
+    protected abstract void innerWrite(byte[] bytes, int off, int len) throws IOException;
+    
+    protected abstract void innerWrite(ByteBuffer buff) throws IOException;
+    
+    /** Flush the underlying output, if supported **/
+    protected abstract void innerFlush() throws IOException;
+  }
+  
+  static class OutputStreamSink extends ByteSink {
+    private final OutputStream out;
+    private final WritableByteChannel channel;
+    private OutputStreamSink(OutputStream out) {
+      super();
+      this.out = out;
+      channel = Channels.newChannel(out);
+    }
+    @Override
+    protected void innerWrite(byte[] bytes, int off, int len)
+        throws IOException {
+      out.write(bytes, off, len);
+    }
+    @Override
+    protected void innerFlush() throws IOException {
+      out.flush();
+    }
+    @Override
+    protected void innerWrite(ByteBuffer buff) throws IOException {
+      channel.write(buff);
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/io/DatumReader.java
new file mode 100644
index 0000000..a968651
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DatumReader.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+
+/** Read data of a schema.
+ * <p>Determines the in-memory data representation.
+ */
+public interface DatumReader<D> {
+
+  /** Set the writer's schema. */
+  void setSchema(Schema schema);
+
+  /** Read a datum.  Traverse the schema, depth-first, reading all leaf values
+   * in the schema into a datum that is returned.  If the provided datum is
+   * non-null it may be reused and returned. */
+  D read(D reuse, Decoder in) throws IOException;
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/io/DatumWriter.java
new file mode 100644
index 0000000..c30bf82
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DatumWriter.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+
+/** Write data of a schema.
+ * <p>Implemented for different in-memory data representations.
+ */
+public interface DatumWriter<D> {
+
+  /** Set the schema. */
+  void setSchema(Schema schema);
+
+  /** Write a datum.  Traverse the schema, depth first, writing each leaf value
+   * in the schema from the datum to the output. */
+  void write(D datum, Encoder out) throws IOException;
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
new file mode 100644
index 0000000..e924de5
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/Decoder.java
@@ -0,0 +1,284 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.util.Utf8;
+
+/**
+ * Low-level support for de-serializing Avro values.
+ * <p/>
+ *  This class has two types of methods.  One type of methods support
+ *  the reading of leaf values (for example, {@link #readLong} and
+ *  {@link #readString}).
+ *  <p/>
+ *  The other type of methods support the reading of maps and arrays.
+ *  These methods are {@link #readArrayStart}, {@link #arrayNext},
+ *  and similar methods for maps).  See {@link #readArrayStart} for
+ *  details on these methods.)
+ *  <p/>
+ *  {@link DecoderFactory} contains Decoder construction and configuration
+ *  facilities.
+ *  @see DecoderFactory
+ *  @see Encoder
+ */
+
+public abstract class Decoder {
+
+  /**
+   * "Reads" a null value.  (Doesn't actually read anything, but
+   * advances the state of the parser if the implementation is
+   * stateful.)
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          null is not the type of the next value to be read
+   */
+  public abstract void readNull() throws IOException;
+
+  /**
+   * Reads a boolean value written by {@link Encoder#writeBoolean}.
+   * @throws AvroTypeException If this is a stateful reader and
+   * boolean is not the type of the next value to be read
+   */
+
+  public abstract boolean readBoolean() throws IOException;
+
+  /**
+   * Reads an integer written by {@link Encoder#writeInt}.
+   * @throws AvroTypeException If encoded value is larger than
+   *          32-bits
+   * @throws AvroTypeException If this is a stateful reader and
+   *          int is not the type of the next value to be read
+   */
+  public abstract int readInt() throws IOException;
+
+  /**
+   * Reads a long written by {@link Encoder#writeLong}.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          long is not the type of the next value to be read
+   */
+  public abstract long readLong() throws IOException;
+
+  /**
+   * Reads a float written by {@link Encoder#writeFloat}.
+   * @throws AvroTypeException If this is a stateful reader and
+   * is not the type of the next value to be read
+   */
+  public abstract float readFloat() throws IOException;
+
+  /**
+   * Reads a double written by {@link Encoder#writeDouble}.
+   * @throws AvroTypeException If this is a stateful reader and
+   *           is not the type of the next value to be read
+   */
+  public abstract double readDouble() throws IOException;
+    
+  /**
+   * Reads a char-string written by {@link Encoder#writeString}.
+   * @throws AvroTypeException If this is a stateful reader and
+   * char-string is not the type of the next value to be read
+   */
+  public abstract Utf8 readString(Utf8 old) throws IOException;
+    
+  /**
+   * Reads a char-string written by {@link Encoder#writeString}.
+   * @throws AvroTypeException If this is a stateful reader and
+   * char-string is not the type of the next value to be read
+   */
+  public abstract String readString() throws IOException;
+
+  /**
+   * Discards a char-string written by {@link Encoder#writeString}.
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          char-string is not the type of the next value to be read
+   */
+  public abstract void skipString() throws IOException;
+
+  /**
+   * Reads a byte-string written by {@link Encoder#writeBytes}.
+   * if <tt>old</tt> is not null and has sufficient capacity to take in
+   * the bytes being read, the bytes are returned in <tt>old</tt>.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          byte-string is not the type of the next value to be read
+   */
+  public abstract ByteBuffer readBytes(ByteBuffer old) throws IOException;
+
+  /**
+   * Discards a byte-string written by {@link Encoder#writeBytes}.
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          byte-string is not the type of the next value to be read
+   */
+  public abstract void skipBytes() throws IOException;
+  
+  /**
+   * Reads fixed sized binary object.
+   * @param bytes The buffer to store the contents being read.
+   * @param start The position where the data needs to be written.
+   * @param length  The size of the binary object.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          fixed sized binary object is not the type of the next
+   *          value to be read or the length is incorrect.
+   * @throws IOException
+   */
+  public abstract void readFixed(byte[] bytes, int start, int length)
+    throws IOException;
+
+  /**
+   * A shorthand for <tt>readFixed(bytes, 0, bytes.length)</tt>.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          fixed sized binary object is not the type of the next
+   *          value to be read or the length is incorrect.
+   * @throws IOException
+   */
+  public void readFixed(byte[] bytes) throws IOException {
+    readFixed(bytes, 0, bytes.length);
+  }
+  
+  /**
+   * Discards fixed sized binary object.
+   * @param length  The size of the binary object to be skipped.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          fixed sized binary object is not the type of the next
+   *          value to be read or the length is incorrect.
+   * @throws IOException
+   */
+  public abstract void skipFixed(int length) throws IOException;
+
+  /**
+   * Reads an enumeration.
+   * @return The enumeration's value.
+   * @throws AvroTypeException If this is a stateful reader and
+   *          enumeration is not the type of the next value to be read.
+   * @throws IOException
+   */
+  public abstract int readEnum() throws IOException;
+  
+  /**
+   * Reads and returns the size of the first block of an array.  If
+   * this method returns non-zero, then the caller should read the
+   * indicated number of items, and then call {@link
+   * #arrayNext} to find out the number of items in the next
+   * block.  The typical pattern for consuming an array looks like:
+   * <pre>
+   *   for(long i = in.readArrayStart(); i != 0; i = in.arrayNext()) {
+   *     for (long j = 0; j < i; j++) {
+   *       read next element of the array;
+   *     }
+   *   }
+   * </pre>
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          array is not the type of the next value to be read */
+  public abstract long readArrayStart() throws IOException;
+
+  /**
+   * Processes the next block of an array and returns the number of items in
+   * the block and let's the caller
+   * read those items.
+   * @throws AvroTypeException When called outside of an
+   *         array context
+   */
+  public abstract long arrayNext() throws IOException;
+
+  /**
+   * Used for quickly skipping through an array.  Note you can
+   * either skip the entire array, or read the entire array (with
+   * {@link #readArrayStart}), but you can't mix the two on the
+   * same array.
+   *
+   * This method will skip through as many items as it can, all of
+   * them if possible.  It will return zero if there are no more
+   * items to skip through, or an item count if it needs the client's
+   * help in skipping.  The typical usage pattern is:
+   * <pre>
+   *   for(long i = in.skipArray(); i != 0; i = i.skipArray()) {
+   *     for (long j = 0; j < i; j++) {
+   *       read and discard the next element of the array;
+   *     }
+   *   }
+   * </pre>
+   * Note that this method can automatically skip through items if a
+   * byte-count is found in the underlying data, or if a schema has
+   * been provided to the implementation, but
+   * otherwise the client will have to skip through items itself.
+   *
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          array is not the type of the next value to be read
+   */
+  public abstract long skipArray() throws IOException;
+
+  /**
+   * Reads and returns the size of the next block of map-entries.
+   * Similar to {@link #readArrayStart}.
+   *
+   *  As an example, let's say you want to read a map of records,
+   *  the record consisting of an Long field and a Boolean field.
+   *  Your code would look something like this:
+   * <pre>
+   *   Map<String,Record> m = new HashMap<String,Record>();
+   *   Record reuse = new Record();
+   *   for(long i = in.readMapStart(); i != 0; i = in.readMapNext()) {
+   *     for (long j = 0; j < i; j++) {
+   *       String key = in.readString();
+   *       reuse.intField = in.readInt();
+   *       reuse.boolField = in.readBoolean();
+   *       m.put(key, reuse);
+   *     }
+   *   }
+   * </pre>
+   * @throws AvroTypeException If this is a stateful reader and
+   *         map is not the type of the next value to be read
+   */
+  public abstract long readMapStart() throws IOException;
+
+  /**
+   * Processes the next block of map entries and returns the count of them.
+   * Similar to {@link #arrayNext}.  See {@link #readMapStart} for details.
+   * @throws AvroTypeException When called outside of a
+   *         map context
+   */
+  public abstract long mapNext() throws IOException;
+
+  /**
+   * Support for quickly skipping through a map similar to {@link #skipArray}.
+   *
+   * As an example, let's say you want to skip a map of records,
+   * the record consisting of an Long field and a Boolean field.
+   * Your code would look something like this:
+   * <pre>
+   *   for(long i = in.skipMap(); i != 0; i = in.skipMap()) {
+   *     for (long j = 0; j < i; j++) {
+   *       in.skipString();  // Discard key
+   *       in.readInt(); // Discard int-field of value
+   *       in.readBoolean(); // Discard boolean-field of value
+   *     }
+   *   }
+   * </pre>
+   *  @throws AvroTypeException If this is a stateful reader and
+   *          array is not the type of the next value to be read */
+
+  public abstract long skipMap() throws IOException;
+
+  /**
+   * Reads the tag of a union written by {@link Encoder#writeIndex}.
+   * @throws AvroTypeException If this is a stateful reader and
+   *         union is not the type of the next value to be read
+   */
+  public abstract int readIndex() throws IOException;
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
new file mode 100644
index 0000000..c777614
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DecoderFactory.java
@@ -0,0 +1,316 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.avro.Schema;
+
+/**
+ * A factory for creating and configuring {@link Decoder}s.
+ * <p/>
+ * Factories are thread-safe, and are generally cached by applications for
+ * performance reasons. Multiple instances are only required if multiple
+ * concurrent configurations are needed.
+ * 
+ * @see Decoder
+ */
+
+public class DecoderFactory {
+  private static final DecoderFactory DEFAULT_FACTORY = new DefaultDecoderFactory();
+  static final int DEFAULT_BUFFER_SIZE = 8192;
+
+  int binaryDecoderBufferSize = DEFAULT_BUFFER_SIZE;
+
+  /** Constructor for factory instances */
+  public DecoderFactory() {
+    super();
+  }
+
+  /**
+   * @deprecated use the equivalent {@link #get()} instead
+   */
+  @Deprecated
+  public static DecoderFactory defaultFactory() {
+    return get();
+  }
+  
+  /**
+   * Returns an immutable static DecoderFactory configured with default settings
+   * All mutating methods throw IllegalArgumentExceptions. All creator methods
+   * create objects with default settings.
+   */
+  public static DecoderFactory get() {
+    return DEFAULT_FACTORY;
+  }
+
+  /**
+   * Configures this factory to use the specified buffer size when creating
+   * Decoder instances that buffer their input. The default buffer size is
+   * 8192 bytes.
+   * 
+   * @param size The preferred buffer size. Valid values are in the range [32,
+   *          16*1024*1024]. Values outside this range are rounded to the nearest
+   *          value in the range. Values less than 512 or greater than 1024*1024
+   *          are not recommended.
+   * @return This factory, to enable method chaining:
+   * <pre>
+   * DecoderFactory myFactory = new DecoderFactory().useBinaryDecoderBufferSize(4096);
+   * </pre>
+   */
+  public DecoderFactory configureDecoderBufferSize(int size) {
+    if (size < 32)
+      size = 32;
+    if (size > 16 * 1024 * 1024)
+      size = 16 * 1024 * 1024;
+    this.binaryDecoderBufferSize = size;
+    return this;
+  }
+  
+  /**
+   * Returns this factory's configured preferred buffer size.  Used when creating
+   * Decoder instances that buffer. See {@link #configureDecoderBufferSize}
+   * @return The preferred buffer size, in bytes.
+   */
+  public int getConfiguredBufferSize() {
+    return this.binaryDecoderBufferSize;
+  }
+  
+  /** @deprecated use the equivalent
+   *  {@link #binaryDecoder(InputStream, BinaryDecoder)} instead */
+  @Deprecated
+  public BinaryDecoder createBinaryDecoder(InputStream in, BinaryDecoder reuse) {
+    return binaryDecoder(in, reuse);
+  }
+  
+  /**
+   * Creates or reinitializes a {@link BinaryDecoder} with the input stream
+   * provided as the source of data. If <i>reuse</i> is provided, it will be
+   * reinitialized to the given input stream.
+   * <p/>
+   * {@link BinaryDecoder} instances returned by this method buffer their input,
+   * reading up to {@link #getConfiguredBufferSize()} bytes past the minimum
+   * required to satisfy read requests in order to achieve better performance.
+   * If the buffering is not desired, use
+   * {@link #directBinaryDecoder(InputStream, BinaryDecoder)}.
+   * <p/>
+   * {@link BinaryDecoder#inputStream()} provides a view on the data that is
+   * buffer-aware, for users that need to interleave access to data
+   * with the Decoder API.
+   * 
+   * @param in
+   *          The InputStream to initialize to
+   * @param reuse
+   *          The BinaryDecoder to <i>attempt</i> to reuse given the factory
+   *          configuration. A BinaryDecoder implementation may not be
+   *          compatible with reuse, causing a new instance to be returned. If
+   *          null, a new instance is returned.
+   * @return A BinaryDecoder that uses <i>in</i> as its source of data. If
+   *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
+   *         is not null, then it may be reinitialized if compatible, otherwise
+   *         a new instance will be returned.
+   * @see BinaryDecoder
+   * @see Decoder
+   */
+  public BinaryDecoder binaryDecoder(InputStream in, BinaryDecoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(BinaryDecoder.class)) {
+      return new BinaryDecoder(in, binaryDecoderBufferSize);
+    } else {
+      return ((BinaryDecoder)reuse).configure(in, binaryDecoderBufferSize);
+    }
+  }
+  
+  /**
+   * Creates or reinitializes a {@link BinaryDecoder} with the input stream
+   * provided as the source of data. If <i>reuse</i> is provided, it will be
+   * reinitialized to the given input stream.
+   * <p/>
+   * {@link BinaryDecoder} instances returned by this method do not buffer their input.
+   * In most cases a buffering BinaryDecoder is sufficient in combination with
+   * {@link BinaryDecoder#inputStream()} which provides a buffer-aware view on
+   * the data.
+   * <p/>
+   * A "direct" BinaryDecoder does not read ahead from an InputStream or other data source
+   * that cannot be rewound.  From the perspective of a client, a "direct" decoder
+   * must never read beyond the minimum necessary bytes to service a {@link BinaryDecoder}
+   * API read request.  
+   * <p/>
+   * In the case that the improved performance of a buffering implementation does not outweigh the
+   * inconvenience of its buffering semantics, a "direct" decoder can be
+   * used.
+   * @param in
+   *          The InputStream to initialize to
+   * @param reuse
+   *          The BinaryDecoder to <i>attempt</i> to reuse given the factory
+   *          configuration. A BinaryDecoder implementation may not be
+   *          compatible with reuse, causing a new instance to be returned. If
+   *          null, a new instance is returned.
+   * @return A BinaryDecoder that uses <i>in</i> as its source of data. If
+   *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
+   *         is not null, then it may be reinitialized if compatible, otherwise
+   *         a new instance will be returned.
+   * @see DirectBinaryDecoder
+   * @see Decoder
+   */
+  public BinaryDecoder directBinaryDecoder(InputStream in, BinaryDecoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(DirectBinaryDecoder.class)) {
+      return new DirectBinaryDecoder(in);
+    } else {
+      return ((DirectBinaryDecoder)reuse).configure(in);
+    }
+  }
+
+  /** @deprecated use {@link #binaryDecoder(byte[], int, int, BinaryDecoder)}
+   * instead */
+  @Deprecated
+  public BinaryDecoder createBinaryDecoder(byte[] bytes, int offset,
+      int length, BinaryDecoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(BinaryDecoder.class)) {
+      return new BinaryDecoder(bytes, offset, length);
+    } else {
+      return reuse.configure(bytes, offset, length);
+    }
+  }
+  
+  /**
+   * Creates or reinitializes a {@link BinaryDecoder} with the byte array
+   * provided as the source of data. If <i>reuse</i> is provided, it will
+   * attempt to reinitialize <i>reuse</i> to the new byte array. This instance
+   * will use the provided byte array as its buffer.
+   * <p/>
+   * {@link BinaryDecoder#inputStream()} provides a view on the data that is
+   * buffer-aware and can provide a view of the data not yet read by Decoder API
+   * methods.
+   * 
+   * @param bytes The byte array to initialize to
+   * @param offset The offset to start reading from
+   * @param length The maximum number of bytes to read from the byte array
+   * @param reuse The BinaryDecoder to attempt to reinitialize. if null a new
+   *          BinaryDecoder is created.
+   * @return A BinaryDecoder that uses <i>bytes</i> as its source of data. If
+   *         <i>reuse</i> is null, this will be a new instance. <i>reuse</i> may
+   *         be reinitialized if appropriate, otherwise a new instance is
+   *         returned. Clients must not assume that <i>reuse</i> is
+   *         reinitialized and returned.
+   */
+  public BinaryDecoder binaryDecoder(byte[] bytes, int offset,
+      int length, BinaryDecoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(BinaryDecoder.class)) {
+      return new BinaryDecoder(bytes, offset, length);
+    } else {
+      return reuse.configure(bytes, offset, length);
+    }
+  }
+
+  /** @deprecated use {@link #binaryDecoder(byte[], BinaryDecoder)} instead */
+  @Deprecated
+  public BinaryDecoder createBinaryDecoder(byte[] bytes, BinaryDecoder reuse) {
+    return binaryDecoder(bytes, 0, bytes.length, reuse);
+  }
+  
+  /**
+   * This method is shorthand for
+   * <pre>
+   * createBinaryDecoder(bytes, 0, bytes.length, reuse);
+   * </pre> {@link #binaryDecoder(byte[], int, int, BinaryDecoder)}
+   */
+  public BinaryDecoder binaryDecoder(byte[] bytes, BinaryDecoder reuse) {
+    return binaryDecoder(bytes, 0, bytes.length, reuse);
+  }
+
+  /**
+   * Creates a {@link JsonDecoder} using the InputStrim provided for reading
+   * data that conforms to the Schema provided.
+   * <p/>
+   * 
+   * @param schema
+   *          The Schema for data read from this JsonEncoder. Cannot be null.
+   * @param input
+   *          The InputStream to read from. Cannot be null.
+   * @return A JsonEncoder configured with <i>input</i> and <i>schema</i>
+   * @throws IOException
+   */
+  public JsonDecoder jsonDecoder(Schema schema, InputStream input)
+      throws IOException {
+    return new JsonDecoder(schema, input);
+  }
+  
+  /**
+   * Creates a {@link JsonDecoder} using the String provided for reading data
+   * that conforms to the Schema provided.
+   * <p/>
+   * 
+   * @param schema
+   *          The Schema for data read from this JsonEncoder. Cannot be null.
+   * @param input
+   *          The String to read from. Cannot be null.
+   * @return A JsonEncoder configured with <i>input</i> and <i>schema</i>
+   * @throws IOException
+   */
+  public JsonDecoder jsonDecoder(Schema schema, String input)
+      throws IOException {
+    return new JsonDecoder(schema, input);
+  }
+
+  /**
+   * Creates a {@link ValidatingDecoder} wrapping the Decoder provided. This
+   * ValidatingDecoder will ensure that operations against it conform to the
+   * schema provided.
+   * 
+   * @param schema
+   *          The Schema to validate against. Cannot be null.
+   * @param wrapped
+   *          The Decoder to wrap.
+   * @return A ValidatingDecoder configured with <i>wrapped</i> and
+   *         <i>schema</i>
+   * @throws IOException
+   */
+  public ValidatingDecoder validatingDecoder(Schema schema, Decoder wrapped)
+      throws IOException {
+    return new ValidatingDecoder(schema, wrapped);
+  }
+
+  /**
+   * Creates a {@link ResolvingDecoder} wrapping the Decoder provided. This
+   * ResolvingDecoder will resolve input conforming to the <i>writer</i> schema
+   * from the wrapped Decoder, and present it as the <i>reader</i> schema.
+   * 
+   * @param writer
+   *          The Schema that the source data is in. Cannot be null.
+   * @param reader
+   *          The Schema that the reader wishes to read the data as. Cannot be
+   *          null.
+   * @param wrapped
+   *          The Decoder to wrap.
+   * @return A ResolvingDecoder configured to resolve <i>writer</i> to
+   *         <i>reader</i> from <i>in</i>
+   * @throws IOException
+   */
+  public ResolvingDecoder resolvingDecoder(Schema writer, Schema reader,
+      Decoder wrapped) throws IOException {
+    return new ResolvingDecoder(writer, reader, wrapped);
+  }
+  
+  private static class DefaultDecoderFactory extends DecoderFactory {
+    @Override
+    public DecoderFactory configureDecoderBufferSize(int bufferSize) {
+      throw new IllegalArgumentException("This Factory instance is Immutable");
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java
new file mode 100644
index 0000000..5bc1760
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryDecoder.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.util.ByteBufferInputStream;
+
+
+/**
+ *  A non-buffering version of {@link BinaryDecoder}.
+ *  <p/>
+ *  This implementation will not read-ahead from the provided InputStream
+ *  beyond the minimum required to service the API requests.
+ *
+ *  @see Encoder
+ */
+
+class DirectBinaryDecoder extends BinaryDecoder {
+  private InputStream in;
+
+  private class ByteReader {
+    public ByteBuffer read(ByteBuffer old, int length) throws IOException {
+      ByteBuffer result;
+      if (old != null && length <= old.capacity()) {
+        result = old;
+        result.clear();
+      } else {
+        result = ByteBuffer.allocate(length);
+      }
+      doReadBytes(result.array(), result.position(), length);
+      result.limit(length);
+      return result;
+    }
+  }
+
+  private class ReuseByteReader extends ByteReader {
+    private final ByteBufferInputStream bbi;
+    
+    public ReuseByteReader(ByteBufferInputStream bbi) {
+      this.bbi = bbi;
+    }
+    
+    @Override
+    public ByteBuffer read(ByteBuffer old, int length) throws IOException {
+      if (old != null) {
+        return super.read(old, length);
+      } else {
+        return bbi.readBuffer(length);
+      }
+    }
+    
+  }
+
+  private ByteReader byteReader;
+
+  DirectBinaryDecoder(InputStream in) {
+    super();
+    configure(in);
+  }
+
+  DirectBinaryDecoder configure(InputStream in) {
+    this.in = in;
+    byteReader = (in instanceof ByteBufferInputStream) ?
+            new ReuseByteReader((ByteBufferInputStream) in) : new ByteReader();
+    return this;
+  }
+
+  @Override
+  public boolean readBoolean() throws IOException {
+    int n = in.read();
+    if (n < 0) {
+      throw new EOFException();
+    }
+    return n == 1;
+  }
+
+  @Override
+  public int readInt() throws IOException {
+    int n = 0;
+    int b;
+    int shift = 0;
+    do {
+      b = in.read();
+      if (b >= 0) {
+         n |= (b & 0x7F) << shift;
+         if ((b & 0x80) == 0) {
+           return (n >>> 1) ^ -(n & 1); // back to two's-complement
+         }
+      } else {
+        throw new EOFException();
+      }
+      shift += 7;
+    } while (shift < 32);
+    throw new IOException("Invalid int encoding");
+    
+  }
+
+  @Override
+  public long readLong() throws IOException {
+    long n = 0;
+    int b;
+    int shift = 0;
+    do { 
+      b = in.read();
+      if (b >= 0) {
+         n |= (b & 0x7FL) << shift;
+         if ((b & 0x80) == 0) {
+           return (n >>> 1) ^ -(n & 1); // back to two's-complement
+         }
+      } else {
+        throw new EOFException();
+      }
+      shift += 7;
+    } while (shift < 64);
+    throw new IOException("Invalid long encoding");
+  }
+
+  private final byte[] buf = new byte[8];
+
+  @Override
+  public float readFloat() throws IOException {
+    doReadBytes(buf, 0, 4);
+    int n = (((int) buf[0]) & 0xff)
+      |  ((((int) buf[1]) & 0xff) << 8)
+      |  ((((int) buf[2]) & 0xff) << 16)
+      |  ((((int) buf[3]) & 0xff) << 24);
+    return Float.intBitsToFloat(n);
+  }
+
+  @Override
+  public double readDouble() throws IOException {
+    doReadBytes(buf, 0, 8);
+    long n = (((long) buf[0]) & 0xff)
+      |  ((((long) buf[1]) & 0xff) << 8)
+      |  ((((long) buf[2]) & 0xff) << 16)
+      |  ((((long) buf[3]) & 0xff) << 24)
+      |  ((((long) buf[4]) & 0xff) << 32)
+      |  ((((long) buf[5]) & 0xff) << 40)
+      |  ((((long) buf[6]) & 0xff) << 48)
+      |  ((((long) buf[7]) & 0xff) << 56);
+    return Double.longBitsToDouble(n);
+  }
+
+  @Override
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    int length = readInt();
+    return byteReader.read(old, length);
+  }
+
+  @Override
+  protected void doSkipBytes(long length) throws IOException {
+    while (length > 0) {
+      long n = in.skip(length);
+      if (n <= 0) {
+        throw new EOFException();
+      }
+      length -= n;
+    }
+  }
+
+  @Override
+  protected void doReadBytes(byte[] bytes, int start, int length)
+    throws IOException {
+    for (; ;) {
+      int n = in.read(bytes, start, length);
+      if (n == length || length == 0) {
+        return;
+      } else if (n < 0) {
+        throw new EOFException();
+      }
+      start += n;
+      length -= n;
+    }
+  }
+
+  @Override
+  public InputStream inputStream() {
+    return in;
+  }
+
+  @Override
+  public boolean isEnd() throws IOException {
+    throw new UnsupportedOperationException();
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
new file mode 100644
index 0000000..1126742
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * An {@link Encoder} for Avro's binary encoding that does not buffer output.
+ * <p/>
+ * This encoder does not buffer writes, and as a result is slower than
+ * {@link BufferedBinaryEncoder}. However, it is lighter-weight and useful when the
+ * buffering in BufferedBinaryEncoder is not desired and/or the Encoder is
+ * very short lived.
+ * <p/>
+ * To construct, use
+ * {@link EncoderFactory#directBinaryEncoder(OutputStream, BinaryEncoder)}
+ *  <p/>
+ * DirectBinaryEncoder is not thread-safe
+ * @see BinaryEncoder
+ * @see EncoderFactory
+ * @see Encoder
+ * @see Decoder
+ */
+public class DirectBinaryEncoder extends BinaryEncoder {
+  private OutputStream out;
+  // the buffer is used for writing floats, doubles, and large longs.
+  private final byte[] buf = new byte[12];
+
+  /** Create a writer that sends its output to the underlying stream
+   *  <code>out</code>. 
+   **/
+  DirectBinaryEncoder(OutputStream out) {
+    configure(out);
+  }
+
+  DirectBinaryEncoder configure(OutputStream out) {
+    if (null == out) throw new NullPointerException("OutputStream cannot be null!");
+    this.out = out;
+    return this;
+  }
+
+  @Override
+  public void flush() throws IOException {
+    out.flush();
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    out.write(b ? 1 : 0);
+  }
+
+  /* buffering is slower for ints that encode to just 1 or
+   * two bytes, and and faster for large ones.
+   * (Sun JRE 1.6u22, x64 -server) */
+  @Override
+  public void writeInt(int n) throws IOException {
+    int val = (n << 1) ^ (n >> 31);
+    if ((val & ~0x7F) == 0) {
+      out.write(val);
+      return;
+    } else if ((val & ~0x3FFF) == 0) {
+      out.write(0x80 | val);
+      out.write(val >>> 7);
+      return;
+    }
+    int len = BinaryData.encodeInt(n, buf, 0);
+    out.write(buf, 0, len);
+  }
+
+  /* buffering is slower for writeLong when the number is small enough to
+   * fit in an int. 
+   * (Sun JRE 1.6u22, x64 -server) */
+  @Override
+  public void writeLong(long n) throws IOException {
+    long val = (n << 1) ^ (n >> 63); // move sign to low-order bit
+    if ((val & ~0x7FFFFFFFL) == 0) {
+      int i = (int) val;
+      while ((i & ~0x7F) != 0) {
+        out.write((byte)((0x80 | i) & 0xFF));
+        i >>>= 7;
+      }
+      out.write((byte)i);
+      return;
+    }
+    int len = BinaryData.encodeLong(n, buf, 0);
+    out.write(buf, 0, len);
+  }
+  
+  @Override
+  public void writeFloat(float f) throws IOException {
+    int len = BinaryData.encodeFloat(f, buf, 0);
+    out.write(buf, 0, len);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    byte[] buf = new byte[8];
+    int len = BinaryData.encodeDouble(d, buf, 0);
+    out.write(buf, 0, len);
+  }
+
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    out.write(bytes, start, len);
+  }
+
+  @Override
+  protected void writeZero() throws IOException {
+    out.write(0);
+  }
+  
+  @Override
+  public int bytesBuffered() {
+    return 0;
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java
new file mode 100644
index 0000000..c3647c0
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/Encoder.java
@@ -0,0 +1,299 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.Flushable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.util.Utf8;
+
+/**
+ * Low-level support for serializing Avro values.
+ * <p/>
+ * This class has two types of methods.  One type of methods support
+ * the writing of leaf values (for example, {@link #writeLong} and
+ * {@link #writeString}).  These methods have analogs in {@link
+ * Decoder}.
+ * <p/>
+ * The other type of methods support the writing of maps and arrays.
+ * These methods are {@link #writeArrayStart}, {@link
+ * #startItem}, and {@link #writeArrayEnd} (and similar methods for
+ * maps).  Some implementations of {@link Encoder} handle the
+ * buffering required to break large maps and arrays into blocks,
+ * which is necessary for applications that want to do streaming.
+ * (See {@link #writeArrayStart} for details on these methods.)
+ * <p/>
+ * {@link EncoderFactory} contains Encoder construction and configuration
+ * facilities.  
+ *  @see EncoderFactory
+ *  @see Decoder
+ */
+public abstract class Encoder implements Flushable {
+
+  /**
+   * "Writes" a null value.  (Doesn't actually write anything, but
+   * advances the state of the parser if this class is stateful.)
+   * @throws AvroTypeException If this is a stateful writer and a
+   *         null is not expected
+   */
+  public abstract void writeNull() throws IOException;
+  
+  /**
+   * Write a boolean value.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * boolean is not expected
+   */
+  public abstract void writeBoolean(boolean b) throws IOException;
+
+  /**
+   * Writes a 32-bit integer.
+   * @throws AvroTypeException If this is a stateful writer and an
+   * integer is not expected
+   */
+  public abstract void writeInt(int n) throws IOException;
+
+  /**
+   * Write a 64-bit integer.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * long is not expected
+   */
+  public abstract void writeLong(long n) throws IOException;
+  
+  /** Write a float.
+   * @throws IOException 
+   * @throws AvroTypeException If this is a stateful writer and a
+   * float is not expected
+   */
+  public abstract void writeFloat(float f) throws IOException;
+
+  /**
+   * Write a double.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * double is not expected
+   */
+  public abstract void writeDouble(double d) throws IOException;
+
+  /**
+   * Write a Unicode character string.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * char-string is not expected
+   */
+  public abstract void writeString(Utf8 utf8) throws IOException;
+
+  /**
+   * Write a Unicode character string.  The default implementation converts
+   * the String to a {@link org.apache.avro.util.Utf8}.  Some Encoder 
+   * implementations may want to do something different as a performance optimization.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * char-string is not expected
+   */
+  public void writeString(String str) throws IOException {
+    writeString(new Utf8(str));
+  }
+
+  /**
+   * Write a Unicode character string.  If the CharSequence is an
+   * {@link org.apache.avro.util.Utf8} it writes this directly, otherwise
+   * the CharSequence is converted to a String via toString() and written.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * char-string is not expected
+   */
+  public void writeString(CharSequence charSequence) throws IOException {
+    if (charSequence instanceof Utf8)
+      writeString((Utf8)charSequence);
+    else
+      writeString(charSequence.toString());
+  }
+  
+  /**
+   * Write a byte string.
+   * @throws AvroTypeException If this is a stateful writer and a
+   *         byte-string is not expected
+   */
+  public abstract void writeBytes(ByteBuffer bytes) throws IOException;
+  
+  /**
+   * Write a byte string.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * byte-string is not expected
+   */
+  public abstract void writeBytes(byte[] bytes, int start, int len) throws IOException;
+  
+  /**
+   * Writes a byte string.
+   * Equivalent to <tt>writeBytes(bytes, 0, bytes.length)</tt>
+   * @throws IOException 
+   * @throws AvroTypeException If this is a stateful writer and a
+   * byte-string is not expected
+   */
+  public void writeBytes(byte[] bytes) throws IOException {
+    writeBytes(bytes, 0, bytes.length);
+  }
+
+  /**
+   * Writes a fixed size binary object.
+   * @param bytes The contents to write
+   * @param start The position within <tt>bytes</tt> where the contents
+   * start.
+   * @param len The number of bytes to write.
+   * @throws AvroTypeException If this is a stateful writer and a
+   * byte-string is not expected
+   * @throws IOException
+   */
+  public abstract void writeFixed(byte[] bytes, int start, int len) throws IOException;
+
+  /**
+   * A shorthand for <tt>writeFixed(bytes, 0, bytes.length)</tt>
+   * @param bytes
+   */
+  public void writeFixed(byte[] bytes) throws IOException {
+    writeFixed(bytes, 0, bytes.length);
+  }
+  
+  /** Writes a fixed from a ByteBuffer. */
+  public void writeFixed(ByteBuffer bytes) throws IOException {
+    int pos = bytes.position();
+    int len = bytes.limit() - pos;
+    if (bytes.hasArray()) {
+      writeFixed(bytes.array(), bytes.arrayOffset() + pos, len);
+    } else {
+      byte[] b = new byte[len];
+      bytes.duplicate().get(b, 0, len);
+      writeFixed(b, 0, len);
+    }
+  }
+
+  /**
+   * Writes an enumeration.
+   * @param e
+   * @throws AvroTypeException If this is a stateful writer and an enumeration
+   * is not expected or the <tt>e</tt> is out of range.
+   * @throws IOException
+   */
+  public abstract void writeEnum(int e) throws IOException;
+
+  /** Call this method to start writing an array.
+   *
+   *  When starting to serialize an array, call {@link
+   *  #writeArrayStart}. Then, before writing any data for any item
+   *  call {@link #setItemCount} followed by a sequence of
+   *  {@link #startItem()} and the item itself. The number of
+   *  {@link #startItem()} should match the number specified in
+   *  {@link #setItemCount}.
+   *  When actually writing the data of the item, you can call any {@link
+   *  Encoder} method (e.g., {@link #writeLong}).  When all items
+   *  of the array have been written, call {@link #writeArrayEnd}.
+   *
+   *  As an example, let's say you want to write an array of records,
+   *  the record consisting of an Long field and a Boolean field.
+   *  Your code would look something like this:
+   *  <pre>
+   *  out.writeArrayStart();
+   *  out.setItemCount(list.size());
+   *  for (Record r : list) {
+   *    out.startItem();
+   *    out.writeLong(r.longField);
+   *    out.writeBoolean(r.boolField);
+   *  }
+   *  out.writeArrayEnd();
+   *  </pre>
+   *  @throws AvroTypeException If this is a stateful writer and an
+   *          array is not expected
+   */
+  public abstract void writeArrayStart() throws IOException;
+
+  /**
+   * Call this method before writing a batch of items in an array or a map.
+   * Then for each item, call {@link #startItem()} followed by any of the
+   * other write methods of {@link Encoder}. The number of calls
+   * to {@link #startItem()} must be equal to the count specified
+   * in {@link #setItemCount}. Once a batch is completed you
+   * can start another batch with {@link #setItemCount}.
+   * 
+   * @param itemCount The number of {@link #startItem()} calls to follow.
+   * @throws IOException
+   */
+  public abstract void setItemCount(long itemCount) throws IOException;
+  
+  /**
+   * Start a new item of an array or map.
+   * See {@link #writeArrayStart} for usage information.
+   * @throws AvroTypeException If called outside of an array or map context
+   */
+  public abstract void startItem() throws IOException;
+
+  /**
+   * Call this method to finish writing an array.
+   * See {@link #writeArrayStart} for usage information.
+   *
+   * @throws AvroTypeException If items written does not match count
+   *          provided to {@link #writeArrayStart}
+   * @throws AvroTypeException If not currently inside an array
+   */
+  public abstract void writeArrayEnd() throws IOException;
+
+  /**
+   * Call this to start a new map.  See
+   * {@link #writeArrayStart} for details on usage.
+   *
+   * As an example of usage, let's say you want to write a map of
+   * records, the record consisting of an Long field and a Boolean
+   * field.  Your code would look something like this:
+   * <pre>
+   * out.writeMapStart();
+   * out.setItemCount(list.size());
+   * for (Map.Entry<String,Record> entry : map.entrySet()) {
+   *   out.startItem();
+   *   out.writeString(entry.getKey());
+   *   out.writeLong(entry.getValue().longField);
+   *   out.writeBoolean(entry.getValue().boolField);
+   * }
+   * out.writeMapEnd();
+   * </pre>
+   * @throws AvroTypeException If this is a stateful writer and a
+   * map is not expected
+   */
+  public abstract void writeMapStart() throws IOException;
+
+  /**
+   * Call this method to terminate the inner-most, currently-opened
+   * map.  See {@link #writeArrayStart} for more details.
+   *
+   * @throws AvroTypeException If items written does not match count
+   *          provided to {@link #writeMapStart}
+   * @throws AvroTypeException If not currently inside a map
+   */
+  public abstract void writeMapEnd() throws IOException;
+
+  /** Call this method to write the tag of a union.
+   *
+   * As an example of usage, let's say you want to write a union,
+   * whose second branch is a record consisting of an Long field and
+   * a Boolean field.  Your code would look something like this:
+   * <pre>
+   * out.writeIndex(1);
+   * out.writeLong(record.longField);
+   * out.writeBoolean(record.boolField);
+   * </pre>
+   * @throws AvroTypeException If this is a stateful writer and a
+   * map is not expected
+   */
+  public abstract void writeIndex(int unionIndex) throws IOException;
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
new file mode 100644
index 0000000..679daa7
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
@@ -0,0 +1,365 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.codehaus.jackson.JsonGenerator;
+
+/**
+ * A factory for creating and configuring {@link Encoder} instances.
+ * <p/>
+ * Factory methods that create Encoder instances are thread-safe.
+ * Multiple instances with different configurations can be cached
+ * by an application.
+ * 
+ * @see Encoder
+ * @see BinaryEncoder
+ * @see JsonEncoder
+ * @see ValidatingEncoder
+ * @see BufferedBinaryEncoder
+ * @see BlockingBinaryEncoder
+ * @see DirectBinaryEncoder
+ */
+
+public class EncoderFactory {
+  private static final int DEFAULT_BUFFER_SIZE = 2048;
+  private static final int DEFAULT_BLOCK_BUFFER_SIZE = 64 * 1024;
+  private static final int MIN_BLOCK_BUFFER_SIZE = 64;
+  private static final int MAX_BLOCK_BUFFER_SIZE = 1024 * 1024 * 1024;
+
+  private static final EncoderFactory DEFAULT_FACTORY = 
+    new DefaultEncoderFactory();
+  
+  protected int binaryBufferSize = DEFAULT_BUFFER_SIZE;
+  protected int binaryBlockSize = DEFAULT_BLOCK_BUFFER_SIZE;
+
+  /**
+   * Returns an immutable static DecoderFactory with default configuration.
+   * All configuration methods throw AvroRuntimeExceptions if called.
+   */
+  public static EncoderFactory get() {
+    return DEFAULT_FACTORY;
+  }
+  
+  /**
+   * Configures this factory to use the specified buffer size when creating
+   * Encoder instances that buffer their output. The default buffer size is 2048
+   * bytes.
+   * 
+   * @param size
+   *          The buffer size to configure new instances with. Valid values are
+   *          in the range [32, 16*1024*1024]. Values outside this range are set
+   *          to the nearest value in the range. Values less than 256 will limit
+   *          performance but will consume less memory if the BinaryEncoder is
+   *          short-lived, values greater than 8*1024 are not likely to improve
+   *          performance but may be useful for the downstream OutputStream.
+   * @return This factory, to enable method chaining:
+   * <pre>
+   * EncoderFactory factory = new EncoderFactory().configureBufferSize(4096);
+   * </pre>
+   * @see #binaryEncoder(OutputStream, BinaryEncoder)
+   */
+  public EncoderFactory configureBufferSize(int size) {
+    if (size < 32)
+      size = 32;
+    if (size > 16 * 1024 * 1024)
+      size = 16 * 1024 * 1024;
+    this.binaryBufferSize = size;
+    return this;
+  }
+  
+  /**
+   * Returns this factory's configured default buffer size.  Used when creating
+   * Encoder instances that buffer writes.
+   * @see #configureBufferSize(int)
+   * @see #binaryEncoder(OutputStream, BinaryEncoder)
+   * @return The preferred buffer size, in bytes.
+   */
+  public int getBufferSize() {
+    return this.binaryBufferSize;
+  }
+
+  /**
+   * Configures this factory to construct blocking BinaryEncoders with the
+   * specified block buffer size. The default buffer size is 64 * 1024 bytes.
+   * 
+   * @param size
+   *          The preferred block size for array blocking. Arrays larger than
+   *          this size will be segmented into blocks according to the Avro
+   *          spec. Valid values are in the range [64, 1024*1024*1024] Values
+   *          outside this range are set to the nearest value in the range. The
+   *          encoder will require at least this amount of memory.
+   * @return This factory, to enable method chaining:
+   * <pre>
+   * EncoderFactory factory = new EncoderFactory().configureBlockSize(8000);
+   * </pre>
+   * @see #blockingBinaryEncoder(OutputStream, BinaryEncoder)
+   */
+  public EncoderFactory configureBlockSize(int size) {
+    if (size < MIN_BLOCK_BUFFER_SIZE) 
+      size = MIN_BLOCK_BUFFER_SIZE;
+    if (size > MAX_BLOCK_BUFFER_SIZE)
+      size = MAX_BLOCK_BUFFER_SIZE;
+    this.binaryBlockSize = size;
+    return this;
+  }
+
+  /**
+   * Returns this factory's configured default block buffer size.  
+   * {@link BinaryEncoder} instances created with
+   * #blockingBinaryEncoder(OutputStream, BinaryEncoder)
+   * will have block buffers of this size.
+   * <p/>
+   * @see #configureBlockSize(int)
+   * @see #blockingBinaryEncoder(OutputStream, BinaryEncoder)
+   * @return The preferred block size, in bytes.
+   */
+  public int getBlockSize() {
+    return this.binaryBlockSize;
+  }
+  
+  /**
+   * Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
+   * provided as the destination for written data. If <i>reuse</i> is provided,
+   * an attempt will be made to reconfigure <i>reuse</i> rather than construct a
+   * new instance, but this is not guaranteed, a new instance may be returned.
+   * <p/>
+   * The {@link BinaryEncoder} implementation returned may buffer its output.
+   * Data may not appear on the underlying OutputStream until
+   * {@link Encoder#flush()} is called.  The buffer size is configured with
+   * {@link #configureBufferSize(int)}.
+   * </p>  If buffering is not desired, and lower performance is acceptable, use 
+   * {@link #directBinaryEncoder(OutputStream, BinaryEncoder)}
+   * <p/>
+   * {@link BinaryEncoder} instances returned by this method are not thread-safe
+   * 
+   * @param out
+   *          The OutputStream to write to.  Cannot be null.
+   * @param reuse
+   *          The BinaryEncoder to <i>attempt</i> to reuse given the factory
+   *          configuration. A BinaryEncoder implementation may not be
+   *          compatible with reuse, causing a new instance to be returned.
+   *          If null, a new instance is returned.
+   * @return A BinaryEncoder that uses <i>out</i> as its data output. If
+   *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
+   *         is not null, then the returned instance may be a new instance or
+   *         <i>reuse</i> reconfigured to use <i>out</i>.
+   * @throws IOException 
+   * @see BufferedBinaryEncoder
+   * @see Encoder
+   */
+  public BinaryEncoder binaryEncoder(OutputStream out, BinaryEncoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(BufferedBinaryEncoder.class)) {
+      return new BufferedBinaryEncoder(out, this.binaryBufferSize);
+    }  else {
+      return ((BufferedBinaryEncoder)reuse).configure(out, this.binaryBufferSize);
+    }
+  }
+
+  /**
+   * Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
+   * provided as the destination for written data. If <i>reuse</i> is provided,
+   * an attempt will be made to reconfigure <i>reuse</i> rather than construct a
+   * new instance, but this is not guaranteed, a new instance may be returned.
+   * <p/>
+   * The {@link BinaryEncoder} implementation returned does not buffer its
+   * output, calling {@link Encoder#flush()} will simply cause the wrapped
+   * OutputStream to be flushed.
+   * <p/>
+   * Performance of unbuffered writes can be significantly slower than buffered
+   * writes.  {@link #binaryEncoder(OutputStream, BinaryEncoder)} returns
+   * BinaryEncoder instances that are tuned for performance but may buffer output.
+   * The unbuffered, 'direct' encoder may be desired when buffering semantics are
+   * problematic, or if the lifetime of the encoder is so short that the buffer
+   * would not be useful.
+   * <p/>
+   * {@link BinaryEncoder} instances returned by this method are not thread-safe.
+   * 
+   * @param out
+   *          The OutputStream to initialize to. Cannot be null.
+   * @param reuse
+   *          The BinaryEncoder to <i>attempt</i> to reuse given the factory
+   *          configuration. A BinaryEncoder implementation may not be
+   *          compatible with reuse, causing a new instance to be returned. If
+   *          null, a new instance is returned.
+   * @return A BinaryEncoder that uses <i>out</i> as its data output. If
+   *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
+   *         is not null, then the returned instance may be a new instance or
+   *         <i>reuse</i> reconfigured to use <i>out</i>.
+   * @see DirectBinaryEncoder
+   * @see Encoder
+   */
+  public BinaryEncoder directBinaryEncoder(OutputStream out, BinaryEncoder reuse) {
+    if (null == reuse || !reuse.getClass().equals(DirectBinaryEncoder.class)) {
+      return new DirectBinaryEncoder(out);
+    } else {
+      return ((DirectBinaryEncoder)reuse).configure(out);
+    }
+  }
+  
+  /**
+   * Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
+   * provided as the destination for written data. If <i>reuse</i> is provided,
+   * an attempt will be made to reconfigure <i>reuse</i> rather than construct a
+   * new instance, but this is not guaranteed, a new instance may be returned.
+   * <p/>
+   * The {@link BinaryEncoder} implementation returned buffers its output,
+   * calling {@link Encoder#flush()} is required for output to appear on the underlying
+   * OutputStream.
+   * <p/>
+   * The returned BinaryEncoder implements the Avro binary encoding using blocks
+   * delimited with byte sizes for Arrays and Maps.  This allows for some decoders
+   * to skip over large Arrays or Maps without decoding the contents, but adds
+   * some overhead.  The default block size is configured with
+   * {@link #configureBlockSize(int)} 
+   * <p/>
+   * {@link BinaryEncoder} instances returned by this method are not thread-safe.
+   * 
+   * @param out
+   *          The OutputStream to initialize to. Cannot be null.
+   * @param reuse
+   *          The BinaryEncoder to <i>attempt</i> to reuse given the factory
+   *          configuration. A BinaryEncoder implementation may not be
+   *          compatible with reuse, causing a new instance to be returned. If
+   *          null, a new instance is returned.
+   * @return A BinaryEncoder that uses <i>out</i> as its data output. If
+   *         <i>reuse</i> is null, this will be a new instance. If <i>reuse</i>
+   *         is not null, then the returned instance may be a new instance or
+   *         <i>reuse</i> reconfigured to use <i>out</i>.
+   * @throws IOException
+   * @see BlockingBinaryEncoder
+   * @see Encoder
+   */
+  public BinaryEncoder blockingBinaryEncoder(OutputStream out,
+      BinaryEncoder reuse) {
+    int blockSize = this.binaryBlockSize;
+    int bufferSize = (blockSize * 2 >= this.binaryBufferSize) ? 32
+        : this.binaryBufferSize;
+    if (null == reuse || !reuse.getClass().equals(BlockingBinaryEncoder.class)) {
+      return new BlockingBinaryEncoder(out, blockSize, bufferSize);
+    } else {
+      return ((BlockingBinaryEncoder) reuse).configure(out, blockSize, bufferSize);
+    }
+  }
+
+  /**
+   * Creates a {@link JsonEncoder} using the OutputStream provided for writing
+   * data conforming to the Schema provided.
+   * <p/>
+   * {@link JsonEncoder} buffers its output. Data may not appear on the
+   * underlying OutputStream until {@link Encoder#flush()} is called.
+   * <p/>
+   * {@link JsonEncoder} is not thread-safe.
+   * 
+   * @param schema
+   *          The Schema for data written to this JsonEncoder. Cannot be null.
+   * @param out
+   *          The OutputStream to write to. Cannot be null.
+   * @return A JsonEncoder configured with <i>out</i> and <i>schema</i>
+   * @throws IOException
+   */
+  public JsonEncoder jsonEncoder(Schema schema, OutputStream out)
+      throws IOException {
+    return new JsonEncoder(schema, out);
+  }
+
+  /**
+   * Creates a {@link JsonEncoder} using the OutputStream provided for writing
+   * data conforming to the Schema provided with optional pretty printing.
+   * <p/>
+   * {@link JsonEncoder} buffers its output. Data may not appear on the
+   * underlying OutputStream until {@link Encoder#flush()} is called.
+   * <p/>
+   * {@link JsonEncoder} is not thread-safe.
+   * 
+   * @param schema
+   *          The Schema for data written to this JsonEncoder. Cannot be null.
+   * @param out
+   *          The OutputStream to write to. Cannot be null.
+   * @param pretty
+   *          Pretty print encoding.
+   * @return A JsonEncoder configured with <i>out</i>, <i>schema</i> and <i>pretty</i>
+   * @throws IOException
+   */
+  public JsonEncoder jsonEncoder(Schema schema, OutputStream out, boolean pretty)
+      throws IOException {
+    return new JsonEncoder(schema, out, pretty);
+  }
+
+  /**
+   * Creates a {@link JsonEncoder} using the {@link JsonGenerator} provided for
+   * output of data conforming to the Schema provided.
+   * <p/>
+   * {@link JsonEncoder} buffers its output. Data may not appear on the
+   * underlying output until {@link Encoder#flush()} is called.
+   * <p/>
+   * {@link JsonEncoder} is not thread-safe.
+   * 
+   * @param schema
+   *          The Schema for data written to this JsonEncoder. Cannot be null.
+   * @param gen
+   *          The JsonGenerator to write with. Cannot be null.
+   * @return A JsonEncoder configured with <i>gen</i> and <i>schema</i>
+   * @throws IOException
+   * @deprecated internal method
+   */
+  @Deprecated
+  public JsonEncoder jsonEncoder(Schema schema, JsonGenerator gen)
+      throws IOException {
+    return new JsonEncoder(schema, gen);
+  }
+  
+  /**
+   * Creates a {@link ValidatingEncoder} that wraps the Encoder provided.
+   * This ValidatingEncoder will ensure that operations against it conform
+   * to the schema provided.
+   * <p/>
+   * Many {@link Encoder}s buffer their output. Data may not appear on the
+   * underlying output until {@link Encoder#flush()} is called.
+   * <p/>
+   * {@link ValidatingEncoder} is not thread-safe.
+   * 
+   * @param schema
+   *          The Schema to validate operations against. Cannot be null.
+   * @param encoder
+   *          The Encoder to wrap.  Cannot be be null.
+   * @return A ValidatingEncoder configured to wrap <i>encoder</i> and validate
+   *  against <i>schema</i>
+   * @throws IOException
+   */
+  public ValidatingEncoder validatingEncoder(Schema schema, Encoder encoder)
+      throws IOException {
+    return new ValidatingEncoder(schema, encoder);
+  }
+  
+  // default encoder is not mutable
+  private static class DefaultEncoderFactory extends EncoderFactory {
+    @Override
+    public EncoderFactory configureBlockSize(int size) {
+      throw new AvroRuntimeException("Default EncoderFactory cannot be configured");
+    }
+    @Override
+    public EncoderFactory configureBufferSize(int size) {
+      throw new AvroRuntimeException("Default EncoderFactory cannot be configured");
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
new file mode 100644
index 0000000..8206181
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonDecoder.java
@@ -0,0 +1,702 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Stack;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.parsing.JsonGrammarGenerator;
+import org.apache.avro.io.parsing.Parser;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.util.Utf8;
+import org.codehaus.jackson.Base64Variant;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonLocation;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonStreamContext;
+import org.codehaus.jackson.JsonToken;
+import org.codehaus.jackson.ObjectCodec;
+
+/** A {@link Decoder} for Avro's JSON data encoding. 
+ * </p>
+ * Construct using {@link DecoderFactory}.
+ * </p>
+ * JsonDecoder is not thread-safe.
+ * */
+public class JsonDecoder extends ParsingDecoder
+  implements Parser.ActionHandler {
+  private JsonParser in;
+  private static JsonFactory jsonFactory = new JsonFactory();
+  Stack<ReorderBuffer> reorderBuffers = new Stack<ReorderBuffer>();
+  ReorderBuffer currentReorderBuffer; 
+  
+  private static class ReorderBuffer {
+    public Map<String, List<JsonElement>> savedFields = new HashMap<String, List<JsonElement>>();
+    public JsonParser origParser = null; 
+  }
+  
+  static final String CHARSET = "ISO-8859-1";
+
+  private JsonDecoder(Symbol root, InputStream in) throws IOException {
+    super(root);
+    configure(in);
+  }
+  
+  private JsonDecoder(Symbol root, String in) throws IOException {
+    super(root);
+    configure(in);
+  }
+
+  JsonDecoder(Schema schema, InputStream in) throws IOException {
+    this(getSymbol(schema), in);
+  }
+  
+  JsonDecoder(Schema schema, String in) throws IOException {
+    this(getSymbol(schema), in);
+  }
+  
+  private static Symbol getSymbol(Schema schema) {
+    if (null == schema) {
+      throw new NullPointerException("Schema cannot be null!");
+    }
+    return new JsonGrammarGenerator().generate(schema);
+  }
+
+  /**
+   * Reconfigures this JsonDecoder to use the InputStream provided.
+   * <p/>
+   * If the InputStream provided is null, a NullPointerException is thrown.
+   * <p/>
+   * Otherwise, this JsonDecoder will reset its state and then
+   * reconfigure its input.
+   * @param in
+   *   The IntputStream to read from. Cannot be null.
+   * @throws IOException
+   * @return this JsonDecoder
+   */
+  public JsonDecoder configure(InputStream in) throws IOException {
+    if (null == in) {
+      throw new NullPointerException("InputStream to read from cannot be null!");
+    }
+    parser.reset();
+    this.in = jsonFactory.createJsonParser(in);
+    this.in.nextToken();
+    return this;
+  }
+  
+  /**
+   * Reconfigures this JsonDecoder to use the String provided for input.
+   * <p/>
+   * If the String provided is null, a NullPointerException is thrown.
+   * <p/>
+   * Otherwise, this JsonDecoder will reset its state and then
+   * reconfigure its input.
+   * @param in
+   *   The String to read from. Cannot be null.
+   * @throws IOException
+   * @return this JsonDecoder
+   */
+  public JsonDecoder configure(String in) throws IOException {
+    if (null == in) {
+      throw new NullPointerException("String to read from cannot be null!");
+    }
+    parser.reset();
+    this.in = new JsonFactory().createJsonParser(in);
+    this.in.nextToken();
+    return this;
+  }
+
+  private void advance(Symbol symbol) throws IOException {
+    this.parser.processTrailingImplicitActions();
+    if (in.getCurrentToken() == null && this.parser.depth() == 1)
+      throw new EOFException();
+    parser.advance(symbol);
+  }
+
+  @Override
+  public void readNull() throws IOException {
+    advance(Symbol.NULL);
+    if (in.getCurrentToken() == JsonToken.VALUE_NULL) {
+      in.nextToken();
+    } else {
+      throw error("null");
+    }
+  }
+
+  @Override
+  public boolean readBoolean() throws IOException {
+    advance(Symbol.BOOLEAN);
+    JsonToken t = in.getCurrentToken(); 
+    if (t == JsonToken.VALUE_TRUE || t == JsonToken.VALUE_FALSE) {
+      in.nextToken();
+      return t == JsonToken.VALUE_TRUE;
+    } else {
+      throw error("boolean");
+    }
+  }
+
+  @Override
+  public int readInt() throws IOException {
+    advance(Symbol.INT);
+    if (in.getCurrentToken().isNumeric()) {
+      int result = in.getIntValue();
+      in.nextToken();
+      return result;
+    } else {
+      throw error("int");
+    }
+  }
+    
+  @Override
+  public long readLong() throws IOException {
+    advance(Symbol.LONG);
+    if (in.getCurrentToken().isNumeric()) {
+      long result = in.getLongValue();
+      in.nextToken();
+      return result;
+    } else {
+      throw error("long");
+    }
+  }
+
+  @Override
+  public float readFloat() throws IOException {
+    advance(Symbol.FLOAT);
+    if (in.getCurrentToken().isNumeric()) {
+      float result = in.getFloatValue();
+      in.nextToken();
+      return result;
+    } else {
+      throw error("float");
+    }
+  }
+
+  @Override
+  public double readDouble() throws IOException {
+    advance(Symbol.DOUBLE);
+    if (in.getCurrentToken().isNumeric()) {
+      double result = in.getDoubleValue();
+      in.nextToken();
+      return result;
+    } else {
+      throw error("double");
+    }
+  }
+    
+  @Override
+  public Utf8 readString(Utf8 old) throws IOException {
+    return new Utf8(readString());
+  }
+
+  @Override
+  public String readString() throws IOException {
+    advance(Symbol.STRING);
+    if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) {
+      parser.advance(Symbol.MAP_KEY_MARKER);
+      if (in.getCurrentToken() != JsonToken.FIELD_NAME) {
+        throw error("map-key");
+      }
+    } else {
+      if (in.getCurrentToken() != JsonToken.VALUE_STRING) {
+        throw error("string");
+      }
+    }
+    String result = in.getText();
+    in.nextToken();
+    return result;
+  }
+
+  @Override
+  public void skipString() throws IOException {
+    advance(Symbol.STRING);
+    if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) {
+      parser.advance(Symbol.MAP_KEY_MARKER);
+      if (in.getCurrentToken() != JsonToken.FIELD_NAME) {
+        throw error("map-key");
+      }
+    } else {
+      if (in.getCurrentToken() != JsonToken.VALUE_STRING) {
+        throw error("string");
+      }
+    }
+    in.nextToken();
+  }
+
+  @Override
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    advance(Symbol.BYTES);
+    if (in.getCurrentToken() == JsonToken.VALUE_STRING) {
+      byte[] result = readByteArray();
+      in.nextToken();
+      return ByteBuffer.wrap(result);
+    } else {
+      throw error("bytes");
+    }
+  }
+
+  private byte[] readByteArray() throws IOException {
+    byte[] result = in.getText().getBytes(CHARSET);
+    return result;
+  }
+
+  @Override
+  public void skipBytes() throws IOException {
+    advance(Symbol.BYTES);
+    if (in.getCurrentToken() == JsonToken.VALUE_STRING) {
+      in.nextToken();
+    } else {
+      throw error("bytes");
+    }
+  }
+
+  private void checkFixed(int size) throws IOException {
+    advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    if (size != top.size) {
+      throw new AvroTypeException(
+        "Incorrect length for fixed binary: expected " +
+        top.size + " but received " + size + " bytes.");
+    }
+  }
+    
+  @Override
+  public void readFixed(byte[] bytes, int start, int len) throws IOException {
+    checkFixed(len);
+    if (in.getCurrentToken() == JsonToken.VALUE_STRING) {
+      byte[] result = readByteArray();
+      in.nextToken();
+      if (result.length != len) {
+        throw new AvroTypeException("Expected fixed length " + len
+            + ", but got" + result.length);
+      }
+      System.arraycopy(result, 0, bytes, start, len);
+    } else {
+      throw error("fixed");
+    }
+  }
+
+  @Override
+  public void skipFixed(int length) throws IOException {
+    checkFixed(length);
+    doSkipFixed(length);
+  }
+
+  private void doSkipFixed(int length) throws IOException {
+    if (in.getCurrentToken() == JsonToken.VALUE_STRING) {
+      byte[] result = readByteArray();
+      in.nextToken();
+      if (result.length != length) {
+        throw new AvroTypeException("Expected fixed length " + length
+            + ", but got" + result.length);
+      }
+    } else {
+      throw error("fixed");
+    }
+  }
+
+  @Override
+  protected void skipFixed() throws IOException {
+    advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    doSkipFixed(top.size);
+  }
+
+  @Override
+  public int readEnum() throws IOException {
+    advance(Symbol.ENUM);
+    Symbol.EnumLabelsAction top = (Symbol.EnumLabelsAction) parser.popSymbol();
+    if (in.getCurrentToken() == JsonToken.VALUE_STRING) {
+      in.getText();
+      int n = top.findLabel(in.getText());
+      if (n >= 0) {
+        in.nextToken();
+        return n;
+      }
+      throw new AvroTypeException("Unknown symbol in enum " + in.getText());
+    } else {
+      throw error("fixed");
+    }
+  }
+
+  @Override
+  public long readArrayStart() throws IOException {
+    advance(Symbol.ARRAY_START);
+    if (in.getCurrentToken() == JsonToken.START_ARRAY) {
+      in.nextToken();
+      return doArrayNext();
+    } else {
+      throw error("array-start");
+    }
+  }
+
+  @Override
+  public long arrayNext() throws IOException {
+    advance(Symbol.ITEM_END);
+    return doArrayNext();
+  }
+
+  private long doArrayNext() throws IOException {
+    if (in.getCurrentToken() == JsonToken.END_ARRAY) {
+      parser.advance(Symbol.ARRAY_END);
+      in.nextToken();
+      return 0;
+    } else {
+      return 1;
+    }
+  }
+
+  @Override
+  public long skipArray() throws IOException {
+    advance(Symbol.ARRAY_START);
+    if (in.getCurrentToken() == JsonToken.START_ARRAY) {
+      in.skipChildren();
+      in.nextToken();
+      advance(Symbol.ARRAY_END);    
+    } else {
+      throw error("array-start");
+    }
+    return 0;
+  }
+
+  @Override
+  public long readMapStart() throws IOException {
+    advance(Symbol.MAP_START);
+    if (in.getCurrentToken() == JsonToken.START_OBJECT) {
+      in.nextToken();
+      return doMapNext();
+    } else {
+      throw error("map-start");
+    }
+  }
+
+  @Override
+  public long mapNext() throws IOException {
+    advance(Symbol.ITEM_END);
+    return doMapNext();
+  }
+
+  private long doMapNext() throws IOException {
+    if (in.getCurrentToken() == JsonToken.END_OBJECT) {
+      in.nextToken();
+      advance(Symbol.MAP_END);
+      return 0;
+    } else {
+      return 1;
+    }
+  }
+
+  @Override
+  public long skipMap() throws IOException {
+    advance(Symbol.MAP_START);
+    if (in.getCurrentToken() == JsonToken.START_OBJECT) {
+      in.skipChildren();
+      in.nextToken();
+      advance(Symbol.MAP_END);    
+    } else {
+      throw error("map-start");
+    }
+    return 0;
+  }
+
+  @Override
+  public int readIndex() throws IOException {
+    advance(Symbol.UNION);
+    Symbol.Alternative a = (Symbol.Alternative) parser.popSymbol();
+    
+    String label;
+    if (in.getCurrentToken() == JsonToken.VALUE_NULL) {
+      label = "null";
+    } else if (in.getCurrentToken() == JsonToken.START_OBJECT &&
+               in.nextToken() == JsonToken.FIELD_NAME) {
+      label = in.getText();
+      in.nextToken();
+      parser.pushSymbol(Symbol.UNION_END);
+    } else {
+      throw error("start-union");
+    }
+    int n = a.findLabel(label);
+    if (n < 0)
+      throw new AvroTypeException("Unknown union branch " + label);
+    parser.pushSymbol(a.getSymbol(n));
+    return n;
+  }
+
+  @Override
+  public Symbol doAction(Symbol input, Symbol top) throws IOException {
+    if (top instanceof Symbol.FieldAdjustAction) {
+        Symbol.FieldAdjustAction fa = (Symbol.FieldAdjustAction) top;
+        String name = fa.fname;
+      if (currentReorderBuffer != null) {
+        List<JsonElement> node = currentReorderBuffer.savedFields.get(name);
+        if (node != null) {
+          currentReorderBuffer.savedFields.remove(name);
+          currentReorderBuffer.origParser = in;
+          in = makeParser(node);
+          return null;
+        }
+      }
+      if (in.getCurrentToken() == JsonToken.FIELD_NAME) {
+        do {
+          String fn = in.getText();
+          in.nextToken();
+          if (name.equals(fn)) {
+            return null;
+          } else {
+            if (currentReorderBuffer == null) {
+              currentReorderBuffer = new ReorderBuffer();
+            }
+            currentReorderBuffer.savedFields.put(fn, getVaueAsTree(in));
+          }
+        } while (in.getCurrentToken() == JsonToken.FIELD_NAME);
+        throw new AvroTypeException("Expected field name not found: " + fa.fname);
+      }
+    } else if (top == Symbol.FIELD_END) {
+      if (currentReorderBuffer != null && currentReorderBuffer.origParser != null) {
+        in = currentReorderBuffer.origParser;
+        currentReorderBuffer.origParser = null;
+      }
+    } else if (top == Symbol.RECORD_START) {
+      if (in.getCurrentToken() == JsonToken.START_OBJECT) {
+        in.nextToken();
+        reorderBuffers.push(currentReorderBuffer);
+        currentReorderBuffer = null;
+      } else {
+        throw error("record-start");
+      }
+    } else if (top == Symbol.RECORD_END || top == Symbol.UNION_END) {
+      if (in.getCurrentToken() == JsonToken.END_OBJECT) {
+        in.nextToken();
+        if (top == Symbol.RECORD_END) {
+          if (currentReorderBuffer != null && !currentReorderBuffer.savedFields.isEmpty()) {
+            throw error("Unknown fields: " + currentReorderBuffer.savedFields.keySet());
+          }
+          currentReorderBuffer = reorderBuffers.pop();
+        }
+      } else {
+        throw error(top == Symbol.RECORD_END ? "record-end" : "union-end");
+      }
+    } else {
+      throw new AvroTypeException("Unknown action symbol " + top);
+    }
+    return null;
+  }
+
+  private static class JsonElement {
+    public final JsonToken token;
+    public final String value;
+    public JsonElement(JsonToken t, String value) {
+      this.token = t;
+      this.value = value;
+    }
+    
+    public JsonElement(JsonToken t) {
+      this(t, null);
+    }
+  }
+  
+  private static List<JsonElement> getVaueAsTree(JsonParser in) throws IOException {
+    int level = 0;
+    List<JsonElement> result = new ArrayList<JsonElement>();
+    do {
+      JsonToken t = in.getCurrentToken();
+      switch (t) {
+      case START_OBJECT:
+      case START_ARRAY:
+        level++;
+        result.add(new JsonElement(t));
+        break;
+      case END_OBJECT:
+      case END_ARRAY:
+        level--;
+        result.add(new JsonElement(t));
+        break;
+      case FIELD_NAME:
+      case VALUE_STRING:
+      case VALUE_NUMBER_INT:
+      case VALUE_NUMBER_FLOAT:
+      case VALUE_TRUE:
+      case VALUE_FALSE:
+      case VALUE_NULL:
+        result.add(new JsonElement(t, in.getText()));
+        break;
+      }
+      in.nextToken();
+    } while (level != 0);
+    result.add(new JsonElement(null));
+    return result;
+  }
+
+  private JsonParser makeParser(final List<JsonElement> elements) throws IOException {
+    return new JsonParser() {
+      int pos = 0;
+
+      @Override
+      public ObjectCodec getCodec() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void setCodec(ObjectCodec c) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void close() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public JsonToken nextToken() throws IOException {
+        pos++;
+        return elements.get(pos).token;
+      }
+
+      @Override
+      public JsonParser skipChildren() throws IOException {
+        int level = 0;
+        do {
+          switch(elements.get(pos++).token) {
+          case START_ARRAY:
+          case START_OBJECT:
+            level++;
+            break;
+          case END_ARRAY:
+          case END_OBJECT:
+            level--;
+            break;
+          }
+        } while (level > 0);
+        return this;
+      }
+
+      @Override
+      public boolean isClosed() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public String getCurrentName() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public JsonStreamContext getParsingContext() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public JsonLocation getTokenLocation() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public JsonLocation getCurrentLocation() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public String getText() throws IOException {
+        return elements.get(pos).value;
+      }
+
+      @Override
+      public char[] getTextCharacters() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getTextLength() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getTextOffset() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public Number getNumberValue() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public NumberType getNumberType() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getIntValue() throws IOException {
+        return Integer.parseInt(getText());
+      }
+
+      @Override
+      public long getLongValue() throws IOException {
+        return Long.parseLong(getText());
+      }
+
+      @Override
+      public BigInteger getBigIntegerValue() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public float getFloatValue() throws IOException {
+        return Float.parseFloat(getText());
+      }
+
+      @Override
+      public double getDoubleValue() throws IOException {
+        return Double.parseDouble(getText());
+      }
+
+      @Override
+      public BigDecimal getDecimalValue() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public byte[] getBinaryValue(Base64Variant b64variant)
+        throws IOException {
+        throw new UnsupportedOperationException();
+      }
+      
+      @Override
+      public JsonToken getCurrentToken() {
+        return elements.get(pos).token;
+      }
+    };
+  }
+
+  private AvroTypeException error(String type) {
+    return new AvroTypeException("Expected " + type +
+        ". Got " + in.getCurrentToken());
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
new file mode 100644
index 0000000..9d413e6
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/JsonEncoder.java
@@ -0,0 +1,321 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.parsing.JsonGrammarGenerator;
+import org.apache.avro.io.parsing.Parser;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.util.Utf8;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.util.DefaultPrettyPrinter;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
+
+/** An {@link Encoder} for Avro's JSON data encoding. 
+ * </p>
+ * Construct using {@link EncoderFactory}.
+ * </p>
+ * JsonEncoder buffers output, and data may not appear on the output
+ * until {@link Encoder#flush()} is called.
+ * </p>
+ * JsonEncoder is not thread-safe.
+ * */
+public class JsonEncoder extends ParsingEncoder implements Parser.ActionHandler {
+  private static final String LINE_SEPARATOR = System.getProperty("line.separator");
+  final Parser parser;
+  private JsonGenerator out;
+  /**
+   * Has anything been written into the collections?
+   */
+  protected BitSet isEmpty = new BitSet();
+
+  JsonEncoder(Schema sc, OutputStream out) throws IOException {
+    this(sc, getJsonGenerator(out, false));
+  }
+
+  JsonEncoder(Schema sc, OutputStream out, boolean pretty) throws IOException {
+    this(sc, getJsonGenerator(out, pretty));
+  }
+
+  JsonEncoder(Schema sc, JsonGenerator out) throws IOException {
+    configure(out);
+    this.parser =
+      new Parser(new JsonGrammarGenerator().generate(sc), this);
+  }
+
+  @Override
+  public void flush() throws IOException {
+    parser.processImplicitActions();
+    if (out != null) {
+      out.flush();
+    }
+  }
+
+  // by default, one object per line.
+  // with pretty option use default pretty printer with root line separator.
+  private static JsonGenerator getJsonGenerator(OutputStream out, boolean pretty)
+      throws IOException {
+    if (null == out)
+      throw new NullPointerException("OutputStream cannot be null"); 
+    JsonGenerator g
+      = new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
+    if (pretty) {
+      DefaultPrettyPrinter pp = new DefaultPrettyPrinter() {
+        //@Override
+        public void writeRootValueSeparator(JsonGenerator jg)
+            throws IOException
+        {
+          jg.writeRaw(LINE_SEPARATOR);
+        }
+      };
+      g.setPrettyPrinter(pp);
+    } else {
+      MinimalPrettyPrinter pp = new MinimalPrettyPrinter();
+      pp.setRootValueSeparator(LINE_SEPARATOR);
+      g.setPrettyPrinter(pp);
+    }
+    return g;
+  }
+  
+  /**
+   * Reconfigures this JsonEncoder to use the output stream provided.
+   * <p/>
+   * If the OutputStream provided is null, a NullPointerException is thrown.
+   * <p/>
+   * Otherwise, this JsonEncoder will flush its current output and then
+   * reconfigure its output to use a default UTF8 JsonGenerator that writes
+   * to the provided OutputStream.
+   * 
+   * @param out
+   *          The OutputStream to direct output to. Cannot be null.
+   * @throws IOException
+   * @return this JsonEncoder
+   */
+  public JsonEncoder configure(OutputStream out) throws IOException {
+    this.configure(getJsonGenerator(out, false));
+    return this;
+  }
+  
+  /**
+   * Reconfigures this JsonEncoder to output to the JsonGenerator provided.
+   * <p/>
+   * If the JsonGenerator provided is null, a NullPointerException is thrown.
+   * <p/>
+   * Otherwise, this JsonEncoder will flush its current output and then
+   * reconfigure its output to use the provided JsonGenerator.
+   * 
+   * @param generator
+   *          The JsonGenerator to direct output to. Cannot be null.
+   * @throws IOException
+   * @return this JsonEncoder
+   * @deprecated internal method
+   */
+  @Deprecated
+  public JsonEncoder configure(JsonGenerator generator) throws IOException {
+    if (null == generator)
+      throw new NullPointerException("JsonGenerator cannot be null");
+    if (null != parser) {
+      flush();
+    }
+    this.out = generator;
+    return this;
+  }
+
+  @Override
+  public void writeNull() throws IOException {
+    parser.advance(Symbol.NULL);
+    out.writeNull();
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    parser.advance(Symbol.BOOLEAN);
+    out.writeBoolean(b);
+  }
+
+  @Override
+  public void writeInt(int n) throws IOException {
+    parser.advance(Symbol.INT);
+    out.writeNumber(n);
+  }
+
+  @Override
+  public void writeLong(long n) throws IOException {
+    parser.advance(Symbol.LONG);
+    out.writeNumber(n);
+  }
+
+  @Override
+  public void writeFloat(float f) throws IOException {
+    parser.advance(Symbol.FLOAT);
+    out.writeNumber(f);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    parser.advance(Symbol.DOUBLE);
+    out.writeNumber(d);
+  }
+
+  @Override
+  public void writeString(Utf8 utf8) throws IOException {
+    writeString(utf8.toString());
+  }
+  
+  @Override 
+  public void writeString(String str) throws IOException {
+    parser.advance(Symbol.STRING);
+    if (parser.topSymbol() == Symbol.MAP_KEY_MARKER) {
+      parser.advance(Symbol.MAP_KEY_MARKER);
+      out.writeFieldName(str);
+    } else {
+      out.writeString(str);
+    }
+  }
+
+  @Override
+  public void writeBytes(ByteBuffer bytes) throws IOException {
+    if (bytes.hasArray()) {
+      writeBytes(bytes.array(), bytes.position(), bytes.remaining());
+    } else {
+      byte[] b = new byte[bytes.remaining()];
+      bytes.duplicate().get(b);
+      writeBytes(b);
+    }
+  }
+
+  @Override
+  public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+    parser.advance(Symbol.BYTES);
+    writeByteArray(bytes, start, len);
+  }
+
+  private void writeByteArray(byte[] bytes, int start, int len)
+    throws IOException {
+    out.writeString(
+        new String(bytes, start, len, JsonDecoder.CHARSET));
+  }
+
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    parser.advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    if (len != top.size) {
+      throw new AvroTypeException(
+        "Incorrect length for fixed binary: expected " +
+        top.size + " but received " + len + " bytes.");
+    }
+    writeByteArray(bytes, start, len);
+  }
+
+  @Override
+  public void writeEnum(int e) throws IOException {
+    parser.advance(Symbol.ENUM);
+    Symbol.EnumLabelsAction top = (Symbol.EnumLabelsAction) parser.popSymbol();
+    if (e < 0 || e >= top.size) {
+      throw new AvroTypeException(
+          "Enumeration out of range: max is " +
+          top.size + " but received " + e);
+    }
+    out.writeString(top.getLabel(e));
+  }
+
+  @Override
+  public void writeArrayStart() throws IOException {
+    parser.advance(Symbol.ARRAY_START);
+    out.writeStartArray();
+    push();
+    isEmpty.set(depth());
+  }
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    if (! isEmpty.get(pos)) {
+      parser.advance(Symbol.ITEM_END);
+    }
+    pop();
+    parser.advance(Symbol.ARRAY_END);
+    out.writeEndArray();
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {
+    push();
+    isEmpty.set(depth());
+
+    parser.advance(Symbol.MAP_START);
+    out.writeStartObject();
+  }
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    if (! isEmpty.get(pos)) {
+      parser.advance(Symbol.ITEM_END);
+    }
+    pop();
+
+    parser.advance(Symbol.MAP_END);
+    out.writeEndObject();
+  }
+
+  @Override
+  public void startItem() throws IOException {
+    if (! isEmpty.get(pos)) {
+      parser.advance(Symbol.ITEM_END);
+    }
+    super.startItem();
+    isEmpty.clear(depth());
+  }
+
+  @Override
+  public void writeIndex(int unionIndex) throws IOException {
+    parser.advance(Symbol.UNION);
+    Symbol.Alternative top = (Symbol.Alternative) parser.popSymbol();
+    Symbol symbol = top.getSymbol(unionIndex);
+    if (symbol != Symbol.NULL) {
+      out.writeStartObject();
+      out.writeFieldName(top.getLabel(unionIndex));
+      parser.pushSymbol(Symbol.UNION_END);
+    }
+    parser.pushSymbol(symbol);
+  }
+
+  @Override
+  public Symbol doAction(Symbol input, Symbol top) throws IOException {
+    if (top instanceof Symbol.FieldAdjustAction) {
+      Symbol.FieldAdjustAction fa = (Symbol.FieldAdjustAction) top;
+      out.writeFieldName(fa.fname);
+    } else if (top == Symbol.RECORD_START) {
+      out.writeStartObject();
+    } else if (top == Symbol.RECORD_END || top == Symbol.UNION_END) {
+      out.writeEndObject();
+    } else if (top != Symbol.FIELD_END) {
+      throw new AvroTypeException("Unknown action symbol " + top);
+    }
+    return null;
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java
new file mode 100644
index 0000000..db1790e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingDecoder.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+
+import org.apache.avro.io.parsing.SkipParser;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.io.parsing.Parser.ActionHandler;
+import org.apache.avro.io.parsing.SkipParser.SkipHandler;
+
+/** Base class for <a href="parsing/package-summary.html">parser</a>-based
+ * {@link Decoder}s. */
+public abstract class ParsingDecoder extends Decoder
+  implements ActionHandler, SkipHandler {
+  protected final SkipParser parser;
+
+  protected ParsingDecoder(Symbol root) throws IOException {
+    this.parser = new SkipParser(root, this, this);
+  }
+
+  protected abstract void skipFixed() throws IOException;
+  
+  @Override
+  public void skipAction() throws IOException {
+    parser.popSymbol();
+  }
+
+  @Override
+  public void skipTopSymbol() throws IOException {
+    Symbol top = parser.topSymbol();
+    if (top == Symbol.NULL) {
+      readNull();
+    } if (top == Symbol.BOOLEAN) {
+      readBoolean();
+    } else if (top == Symbol.INT) {
+      readInt();
+    } else if (top == Symbol.LONG) {
+      readLong();
+    } else if (top == Symbol.FLOAT) {
+      readFloat();
+    } else if (top == Symbol.DOUBLE) {
+      readDouble();
+    } else if (top == Symbol.STRING) {
+      skipString();
+    } else if (top == Symbol.BYTES) {
+      skipBytes();
+    } else if (top == Symbol.ENUM)  {
+      readEnum();
+    } else if (top == Symbol.FIXED) {
+      skipFixed();
+    } else if (top == Symbol.UNION) {
+      readIndex();
+    } else if (top == Symbol.ARRAY_START) {
+      skipArray();
+    } else if (top == Symbol.MAP_START) {
+      skipMap();
+    }
+  }
+  
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java
new file mode 100644
index 0000000..b6fe369
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ParsingEncoder.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.avro.AvroTypeException;
+
+/** Base class for <a href="parsing/package-summary.html">parser</a>-based
+ * {@link Encoder}s. */
+public abstract class ParsingEncoder extends Encoder {
+  /**
+   * Tracks the number of items that remain to be written in
+   * the collections (array or map).
+   */
+  private long[] counts = new long[10];
+  
+  protected int pos = -1;
+
+  @Override
+  public void setItemCount(long itemCount) throws IOException {
+    if (counts[pos] != 0) {
+      throw new AvroTypeException("Incorrect number of items written. " +
+          counts[pos] + " more required.");
+    }
+    counts[pos] = itemCount;
+  }
+
+  @Override
+  public void startItem() throws IOException {
+    counts[pos]--;
+  }
+
+  /** Push a new collection on to the stack. */
+  protected final void push() {
+    if (++pos == counts.length) {
+      counts = Arrays.copyOf(counts, pos + 10);
+    }
+    counts[pos] = 0;
+  }
+  
+  protected final void pop() {
+    if (counts[pos] != 0) {
+      throw new AvroTypeException("Incorrect number of items written. " +
+          counts[pos] + " more required.");
+    }
+    pos--;
+  }
+  
+  protected final int depth() {
+    return pos;
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
new file mode 100644
index 0000000..2d7eba2
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ResolvingDecoder.java
@@ -0,0 +1,328 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.parsing.ResolvingGrammarGenerator;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.util.Utf8;
+
+/**
+ * {@link Decoder} that performs type-resolution between the reader's and
+ * writer's schemas.
+ *
+ * <p>When resolving schemas, this class will return the values of fields in
+ * _writer's_ order, not the reader's order.  (However, it returns _only_ the
+ * reader's fields, not any extra fields the writer may have written.)  To help
+ * clients handle fields that appear to be coming out of order, this class
+ * defines the method {@link #readFieldOrder}.
+ *
+ * <p>See the <a href="doc-files/parsing.html">parser documentation</a> for
+ *  information on how this works.
+ */
+public class ResolvingDecoder extends ValidatingDecoder {
+
+  private Decoder backup;
+  
+  ResolvingDecoder(Schema writer, Schema reader, Decoder in)
+    throws IOException {
+    this(resolve(writer, reader), in);
+  }
+  
+  /**
+   * Constructs a <tt>ResolvingDecoder</tt> using the given resolver.
+   * The resolver must have been returned by a previous call to
+   * {@link #resolve(Schema, Schema)}.
+   * @param resolver  The resolver to use.
+   * @param in  The underlying decoder.
+   * @throws IOException
+   */
+  private ResolvingDecoder(Object resolver, Decoder in)
+    throws IOException {
+    super((Symbol) resolver, in);
+  }
+
+  /**
+   * Produces an opaque resolver that can be used to construct a new
+   * {@link ResolvingDecoder#ResolvingDecoder(Object, Decoder)}. The
+   * returned Object is immutable and hence can be simultaneously used
+   * in many ResolvingDecoders. This method is reasonably expensive, the
+   * users are encouraged to cache the result.
+   * 
+   * @param writer  The writer's schema. Cannot be null.
+   * @param reader  The reader's schema. Cannot be null.
+   * @return  The opaque reolver.
+   * @throws IOException
+   */
+  public static Object resolve(Schema writer, Schema reader)
+    throws IOException {
+    if (null == writer) {
+      throw new NullPointerException("writer cannot be null!");
+    }
+    if (null == reader) {
+      throw new NullPointerException("reader cannot be null!");
+    }
+    return new ResolvingGrammarGenerator().generate(writer, reader);
+  }
+
+  /** Returns the actual order in which the reader's fields will be
+   * returned to the reader.
+   *
+   * This method is useful because {@link ResolvingDecoder}
+   * returns values in the order written by the writer, rather than
+   * the order expected by the reader.  This method allows readers
+   * to figure out what fields to expect.  Let's say the reader is
+   * expecting a three-field record, the first field is a long, the
+   * second a string, and the third an array.  In this case, a
+   * typical usage might be as follows:
+   * <pre>
+   *   Schema.Fields[] fieldOrder = in.readFieldOrder();
+   *   for (int i = 0; i < 3; i++) {
+   *     switch (fieldOrder[i].pos()) {
+   *     case 1:
+   *       foo(in.readLong());
+   *       break;
+   *     case 2:
+   *       someVariable = in.readString();
+   *       break;
+   *     case 3:
+   *       bar(in); // The code of "bar" will read an array-of-int
+   *       break;
+   *     }
+   * </pre>
+   * Note that {@link ResolvingDecoder} will return only the
+   * fields expected by the reader, not other fields that may have
+   * been written by the writer.  Thus, the iteration-count of "3" in
+   * the above loop will always be correct.
+   *
+   * Throws a runtime exception if we're not just about to read the
+   * field of a record.  Also, this method will consume the field
+   * information, and thus may only be called <em>once</em> before
+   * reading the field value.  (However, if the client knows the
+   * order of incoming fields, then the client does <em>not</em>
+   * need to call this method but rather can just start reading the
+   * field values.)
+   *
+   * @throws AvroTypeException If we're not starting a new record
+   *                               
+   */
+  public final Schema.Field[] readFieldOrder() throws IOException {
+    return ((Symbol.FieldOrderAction) parser.advance(Symbol.FIELD_ACTION)).
+      fields;
+  }
+  
+  /**
+   * Consume any more data that has been written by the writer but not
+   * needed by the reader so that the the underlying decoder is in proper
+   * shape for the next record. This situation happens when, for example,
+   * the writer writes a record with two fields and the reader needs only the
+   * first field.
+   * 
+   * This function should be called after completely decoding an object but
+   * before next object can be decoded from the same underlying decoder
+   * either directly or through another resolving decoder. If the same resolving
+   * decoder is used for the next object as well, calling this method is
+   * optional; the state of this resolving decoder ensures that any leftover
+   * portions are consumed before the next object is decoded.
+   * @throws IOException
+   */
+  public final void drain() throws IOException {
+    parser.processImplicitActions();
+  }
+
+  @Override
+  public long readLong() throws IOException {
+    Symbol actual = parser.advance(Symbol.LONG);
+    if (actual == Symbol.INT) {
+      return in.readInt();
+    } else if (actual == Symbol.DOUBLE) {
+      return (long) in.readDouble();
+    } else {
+      assert actual == Symbol.LONG;
+      return in.readLong();
+    }
+  }
+
+  @Override
+  public float readFloat() throws IOException {
+    Symbol actual = parser.advance(Symbol.FLOAT);
+    if (actual == Symbol.INT) {
+      return (float) in.readInt();
+    } else if (actual == Symbol.LONG) {
+      return (float) in.readLong();
+    } else {
+      assert actual == Symbol.FLOAT;
+      return (float) in.readFloat();
+    }
+  }
+  
+  @Override
+  public double readDouble() throws IOException {
+    Symbol actual = parser.advance(Symbol.DOUBLE);
+    if (actual == Symbol.INT) {
+      return (double) in.readInt();
+    } else if (actual == Symbol.LONG) {
+      return (double) in.readLong();
+    } else if (actual == Symbol.FLOAT) {
+      return (double) in.readFloat();
+    } else {
+      assert actual == Symbol.DOUBLE;
+      return in.readDouble();
+    }
+  }
+  
+  @Override
+  public Utf8 readString(Utf8 old) throws IOException {
+    Symbol actual = parser.advance(Symbol.STRING);
+    if (actual == Symbol.BYTES) {
+      return new Utf8(in.readBytes(null).array());
+    } else {
+      assert actual == Symbol.STRING;
+      return in.readString(old);
+    }
+  }
+
+  private static final Charset UTF8 = Charset.forName("UTF-8");
+
+  @Override
+  public String readString() throws IOException {
+    Symbol actual = parser.advance(Symbol.STRING);
+    if (actual == Symbol.BYTES) {
+      return new String(in.readBytes(null).array(), UTF8);
+    } else {
+      assert actual == Symbol.STRING;
+      return in.readString();
+    }
+  }
+
+  @Override
+  public void skipString() throws IOException {
+    Symbol actual = parser.advance(Symbol.STRING);
+    if (actual == Symbol.BYTES) {
+      in.skipBytes();
+    } else {
+      assert actual == Symbol.STRING;
+      in.skipString();
+    }
+  }
+
+  @Override
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    Symbol actual = parser.advance(Symbol.BYTES);
+    if (actual == Symbol.STRING) {
+      Utf8 s = in.readString(null);
+      return ByteBuffer.wrap(s.getBytes(), 0, s.getByteLength());
+    } else {
+      assert actual == Symbol.BYTES;
+      return in.readBytes(old);
+    }
+  }
+
+  @Override
+  public void skipBytes() throws IOException {
+    Symbol actual = parser.advance(Symbol.BYTES);
+    if (actual == Symbol.STRING) {
+      in.skipString();
+    } else {
+      assert actual == Symbol.BYTES;
+      in.skipBytes();
+    }
+  }
+
+  @Override
+  public int readEnum() throws IOException {
+    parser.advance(Symbol.ENUM);
+    Symbol.EnumAdjustAction top = (Symbol.EnumAdjustAction) parser.popSymbol();
+    int n = in.readEnum();
+    Object o = top.adjustments[n];
+    if (o instanceof Integer) {
+      return ((Integer) o).intValue();
+    } else {
+      throw new AvroTypeException((String) o);
+    }
+  }
+    
+  @Override
+  public int readIndex() throws IOException {
+    parser.advance(Symbol.UNION);
+    Symbol.UnionAdjustAction top = (Symbol.UnionAdjustAction) parser.popSymbol();
+    parser.pushSymbol(top.symToParse);
+    return top.rindex;
+  }
+
+  @Override
+  public Symbol doAction(Symbol input, Symbol top) throws IOException {
+    if (top instanceof Symbol.FieldOrderAction) {
+      return input == Symbol.FIELD_ACTION ? top : null;
+    } if (top instanceof Symbol.ResolvingAction) {
+      Symbol.ResolvingAction t = (Symbol.ResolvingAction) top;
+      if (t.reader != input) {
+        throw new AvroTypeException("Found " + t.reader + " while looking for "
+                                    + input);
+      } else {
+        return t.writer;
+      }
+    } else if (top instanceof Symbol.SkipAction) {
+      Symbol symToSkip = ((Symbol.SkipAction) top).symToSkip;
+      parser.skipSymbol(symToSkip);
+    } else if (top instanceof Symbol.WriterUnionAction) {
+      Symbol.Alternative branches = (Symbol.Alternative) parser.popSymbol();
+      parser.pushSymbol(branches.getSymbol(in.readIndex()));
+    } else if (top instanceof Symbol.ErrorAction) {
+      throw new AvroTypeException(((Symbol.ErrorAction) top).msg);
+    } else if (top instanceof Symbol.DefaultStartAction) {
+      Symbol.DefaultStartAction dsa = (Symbol.DefaultStartAction) top;
+      backup = in;
+      in = DecoderFactory.get()
+        .binaryDecoder(dsa.contents, null);
+    } else if (top == Symbol.DEFAULT_END_ACTION) {
+      in = backup;
+    } else {
+      throw new AvroTypeException("Unknown action: " + top);
+    }
+    return null;
+  }
+
+  @Override
+  public void skipAction() throws IOException {
+    Symbol top = parser.popSymbol();
+    if (top instanceof Symbol.ResolvingAction) {
+      parser.pushSymbol(((Symbol.ResolvingAction) top).writer);
+    } else if (top instanceof Symbol.SkipAction) {
+      parser.pushSymbol(((Symbol.SkipAction) top).symToSkip);
+    } else if (top instanceof Symbol.WriterUnionAction) {
+      Symbol.Alternative branches = (Symbol.Alternative) parser.popSymbol();
+      parser.pushSymbol(branches.getSymbol(in.readIndex()));
+    } else if (top instanceof Symbol.ErrorAction) {
+      throw new AvroTypeException(((Symbol.ErrorAction) top).msg);
+    } else if (top instanceof Symbol.DefaultStartAction) {
+      Symbol.DefaultStartAction dsa = (Symbol.DefaultStartAction) top;
+      backup = in;
+      in = DecoderFactory.get()
+        .binaryDecoder(dsa.contents, null);
+    } else if (top == Symbol.DEFAULT_END_ACTION) {
+      in = backup;
+    }
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java
new file mode 100644
index 0000000..ec4275c
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingDecoder.java
@@ -0,0 +1,254 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.parsing.Parser;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.io.parsing.ValidatingGrammarGenerator;
+import org.apache.avro.util.Utf8;
+
+/**
+ * An implementation of {@link Decoder} that ensures that the sequence
+ * of operations conforms to a schema.
+ * <p/>
+ * Use {@link DecoderFactory#validatingDecoder(Schema, Decoder)} to construct
+ * and configure.
+ * <p/>
+ * ValidatingDecoder is not thread-safe.
+ * @see Decoder
+ * @see DecoderFactory
+ */
+public class ValidatingDecoder extends ParsingDecoder
+  implements Parser.ActionHandler {
+  protected Decoder in;
+
+  ValidatingDecoder(Symbol root, Decoder in) throws IOException {
+    super(root);
+    this.configure(in);
+  }
+
+  ValidatingDecoder(Schema schema, Decoder in) throws IOException {
+    this(getSymbol(schema), in);
+  }
+  
+  private static Symbol getSymbol(Schema schema) {
+    if (null == schema) {
+      throw new NullPointerException("Schema cannot be null");
+    }
+    return new ValidatingGrammarGenerator().generate(schema);
+  }
+
+  /** Re-initialize, reading from a new underlying Decoder. */
+  public ValidatingDecoder configure(Decoder in) throws IOException {
+    this.parser.reset();
+    this.in = in;
+    return this;
+  }
+
+  @Override
+  public void readNull() throws IOException {
+    parser.advance(Symbol.NULL);
+    in.readNull();
+  }
+    
+  @Override
+  public boolean readBoolean() throws IOException {
+    parser.advance(Symbol.BOOLEAN);
+    return in.readBoolean();
+  }
+
+  @Override
+  public int readInt() throws IOException {
+    parser.advance(Symbol.INT);
+    return in.readInt();
+  }
+    
+  @Override
+  public long readLong() throws IOException {
+    parser.advance(Symbol.LONG);
+    return in.readLong();
+  }
+
+  @Override
+  public float readFloat() throws IOException {
+    parser.advance(Symbol.FLOAT);
+    return in.readFloat();
+  }
+
+  @Override
+  public double readDouble() throws IOException {
+    parser.advance(Symbol.DOUBLE);
+    return in.readDouble();
+  }
+    
+  @Override
+  public Utf8 readString(Utf8 old) throws IOException {
+    parser.advance(Symbol.STRING);
+    return in.readString(old);
+  }
+
+  @Override
+  public String readString() throws IOException {
+    parser.advance(Symbol.STRING);
+    return in.readString();
+  }
+
+  @Override
+  public void skipString() throws IOException {
+    parser.advance(Symbol.STRING);
+    in.skipString();
+  }
+
+  @Override
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    parser.advance(Symbol.BYTES);
+    return in.readBytes(old);
+  }
+
+  @Override
+  public void skipBytes() throws IOException {
+    parser.advance(Symbol.BYTES);
+    in.skipBytes();
+  }
+
+  private void checkFixed(int size) throws IOException {
+    parser.advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    if (size != top.size) {
+      throw new AvroTypeException(
+        "Incorrect length for fixed binary: expected " +
+        top.size + " but received " + size + " bytes.");
+    }
+  }
+    
+  @Override
+  public void readFixed(byte[] bytes, int start, int len) throws IOException {
+    checkFixed(len);
+    in.readFixed(bytes, start, len);
+  }
+
+  @Override
+  public void skipFixed(int length) throws IOException {
+    checkFixed(length);
+    in.skipFixed(length);
+  }
+
+  @Override
+  protected void skipFixed() throws IOException {
+    parser.advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    in.skipFixed(top.size);
+  }
+
+  @Override
+  public int readEnum() throws IOException {
+    parser.advance(Symbol.ENUM);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    int result = in.readEnum();
+    if (result < 0 || result >= top.size) {
+      throw new AvroTypeException(
+          "Enumeration out of range: max is " +
+          top.size + " but received " + result);
+    }
+    return result;
+  }
+
+  @Override
+  public long readArrayStart() throws IOException {
+    parser.advance(Symbol.ARRAY_START);
+    long result = in.readArrayStart();
+    if (result == 0) {
+      parser.advance(Symbol.ARRAY_END);
+    }
+    return result;
+  }
+
+  @Override
+  public long arrayNext() throws IOException {
+    parser.processTrailingImplicitActions();
+    long result = in.arrayNext();
+    if (result == 0) {
+      parser.advance(Symbol.ARRAY_END);
+    }
+    return result;
+  }
+
+  @Override
+  public long skipArray() throws IOException {
+    parser.advance(Symbol.ARRAY_START);
+    for (long c = in.skipArray(); c != 0; c = in.skipArray()) {
+      while (c-- > 0) {
+        parser.skipRepeater();
+      }
+    }
+    parser.advance(Symbol.ARRAY_END);    
+    return 0;
+  }
+
+  @Override
+  public long readMapStart() throws IOException {
+    parser.advance(Symbol.MAP_START);
+    long result = in.readMapStart();
+    if (result == 0) {
+      parser.advance(Symbol.MAP_END);
+    }
+    return result;
+  }
+
+  @Override
+  public long mapNext() throws IOException {
+    parser.processTrailingImplicitActions();
+    long result = in.mapNext();
+    if (result == 0) {
+      parser.advance(Symbol.MAP_END);
+    }
+    return result;
+  }
+
+  @Override
+  public long skipMap() throws IOException {
+    parser.advance(Symbol.MAP_START);
+    for (long c = in.skipMap(); c != 0; c = in.skipMap()) {
+      while (c-- > 0) {
+        parser.skipRepeater();
+      }
+    }
+    parser.advance(Symbol.MAP_END);
+    return 0;
+  }
+
+  @Override
+  public int readIndex() throws IOException {
+    parser.advance(Symbol.UNION);
+    Symbol.Alternative top = (Symbol.Alternative) parser.popSymbol();
+    int result = in.readIndex();
+    parser.pushSymbol(top.getSymbol(result));
+    return result;
+  }
+  
+  @Override
+  public Symbol doAction(Symbol input, Symbol top) throws IOException {
+    return null;
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java
new file mode 100644
index 0000000..275df18
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/ValidatingEncoder.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.io.parsing.ValidatingGrammarGenerator;
+import org.apache.avro.io.parsing.Parser;
+import org.apache.avro.io.parsing.Symbol;
+import org.apache.avro.util.Utf8;
+
+/**
+ * An implementation of {@link Encoder} that wraps another Encoder and
+ * ensures that the sequence of operations conforms to the provided schema.
+ * <p/>
+ * Use {@link EncoderFactory#validatingEncoder(Schema, Encoder)} to construct
+ * and configure.
+ * <p/>
+ * ValidatingEncoder is not thread-safe.
+ * @see Encoder
+ * @see EncoderFactory
+ */
+public class ValidatingEncoder extends ParsingEncoder 
+  implements Parser.ActionHandler {
+  protected Encoder out;
+  protected final Parser parser;
+
+  ValidatingEncoder(Symbol root, Encoder out) throws IOException {
+    this.out = out;
+    this.parser = new Parser(root, this);
+  }
+
+  ValidatingEncoder(Schema schema, Encoder in) throws IOException {
+    this(new ValidatingGrammarGenerator().generate(schema), in);
+  }
+
+  @Override
+  public void flush() throws IOException {
+    out.flush();
+  }
+
+  /**
+   * Reconfigures this ValidatingEncoder to wrap the encoder provided.
+   * @param encoder
+   *   The Encoder to wrap for validation.
+   * @return
+   *   This ValidatingEncoder.
+   */
+  public ValidatingEncoder configure(Encoder encoder) {
+    this.parser.reset();
+    this.out = encoder;
+    return this;
+  }
+  
+  @Override
+  public void writeNull() throws IOException {
+    parser.advance(Symbol.NULL);
+    out.writeNull();
+  }
+
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    parser.advance(Symbol.BOOLEAN);
+    out.writeBoolean(b);
+  }
+
+  @Override
+  public void writeInt(int n) throws IOException {
+    parser.advance(Symbol.INT);
+    out.writeInt(n);
+  }
+
+  @Override
+  public void writeLong(long n) throws IOException {
+    parser.advance(Symbol.LONG);
+    out.writeLong(n);
+  }
+
+  @Override
+  public void writeFloat(float f) throws IOException {
+    parser.advance(Symbol.FLOAT);
+    out.writeFloat(f);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    parser.advance(Symbol.DOUBLE);
+    out.writeDouble(d);
+  }
+
+  @Override
+  public void writeString(Utf8 utf8) throws IOException {
+    parser.advance(Symbol.STRING);
+    out.writeString(utf8);
+  }
+  
+  @Override
+  public void writeString(String str) throws IOException {
+    parser.advance(Symbol.STRING);
+    out.writeString(str);
+  }
+  
+  @Override
+  public void writeString(CharSequence charSequence) throws IOException {
+    parser.advance(Symbol.STRING);
+    out.writeString(charSequence);
+  }
+
+  @Override
+  public void writeBytes(ByteBuffer bytes) throws IOException {
+    parser.advance(Symbol.BYTES);
+    out.writeBytes(bytes);
+  }
+
+  @Override
+  public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+    parser.advance(Symbol.BYTES);
+    out.writeBytes(bytes, start, len);
+  }
+
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    parser.advance(Symbol.FIXED);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    if (len != top.size) {
+      throw new AvroTypeException(
+        "Incorrect length for fixed binary: expected " +
+        top.size + " but received " + len + " bytes.");
+    }
+    out.writeFixed(bytes, start, len);
+  }
+
+  @Override
+  public void writeEnum(int e) throws IOException {
+    parser.advance(Symbol.ENUM);
+    Symbol.IntCheckAction top = (Symbol.IntCheckAction) parser.popSymbol();
+    if (e < 0 || e >= top.size) {
+      throw new AvroTypeException(
+          "Enumeration out of range: max is " +
+          top.size + " but received " + e);
+    }
+    out.writeEnum(e);
+  }
+
+  @Override
+  public void writeArrayStart() throws IOException {
+    push();
+    parser.advance(Symbol.ARRAY_START);
+    out.writeArrayStart();
+  }
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    parser.advance(Symbol.ARRAY_END);
+    out.writeArrayEnd();
+    pop();
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {
+    push();
+    parser.advance(Symbol.MAP_START);
+    out.writeMapStart();
+  }
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    parser.advance(Symbol.MAP_END);
+    out.writeMapEnd();
+    pop();
+  }
+
+  @Override
+  public void setItemCount(long itemCount) throws IOException {
+    super.setItemCount(itemCount);
+    out.setItemCount(itemCount);
+  }
+
+  @Override
+  public void startItem() throws IOException {
+    super.startItem();
+    out.startItem();
+  }
+
+  @Override
+  public void writeIndex(int unionIndex) throws IOException {
+    parser.advance(Symbol.UNION);
+    Symbol.Alternative top = (Symbol.Alternative) parser.popSymbol();
+    parser.pushSymbol(top.getSymbol(unionIndex));
+    out.writeIndex(unionIndex);
+  }
+
+  @Override
+  public Symbol doAction(Symbol input, Symbol top) throws IOException {
+    return null;
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/package.html b/lang/java/avro/src/main/java/org/apache/avro/io/package.html
new file mode 100644
index 0000000..a72c61e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/package.html
@@ -0,0 +1,31 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Utilities for Encoding and Decoding Avro data.
+<p>
+{@link org.apache.avro.io.Encoder} is the abstract type encapsulating the
+encoding of Avro data.  Encoder instances are created and configured
+with {@link org.apache.avro.io.EncoderFactory}.
+<p>
+{@link org.apache.avro.io.Decoder} is the abstract type encapsulating the
+decoding of Avro data.  Decoder instances are created and configured
+with {@link org.apache.avro.io.DecoderFactory}.
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java
new file mode 100644
index 0000000..1620038
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/JsonGrammarGenerator.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+
+/**
+ * The class that generates a grammar suitable to parse Avro data
+ * in JSON format.
+ */
+
+public class JsonGrammarGenerator extends ValidatingGrammarGenerator {
+  /**
+   * Returns the non-terminal that is the start symbol
+   * for the grammar for the grammar for the given schema <tt>sc</tt>.
+   */
+  public Symbol generate(Schema schema) {
+    return Symbol.root(generate(schema, new HashMap<LitS, Symbol>()));
+  }
+
+  /**
+   * Returns the non-terminal that is the start symbol
+   * for grammar of the given schema <tt>sc</tt>. If there is already an entry
+   * for the given schema in the given map <tt>seen</tt> then
+   * that entry is returned. Otherwise a new symbol is generated and
+   * an entry is inserted into the map.
+   * @param sc    The schema for which the start symbol is required
+   * @param seen  A map of schema to symbol mapping done so far.
+   * @return      The start symbol for the schema
+   */
+  public Symbol generate(Schema sc, Map<LitS, Symbol> seen) {
+    switch (sc.getType()) {
+    case NULL:
+    case BOOLEAN:
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+    case STRING:
+    case BYTES:
+    case FIXED:
+    case UNION:
+      return super.generate(sc, seen);
+    case ENUM:
+      return Symbol.seq(Symbol.enumLabelsAction(sc.getEnumSymbols()),
+          Symbol.ENUM);
+    case ARRAY:
+      return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END,
+              Symbol.ITEM_END, generate(sc.getElementType(), seen)),
+          Symbol.ARRAY_START);
+    case MAP:
+      return Symbol.seq(Symbol.repeat(Symbol.MAP_END,
+              Symbol.ITEM_END, generate(sc.getValueType(), seen),
+              Symbol.MAP_KEY_MARKER, Symbol.STRING),
+          Symbol.MAP_START);
+    case RECORD: {
+      LitS wsc = new LitS(sc);
+      Symbol rresult = seen.get(wsc);
+      if (rresult == null) {
+        Symbol[] production = new Symbol[sc.getFields().size() * 3 + 2];
+        rresult = Symbol.seq(production);
+        seen.put(wsc, rresult);
+
+        int i = production.length;
+        int n = 0;
+        production[--i] = Symbol.RECORD_START;
+        for (Field f : sc.getFields()) {
+          production[--i] = Symbol.fieldAdjustAction(n, f.name());
+          production[--i] = generate(f.schema(), seen);
+          production[--i] = Symbol.FIELD_END;
+          n++;
+        }
+        production[--i] = Symbol.RECORD_END;
+      }
+      return rresult;
+    }
+    default:
+      throw new RuntimeException("Unexpected schema type");
+    }
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java
new file mode 100644
index 0000000..796de3e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Parser.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.avro.AvroTypeException;
+
+/**
+ * Parser is the class that maintains the stack for parsing. This class
+ * is used by encoders, which are not required to skip.
+ */
+public class Parser {
+  /**
+   * The parser knows how to handle the terminal and non-terminal
+   * symbols. But it needs help from outside to handle implicit
+   * and explicit actions. The clients implement this interface to
+   * provide this help.
+   */
+  public interface ActionHandler {
+    /**
+     * Handle the action symbol <tt>top</tt> when the <tt>input</tt> is
+     * sought to be taken off the stack.
+     * @param input The input symbol from the caller of advance
+     * @param top The symbol at the top the stack.
+     * @return  <tt>null</tt> if advance() is to continue processing the
+     * stack. If not <tt>null</tt> the return value will be returned
+     * by advance().
+     * @throws IOException
+     */
+    Symbol doAction(Symbol input, Symbol top) throws IOException;
+  }
+
+  protected final ActionHandler symbolHandler;
+  protected Symbol[] stack;
+  protected int pos;
+
+  public Parser(Symbol root, ActionHandler symbolHandler)
+    throws IOException {
+    this.symbolHandler = symbolHandler;
+    this.stack = new Symbol[5]; // Start small to make sure expansion code works
+    this.stack[0] = root;
+    this.pos = 1;
+  }
+
+  /**
+   * If there is no sufficient room in the stack, use this expand it.
+   */
+  private void expandStack() {
+    stack = Arrays.copyOf(stack, stack.length+Math.max(stack.length,1024));
+  }
+
+  /**
+   * Recursively replaces the symbol at the top of the stack with its
+   * production, until the top is a terminal. Then checks if the
+   * top symbol matches the terminal symbol suppled <tt>terminal</tt>.
+   * @param input The symbol to match against the terminal at the
+   * top of the stack.
+   * @return The terminal symbol at the top of the stack unless an
+   * implicit action resulted in another symbol, in which case that
+   * symbol is returned.
+   */
+  public final Symbol advance(Symbol input) throws IOException {
+    for (; ;) {
+      Symbol top = stack[--pos];
+      if (top == input) {
+        return top; // A common case
+      }
+
+      Symbol.Kind k = top.kind;
+      if (k == Symbol.Kind.IMPLICIT_ACTION) {
+        Symbol result = symbolHandler.doAction(input, top);
+        if (result != null) {
+          return result;
+        }
+      } else if (k == Symbol.Kind.TERMINAL) {
+        throw new AvroTypeException("Attempt to process a "
+                + input + " when a "
+                + top + " was expected.");
+      } else if (k == Symbol.Kind.REPEATER
+          && input == ((Symbol.Repeater) top).end) {
+        return input;
+      } else {
+        pushProduction(top);
+      }
+    }
+  }
+  
+  /**
+   * Performs any implicit actions at the top the stack, expanding any
+   * production (other than the root) that may be encountered.
+   * This method will fail if there are any repeaters on the stack.
+   * @throws IOException
+   */
+  public final void processImplicitActions() throws IOException {
+     while (pos > 1) {
+      Symbol top = stack[pos - 1];
+      if (top.kind == Symbol.Kind.IMPLICIT_ACTION) {
+        pos--;
+        symbolHandler.doAction(null, top);
+      } else if (top.kind != Symbol.Kind.TERMINAL) {
+        pos--;
+        pushProduction(top);
+      } else {
+        break;
+      }
+    }
+  }
+
+  /**
+   * Performs any "trailing" implicit actions at the top the stack. 
+   */
+  public final void processTrailingImplicitActions() throws IOException {
+    while (pos >= 1) {
+      Symbol top = stack[pos - 1];
+      if (top.kind == Symbol.Kind.IMPLICIT_ACTION 
+        && ((Symbol.ImplicitAction) top).isTrailing) {
+        pos--;
+        symbolHandler.doAction(null, top);
+      } else {
+        break;
+      }
+    }
+  }
+
+  /**
+   * Pushes the production for the given symbol <tt>sym</tt>.
+   * If <tt>sym</tt> is a repeater and <tt>input</tt> is either
+   * {@link Symbol#ARRAY_END} or {@link Symbol#MAP_END} pushes nothing.
+   * @param sym
+   */
+  public final void pushProduction(Symbol sym) {
+    Symbol[] p = sym.production;
+    while (pos + p.length > stack.length) {
+      expandStack();
+    }
+    System.arraycopy(p, 0, stack, pos, p.length);
+    pos += p.length;
+  }
+
+  /**
+   * Pops and returns the top symbol from the stack.
+   */
+  public Symbol popSymbol() {
+    return stack[--pos];
+  }
+  
+  /**
+   * Returns the top symbol from the stack.
+   */
+  public Symbol topSymbol() {
+    return stack[pos - 1];
+  }
+  
+  /**
+   * Pushes <tt>sym</tt> on to the stack.
+   */
+  public void pushSymbol(Symbol sym) {
+    if (pos == stack.length) {
+      expandStack();
+    }
+    stack[pos++] = sym;
+  }
+  
+  /**
+   * Returns the depth of the stack.
+   */
+  public int depth() {
+    return pos;
+  }
+
+  public void reset() {
+    pos = 1;
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
new file mode 100644
index 0000000..31f38de
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ResolvingGrammarGenerator.java
@@ -0,0 +1,544 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.codehaus.jackson.JsonNode;
+
+/**
+ * The class that generates a resolving grammar to resolve between two
+ * schemas.
+ */
+public class ResolvingGrammarGenerator extends ValidatingGrammarGenerator {
+  /**
+   * Resolves the writer schema <tt>writer</tt> and the reader schema
+   * <tt>reader</tt> and returns the start symbol for the grammar generated. 
+   * @param writer    The schema used by the writer
+   * @param reader    The schema used by the reader
+   * @return          The start symbol for the resolving grammar
+   * @throws IOException 
+   */
+  public final Symbol generate(Schema writer, Schema reader)
+    throws IOException {
+    return Symbol.root(generate(writer, reader, new HashMap<LitS, Symbol>()));
+  }
+  
+  /**
+   * Resolves the writer schema <tt>writer</tt> and the reader schema
+   * <tt>reader</tt> and returns the start symbol for the grammar generated.
+   * If there is already a symbol in the map <tt>seen</tt> for resolving the
+   * two schemas, then that symbol is returned. Otherwise a new symbol is
+   * generated and returnd. 
+   * @param writer    The schema used by the writer
+   * @param reader    The schema used by the reader
+   * @param seen      The <reader-schema, writer-schema> to symbol
+   * map of start symbols of resolving grammars so far.
+   * @return          The start symbol for the resolving grammar
+   * @throws IOException 
+   */
+  public Symbol generate(Schema writer, Schema reader,
+                                Map<LitS, Symbol> seen) throws IOException
+  {
+    final Schema.Type writerType = writer.getType();
+    final Schema.Type readerType = reader.getType();
+
+    if (writerType == readerType) {
+      switch (writerType) {
+      case NULL:
+        return Symbol.NULL;
+      case BOOLEAN:
+        return Symbol.BOOLEAN;
+      case INT:
+        return Symbol.INT;
+      case LONG:
+        return Symbol.LONG;
+      case FLOAT:
+        return Symbol.FLOAT;
+      case DOUBLE:
+        return Symbol.DOUBLE;
+      case STRING:
+        return Symbol.STRING;
+      case BYTES:
+        return Symbol.BYTES;
+      case FIXED:
+        if (writer.getFullName().equals(reader.getFullName())
+            && writer.getFixedSize() == reader.getFixedSize()) {
+          return Symbol.seq(Symbol.intCheckAction(writer.getFixedSize()),
+              Symbol.FIXED);
+        }
+        break;
+
+      case ENUM:
+        if (writer.getFullName() == null
+                || writer.getFullName().equals(reader.getFullName())) {
+          return Symbol.seq(mkEnumAdjust(writer.getEnumSymbols(),
+                  reader.getEnumSymbols()), Symbol.ENUM);
+        }
+        break;
+
+      case ARRAY:
+        return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END,
+                generate(writer.getElementType(),
+                reader.getElementType(), seen)),
+            Symbol.ARRAY_START);
+      
+      case MAP:
+        return Symbol.seq(Symbol.repeat(Symbol.MAP_END,
+                generate(writer.getValueType(),
+                reader.getValueType(), seen), Symbol.STRING),
+            Symbol.MAP_START);
+      case RECORD:
+        return resolveRecords(writer, reader, seen);
+      case UNION:
+        return resolveUnion(writer, reader, seen);
+      default:
+        throw new AvroTypeException("Unkown type for schema: " + writerType);
+      }
+    } else {  // writer and reader are of different types
+      if (writerType == Schema.Type.UNION) {
+        return resolveUnion(writer, reader, seen);
+      }
+  
+      switch (readerType) {
+      case LONG:
+        switch (writerType) {
+        case INT:
+          return Symbol.resolve(super.generate(writer, seen), Symbol.LONG);
+        }
+        break;
+  
+      case FLOAT:
+        switch (writerType) {
+        case INT:
+        case LONG:
+          return Symbol.resolve(super.generate(writer, seen), Symbol.FLOAT);
+        }
+        break;
+  
+      case DOUBLE:
+        switch (writerType) {
+        case INT:
+        case LONG:
+        case FLOAT:
+          return Symbol.resolve(super.generate(writer, seen), Symbol.DOUBLE);
+        }
+        break;
+  
+      case BYTES:
+        switch (writerType) {
+        case STRING:
+          return Symbol.resolve(super.generate(writer, seen), Symbol.BYTES);
+        }
+        break;
+  
+      case STRING:
+        switch (writerType) {
+        case BYTES:
+          return Symbol.resolve(super.generate(writer, seen), Symbol.STRING);
+        }
+        break;
+  
+      case UNION:
+        int j = bestBranch(reader, writer, seen);
+        if (j >= 0) {
+          Symbol s = generate(writer, reader.getTypes().get(j), seen);
+          return Symbol.seq(Symbol.unionAdjustAction(j, s), Symbol.UNION);
+        }
+        break;
+      case NULL:
+      case BOOLEAN:
+      case INT:
+      case ENUM:
+      case ARRAY:
+      case MAP:
+      case RECORD:
+      case FIXED:
+        break;
+      default:
+        throw new RuntimeException("Unexpected schema type: " + readerType);
+      }
+    }
+    return Symbol.error("Found " + writer.getFullName()
+                        + ", expecting " + reader.getFullName());
+  }
+
+  private Symbol resolveUnion(Schema writer, Schema reader,
+      Map<LitS, Symbol> seen) throws IOException {
+    List<Schema> alts = writer.getTypes();
+    final int size = alts.size();
+    Symbol[] symbols = new Symbol[size];
+    String[] labels = new String[size];
+
+    /**
+     * We construct a symbol without filling the arrays. Please see
+     * {@link Symbol#production} for the reason.
+     */
+    int i = 0;
+    for (Schema w : alts) {
+      symbols[i] = generate(w, reader, seen);
+      labels[i] = w.getFullName();
+      i++;
+    }
+    return Symbol.seq(Symbol.alt(symbols, labels),
+                      Symbol.writerUnionAction());
+  }
+
+  private Symbol resolveRecords(Schema writer, Schema reader,
+      Map<LitS, Symbol> seen) throws IOException {
+    LitS wsc = new LitS2(writer, reader);
+    Symbol result = seen.get(wsc);
+    if (result == null) {
+      List<Field> wfields = writer.getFields();
+      List<Field> rfields = reader.getFields();
+
+      // First, compute reordering of reader fields, plus
+      // number elements in the result's production
+      Field[] reordered = new Field[rfields.size()];
+      int ridx = 0;
+      int count = 1 + wfields.size();
+
+      for (Field f : wfields) {
+        Field rdrField = reader.getField(f.name());
+        if (rdrField != null) {
+          reordered[ridx++] = rdrField;
+        }
+      }
+
+      for (Field rf : rfields) {
+        String fname = rf.name();
+        if (writer.getField(fname) == null) {
+          if (rf.defaultValue() == null) {
+            result = Symbol.error("Found " + writer.getFullName()
+                                  + ", expecting " + reader.getFullName()
+                                  + ", missing required field " + fname);
+            seen.put(wsc, result);
+            return result;
+          } else {
+            reordered[ridx++] = rf;
+            count += 3;
+          }
+        }
+      }
+
+      Symbol[] production = new Symbol[count];
+      production[--count] = Symbol.fieldOrderAction(reordered);
+
+      /**
+       * We construct a symbol without filling the array. Please see
+       * {@link Symbol#production} for the reason.
+       */
+      result = Symbol.seq(production);
+      seen.put(wsc, result);
+
+      /*
+       * For now every field in read-record with no default value
+       * must be in write-record.
+       * Write record may have additional fields, which will be
+       * skipped during read.
+       */
+
+      // Handle all the writer's fields
+      for (Field wf : wfields) {
+        String fname = wf.name();
+        Field rf = reader.getField(fname);
+        if (rf == null) {
+          production[--count] =
+            Symbol.skipAction(generate(wf.schema(), wf.schema(), seen));
+        } else {
+          production[--count] =
+            generate(wf.schema(), rf.schema(), seen);
+        }
+      }
+
+      // Add default values for fields missing from Writer
+      for (Field rf : rfields) {
+        String fname = rf.name();
+        Field wf = writer.getField(fname);
+        if (wf == null) {
+          byte[] bb = getBinary(rf.schema(), rf.defaultValue());
+          production[--count] = Symbol.defaultStartAction(bb);
+          production[--count] = generate(rf.schema(), rf.schema(), seen);
+          production[--count] = Symbol.DEFAULT_END_ACTION;
+        }
+      }
+    }
+    return result;
+  }
+
+  private static EncoderFactory factory = new EncoderFactory().configureBufferSize(32);
+  /**
+   * Returns the Avro binary encoded version of <tt>n</tt> according to
+   * the schema <tt>s</tt>.
+   * @param s The schema for encoding
+   * @param n The Json node that has the value to be encoded.
+   * @return  The binary encoded version of <tt>n</tt>.
+   * @throws IOException
+   */
+  private static byte[] getBinary(Schema s, JsonNode n) throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    Encoder e = factory.binaryEncoder(out, null);
+    encode(e, s, n);
+    e.flush();
+    return out.toByteArray();
+  }
+  
+  /**
+   * Encodes the given Json node <tt>n</tt> on to the encoder <tt>e</tt>
+   * according to the schema <tt>s</tt>.
+   * @param e The encoder to encode into.
+   * @param s The schema for the object being encoded.
+   * @param n The Json node to encode.
+   * @throws IOException
+   * @deprecated internal method
+   */
+  @Deprecated
+  public static void encode(Encoder e, Schema s, JsonNode n)
+    throws IOException {
+    switch (s.getType()) {
+    case RECORD:
+      for (Field f : s.getFields()) {
+        String name = f.name();
+        JsonNode v = n.get(name);
+        if (v == null) {
+          v = f.defaultValue();
+        }
+        if (v == null) {
+          throw new AvroTypeException("No default value for: " + name);
+        }
+        encode(e, f.schema(), v);
+      }
+      break;
+    case ENUM:
+      e.writeEnum(s.getEnumOrdinal(n.getTextValue()));
+      break;
+    case ARRAY:
+      e.writeArrayStart();
+      e.setItemCount(n.size());
+      Schema i = s.getElementType();
+      for (JsonNode node : n) {
+        e.startItem();
+        encode(e, i, node);
+      }
+      e.writeArrayEnd();
+      break;
+    case MAP:
+      e.writeMapStart();
+      e.setItemCount(n.size());
+      Schema v = s.getValueType();
+      for (Iterator<String> it = n.getFieldNames(); it.hasNext();) {
+        e.startItem();
+        String key = it.next();
+        e.writeString(key);
+        encode(e, v, n.get(key));
+      }
+      e.writeMapEnd();
+      break;
+    case UNION:
+      e.writeIndex(0);
+      encode(e, s.getTypes().get(0), n);
+      break;
+    case FIXED:
+      if (!n.isTextual())
+        throw new AvroTypeException("Non-string default value for fixed: "+n);
+      byte[] bb = n.getTextValue().getBytes("ISO-8859-1");
+      if (bb.length != s.getFixedSize()) {
+        bb = Arrays.copyOf(bb, s.getFixedSize());
+      }
+      e.writeFixed(bb);
+      break;
+    case STRING:
+      if (!n.isTextual())
+        throw new AvroTypeException("Non-string default value for string: "+n);
+      e.writeString(n.getTextValue());
+      break;
+    case BYTES:
+      if (!n.isTextual())
+        throw new AvroTypeException("Non-string default value for bytes: "+n);
+      e.writeBytes(n.getTextValue().getBytes("ISO-8859-1"));
+      break;
+    case INT:
+      if (!n.isNumber())
+        throw new AvroTypeException("Non-numeric default value for int: "+n);
+      e.writeInt(n.getIntValue());
+      break;
+    case LONG:
+      if (!n.isNumber())
+        throw new AvroTypeException("Non-numeric default value for long: "+n);
+      e.writeLong(n.getLongValue());
+      break;
+    case FLOAT:
+      if (!n.isNumber())
+        throw new AvroTypeException("Non-numeric default value for float: "+n);
+      e.writeFloat((float) n.getDoubleValue());
+      break;
+    case DOUBLE:
+      if (!n.isNumber())
+        throw new AvroTypeException("Non-numeric default value for double: "+n);
+      e.writeDouble(n.getDoubleValue());
+      break;
+    case BOOLEAN:
+      if (!n.isBoolean())
+        throw new AvroTypeException("Non-boolean default for boolean: "+n);
+      e.writeBoolean(n.getBooleanValue());
+      break;
+    case NULL:
+      if (!n.isNull())
+        throw new AvroTypeException("Non-null default value for null type: "+n);
+      e.writeNull();
+      break;
+    }
+  }
+
+  private static Symbol mkEnumAdjust(List<String> wsymbols,
+      List<String> rsymbols){
+    Object[] adjustments = new Object[wsymbols.size()];
+    for (int i = 0; i < adjustments.length; i++) {
+      int j = rsymbols.indexOf(wsymbols.get(i));
+      adjustments[i] = (j == -1 ? "No match for " + wsymbols.get(i)
+                                : new Integer(j));
+    }
+    return Symbol.enumAdjustAction(rsymbols.size(), adjustments);
+  }
+
+  /**
+   * This checks if the symbol itself is an error or if there is an error in
+   * its production.
+   *
+   * When the symbol is created for a record, this checks whether the record
+   * fields are present (the symbol is not an error action) and that all of the
+   * fields have a non-error action. Record fields may have nested error
+   * actions.
+   *
+   * @return true if the symbol is an error or if its production has an error
+   */
+  private boolean hasMatchError(Symbol sym) {
+    if (sym instanceof Symbol.ErrorAction) {
+      return true;
+    } else {
+      for (int i = 0; i < sym.production.length; i += 1) {
+        if (sym.production[i] instanceof Symbol.ErrorAction) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
+  private int bestBranch(Schema r, Schema w, Map<LitS, Symbol> seen) throws IOException {
+    Schema.Type vt = w.getType();
+      // first scan for exact match
+      int j = 0;
+      int structureMatch = -1;
+      for (Schema b : r.getTypes()) {
+        if (vt == b.getType())
+          if (vt == Schema.Type.RECORD || vt == Schema.Type.ENUM ||
+              vt == Schema.Type.FIXED) {
+            String vname = w.getFullName();
+            String bname = b.getFullName();
+            // return immediately if the name matches exactly according to spec
+            if (vname != null && vname.equals(bname))
+              return j;
+
+            if (vt == Schema.Type.RECORD &&
+                !hasMatchError(resolveRecords(w, b, seen))) {
+              String vShortName = w.getName();
+              String bShortName = b.getName();
+              // use the first structure match or one where the name matches
+              if ((structureMatch < 0) ||
+                  (vShortName != null && vShortName.equals(bShortName))) {
+                structureMatch = j;
+              }
+            }
+          } else
+            return j;
+        j++;
+      }
+
+      // if there is a record structure match, return it
+      if (structureMatch >= 0)
+        return structureMatch;
+
+      // then scan match via numeric promotion
+      j = 0;
+      for (Schema b : r.getTypes()) {
+        switch (vt) {
+        case INT:
+          switch (b.getType()) {
+          case LONG: case DOUBLE:
+            return j;
+          }
+          break;
+        case LONG:
+        case FLOAT:
+          switch (b.getType()) {
+          case DOUBLE:
+            return j;
+          }
+          break;
+        case STRING:
+          switch (b.getType()) {
+          case BYTES:
+            return j;
+          }
+          break;
+        case BYTES:
+          switch (b.getType()) {
+          case STRING:
+            return j;
+          }
+          break;
+        }
+        j++;
+      }
+      return -1;
+  }
+
+  /**
+   * Clever trick which differentiates items put into
+   * <code>seen</code> by {@link ValidatingGrammarGenerator#validating validating()}
+   * from those put in by {@link ValidatingGrammarGenerator#resolving resolving()}.
+   */
+   static class LitS2 extends ValidatingGrammarGenerator.LitS {
+     public Schema expected;
+     public LitS2(Schema actual, Schema expected) {
+       super(actual);
+       this.expected = expected;
+     }
+     public boolean equals(Object o) {
+       if (! (o instanceof LitS2)) return false;
+       LitS2 other = (LitS2) o;
+       return actual == other.actual && expected == other.expected;
+     }
+     public int hashCode() {
+       return super.hashCode() + expected.hashCode();
+     }
+   }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
new file mode 100644
index 0000000..5afbe60
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.io.IOException;
+
+/**
+ * A parser that capable of skipping as well read and write. This class is
+ * used by decoders who (unlink encoders) are required to implement
+ * methods to skip.
+ */
+public class SkipParser extends Parser {
+  /**
+   * The clients implement this interface to skip symbols and actions.
+   */
+  public interface SkipHandler {
+    /**
+     * Skips the action at the top of the stack.
+     */
+    void skipAction() throws IOException;
+
+    /**
+     * Skips the symbol at the top of the stack.
+     */
+    void skipTopSymbol() throws IOException;
+  }
+  
+  private final SkipHandler skipHandler;
+
+  public SkipParser(Symbol root, ActionHandler symbolHandler,
+      SkipHandler skipHandler)
+    throws IOException {
+    super(root, symbolHandler);
+    this.skipHandler = skipHandler;
+  }
+
+  /**
+   * Skips data by calling <code>skipXyz</code> or
+   * <code>readXyz</code> methods on <code>this</code>, until the
+   * parser stack reaches the target level.
+   */
+  public final void skipTo(int target) throws IOException {
+    outer:
+    while (target < pos) {
+      Symbol top = stack[pos - 1];
+      while (top.kind != Symbol.Kind.TERMINAL) {
+        if (top.kind == Symbol.Kind.IMPLICIT_ACTION
+            || top.kind == Symbol.Kind.EXPLICIT_ACTION) {
+          skipHandler.skipAction();
+        } else {
+          --pos;
+          pushProduction(top);
+        }
+        continue outer;
+      }
+      skipHandler.skipTopSymbol();
+    }
+  }
+
+  /**
+   * Skips the repeater at the top the stack.
+   */
+  public final void skipRepeater() throws IOException {
+    int target = pos;
+    Symbol repeater = stack[--pos];
+    assert repeater.kind == Symbol.Kind.REPEATER;
+    pushProduction(repeater);
+    skipTo(target);
+  }
+
+  /**
+   * Pushes the given symbol on to the skip and skips it.
+   * @param symToSkip The symbol that should be skipped.
+   */
+  public final void skipSymbol(Symbol symToSkip) throws IOException {
+    int target = pos;
+    pushSymbol(symToSkip);
+    skipTo(target);
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
new file mode 100644
index 0000000..08a9d14
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/Symbol.java
@@ -0,0 +1,627 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import org.apache.avro.Schema;
+
+/**
+ * Symbol is the base of all symbols (terminals and non-terminals) of
+ * the grammar.
+ */
+public abstract class Symbol {
+  /*
+   * The type of symbol.
+   */
+  public enum Kind {
+    /** terminal symbols which have no productions */
+    TERMINAL,
+    /** Start symbol for some grammar */
+    ROOT,
+    /** non-terminal symbol which is a sequence of one or more other symbols */
+    SEQUENCE,
+    /** non-termial to represent the contents of an array or map */
+    REPEATER,
+    /** non-terminal to represent the union */
+    ALTERNATIVE,
+    /** non-terminal action symbol which are automatically consumed */
+    IMPLICIT_ACTION,
+    /** non-terminal action symbol which is explicitly consumed */
+    EXPLICIT_ACTION
+  };
+
+  /// The kind of this symbol.
+  public final Kind kind;
+
+  /**
+   * The production for this symbol. If this symbol is a terminal
+   * this is <tt>null</tt>. Otherwise this holds the the sequence of
+   * the symbols that forms the production for this symbol. The
+   * sequence is in the reverse order of production. This is useful
+   * for easy copying onto parsing stack.
+   * 
+   * Please note that this is a final. So the production for a symbol
+   * should be known before that symbol is constructed. This requirement
+   * cannot be met for those symbols which are recursive (e.g. a record that
+   * holds union a branch of which is the record itself). To resolve this
+   * problem, we initialize the symbol with an array of nulls. Later we
+   * fill the symbols. Not clean, but works. The other option is to not have
+   * this field a final. But keeping it final and thus keeping symbol immutable
+   * gives some confort. See various generators how we generate records.
+   */
+  public final Symbol[] production;
+  /**
+   * Constructs a new symbol of the given kind <tt>kind</tt>.
+   */
+  protected Symbol(Kind kind) {
+    this(kind, null);
+  }
+    
+    
+  protected Symbol(Kind kind, Symbol[] production) {
+    this.production = production;
+    this.kind = kind;
+  }
+
+  /**
+   * A convenience method to construct a root symbol.
+   */
+  static Symbol root(Symbol... symbols) {
+    return new Root(symbols);
+  }
+  /**
+   * A convenience method to construct a sequence.
+   * @param production  The constituent symbols of the sequence.
+   */
+  static Symbol seq(Symbol... production) {
+    return new Sequence(production);
+  }
+
+  /**
+   * A convenience method to construct a repeater.
+   * @param symsToRepeat The symbols to repeat in the repeater.
+   */
+  static Symbol repeat(Symbol endSymbol, Symbol... symsToRepeat) {
+    return new Repeater(endSymbol, symsToRepeat);
+  }
+
+  /**
+   *  A convenience method to construct a union.
+   */
+  static Symbol alt(Symbol[] symbols, String[] labels) {
+    return new Alternative(symbols, labels);
+  }
+
+  /**
+   * A convenience method to construct an ErrorAction.
+   * @param e
+   */
+  static Symbol error(String e) {
+    return new ErrorAction(e);
+  }
+  
+  /**
+   * A convenience method to construct a ResolvingAction.
+   * @param w The writer symbol
+   * @param r The reader symbol
+   */
+  static Symbol resolve(Symbol w, Symbol r) {
+    return new ResolvingAction(w, r);
+  }
+  
+  private static class Fixup {
+    public final Symbol[] symbols;
+    public final int pos;
+    
+    public Fixup(Symbol[] symbols, int pos) {
+      this.symbols = symbols;
+      this.pos = pos;
+    }
+  }
+  
+  public Symbol flatten(Map<Sequence, Sequence> map,
+      Map<Sequence, List<Fixup>> map2) {
+    return this;
+  }
+  
+  public int flattenedSize() {
+    return 1;
+  }
+  
+  /**
+   * Flattens the given sub-array of symbols into an sub-array of symbols. Every
+   * <tt>Sequence</tt> in the input are replaced by its production recursively.
+   * Non-<tt>Sequence</tt> symbols, they internally have other symbols
+   * those internal symbols also get flattened.
+   * 
+   * The algorithm does a few tricks to handle recursive symbol definitions.
+   * In order to avoid infinite recursion with recursive symbols, we have a map
+   * of Symbol->Symbol. Before fully constructing a flattened symbol for a
+   * <tt>Sequence</tt> we insert an empty output symbol into the map and then
+   * start filling the production for the <tt>Sequence</tt>. If the same
+   * <tt>Sequence</tt> is encountered due to recursion, we simply return the
+   * (empty) output <tt>Sequence<tt> from the map. Then we actually fill out
+   * the production for the <tt>Sequence</tt>.
+   * As part of the flattening process we copy the production of
+   * <tt>Sequence</tt>s into larger arrays. If the original <tt>Sequence</tt>
+   * has not not be fully constructed yet, we copy a bunch of <tt>null</tt>s.
+   * Fix-up remembers all those <tt>null</tt> patches. The fix-ups gets finally
+   * filled when we know the symbols to occupy those patches.
+   *  
+   * @param in  The array of input symbols to flatten
+   * @param start The position where the input sub-array starts.
+   * @param out The output that receives the flattened list of symbols. The
+   * output array should have sufficient space to receive the expanded sub-array
+   * of symbols.
+   * @param skip  The position where the output input sub-array starts.
+   * @param map A map of symbols which have already been expanded. Useful for
+   * handling recursive definitions and for caching.
+   * @param map2  A map to to store the list of fix-ups.
+   */
+  static void flatten(Symbol[] in, int start,
+      Symbol[] out, int skip,
+      Map<Sequence, Sequence> map,
+      Map<Sequence, List<Fixup>> map2) {
+    for (int i = start, j = skip; i < in.length; i++) {
+      Symbol s = in[i].flatten(map, map2);
+      if (s instanceof Sequence) {
+        Symbol[] p = s.production;
+        List<Fixup> l = map2.get(s);
+        if (l == null) {
+          System.arraycopy(p, 0, out, j, p.length);
+        } else {
+          l.add(new Fixup(out, j));
+        }
+        j += p.length;
+      } else {
+        out[j++] = s;
+      }
+    }
+  }
+
+  /**
+   * Returns the amount of space required to flatten the given
+   * sub-array of symbols.
+   * @param symbols The array of input symbols.
+   * @param start The index where the subarray starts.
+   * @return  The number of symbols that will be produced if one expands
+   * the given input.
+   */
+  protected static int flattenedSize(Symbol[] symbols, int start) {
+    int result = 0;
+    for (int i = start; i < symbols.length; i++) {
+      if (symbols[i] instanceof Sequence) {
+        Sequence s = (Sequence) symbols[i];
+        result += s.flattenedSize();
+      } else {
+        result += 1;
+      }
+    }
+    return result;
+  }
+
+  private static class Terminal extends Symbol {
+    private final String printName;
+    public Terminal(String printName) {
+      super(Kind.TERMINAL);
+      this.printName = printName;
+    }
+    public String toString() { return printName; }
+  }
+
+  public static class ImplicitAction extends Symbol {
+    /**
+     * Set to <tt>true</tt> if and only if this implicit action is 
+     * a trailing action. That is, it is an action that follows
+     * real symbol. E.g {@link Symbol#DEFAULT_END_ACTION}.
+     */
+    public final boolean isTrailing;
+
+    private ImplicitAction() {
+      this(false);
+    }
+    
+    private ImplicitAction(boolean isTrailing) {
+      super(Kind.IMPLICIT_ACTION);
+      this.isTrailing = isTrailing;
+    }
+  }
+  
+  protected static class Root extends Symbol {
+    private Root(Symbol... symbols) {
+      super(Kind.ROOT, makeProduction(symbols));
+      production[0] = this;
+    }
+
+    private static Symbol[] makeProduction(Symbol[] symbols) {
+      Symbol[] result = new Symbol[flattenedSize(symbols, 0) + 1];
+      flatten(symbols, 0, result, 1,
+          new HashMap<Sequence, Sequence>(),
+          new HashMap<Sequence, List<Fixup>>());
+      return result;
+    }
+  }
+  
+  protected static class Sequence extends Symbol implements Iterable<Symbol> {
+    private Sequence(Symbol[] productions) {
+      super(Kind.SEQUENCE, productions);
+    }
+
+    public Symbol get(int index) {
+      return production[index];
+    }
+    
+    public int size() {
+      return production.length;
+    }
+    
+    public Iterator<Symbol> iterator() {
+      return new Iterator<Symbol>() {
+        private int pos = production.length;
+        
+        public boolean hasNext() {
+          return 0 < pos;
+        }
+        
+        public Symbol next() {
+          if (0 < pos) {
+            return production[--pos];
+          } else {
+            throw new NoSuchElementException();
+          }
+        }
+        
+        public void remove() {
+          throw new UnsupportedOperationException();
+        }
+      };
+    }
+    @Override
+    public Sequence flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      Sequence result = map.get(this);
+      if (result == null) {
+        result = new Sequence(new Symbol[flattenedSize()]);
+        map.put(this, result);
+        List<Fixup> l = new ArrayList<Fixup>();
+        map2.put(result, l);
+        
+        flatten(production, 0,
+            result.production, 0, map, map2);
+        for (Fixup f : l) {
+          System.arraycopy(result.production, 0, f.symbols, f.pos,
+              result.production.length);
+        }
+        map2.remove(result);
+      }
+      return result;
+    }
+
+    @Override
+    public final int flattenedSize() {
+      return flattenedSize(production, 0);
+    }
+  }
+
+  public static class Repeater extends Symbol {
+    public final Symbol end;
+   
+    private Repeater(Symbol end, Symbol... sequenceToRepeat) {
+      super(Kind.REPEATER, makeProduction(sequenceToRepeat));
+      this.end = end;
+      production[0] = this;
+    }
+    
+    private static Symbol[] makeProduction(Symbol[] p) {
+      Symbol[] result = new Symbol[p.length + 1];
+      System.arraycopy(p, 0, result, 1, p.length);
+      return result;
+    }
+    
+    @Override
+    public Repeater flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      Repeater result =
+        new Repeater(end, new Symbol[flattenedSize(production, 1)]);
+      flatten(production, 1, result.production, 1, map, map2);
+      return result;
+    }
+
+  }
+  
+  /**
+   * Returns true if the Parser contains any Error symbol, indicating that it may fail 
+   * for some inputs.
+   */
+  public static boolean hasErrors(Symbol symbol) {
+    switch(symbol.kind) {
+    case ALTERNATIVE:
+      return hasErrors(symbol, ((Alternative) symbol).symbols);
+    case EXPLICIT_ACTION:
+      return false;
+    case IMPLICIT_ACTION:
+      return symbol instanceof ErrorAction;
+    case REPEATER:
+      Repeater r = (Repeater) symbol;
+      return hasErrors(r.end) || hasErrors(symbol, r.production);
+    case ROOT:
+    case SEQUENCE:
+      return hasErrors(symbol, symbol.production);
+    case TERMINAL:
+      return false;
+    default:
+      throw new RuntimeException("unknown symbol kind: " + symbol.kind);
+    }
+  }
+  
+  private static boolean hasErrors(Symbol root, Symbol[] symbols) {
+    if(null != symbols) {
+      for(Symbol s: symbols) {
+        if (s == root) {
+          continue;
+        }
+        if (hasErrors(s)) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+    
+  public static class Alternative extends Symbol {
+    public final Symbol[] symbols;
+    public final String[] labels;
+    private Alternative(Symbol[] symbols, String[] labels) {
+      super(Kind.ALTERNATIVE);
+      this.symbols = symbols;
+      this.labels = labels;
+    }
+    
+    public Symbol getSymbol(int index) {
+      return symbols[index];
+    }
+    
+    public String getLabel(int index) {
+      return labels[index];
+    }
+    
+    public int size() {
+      return symbols.length;
+    }
+
+    public int findLabel(String label) {
+      if (label != null) {
+        for (int i = 0; i < labels.length; i++) {
+          if (label.equals(labels[i])) {
+            return i;
+          }
+        }
+      }
+      return -1;
+    }
+
+    @Override
+    public Alternative flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      Symbol[] ss = new Symbol[symbols.length];
+      for (int i = 0; i < ss.length; i++) {
+        ss[i] = symbols[i].flatten(map, map2);
+      }
+      return new Alternative(ss, labels);
+    }
+  }
+
+  public static class ErrorAction extends ImplicitAction {
+    public final String msg;
+    private ErrorAction(String msg) {
+      this.msg = msg;
+    }
+  }
+
+  public static IntCheckAction intCheckAction(int size) {
+    return new IntCheckAction(size);
+  }
+
+  public static class IntCheckAction extends Symbol {
+    public final int size;
+    @Deprecated public IntCheckAction(int size) {
+      super(Kind.EXPLICIT_ACTION);
+      this.size = size;
+    }
+  }
+
+  public static EnumAdjustAction enumAdjustAction(int rsymCount, Object[] adj) {
+    return new EnumAdjustAction(rsymCount, adj);
+  }
+  
+  public static class EnumAdjustAction extends IntCheckAction {
+    public final Object[] adjustments;
+    @Deprecated public EnumAdjustAction(int rsymCount, Object[] adjustments) {
+      super(rsymCount);
+      this.adjustments = adjustments;
+    }
+  }
+
+  public static WriterUnionAction writerUnionAction() {
+    return new WriterUnionAction();
+  }
+
+  public static class WriterUnionAction extends ImplicitAction {
+    private WriterUnionAction() {}
+  }
+
+  public static class ResolvingAction extends ImplicitAction {
+    public final Symbol writer;
+    public final Symbol reader;
+    private ResolvingAction(Symbol writer, Symbol reader) {
+      this.writer = writer;
+      this.reader = reader;
+    }
+    
+    @Override
+    public ResolvingAction flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      return new ResolvingAction(writer.flatten(map, map2),
+          reader.flatten(map, map2));
+    }
+
+  }
+  
+  public static SkipAction skipAction(Symbol symToSkip) {
+    return new SkipAction(symToSkip);
+  }
+
+  public static class SkipAction extends ImplicitAction {
+    public final Symbol symToSkip;
+    @Deprecated public SkipAction(Symbol symToSkip) {
+      super(true);
+      this.symToSkip = symToSkip;
+    }
+    
+    @Override
+    public SkipAction flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      return new SkipAction(symToSkip.flatten(map, map2));
+    }
+
+  }
+
+  public static FieldAdjustAction fieldAdjustAction(int rindex, String fname) {
+    return new FieldAdjustAction(rindex, fname);
+  }
+  
+  public static class FieldAdjustAction extends ImplicitAction {
+    public final int rindex;
+    public final String fname;
+    @Deprecated public FieldAdjustAction(int rindex, String fname) {
+      this.rindex = rindex;
+      this.fname = fname;
+    }
+  }
+  
+  public static FieldOrderAction fieldOrderAction(Schema.Field[] fields) {
+    return new FieldOrderAction(fields);
+  }
+
+  public static final class FieldOrderAction extends ImplicitAction {
+    public final Schema.Field[] fields;
+    @Deprecated public FieldOrderAction(Schema.Field[] fields) {
+      this.fields = fields;
+    }
+  }
+
+  public static DefaultStartAction defaultStartAction(byte[] contents) {
+    return new DefaultStartAction(contents);
+  }
+
+  public static class DefaultStartAction extends ImplicitAction {
+    public final byte[] contents;
+    @Deprecated public DefaultStartAction(byte[] contents) {
+      this.contents = contents;
+    }
+  }
+
+  public static UnionAdjustAction unionAdjustAction(int rindex, Symbol sym) {
+    return new UnionAdjustAction(rindex, sym);
+  }
+
+  public static class UnionAdjustAction extends ImplicitAction {
+    public final int rindex;
+    public final Symbol symToParse;
+    @Deprecated public UnionAdjustAction(int rindex, Symbol symToParse) {
+      this.rindex = rindex;
+      this.symToParse = symToParse;
+    }
+    
+    @Override
+    public UnionAdjustAction flatten(Map<Sequence, Sequence> map,
+        Map<Sequence, List<Fixup>> map2) {
+      return new UnionAdjustAction(rindex, symToParse.flatten(map, map2));
+    }
+    
+  }
+
+  /** For JSON. */
+  public static EnumLabelsAction enumLabelsAction(List<String> symbols) {
+    return new EnumLabelsAction(symbols);
+  }
+
+  public static class EnumLabelsAction extends IntCheckAction {
+    public final List<String> symbols;
+    @Deprecated public EnumLabelsAction(List<String> symbols) {
+      super(symbols.size());
+      this.symbols = symbols;
+    }
+    
+    public String getLabel(int n) {
+      return symbols.get(n);
+    }
+    
+    public int findLabel(String l) {
+      if (l != null) {
+        for (int i = 0; i < symbols.size(); i++) {
+          if (l.equals(symbols.get(i))) {
+            return i;
+          }
+        }
+      }
+      return -1;
+    }
+  }
+
+  /**
+   * The terminal symbols for the grammar.
+   */
+  public static final Symbol NULL = new Symbol.Terminal("null");
+  public static final Symbol BOOLEAN = new Symbol.Terminal("boolean");
+  public static final Symbol INT = new Symbol.Terminal("int");
+  public static final Symbol LONG = new Symbol.Terminal("long");
+  public static final Symbol FLOAT = new Symbol.Terminal("float");
+  public static final Symbol DOUBLE = new Symbol.Terminal("double");
+  public static final Symbol STRING = new Symbol.Terminal("string");
+  public static final Symbol BYTES = new Symbol.Terminal("bytes");
+  public static final Symbol FIXED = new Symbol.Terminal("fixed");
+  public static final Symbol ENUM = new Symbol.Terminal("enum");
+  public static final Symbol UNION = new Symbol.Terminal("union");
+
+  public static final Symbol ARRAY_START = new Symbol.Terminal("array-start");
+  public static final Symbol ARRAY_END = new Symbol.Terminal("array-end");
+  public static final Symbol MAP_START = new Symbol.Terminal("map-start");
+  public static final Symbol MAP_END = new Symbol.Terminal("map-end");
+  public static final Symbol ITEM_END = new Symbol.Terminal("item-end");
+
+  /* a pseudo terminal used by parsers */
+  public static final Symbol FIELD_ACTION =
+    new Symbol.Terminal("field-action");
+
+  public static final Symbol RECORD_START = new ImplicitAction(false);
+  public static final Symbol RECORD_END = new ImplicitAction(true);
+  public static final Symbol UNION_END = new ImplicitAction(true);
+  public static final Symbol FIELD_END = new ImplicitAction(true);
+  
+  public static final Symbol DEFAULT_END_ACTION = new ImplicitAction(true);
+  public static final Symbol MAP_KEY_MARKER =
+    new Symbol.Terminal("map-key-marker");
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
new file mode 100644
index 0000000..6bd1b26
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/ValidatingGrammarGenerator.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+
+/**
+ * The class that generates validating grammar.
+ */
+public class ValidatingGrammarGenerator {
+  /**
+   * Returns the non-terminal that is the start symbol
+   * for the grammar for the given schema <tt>sc</tt>.
+   */
+  public Symbol generate(Schema schema) {
+    return Symbol.root(generate(schema, new HashMap<LitS, Symbol>()));
+  }
+
+  /**
+   * Returns the non-terminal that is the start symbol
+   * for the grammar for the given schema <tt>sc</tt>. If there is already an entry
+   * for the given schema in the given map <tt>seen</tt> then
+   * that entry is returned. Otherwise a new symbol is generated and
+   * an entry is inserted into the map.
+   * @param sc    The schema for which the start symbol is required
+   * @param seen  A map of schema to symbol mapping done so far.
+   * @return      The start symbol for the schema
+   */
+  public Symbol generate(Schema sc, Map<LitS, Symbol> seen) {
+    switch (sc.getType()) {
+    case NULL:
+      return Symbol.NULL;
+    case BOOLEAN:
+      return Symbol.BOOLEAN;
+    case INT:
+      return Symbol.INT;
+    case LONG:
+      return Symbol.LONG;
+    case FLOAT:
+      return Symbol.FLOAT;
+    case DOUBLE:
+      return Symbol.DOUBLE;
+    case STRING:
+      return Symbol.STRING;
+    case BYTES:
+      return Symbol.BYTES;
+    case FIXED:
+      return Symbol.seq(Symbol.intCheckAction(sc.getFixedSize()),
+          Symbol.FIXED);
+    case ENUM:
+      return Symbol.seq(Symbol.intCheckAction(sc.getEnumSymbols().size()),
+          Symbol.ENUM);
+    case ARRAY:
+      return Symbol.seq(Symbol.repeat(Symbol.ARRAY_END, generate(sc.getElementType(), seen)),
+          Symbol.ARRAY_START);
+    case MAP:
+      return Symbol.seq(Symbol.repeat(Symbol.MAP_END,
+              generate(sc.getValueType(), seen), Symbol.STRING),
+          Symbol.MAP_START);
+    case RECORD: {
+      LitS wsc = new LitS(sc);
+      Symbol rresult = seen.get(wsc);
+      if (rresult == null) {
+        Symbol[] production = new Symbol[sc.getFields().size()];
+
+        /**
+         * We construct a symbol without filling the array. Please see
+         * {@link Symbol#production} for the reason.
+         */
+        rresult = Symbol.seq(production);
+        seen.put(wsc, rresult);
+
+        int i = production.length;
+        for (Field f : sc.getFields()) {
+          production[--i] = generate(f.schema(), seen);
+        }
+      }
+      return rresult;
+    }
+    case UNION:
+      List<Schema> subs = sc.getTypes();
+      Symbol[] symbols = new Symbol[subs.size()];
+      String[] labels = new String[subs.size()];
+      
+      int i = 0;
+      for (Schema b : sc.getTypes()) {
+        symbols[i] = generate(b, seen);
+        labels[i] = b.getFullName();
+        i++;
+      }
+      return Symbol.seq(Symbol.alt(symbols, labels), Symbol.UNION);
+
+    default:
+      throw new RuntimeException("Unexpected schema type");
+    }
+  }
+
+  /** A wrapper around Schema that does "==" equality. */
+  static class LitS {
+    public final Schema actual;
+    public LitS(Schema actual) { this.actual = actual; }
+    
+    /**
+     * Two LitS are equal if and only if their underlying schema is
+     * the same (not merely equal).
+     */
+    public boolean equals(Object o) {
+      if (! (o instanceof LitS)) return false;
+      return actual == ((LitS)o).actual;
+    }
+    
+    public int hashCode() {
+      return actual.hashCode();
+    }
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/doc-files/parsing.html b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/doc-files/parsing.html
new file mode 100644
index 0000000..050f533
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/doc-files/parsing.html
@@ -0,0 +1,615 @@
+<html>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<head>
+<title>Avro schemas as LL(1) CFG definitions</title>
+</head>
+<body>
+
+<center><h1>Avro schemas as LL(1) CFG definitions</h1></center>
+
+This document shows how an Avro schema can be interpreted as the definition of a context-free grammar in LL(1).  We use such an interpretation for two use-cases.  In one use-case, we use them to validate readers and writers of data against a single Avro schema.  Specifically, sequences of <code>Encoder.writeXyz</code> methods can be validated against a schema, and similarly sequences of <code>Decoder.readXyz</code> methods can be validated against a schema.
+
+The second use-case is using grammars to perform schema resolution.  For this use-case, we've developed a subclass of <code>Decoder</code> which takes two Avro schemas as input -- a reader and a writer schema.  This subclass accepts an input stream written according to the writer schema, and presents it to a client expecting the reader schema.  If the writer writes a long, for example, where the reader expects a double, then the <code>Decoder.readDouble</code> method will convert the wri [...]
+
+This document looks at grammars in the context of these two use-cases.  We first look at the single-schema case, then the double-schema case.  In the future, we believe the interpretation of Avro schemas as CFGs will find other uses (for example, to determine whether or not a schema admits finite-sized values).
+
+
+<h1>The interpretation</h1>
+
+<p> We parse a schema into a set of JSON objects.  For each record, map, array, union schema inside this set, this parse is going to generate a unique identifier "n<sub>i</sub>" (the "pointer" to the schema).  By convention, n<sub>0</sub> is the identifier for the "top-level" schema (i.e., the schema we want to read or write).  In addition, where n<sub>i</sub> is a union, the parse will generate a unique identifier "b<sub>ij</sub>" for each branch of the union.
+
+<p> A context-free grammar (CFG) consists of a set of terminal symbols, a set of non-terminal symbols, a set of productions, and a start symbol.  Here's how we interpret an Avro schema as a CFG:
+
+<p> <b>Terminal symbols:</b> The terminal symbols of the CFG consist of <code>null</code>, <code>bool</code>, <code>int</code>, <code>long</code>, <code>float</code>, <code>double</code>, <code>string</code>, <code>bytes</code>, <code>enum</code>, <code>fixed</code>, <code>arraystart</code>, <code>arrayend</code>, <code>mapstart</code>, <code>mapend</code>, and <code>union</code>.  In addition, we define the special terminals <code>"1"</code>, <code>"2"</code>, <code>"3"</code>, <code>.. [...]
+
+<p> Below, we use the variable <i>P</i> to represent any one of <code>null</code>, <code>bool</code>, <code>int</code>, <code>long</code>, <code>double</code>, <code>string</code>, <code>bytes</code> (i.e., the "primitives").
+
+<p><b>Non-terminal symbols:</b> The non-terminal symbols of the CFG consist of the identifiers n<sub>i</sub>, u<sub>i</sub>, r<sub>i</sub>, e<sub>i</sub>, f<sub>i</sub> and r<sub>p</sub> (there is a non-terminal r<sub>p</sub> for each symbol in <i>P</i>).
+
+<p><b>Productions:</b> The productions of the CFG are as follows:
+
+<p><i>Records:</i> If n<sub>i</sub> is a record-schema, then it defines the following production:
+<br>   n<sub>i</sub> ::= sym(f<sub>i1</sub>) sym(f<sub>i2</sub>) .. sym(f<sub>im</sub>)
+<br>where f<sub>ij</sub> is field "j" of record n<sub>i</sub>, and sym(f<sub>ij</sub>) is the appropriate member of <i>P</i> if f<sub>ij</sub> is a primitive type, or the appropriate n<sub>k</sub> for some k if f<sub>ij</sub> is a map, array, union, or record schema.
+
+<p><i>Arrays:</i> If n<sub>i</sub> is an array schema, then it defines the following productions:
+<br>    n<sub>i</sub> ::= <code>arraystart</code> r<sub>i</sub> <code>arrayend</code>
+<br>    r<sub>i</sub> ::= sym(n<sub>i</sub>) r<sub>i</sub> | ε
+<br> where "sym(n<sub>i</sub>)" is either some <i>P</i>, if this is an array of primitives, or the non-terminal associated with the schema of the element-type of n<sub>k</sub>.
+
+<p><i>Maps:</i> If n<sub>i</sub> is a map schema of element type <i>P</i>, then it defines the following production:
+<br>   n<sub>i</sub> ::= <code>mapstart</code> r<sub>i</sub> <code>mapend</code>
+<br>   r<sub>i</sub> ::= <code>string</code> sym(n<sub>i</sub>) r<sub>i</sub> | ε
+<br> where "sym(n<sub>i</sub>)" is either some <i>P</i>, if the value-type is a primitive, or the non-terminal associated with the schema of the value-type of n<sub>k</sub>.
+
+<p><i>Unions:</i> If n<sub>i</sub> is a union schema, then it defines the following productions:
+<br>   n<sub>i</sub> ::= <code>union</code> u<sub>i</sub>
+<br>   u<sub>i</sub> ::= 1 sym(b<sub>i1</sub>) | 2 sym(b<sub>i2</sub>) | ... | j sym(b<sub>ij</sub>)
+<br> where the "1", "2", "3" are the tags for the union, and the b<sub>ij</sub> is branch "j" of union "n<sub>i</sub>", and sym(b<sub>ij</sub>) is the appropriate member of <i>P</i> if b<sub>ij</sub> is a primitive type, or the appropriate n<sub>k</sub> if b<sub>ij</sub> is a map, array, union, or record schema.  (The introduction of the terminal symbol "UNION" plus the introduction of the additional non-terminal "u<sub>i</sub>" is a convenience to our parsing implementation.)
+
+<p><i>Enum</i> If n<sub>i</sub> is an enum schema, then it defines the following production:
+<br>   n<sub>i</sub> ::= <code>enum</code> e<sub>i</sub>
+<br>   e<sub>i</sub> ::= ε
+
+<br> Here there is no real production for e<sub>i</sub>. The symbol is used to associate some meta information such as the number of values in the enumeration.
+
+<p><i>Fixed</i> If n<sub>i</sub> is an fixed binary schema, then it defines the following production:
+<br>   n<sub>i</sub> ::= <code>enum</code> f<sub>i</sub>
+<br>   f<sub>i</sub> ::= ε
+
+<br> Here there is no real production for f<sub>i</sub>. The symbol is used to associate some meta information such as the size of the fixed binary.
+
+<p><b>Start symbol:</b> the starting symbol of the grammar is n<sub>0</sub>.
+
+<p>
+This grammar defined by the above transformation is LL(1).  (Proof: The only alternatives in these grammars are for the u<sub>i</sub> ("union") symbols and the r<sub>i</sub> ("repeating") symbols.  For "union" the alternative productions correspond to each one of the branches of the union. Each alternative production for a union starts of a unique tag-terminal, so by looking at the very first terminal one can decide which of the productions to select. In the case of the r<sub>k</sub>, th [...]
+
+Here's an example.  Consider the schema:
+<pre>
+{
+  "type":"record", "name":"foo",
+  "fields":[
+    {"name":"bar","type":"double"},
+    {"name":"baz","type":{"type":"array", "items":"string"}},
+    {"name":"zip",
+     "type":{"type":"map",
+     "values":["null",{"type":"array", "items":"bytes"},"foo"]}},
+  ]
+}
+</pre>
+This schema generates the following grammar:
+<pre>
+  n0 ::= double n1 n2
+  r1 ::= string r1 | ε
+  n1 ::= arraystart r1 arrayend
+  r2 ::= string n3 r2 | ε
+  n2 ::= mapstart r2 mapend
+  u3 ::= 1 null | 2 n4 | 3 n0
+  n3 ::= union u3
+  r4 ::= bytes r4 | ε
+  n4 ::= arraystart r4 arrayend
+</pre>
+The symbol "n0" is the start-symbol for this grammar.
+
+<H1>Reminder on LL(1) parsing</H1>
+
+While there's lots of material on the Web on table-driven LL(1) parsing, it all tends to over complicate things.  The best discussion I've found is in <i><a href=http://www.amazon.com/Crafting-Compiler-C-Charles-Fischer/dp/0805321667>Crafting a compiler</a></i>, by Fischer and LeBlanc (my copy is from 1988 -- I hope they quality hasn't slid since then).  Here's a quick summary.
+
+Parsing is the process of attempting to prove that a string can be derived from a grammar.  Top-down parsing attempts this proof in a top-down manner.  You start with the start symbol of the grammar and you ask yourself "Hey, given the input string, how can I derive this start symbol?"
+
+Now, in general, the start-symbol can be derived from one of a finite number of alternative productions:
+<br>   S ::= A<sub>11</sub> A<sub>12</sub> .. A<sub>1n<sub>1</sub></sub> | A<sub>21</sub> .. A<sub>2n<sub>2</sub></sub> | ... | A<sub>m1</sub> .. A<sub>mn<sub>m</sub></sub>
+<br>So the question of deriving the symbol "S" comes down to asking "Hey, given the input I'm looking at, which of these productions for S could have produced that input?"  The key property of LL(1) grammars is that this question is easy to answer.  All you need to do is look at the first token in your input, and that token tells you which of these alternatives could've produced that input.  (This token is sometimes called the "lookahead symbol.")
+
+<p>So the idea is that you put your start symbol on the stack to initialize things.  You pop that symbol off the stack, ask which production for S could've produced the input you're looking at, push that production back on the stack, and repeat.  Let's fill in the details.
+
+<p>The parsing table for this parsing procedure is a function of two inputs and one output:
+<pre>
+   T: Non-terminal x Terminal --> Production
+</pre>
+Remember, a "production" is a sequence of symbols -- a mix of terminals and non-terminals -- that derive a non-terminal in the grammar.
+
+<p>This function <code>T</code> takes a a non-terminal, a terminal, and returns a production for the non-terminal.  The non-terminal is the symbol you're trying to derive (from the top of the parsing stack); the terminal is the current symbol in your input stream (the lookahead symbol).  If <code>X</code> is the first input and <code>a</code>, then the output is the unique production for <code>X</code> that can produce the input symbol <code>a</code>.  (This function can also return the  [...]
+
+<p>If you have such a table, then your parsing code looks like this:
+<pre>
+parse(Table T, TokenStream in):
+  Stack stack = new Stack(Table.startSymbol);
+  for (Token t = in.next(); t != EOF; t = in.next())
+    advance(stack, T, t);
+
+advance(Stack stack, Table T, Token t):
+  X = stack.pop();
+  while (! isTerminal(X)):
+    if T(X,t) yields production Y<sub>1</sub> Y<sub>2</sub> ... Y<sub>n</sub>):
+      // push production in reverse order, so we leave looking for
+      // the first symbol of the production
+      stack.push(Y<sub>n</sub>);
+      ...;
+      stack.push(Y<sub>2</sub>);
+      stack.push(Y<sub>1</sub>);
+    else, T(X,t) is undefined, so throw an error;
+    X = stack.pop(); // Repeat until we find a terminal
+
+  if X == t then return
+  else throw an error;
+</pre>
+
+
+
+<h1>Parsing tables for Avro</h1>
+
+Traditionally, the parsing table for an LL(1) grammar defined as follows:
+<pre>
+  T(A,y) = A ::= X<sub>1</sub> ... X<sub>n</sub>  -- if y is in Predict(A ::= X<sub>1</sub> ... X<sub>n</sub>)
+  T(A,y) = Error              -- otherwise
+</pre>
+where <code>Predict(A ::= X<sub>1</sub> ... X<sub>n</sub>)</code> returns the unique first symbol that predicts this particular production for <code>A</code>.
+
+<p>But in our case, almost all productions have a single alternative.  If a non-terminal symbol <code>A</code> is on the top of the stack, then we don't even have to look at the input to figure out which production could derive <code>A</code> because there's only one such production!  Thus, we can define a special parsing table for Avro-induced grammars as follows:
+<pre>
+  T(A,y) = A ::= sym(f<sub>i1</sub>) sym(f<sub>i2</sub>) .. sym(f<sub>im</sub>) -- if A is a record schema
+  T(A,y) = A ::= <code>arraystart</code> r<sub>i</sub> <code>arrayend</code>       -- if A is the non-terminal for an array schema
+  T(A,y) = A ::= <code>mapstart</code> r<sub>i</sub> <code>mapend</code>           -- if A is the non-terminal for an map schema
+  T(A,y) = A ::= <code>union</code> u<sub>i</sub>                     -- if A is a union schema
+  T(A,y) = A ::= y sym(b<sub>ij</sub>)                   -- if A is a u<sub>i</sub> schema (note the "y" inside this production)
+  T(A,y) = Error                              -- if A is "A ::= k sym(b<sub>ij</sub>)" and "y" isn't
+                                              --   in any of the branches of the corresponding union
+  T(A,y) = A ::= n<sub>i</sub> r<sub>i</sub>    -- if A is r<sub>i</sub> and y is neither <code>arrayend</code> nor <code>mapend</code>
+  T(A,y) = A ::= ε  -- if A is r<sub>i</sub> and y is either <code>arrayend</code> or <code>mapend</code>
+</pre>
+Note that only the last three rules for <code>T(A,y)</code> consider the lookahead symbol (i.e., only the last three rules actually look at the value of <code>y</code>).  These are the rules for dealing with productions that have alternatives, i.e., the rules for unions (where there is an alternative for each branch) and the rules for repeaters (where there is one alternative for the "repeat" case and another alternative for the "end" case).
+
+<p>The nice thing about this alternative formulation of the parsing table is that we don't actually have to compute the predict set, which is not super complicated, but would be a pile of code to test and maintain.
+
+<p>It should be noted that the resulting parsing table catches errors in different states than the traditional LL(1) parsing table.  For example, let's say our Schema is simply an array of ints, which induces the following grammar:
+<pre>
+  n<sub>0</sub> ::= <code>arraystart</code> r<sub>int</sub> <code>arrayend</code>
+  r<sub>int</sub> ::= int r<sub>int</sub> | ε
+</pre>
+The traditional LL(1) table would be:
+<pre>
+  T(n<sub>0</sub>,<code>arraystart</code>) = n<sub>0</sub> ::= <code>arraystart</code> r<sub>int</sub> <code>arrayend</code>
+  T(r<sub>int</sub>,int) = r<sub>int</sub> ::= int r<sub>int</sub>
+  T(r<sub>int</sub>,<code>arrayend</code>) = ε
+  T(A,y) = Error -- if (A,y) is none of the above
+</pre>
+while our parser table would be:
+<pre>
+  T'(n<sub>0</sub>,y) = n<sub>0</sub> ::= <code>arraystart</code> r<sub>int</sub> <code>arrayend</code> -- for all y
+  T'(r<sub>int</sub>,y) = r<sub>int</sub> ::= int r<sub>int</sub>             -- for all y other than <code>arrayend</code>
+  T'(r<sub>int</sub>,<code>arrayend</code>) = ε
+</pre>
+Note that <code>T</code> is defined as <code>Error</code> for a lot of <code>(A,y)</code> pairs, but <code>T'</code> is defined as <code>Error</code> for <i>none</i> of them.  How can this be?
+
+<p>The difference is that <code>T</code> catches many errors when terminals fail to appear in Predict sets, while <code>T'</code> catches the errors when terminals fail to match corresponding terminals on the parser stack.  For example, let's say <code>r<sub>int</sub></code> is on the top of the parser stack, and the symbol <code>double</code> is arrives (which means, in practice, that a <code>writeDouble</code> call is encountered).  In this case, a parser with the standard table will c [...]
+
+<p>However, we believe that our modified parser will except exactly the same set of strings as the standard parser.
+
+
+<h1>Induction rules</h1>
+
+<p>The first section ("The interpretation") informally describes the grammer generated by an Avro schema.  This section provides a more formal description using a set of induction rules.  The earlier description in section one is fine for describing how a single Avro schema generates a grammar.  But soon we're going to describe how two schemas together define a "resolving" grammar, and for that description we'll need the more formal mechanism described here.
+
+<p>The terminal and non-terminal symbols in our grammar are as described in the first section.  Our induction rules will define a function "C(S)=<G,a>", which takes an Avro schema "S" and returns a pair consisting of a set of productions "G" and a symbol "a".  This symbol "a" -- which is either a terminal, or a non-terminal defined by G -- generates the values described by schema S.
+
+<p>The first rule applies to all Avro primitive types:
+
+<table align=center>
+  <tr align=center><td><i>p</i> in {<code>null</code>, <code>boolean</code>, <code>int</code>, <code>long</code>, <code>double</code>, <code>string</code>, <code>bytes</code>}</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(<i>p</i>)=<{}, <i>p</i>></td></tr>
+</table>
+
+<p>This first rule does not generate any productions, and simply returns the terminal symbol corresponding to the primitive types of the schema.
+
+<p>The next rule applies to record schemas:
+
+<table align=center>
+  <tr><td align=center>
+  <table cellspacing=0 cellpadding=0><tr><td>S=</td><td><code>{"type":"record", "name":</code>a<code>,</code></td></tr>
+         <tr><td></td><td><code>"fields":[{"name":</code>F<sub>1</sub><code>, "type":</code>S<sub>1</sub><code>}, ..., {"name":</code>F<sub>n</sub><code>, "type":</code>S<sub>n</sub><code>}]}</code></td></tr></table></td></tr>
+  <tr align=center><td>C(S<sub>j</sub>)=<G<sub>j</sub>, f<sub>j</sub>></td></tr>
+  <tr align=center><td><hr></td></tr>
+  <tr align=center><td>C(S)=<G<sub>1</sub> ∪ ... ∪ G<sub>n</sub> ∪ {a::=f<sub>1</sub> f<sub>2</sub> ... f<sub>n</sub>}, a></td></tr>
+</tr>
+</table>
+
+<p>In this case, the set of output-productions consists of all the productions generated by the element-types of the record, plus a production that defines the non-terminal "a" to be the sequence of field-types.  We return "a" as the grammar symbol representing this record-schema.
+
+<p>Next, we define the rule for arrays:
+
+<table align=center>
+  <tr align=center><td>S=<code>{"type":"array", "items":S<sub>e</sub>}</code></td></tr>
+  <tr align=center><td>C(S<sub>e</sub>)=<G<sub>e</sub>,e></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(S)=<G<sub>e</sub> ∪ {r ::= e r, r ::= ε, a ::= <code>arraystart</code> r <code>arrayend</code>}, a></td></tr>
+</table>
+
+<p>For arrays, the set of output productions again contains all productions generated by the element-type.  In addition, we define <em>two</em> productions for "r", which represents the repetition of this element type.  The first production is the recursive case, which consists of the element-type followed by "r" all over again.  The next case is the base case, which is the empty production.  Having defined this repetition, we can then define "a" as this repetition bracketed by the termi [...]
+
+<p>The rule for maps is almost identical to that for arrays:
+
+<table align=center>
+  <tr align=center><td>S=<code>{"type":"map", "values":S<sub>e</sub>}</code></td></tr>
+  <tr align=center><td>C(S<sub>e</sub>)=<G<sub>e</sub>,e></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(S)=<G<sub>e</sub> ∪ {r ::= <code>string</code> e r, r ::= ε, a ::= <code>mapstart</code> r <code>mapend</code>}, a></td></tr>
+</table>
+
+<p>The only difference from arrays is that map-elements consists of a <code>string</code> together with an element-type (vs. just an element type).
+
+<p>The rule for unions:
+<table align=center>
+<tr align=center>
+<td>S=<code>[S<sub>1</sub>, S<sub>2</sub>, ..., S<sub>n</sub>]</code></td>
+</tr>
+<tr align=center>
+ <td>C(S<sub>j</sub>)=<G<sub>j</sub>, b<sub>j</sub>></td>
+</tr>
+<tr align=center><td><hr></td></tr>
+<tr align=center><td>C(S)=<G<sub>1</sub> ∪ ... ∪ G<sub>n</sub> ∪ {u::=1 b<sub>1</sub>, u::=2 b<sub>2</sub>, ..., u::=n b<sub>n</sub>, a::=<code>union</code> u}, a></td></tr>
+</table>
+
+<p>In this rule, we again accumulate productions (G<sub>j</sub>) generated by each of the sub-schemas for each branch of the union.  If there are "k" branches, we define "k" different productions for the non-terminal symbol "u", one for each branch in the union.  These per-branch productions consist of the index of the branch (1 for the first branch, 2 for the second, and so forth), followed by the symbol representing the schema of that branch.  With these productions for "u" defined, we [...]
+
+
+<p>The rule for fixed size binaries:
+<table align=center>
+<tr align=center>
+ <td>S=<code>{"type":"fixed", "name":a, "size":s}</code></td>
+</tr>
+<tr align=center><td><hr></td></tr>
+<tr align=center><td>C(S)=<{a::=<code>fixed</code> f, f::=ε}, a></td></tr>
+</table>
+
+<p>In this rule, we define a new non-terminal f which has associated size of the fixed-binary.
+
+<p>The rule for enums:
+<table align=center>
+<tr align=center>
+ <td>S=<code>{"type":"enum", "name":a, "symbols":["s1", "s2", "s3", ...]}</code></td>
+</tr>
+<tr align=center><td><hr></td></tr>
+<tr align=center><td>C(S)=<{a::=<code>enum</code> e, e::=ε}, a></td></tr>
+</table>
+
+<p>In this rule, we define a new non-terminal e which has associated range of values.
+
+<h1>Resolution using action symbols</h1>
+
+We want to use grammars to represent Avro's rules for schema resolution.  To do this, we need a way to encode certain actions that the parser should perform as part of the resolution.  In particular:
+
+<ul>
+<li> <b>Resolver action:</b> when the writer writes a primitive type that can be promoted into the reader's type, we use a "resolver action" to aid in this process.  This is used for only a limited number of cases: int->long, int->double, long->double, and double->long.
+
+<p> <li> <b>Skip action:</b> when writer's schema for a record contains fields that are not in the reader's schema, we to skip them.  "Skip actions" are used for this purpose.
+
+<p> <li> <b>Field action:</b> the fields of a record can appear in different orders in the reader's and writer's schemas.  In the API we're designing, to support streaming, fields will be returned to the reader in the order generated by the writer; we need to help the reader map this back to its own field-order.  Field actions support this requirement.
+
+<p> <li> <b>Reader union actions:</b> the reader's schema can have a union where the writer's schema did not.  For example, the writer's schema might call for simply a long, while the reader's schema calls for a union that contains a long among other things.  The reader should experience the writer's long values as if they came from a union.  Reader union actions support this requirement.
+
+<p> <li> <b>Writer union actions</b> are the dual of the previous case: the writer may write a union where the reader expects just one branch of the union.  Writer union actions help bridge such situations.
+
+<p> <li> <b>Enum actions:</b> when we have reader- and writer-schema has enumerations, enum actions are used to map the writer's numerical value to the reader's numeric value.
+
+<p> <li> <b>Error actions:</b> in general, errors in schema-resolution can only be detected when data is being read.  For example, if the writer writes a <code>[long, string]</code> union, and the reader is expecting just a <code>long</code>, an error is only reported when the writer sends a string rather than a long.  Further, the Avro spec recommends that <em>all</em> errors be detected at reading-time, even if they could be detected earlier.  Error actions support the deferral of [...]
+</ul>
+
+<p>These actions will become "action symbols" in our grammar.  Action symbols are symbols that cause our parser to perform special activities when they appear on the top of the parsing stack.  For example, when the skip-action makes it to the top of the stack, the parser will automatically skip the next value in the input stream.  (Again, Fischer and LeBlanc has a nice description of action symbols.)
+
+<p>We're going to use induction rules to define a grammar.  This time, our induction rules will define a two-argument function "C(W,R)=<G,a>", which takes two schema, the writer's and reader's schemas respectively.  The results of this function are the same as they were for the single-schema case.
+
+<p>The first rule applies to all Avro primitive types:
+
+<table align=center>
+  <tr align=center><td><i>p</i> in {<code>null</code>, <code>boolean</code>, <code>int</code>, <code>long</code>, <code>double</code>, <code>string</code>, <code>bytes</code>}</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(<i>p</i>,<i>p</i>)=<{}, <i>p</i>></td></tr>
+</table>
+
+<p> In this case, the writer and reader schemas agree, so the resulting grammar should just expect the agreed-upon primitive type.
+
+<p>The next rule deals with resolution of primitive types:
+
+<table align=center>
+  <tr align=center><td>w in {<code>int</code>, <code>long</code>, <code>double</code>}</td></tr>
+  <tr align=center><td>r in {<code>long</code>, <code>double</code>}</td></tr>
+  <tr align=center><td>w != r</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(w,r)=<{}, ResolverAction(w,r)></td></tr>
+</table>
+
+<p> When this parameterized action is encountered, the parser will resolve the writer's value into the reader's expected-type for that value.  In the parsing loop, when we encounter this symbol, we use the "r" parameter of this symbol to check that the reader is asking for the right type of value, and we use the "w" parameter to figure out how to parse the data in the input stream.
+
+<p>One final possibility for primitive types is that they are incompatible types:
+
+<table align=center>
+  <tr align=center><td>The w,r pair does not fit the previous two rules, AND neither</td></tr>
+  <tr align=center><td>of the pair is a union, AND the pair aren't both compounds</td></tr>
+  <tr align=center><td>of the same type (i.e., two arrays, two records, or two maps)</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(w,r)=<{}, ErrorAction></td></tr>
+</table>
+
+<p> When this parameterized action is encountered, the parser will throw an error.  Keep in mind that this symbol might be generated in the middle of a recursive call to "G."  For example, if the reader's schema is long, and the writer's is [long, string], we'll generate an error symbol for the string-branch of the union; if this branch is occurred in actual input, an error will then be generated.
+
+<p>The next rule deals with resolution of fixed size binaries:
+
+<table align=center>
+  <tr align=center><td>w = <code>{"type":"fixed", "name":"n1", "size":s1}</code></td></tr>
+  <tr align=center><td>r = <code>{"type":"fixed", "name":"n2", "size":s2}</code></td></tr>
+  <tr align=center><td>n1 != n2 or s1 != s2</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(w,r)=<{}, ErrorAction></td></tr>
+</table>
+
+<table align=center>
+  <tr align=center><td>w = <code>{"type":"fixed", "name":"n1", "size":s1}</code></td></tr>
+  <tr align=center><td>r = <code>{"type":"fixed", "name":"n2", "size":s2}</code></td></tr>
+  <tr align=center><td>n1 == n2 and s1 == s2</td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(w,r)=<{ a::=<code>fixed</code> f, f::=ε}, a></td></tr>
+</table>
+
+If the names are identical and sizes are identical, then we match otherwise an error is generated.
+
+<p>The next rule deals with resolution of enums:
+
+<table align=center>
+  <tr align=center><td>w = <code>{"type":"enum", "symbols":[sw<sub>1</sub>, sw<sub>2</sub>, ..., sw<sub>m</sub>] }</code></td></tr>
+  <tr align=center><td>r = <code>{"type":"enum", "symbols":[sr<sub>1</sub>, sr<sub>2</sub>, ..., sr<sub>n</sub>] }</code></td></tr>
+  <tr align=center><td>f<sub>i</sub> = EnumAction(i, j) if sw<sub>i</sub> == sr<sub>j</sub></td></tr>
+  <tr align=center><td>f<sub>i</sub> = ErrorAction if sw<sub>i</sub> does not match any sr<sub>j</sub></td></tr>
+  <tr><td><hr></td></tr>
+  <tr align=center><td>C(w,r)=<{ a::=<code>enum</code> e, e::=ε}, a></td></tr>
+</table>
+
+The symbol e has the set of actions f<sub>i</sub> associated with it. It chooses the right action based on the runtime data.
+
+<p>Now that we have rules for primitive types, we can define rules for compound types.  First, let's look at records:
+
+<table align=center>
+<tr>
+ <td align=center>
+  <table cellspacing=0 cellpadding=0>
+  <tr>
+   <td>W=</td>
+   <td><code>{"type":"record","name":</code>w<code>,</code></td>
+  </tr>
+  <tr>
+    <td></td>
+    <td><code>"fields":[{"name":</code>E<sub>1</sub><code>,
+                         "type":</code>S<sub>1</sub><code>},</code>...<code>,
+                         {"name":</code>E<sub>n</sub><code>,
+                         "type":</code>S<sub>n</sub><code>}]}</code></td>
+  </tr>
+  </table>
+ </td>
+</tr>
+<tr>
+ <td align=center>
+  <table cellspacing=0 cellpadding=0>
+  <tr>
+   <td>R=</td>
+   <td><code>{"type":"record", "name":</code>r<code>,</code></td>
+  </tr>
+  <tr>
+   <td></td>
+   <td><code>"fields":[{"name":</code>F<sub>1</sub><code>,
+                        "type":</code>T<sub>1</sub><code>},</code>...<code>,
+                       {"name":</code>F<sub>m</sub><code>,
+                        "type":</code>T<sub>m</sub><code>}]}</code>
+   </td>
+  </tr>
+  </table>
+ </td>
+</tr>
+<tr align=center>
+ <td>{F<sub>1</sub>, ..., F<sub>m</sub>} is a
+     subset of {E<sub>1</sub>, ..., E<sub>n</sub>}</td>
+</tr>
+<tr>
+ <td align=center>
+  C(S<sub>j</sub>, T<sub>i</sub>)
+   = <G<sub>j</sub>, f<sub>j</sub>>
+   -- for all E<sub>j</sub>=F<sub>i</sub></td>
+</tr>
+ <td align=center>
+  C(S<sub>j</sub>)
+   = <G<sub>j</sub>, f<sub>j</sub>>
+   -- for all E<sub>j</sub> not in {F<sub>1</sub>, ..., F<sub>m</sub>}</td>
+</tr>
+<tr>
+ <td align=center>
+  <table>
+  <tr>
+   <td rowspan=2 valign=middle>f'<sub>j</sub>=</td>
+   <td><sub>/ </sub>FieldAction(i, E<sub>i</sub>) f<sub>j</sub>
+          -- if E<sub>j</sub>=F<sub>i</sub></td>
+  </tr>
+  <tr>
+   <td><sup>\ </sup>SkipAction(f<sub>j</sub>)
+             -- if E<sub>j</sub> not in {F<sub>1</sub>, ..., F<sub>m</sub>}</td>
+  </tr>
+  </table>
+ </td>
+</tr>
+<tr align=center><td><hr></td></tr>
+<tr align=center><td>C(W,R)=<G<sub>1</sub> ∪ G<sub>2</sub> ∪ ... ∪ G<sub>n</sub> ∪ { w::=f'<sub>1</sub> f'<sub>2</sub> ... f'<sub>n</sub> }, w></td></tr>
+</table>
+
+<p>The substance of this rule lies in the definion of the "f'<sub>j</sub>".  If the writer's field F<sub>j</sub> is not a member of the reader's schema, then a skip-action is generated, which will cause the parser to automatically skip over the field without the reader knowing.  (In this case, note that we use the <em>single</em>-argument version of "C", i.e., the version defined in the previous section!)
+
+If the writer's field F<sub>j</sub> <em>is</em> a member f the reader's schema, then "f'<sub>j</sub>" is a two-symbol sequence: the first symbol is a (parameterized) field-action which is used to tell the reader which of its own fields is coming next, followed by the symbol for parsing the value written by the writer.
+
+<p>The above rule for records works only when the reader and writer have the same name, and the reader's fields are subset of the writer's.  In other cases, an error is producted.
+
+<p>The rule for arrays is straightforward:
+
+<table align=center>
+<tr align=center>
+ <td>W=<code>{"type":"array", "items":S<sub>w</sub>}</code></td>
+</tr>
+<tr align=center>
+ <td>R=<code>{"type":"array", "items":S<sub>r</sub>}</code></td>
+</tr>
+<tr align=center>
+ <td>C(S<sub>w</sub>, S<sub>r</sub>)=<G<sub>e</sub>,e>
+</tr>
+<tr><td><hr></td></tr>
+<tr align=center><td>C(W,R)=<G<sub>e</sub> ∪ {r ::= e r, r ::= ε, a ::= <code>arraystart</code> r <code>arrayend}, a></td></tr>
+</table>
+
+<p>Here the rule is largely the same as for the single-schema case, although the recursive use of G may result in productions that are very different.  The rule for maps changes in a similarly-small way, so we don't bother to detail that case in this document.
+
+<p>The final rules are for unions.  Let's first look at the case where the writer is a union but the reader is not:
+
+<table align=center>
+<tr align=center>
+ <td>W=[S<sub>1</sub>, ..., S<sub>n</sub>]</td>
+</tr>
+<tr align=center>
+ <td>R is not a union schema</td>
+</tr>
+<tr align=center>
+ <td>C(S<sub>j</sub>,R)=<G<sub>j</sub>, b<sub>j</sub>></td>
+</tr>
+<tr><td><hr></td></tr>
+<tr align=center><td>C(R,W)=<G<sub>1</sub> ∪ G<sub>2</sub> ∪ ... ∪ G<sub>n</sub> ∪ {a::=WriterUnionAction(b<sub>1</sub>, b<sub>2</sub>, ..., b<sub>n</sub>)}, a></td></tr>
+</table>
+
+<p>Here, a writer-union action is generated that looks much like a union did in the single-schema case.  However, unlike in that case, the writer-union action will cause the parser to automatically interpret the writer's union value.
+
+<p> Now let's look when the reader expects a union.  The first of these cases is an error case:
+
+<table align=center>
+<tr align=center>
+ <td>W is not a union schema</td>
+</tr>
+<tr align=center>
+ <td>R=[R<sub>1</sub>, ..., R<sub>n</sub>]</td>
+</tr>
+<tr><td>W does not resolve to any of the branches of R</td></tr>
+<tr><td><hr></td></tr>
+<tr><td align=center>C(W,R)=<{}, ErrorAction></td></tr>
+</table>
+
+<p>In this case, there's no way to resolve the two schemas, so we generate an error action to remind us of this fact at run-time.  (Again, this error action might be under a branch of a containing union, and thus might never be triggered at run-time, so it wouldn't be correct to signal an error at "compile" time.)
+
+<p> Here's the non-error case:
+
+<table align=center>
+<tr align=center>
+ <td>W is not a union schema</td>
+</tr>
+<tr align=center>
+ <td>R=[R<sub>1</sub>, ..., R<sub>n</sub>]</td>
+</tr>
+<tr><td align=center>Branch "j" of R is the best match for W</td></tr>
+<tr><td align=center>C(W,R<sub>j</sub>)=<G,w></td></tr>
+<tr><td><hr></td></tr>
+<tr><td align=center>C(W,R)=<G, ReaderUnionAction(j,w)></td></tr>
+</table>
+
+<p> In this case, we can decide at "compile time" which of the branches of the reader will be the best match for the value that's going to be written by the writer.  We then generate a reader union action, which tells the parser first, which branch-number of the reader's we should report to the schema, and then second which symbol to use to parse the writer's actual value.
+
+<p> The interesting case is when the writer's and reader's schemas are both unions:
+
+<table align=center>
+<tr align=center>
+ <td>W=[W<sub>1</sub>, ..., W<sub>n</sub>]</td>
+</tr>
+<tr align=center>
+ <td>R=[R<sub>1</sub>, ..., R<sub>m</sub>]</td>
+</tr>
+<tr align=center>
+ <td>C(W<sub>j</sub>, R)=<G<sub>j</sub>, b<sub>j</sub>></td>
+</tr>
+<tr><td><hr></td></tr>
+<tr align=center><td>C(W,R)=<G<sub>1</sub> ∪ ... ∪ G<sub>n</sub> ∪ {u::=1 b<sub>1</sub>, u::=2 b<sub>2</sub>, ..., u::=n b<sub>n</sub>, a::=<code>union</code> u}, a></td></tr>
+</table>
+
+<p> Note that in the inductive case ("C(W<sub>j</sub>, R)"), each <i>branch</i> of the writer ("W<sub>j</sub>") is compared to the <em>entire union</em> of the reader ("R").  Thus, one of the two previous cases (the error case or the reader-union case) gets generated for each branch of the writer's union.
+
+
+<h1>Resolving parser</h1>
+
+Here's a stylized version of the actual parsing code, with comments, to illustrate how a resolving-grammar is actually used.  To better understand this code, compare it to the simple code for "advance" given earlier in this document.
+
+<pre>
+  Symbol advance(Stack stack, Table T, Symbol t, TokenStream in):
+    Symbol X = stack.pop();
+    while (! isTerminal(X)):
+      case X:
+        FieldAction:
+          // In this case, the main parsing loop can "ask" for the
+          // field information by passing a FieldAction symbol as
+          // "t".  If it does, it'll get the (parameterized) symbol
+          // from the parsing table.  If it doesn't ask for this
+          // information, then the information will be ignored.
+          if (isFieldAction(t)) return X;
+
+        SkipAction(productionToSkip):
+          // In this case we automatically skip the production we've
+          // been asked to skip
+          in.skip(productionToSkip);
+
+        WriterUnionAction(b_1, b_2, ..., b_n):
+          // In this case, we read from the token input-stream to
+          // determine the actual branch witten by the writer.
+          // We then push this branch on the parsing stack, to tell
+          // the parser what type of value to look for
+          int i = in.readIndex();
+          stack.push(b_i);
+
+        NonTerminal:
+          if T(X,t) yields production Y<sub>1</sub> Y<sub>2</sub> ... Y<sub>n</sub>):
+            // push production in reverse order, so we leave looking for
+            // the first symbol of the production
+            stack.push(Y<sub>n</sub>);
+            ...;
+            stack.push(Y<sub>2</sub>);
+            stack.push(Y<sub>1</sub>);
+          else, T(X,t) is undefined, so throw an error;
+
+      X = stack.pop();
+
+    // We've left the loop, so X is a terminal symbol:
+    case X:
+      ResolvingTable(w,r):
+        // If reader is looking for an "r", then the reader's
+        // looking for the right thing according to the reader's
+        // schema, but return the type actually written so the
+        // proper conversion can happen.
+        if (r == t) return w;
+
+      ReaderUnionAction(index,writerSym):
+        // Reader-union actions are allowed where the reader
+        // is expecting a union.  In this case, we return the
+        // (parameterized!) reader-union-action symbol and 
+        // the code above figures out what to do
+        if (t == union) return X;
+
+      ErrorAction:
+        throw the deferred error;
+      
+    // Fall-through case:
+    if (X == t) then return X
+    else throw an error
+</pre>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/parsing/package.html b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/package.html
new file mode 100644
index 0000000..2a879ab
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/parsing/package.html
@@ -0,0 +1,40 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Implementation of Avro schemas as LL(1) grammars.
+
+<p>See
+<a href="doc-files/parsing.html">parser documentation</a> for details on how
+this is achieved.
+
+<p>
+The classes in this package are used by
+{@link org.apache.avro.io.ValidatingEncoder},
+{@link org.apache.avro.io.ValidatingDecoder},
+{@link org.apache.avro.io.ResolvingDecoder},
+{@link org.apache.avro.io.JsonEncoder} and
+{@link org.apache.avro.io.JsonDecoder},
+
+<p>
+Unless one plans to generate a variation of the grammar or use a grammar,
+one not need to understand these classes.
+
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/ipc/package.html b/lang/java/avro/src/main/java/org/apache/avro/ipc/package.html
new file mode 100644
index 0000000..f246ebd
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/ipc/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Support for inter-process calls.
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/package.html b/lang/java/avro/src/main/java/org/apache/avro/package.html
new file mode 100644
index 0000000..9abc78b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/package.html
@@ -0,0 +1,44 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>Avro kernel classes.
+
+<p>A {@link org.apache.avro.Schema} provides an abstract definition of
+  a data type.
+
+<p>The in-memory representation of data is determined by {@link
+  org.apache.avro.io.DatumReader} and {@link
+  org.apache.avro.io.DatumWriter} implementations.  Generic
+  implementations are provided in the {@link org.apache.avro.generic}
+  package.  A {@link org.apache.avro.compiler.specific.SpecificCompiler
+  compiler} can generate specific java classes and interfaces for
+  schemas and protocols.  Schemas may be automatically generated for
+  existing Java classes by reflection using the {@link
+  org.apache.avro.reflect} package.
+
+<p>Data of a given schema is always serialized identically, regardless
+  of its in-memory representation, by traversing the schema and
+  writing leaf values from the data structure with a {@link
+  org.apache.avro.io.Encoder}.  Deserializing similarly proceeds by
+  traversing the schema, reading leaf values with a {@link
+  org.apache.avro.io.Decoder} and storing them in an in-memory data
+  structure.
+
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ArrayAccessor.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ArrayAccessor.java
new file mode 100644
index 0000000..66f8f09
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ArrayAccessor.java
@@ -0,0 +1,228 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.ResolvingDecoder;
+
+/**
+ * Helper class to provide native array access whenever possible. It is much
+ * faster than using reflection-based operations on arrays.
+ */
+class ArrayAccessor {
+
+  static void writeArray(boolean[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeBoolean(data[i]);
+    }
+  }
+
+  // short, and char arrays are upcast to avro int
+
+  static void writeArray(short[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeInt(data[i]);
+    }
+  }
+
+  static void writeArray(char[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeInt(data[i]);
+    }
+  }
+
+  static void writeArray(int[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeInt(data[i]);
+    }
+  }
+
+  static void writeArray(long[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeLong(data[i]);
+    }
+  }
+
+  static void writeArray(float[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeFloat(data[i]);
+    }
+  }
+
+  static void writeArray(double[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      out.startItem();
+      out.writeDouble(data[i]);
+    }
+  }
+
+  static Object readArray(Object array, Class<?> elementType, long l,
+      ResolvingDecoder in) throws IOException {
+    if (elementType == int.class)
+      return readArray((int[]) array, l, in);
+    if (elementType == long.class)
+      return readArray((long[]) array, l, in);
+    if (elementType == float.class)
+      return readArray((float[]) array, l, in);
+    if (elementType == double.class)
+      return readArray((double[]) array, l, in);
+    if (elementType == boolean.class)
+      return readArray((boolean[]) array, l, in);
+    if (elementType == char.class)
+      return readArray((char[]) array, l, in);
+    if (elementType == short.class)
+      return readArray((short[]) array, l, in);
+    return null;
+  }
+
+  static boolean[] readArray(boolean[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = in.readBoolean();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static int[] readArray(int[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = in.readInt();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static short[] readArray(short[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = (short) in.readInt();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static char[] readArray(char[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = (char) in.readInt();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static long[] readArray(long[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = in.readLong();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static float[] readArray(float[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = in.readFloat();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+  static double[] readArray(double[] array, long l, ResolvingDecoder in)
+      throws IOException {
+    int index = 0;
+    do {
+      int limit = index + (int) l;
+      if (array.length < limit) {
+        array = Arrays.copyOf(array, limit);
+      }
+      while (index < limit) {
+        array[index] = in.readDouble();
+        index++;
+      }
+    } while ((l = in.arrayNext()) > 0);
+    return array;
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java
new file mode 100644
index 0000000..bcd0fd1
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroAlias.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Adds the given name and space as an alias to the schema.
+ * Avro files of this schema can be read into classes
+ * named by the alias. 
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.TYPE)
+public @interface AvroAlias {
+  String NULL = "NOT A VALID NAMESPACE";
+
+  String alias();
+  String space() default NULL;
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroDefault.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroDefault.java
new file mode 100644
index 0000000..fb7607f
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroDefault.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Specifies a default value for a field as a JSON string.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface AvroDefault {
+  String value();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroEncode.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroEncode.java
new file mode 100644
index 0000000..d8f056d
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroEncode.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Expert: Fields with this annotation are encoded using the given custom encoder.
+ * This annotation overrides {@link org.apache.avro.reflect.Stringable Stringable} and {@link org.apache.avro.reflect.Nullable Nullable}.
+ * Since no validation is performed, invalid custom encodings may result in an unreadable file.
+ * Use of {@link org.apache.avro.io.ValidatingEncoder} is recommended.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target(ElementType.FIELD)
+public @interface AvroEncode {
+  Class<? extends CustomEncoding<?>> using();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroIgnore.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroIgnore.java
new file mode 100644
index 0000000..5245c70
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroIgnore.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Marks a field as transient.
+ * Such a field will not get written into or read from a schema,
+ * when using reflection.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.FIELD})
+public @interface AvroIgnore {
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroMeta.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroMeta.java
new file mode 100644
index 0000000..9dddbf5
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroMeta.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Adds the given key:Value pair as metadata into the schema,
+ * at the corresponding node.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE, ElementType.FIELD})
+public @interface AvroMeta {
+  String key();
+  String value();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroName.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroName.java
new file mode 100644
index 0000000..5603f26
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroName.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Sets the avroname for this java field.
+ * When reading into this class, a reflectdatumreader
+ * looks for a schema field with the avroname.
+ */
+ at Target(ElementType.FIELD)
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface AvroName {
+  String value();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroSchema.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroSchema.java
new file mode 100644
index 0000000..0c34546
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/AvroSchema.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declares that a Java type should have a specified Avro schema, overriding
+ * the normally inferred schema.  May be used for classes, parameters, fields
+ * and method return types.
+ * <p> This is useful for slight alterations to the schema that would be
+ * automatically inferred.  For example, a
+ * <code>List<Integer></code>whose elements may be null might use the
+ * annotation
+ * <pre>@AvroSchema("{\"type\":\"array\",\"items\":[\"null\",\"int\"]}")</pre>
+ * since the {@link Nullable} annotation could not be used here.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+  @Target({ElementType.TYPE, ElementType.PARAMETER,
+           ElementType.METHOD, ElementType.FIELD})
+ at Documented
+public @interface AvroSchema {
+  /** The schema to use for this value. */
+  String value();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
new file mode 100644
index 0000000..1c180e5
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/CustomEncoding.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+
+/**
+ * Expert:  a custom encoder and decoder that writes
+ * an object directly to avro. 
+ * No validation is performed to check that the encoding conforms to the schema.
+ * Invalid implementations may result in an unreadable file.
+ * The use of {@link org.apache.avro.io.ValidatingEncoder} is recommended. 
+ *
+ * @param <T> The class of objects that can be serialized with this encoder / decoder.
+ */
+public abstract class CustomEncoding<T> {
+
+  protected Schema schema;
+
+  
+  protected abstract void write(Object datum, Encoder out) throws IOException;
+
+  protected abstract T read(Object reuse, Decoder in) throws IOException;
+
+  T read(Decoder in) throws IOException {
+    return this.read(null, in);
+  }
+
+  protected Schema getSchema() {
+    return schema;
+  }
+
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java
new file mode 100644
index 0000000..6cfcb2e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/DateAsLongEncoding.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.util.Date;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+
+/**
+ * This encoder/decoder writes a java.util.Date object as a long to
+ * avro and reads a Date object from long.
+ * The long stores the number of milliseconds since January 1, 1970, 00:00:00 GMT
+ * represented by the Date object.
+ */
+public class DateAsLongEncoding extends CustomEncoding<Date> {
+  {
+    schema = Schema.create(Schema.Type.LONG);
+    schema.addProp("CustomEncoding", "DateAsLongEncoding");
+  }
+
+  @Override
+  protected final void write(Object datum, Encoder out) throws IOException {
+    out.writeLong(((Date)datum).getTime());
+  }
+
+  @Override
+  protected final Date read(Object reuse, Decoder in) throws IOException {
+    if (reuse != null && reuse instanceof Date) {
+      ((Date)reuse).setTime(in.readLong());
+      return (Date)reuse;
+    }
+    else return new Date(in.readLong());
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java
new file mode 100644
index 0000000..ba739bc
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccess.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.reflect.Field;
+
+abstract class FieldAccess {
+  
+  protected abstract FieldAccessor getAccessor(Field field);
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
new file mode 100644
index 0000000..680139a
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessReflect.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+
+class FieldAccessReflect extends FieldAccess {
+
+  @Override
+  protected FieldAccessor getAccessor(Field field) {
+    AvroEncode enc = field.getAnnotation(AvroEncode.class);
+    if (enc != null)
+      try {
+        return new ReflectionBasesAccessorCustomEncoded(field, enc.using().newInstance());
+      } catch (Exception e) {
+        throw new AvroRuntimeException("Could not instantiate custom Encoding");
+      }
+    return new ReflectionBasedAccessor(field);
+  }
+
+  private class ReflectionBasedAccessor extends FieldAccessor {
+    protected final Field field;
+    private boolean isStringable;
+    private boolean isCustomEncoded;
+
+    public ReflectionBasedAccessor(Field field) {
+      this.field = field;
+      this.field.setAccessible(true);
+      isStringable = field.isAnnotationPresent(Stringable.class);
+      isCustomEncoded = field.isAnnotationPresent(AvroEncode.class); 
+    }
+
+    @Override
+    public String toString() {
+      return field.getName();
+    }
+
+    @Override
+    public Object get(Object object) throws IllegalAccessException {
+      return field.get(object);
+    }
+
+    @Override
+    public void set(Object object, Object value) throws IllegalAccessException,
+        IOException {
+      field.set(object, value);
+    }
+    
+    @Override
+    protected Field getField() {
+      return field;
+    }
+    
+    @Override
+    protected boolean isStringable() {
+      return isStringable;
+    }
+    
+    @Override
+    protected boolean isCustomEncoded() {
+      return isCustomEncoded;
+    }
+  }
+
+  private final class ReflectionBasesAccessorCustomEncoded extends ReflectionBasedAccessor {
+
+    private CustomEncoding<?> encoding;
+
+    public ReflectionBasesAccessorCustomEncoded(Field f, CustomEncoding<?> encoding) {
+      super(f);
+      this.encoding = encoding;
+  }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      try {
+        field.set(object, encoding.read(in));
+      } catch (IllegalAccessException e) {
+        throw new AvroRuntimeException(e);
+}
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      try {
+        encoding.write(field.get(object), out);
+      } catch (IllegalAccessException e) {
+        throw new AvroRuntimeException(e);
+      }
+    }
+
+    protected boolean isCustomEncoded() {
+      return true;
+    }
+
+    @Override
+    protected boolean supportsIO() {
+      return true;
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
new file mode 100644
index 0000000..b5ade1e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessUnsafe.java
@@ -0,0 +1,365 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+
+import sun.misc.Unsafe;
+
+ at SuppressWarnings("restriction")
+class FieldAccessUnsafe extends FieldAccess {
+
+  private static final Unsafe UNSAFE;
+
+  static {
+    try {
+      Field theUnsafe = Unsafe.class.getDeclaredField("theUnsafe");
+      theUnsafe.setAccessible(true);
+      UNSAFE = (Unsafe) theUnsafe.get(null);
+      // It seems not all Unsafe implementations implement the following method.
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected FieldAccessor getAccessor(Field field) {
+    AvroEncode enc = field.getAnnotation(AvroEncode.class);
+    if (enc != null)
+      try {
+        return new UnsafeCustomEncodedField(field, enc.using().newInstance() );
+      } catch (Exception e) {
+        throw new AvroRuntimeException("Could not instantiate custom Encoding");
+      }
+    Class<?> c = field.getType();
+    if (c == int.class)
+      return new UnsafeIntField(field);
+    else if (c == long.class)
+      return new UnsafeLongField(field);
+    else if (c == byte.class)
+      return new UnsafeByteField(field);
+    else if (c == float.class)
+      return new UnsafeFloatField(field);
+    else if (c == double.class)
+      return new UnsafeDoubleField(field);
+    else if (c == char.class)
+      return new UnsafeCharField(field);
+    else if (c == boolean.class)
+      return new UnsafeBooleanField(field);
+    else if (c == short.class)
+      return new UnsafeShortField(field);
+    else
+      return new UnsafeObjectField(field);
+  }
+
+  abstract static class UnsafeCachedField extends FieldAccessor {
+    protected final long offset;
+    protected Field field;
+    protected final boolean isStringable;
+
+    UnsafeCachedField(Field f) {
+      this.offset = UNSAFE.objectFieldOffset(f);
+      this.field = f;
+      this.isStringable = f.isAnnotationPresent(Stringable.class);
+    }
+
+    @Override
+    protected Field getField() {
+      return field;
+    }
+
+    @Override
+    protected boolean supportsIO() {
+      return true;
+    }
+    
+    @Override
+    protected boolean isStringable() {
+      return isStringable;
+    }
+  }
+
+  final static class UnsafeIntField extends UnsafeCachedField {
+    UnsafeIntField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putInt(object, offset, (Integer) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getInt(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putInt(object, offset, in.readInt());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeInt(UNSAFE.getInt(object, offset));
+    }
+  }
+
+  final static class UnsafeFloatField extends UnsafeCachedField {
+    protected UnsafeFloatField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putFloat(object, offset, (Float) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getFloat(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putFloat(object, offset, in.readFloat());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeFloat(UNSAFE.getFloat(object, offset));
+    }
+  }
+
+  final static class UnsafeShortField extends UnsafeCachedField {
+    protected UnsafeShortField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putShort(object, offset, (Short) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getShort(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putShort(object, offset, (short) in.readInt());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeInt(UNSAFE.getShort(object, offset));
+    }
+  }
+
+  final static class UnsafeByteField extends UnsafeCachedField {
+    protected UnsafeByteField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putByte(object, offset, (Byte) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getByte(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putByte(object, offset, (byte) in.readInt());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeInt(UNSAFE.getByte(object, offset));
+    }
+  }
+
+  final static class UnsafeBooleanField extends UnsafeCachedField {
+    protected UnsafeBooleanField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putBoolean(object, offset, (Boolean) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getBoolean(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putBoolean(object, offset, in.readBoolean());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeBoolean(UNSAFE.getBoolean(object, offset));
+    }
+  }
+
+  final static class UnsafeCharField extends UnsafeCachedField {
+    protected UnsafeCharField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putChar(object, offset, (Character) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getChar(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putChar(object, offset, (char) in.readInt());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeInt(UNSAFE.getChar(object, offset));
+    }
+  }
+
+  final static class UnsafeLongField extends UnsafeCachedField {
+    protected UnsafeLongField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putLong(object, offset, (Long) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getLong(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putLong(object, offset, in.readLong());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeLong(UNSAFE.getLong(object, offset));
+    }
+  }
+
+  final static class UnsafeDoubleField extends UnsafeCachedField {
+    protected UnsafeDoubleField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putDouble(object, offset, (Double) value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getDouble(object, offset);
+    }
+
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putDouble(object, offset, in.readDouble());
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      out.writeDouble(UNSAFE.getDouble(object, offset));
+    }
+  }
+
+  final static class UnsafeObjectField extends UnsafeCachedField {
+    protected UnsafeObjectField(Field f) {
+      super(f);
+    }
+
+    @Override
+    protected void set(Object object, Object value) {
+      UNSAFE.putObject(object, offset, value);
+    }
+
+    @Override
+    protected Object get(Object object) {
+      return UNSAFE.getObject(object, offset);
+    }
+    
+    @Override
+    protected boolean supportsIO() {
+      return false;
+    }
+    
+  }
+  
+  final static class UnsafeCustomEncodedField extends UnsafeCachedField {
+
+    private CustomEncoding<?> encoding;
+    
+    UnsafeCustomEncodedField(Field f, CustomEncoding<?> encoding) {
+      super(f);
+      this.encoding = encoding;
+    }
+
+    @Override
+    protected Object get(Object object) throws IllegalAccessException {
+      return UNSAFE.getObject(object, offset);
+    }
+
+    @Override
+    protected void set(Object object, Object value) throws IllegalAccessException, IOException {
+      UNSAFE.putObject(object, offset, value);
+    }
+    
+    @Override
+    protected void read(Object object, Decoder in) throws IOException {
+      UNSAFE.putObject(object, offset, encoding.read(in));
+    }
+
+    @Override
+    protected void write(Object object, Encoder out) throws IOException {
+      encoding.write(UNSAFE.getObject(object, offset), out);
+    }
+    
+    protected boolean isCustomEncoded() {
+      return true;
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java
new file mode 100644
index 0000000..b76d185
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/FieldAccessor.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+
+abstract class FieldAccessor {
+  FieldAccessor() {
+  }
+
+  protected abstract Object get(Object object) throws IllegalAccessException;
+
+  protected abstract void set(Object object, Object value)
+      throws IllegalAccessException, IOException;
+
+  protected void read(Object object, Decoder in) throws IOException {
+  }
+
+  protected void write(Object object, Encoder out) throws IOException {
+  }
+
+  protected boolean supportsIO() {
+    return false;
+  }
+  
+  protected abstract Field getField();
+  
+  protected boolean isStringable() {
+    return false;
+  }
+  
+  protected boolean isCustomEncoded() {
+    return false;
+  }
+  
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/Nullable.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/Nullable.java
new file mode 100644
index 0000000..bf62beb
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/Nullable.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/** Declares that null is a valid value for a Java type.  Causes an Avro union
+ * with null to be used.  May be applied to parameters, fields and methods (to
+ * declare the return type).
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.PARAMETER, ElementType.METHOD, ElementType.FIELD})
+ at Documented
+public @interface Nullable {}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
new file mode 100644
index 0000000..827e1fa
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectData.java
@@ -0,0 +1,903 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Field;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.IdentityHashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericContainer;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.util.ClassUtils;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.specific.FixedSize;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.SchemaNormalization;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.node.NullNode;
+
+import com.thoughtworks.paranamer.CachingParanamer;
+import com.thoughtworks.paranamer.Paranamer;
+
+/** Utilities to use existing Java classes and interfaces via reflection. */
+public class ReflectData extends SpecificData {
+  /** {@link ReflectData} implementation that permits null field values.  The
+   * schema generated for each field is a union of its declared type and
+   * null. */
+  public static class AllowNull extends ReflectData {
+
+    private static final AllowNull INSTANCE = new AllowNull();
+
+    /** Return the singleton instance. */
+    public static AllowNull get() { return INSTANCE; }
+
+    @Override
+    protected Schema createFieldSchema(Field field, Map<String, Schema> names) {
+      Schema schema = super.createFieldSchema(field, names);
+      if (field.getType().isPrimitive()) {
+        // for primitive values, such as int, a null will result in a
+        // NullPointerException at read time
+        return schema;
+      }
+      return makeNullable(schema);
+    }
+  }
+  
+  private static final ReflectData INSTANCE = new ReflectData();
+
+  /** For subclasses.  Applications normally use {@link ReflectData#get()}. */
+  public ReflectData() {}
+  
+  /** Construct with a particular classloader. */
+  public ReflectData(ClassLoader classLoader) {
+    super(classLoader);
+  }
+  
+  /** Return the singleton instance. */
+  public static ReflectData get() { return INSTANCE; }
+
+  /** Cause a class to be treated as though it had an {@link Stringable}
+   ** annotation. */
+  public ReflectData addStringable(Class c) {
+    stringableClasses.add(c);
+    return this;
+  }
+
+  @Override
+  public DatumReader createDatumReader(Schema schema) {
+    return new ReflectDatumReader(schema, schema, this);
+  }
+
+  @Override
+  public DatumReader createDatumReader(Schema writer, Schema reader) {
+    return new ReflectDatumReader(writer, reader, this);
+  }
+
+  @Override
+  public DatumWriter createDatumWriter(Schema schema) {
+    return new ReflectDatumWriter(schema, this);
+  }
+
+  @Override
+  public void setField(Object record, String name, int position, Object o) {
+    setField(record, name, position, o, null);
+  }
+
+  @Override
+  protected void setField(Object record, String name, int pos, Object o,
+    Object state) {
+    if (record instanceof IndexedRecord) {
+      super.setField(record, name, pos, o);
+      return;
+    }
+    try {
+      getAccessorForField(record, name, pos, state).set(record, o);
+    } catch (IllegalAccessException e) {
+      throw new AvroRuntimeException(e);
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override
+  public Object getField(Object record, String name, int position) {
+    return getField(record, name, position, null);
+  }
+  
+  @Override
+  protected Object getField(Object record, String name, int pos, Object state) {
+    if (record instanceof IndexedRecord) {
+      return super.getField(record, name, pos);
+    }
+    try {
+      return getAccessorForField(record, name, pos, state).get(record);
+    } catch (IllegalAccessException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+    
+  private FieldAccessor getAccessorForField(Object record, String name,
+      int pos, Object optionalState) {
+    if (optionalState != null) {
+      return ((FieldAccessor[])optionalState)[pos];
+    }
+    return getFieldAccessor(record.getClass(), name);
+  }
+
+  @Override
+  protected boolean isRecord(Object datum) {
+    if (datum == null) return false;
+    if (super.isRecord(datum)) return true;
+    if (datum instanceof Collection) return false;
+    if (datum instanceof Map) return false;
+    if (datum instanceof GenericFixed) return false;
+    return getSchema(datum.getClass()).getType() == Schema.Type.RECORD;
+  }
+
+  /**
+   * Returns true also for non-string-keyed maps, which are written as an array
+   * of key/value pair records.
+   */
+  @Override
+  protected boolean isArray(Object datum) {
+    if (datum == null) return false;
+    return (datum instanceof Collection)
+      || datum.getClass().isArray()
+      || isNonStringMap(datum);
+  }
+
+  @Override
+  protected Collection getArrayAsCollection(Object datum) {
+    return (datum instanceof Map) ? ((Map)datum).entrySet() : (Collection)datum;
+  }
+
+  @Override
+  protected boolean isBytes(Object datum) {
+    if (datum == null) return false;
+    if (super.isBytes(datum)) return true;
+    Class c = datum.getClass();
+    return c.isArray() && c.getComponentType() == Byte.TYPE;
+  }
+
+  @Override
+  protected Schema getRecordSchema(Object record) {
+    if (record instanceof GenericContainer)
+      return super.getRecordSchema(record);
+    return getSchema(record.getClass());
+  }
+
+  @Override
+  public boolean validate(Schema schema, Object datum) {
+    switch (schema.getType()) {
+    case ARRAY:
+      if (!datum.getClass().isArray())
+        return super.validate(schema, datum);
+      int length = java.lang.reflect.Array.getLength(datum);
+      for (int i = 0; i < length; i++)
+        if (!validate(schema.getElementType(),
+                      java.lang.reflect.Array.get(datum, i)))
+          return false;
+      return true;
+    default:
+      return super.validate(schema, datum);
+    }
+  }
+  
+  static final ConcurrentHashMap<Class<?>, ClassAccessorData> 
+    ACCESSOR_CACHE = new ConcurrentHashMap<Class<?>, ClassAccessorData>();
+
+  private static class ClassAccessorData {
+    private final Class<?> clazz;
+    private final Map<String, FieldAccessor> byName =
+        new HashMap<String, FieldAccessor>();
+    private final IdentityHashMap<Schema, FieldAccessor[]> bySchema =
+        new IdentityHashMap<Schema, FieldAccessor[]>();
+        
+    private ClassAccessorData(Class<?> c) {
+      clazz = c;
+      for(Field f : getFields(c, false)) {
+        if (f.isAnnotationPresent(AvroIgnore.class)) {
+          continue;
+        }
+        FieldAccessor accessor = ReflectionUtil.getFieldAccess().getAccessor(f);
+        AvroName avroname = f.getAnnotation(AvroName.class);    
+        byName.put( (avroname != null 
+          ? avroname.value()
+          : f.getName()) , accessor);  
+      }
+    }
+    
+    /** 
+     * Return the field accessors as an array, indexed by the field
+     * index of the given schema.
+     */
+    private synchronized FieldAccessor[] getAccessorsFor(Schema schema) {
+      FieldAccessor[] result = bySchema.get(schema);
+      if (result == null) {
+        result = createAccessorsFor(schema);
+        bySchema.put(schema, result);
+      }
+      return result;
+    }
+
+    private FieldAccessor[] createAccessorsFor(Schema schema) {
+      List<Schema.Field> avroFields = schema.getFields();
+      FieldAccessor[] result = new FieldAccessor[avroFields.size()];
+      for(Schema.Field avroField : schema.getFields()) {
+        result[avroField.pos()] = byName.get(avroField.name());
+      }
+      return result;
+    }
+
+    private FieldAccessor getAccessorFor(String fieldName) {
+      FieldAccessor result = byName.get(fieldName);
+      if (result == null) {
+        throw new AvroRuntimeException(
+            "No field named " + fieldName + " in: " + clazz);
+      }
+      return result;
+    }
+  }
+  
+  private ClassAccessorData getClassAccessorData(Class<?> c) {
+    ClassAccessorData data = ACCESSOR_CACHE.get(c);
+    if(data == null && !IndexedRecord.class.isAssignableFrom(c)){
+      ClassAccessorData newData = new ClassAccessorData(c);
+      data = ACCESSOR_CACHE.putIfAbsent(c, newData);
+      if (null == data) {
+        data = newData;
+      }
+    }
+    return data;
+  }
+  
+  private FieldAccessor[] getFieldAccessors(Class<?> c, Schema s) {
+    ClassAccessorData data = getClassAccessorData(c);
+    if (data != null) {
+      return data.getAccessorsFor(s);
+    }
+    return null;
+  }
+  
+  private FieldAccessor getFieldAccessor(Class<?> c, String fieldName) {
+    ClassAccessorData data = getClassAccessorData(c);
+    if (data != null) {
+      return data.getAccessorFor(fieldName);
+    }
+    return null;
+  }
+
+  /** @deprecated  Replaced by {@link SpecificData#CLASS_PROP} */
+  @Deprecated
+  static final String CLASS_PROP = "java-class";
+  /** @deprecated  Replaced by {@link SpecificData#KEY_CLASS_PROP} */
+  @Deprecated
+  static final String KEY_CLASS_PROP = "java-key-class";
+  /** @deprecated  Replaced by {@link SpecificData#ELEMENT_PROP} */
+  @Deprecated
+  static final String ELEMENT_PROP = "java-element-class";
+
+  private static final Map<String,Class> CLASS_CACHE =
+               new ConcurrentHashMap<String, Class>();
+
+  static Class getClassProp(Schema schema, String prop) {
+    String name = schema.getProp(prop);
+    if (name == null) return null;
+    Class c = CLASS_CACHE.get(name);
+    if (c != null)
+       return c;
+    try {
+      c =  ClassUtils.forName(name);
+      CLASS_CACHE.put(name, c);
+    } catch (ClassNotFoundException e) {
+      throw new AvroRuntimeException(e);
+    }
+    return c;
+  }
+
+  private static final Class BYTES_CLASS = new byte[0].getClass();
+  private static final IdentityHashMap<Class, Class> ARRAY_CLASSES;
+  static {
+    ARRAY_CLASSES = new IdentityHashMap<Class, Class>();
+    ARRAY_CLASSES.put(byte.class, byte[].class);
+    ARRAY_CLASSES.put(char.class, char[].class);
+    ARRAY_CLASSES.put(short.class, short[].class);
+    ARRAY_CLASSES.put(int.class, int[].class);
+    ARRAY_CLASSES.put(long.class, long[].class);
+    ARRAY_CLASSES.put(float.class, float[].class);
+    ARRAY_CLASSES.put(double.class, double[].class);
+    ARRAY_CLASSES.put(boolean.class, boolean[].class);
+  }
+
+  /**
+   * It returns false for non-string-maps because Avro writes out such maps
+   * as an array of records. Even their JSON representation is an array.
+   */
+  protected boolean isMap(Object datum) {
+    return (datum instanceof Map) && !isNonStringMap(datum);
+  }
+
+  /* Without the Field or Schema corresponding to the datum, it is
+   * not possible to accurately find out the non-stringable nature
+   * of the key. So we check the class of the keys.
+   * If the map is empty, then it doesn't matter whether its considered
+   * a string-key map or a non-string-key map
+   */
+  private boolean isNonStringMap(Object datum) {
+    if (datum instanceof Map) {
+      Map m = (Map)datum;
+      if (m.size() > 0) {
+        Class keyClass = m.keySet().iterator().next().getClass();
+        if (isStringable(keyClass) || keyClass == String.class)
+          return false;
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public Class getClass(Schema schema) {
+    // see if the element class will be converted and use that class
+    Conversion<?> conversion = getConversionFor(schema.getLogicalType());
+    if (conversion != null) {
+      return conversion.getConvertedType();
+    }
+
+    switch (schema.getType()) {
+    case ARRAY:
+      Class collectionClass = getClassProp(schema, CLASS_PROP);
+      if (collectionClass != null)
+        return collectionClass;
+      Class elementClass = getClass(schema.getElementType());
+      if(elementClass.isPrimitive()) {
+        // avoid expensive translation to array type when primitive
+        return ARRAY_CLASSES.get(elementClass);
+      } else {
+        return java.lang.reflect.Array.newInstance(elementClass, 0).getClass();
+      }
+    case STRING:
+      Class stringClass = getClassProp(schema, CLASS_PROP);
+      if (stringClass != null)
+        return stringClass;
+      return String.class;
+    case BYTES:   return BYTES_CLASS;
+    case INT:
+      String intClass = schema.getProp(CLASS_PROP);
+      if (Byte.class.getName().equals(intClass))  return Byte.TYPE;
+      if (Short.class.getName().equals(intClass)) return Short.TYPE;
+      if (Character.class.getName().equals(intClass)) return Character.TYPE;
+    default:
+      return super.getClass(schema);
+    }
+  }
+
+  static final String NS_MAP_ARRAY_RECORD =   // record name prefix
+    "org.apache.avro.reflect.Pair";
+  static final String NS_MAP_KEY = "key";     // name of key field
+  static final int NS_MAP_KEY_INDEX = 0;      // index of key field
+  static final String NS_MAP_VALUE = "value"; // name of value field
+  static final int NS_MAP_VALUE_INDEX = 1;    // index of value field
+
+  /*
+   * Non-string map-keys need special handling and we convert it to an
+   * array of records as: [{"key":{...}, "value":{...}}]
+   */
+  Schema createNonStringMapSchema(Type keyType, Type valueType,
+                                  Map<String, Schema> names) {
+    Schema keySchema = createSchema(keyType, names);
+    Schema valueSchema = createSchema(valueType, names);
+    Schema.Field keyField = 
+      new Schema.Field(NS_MAP_KEY, keySchema, null, null);
+    Schema.Field valueField = 
+      new Schema.Field(NS_MAP_VALUE, valueSchema, null, null);
+    String name = getNameForNonStringMapRecord(keyType, valueType,
+      keySchema, valueSchema);
+    Schema elementSchema = Schema.createRecord(name, null, null, false);
+    elementSchema.setFields(Arrays.asList(keyField, valueField));
+    Schema arraySchema = Schema.createArray(elementSchema);
+    return arraySchema;
+  }
+
+  /*
+   * Gets a unique and consistent name per key-value pair. So if the same
+   * key-value are seen in another map, the same name is generated again.
+   */
+  private String getNameForNonStringMapRecord(Type keyType, Type valueType,
+                                  Schema keySchema, Schema valueSchema) {
+
+    // Generate a nice name for classes in java* package
+    if (keyType instanceof Class && valueType instanceof Class) {
+
+      Class keyClass = (Class)keyType;
+      Class valueClass = (Class)valueType;
+      Package pkg1 = keyClass.getPackage();
+      Package pkg2 = valueClass.getPackage();
+
+      if (pkg1 != null && pkg1.getName().startsWith("java") &&
+        pkg2 != null && pkg2.getName().startsWith("java")) {
+        return NS_MAP_ARRAY_RECORD +
+          keyClass.getSimpleName() + valueClass.getSimpleName();
+      }
+    }
+
+    String name = keySchema.getFullName() + valueSchema.getFullName();
+    long fingerprint = 0;
+    try {
+      fingerprint = SchemaNormalization.fingerprint64(name.getBytes("UTF-8"));
+    } catch (UnsupportedEncodingException e) {
+      String msg = "Unable to create fingerprint for ("
+                   + keyType + ", "  + valueType + ") pair";
+      throw new AvroRuntimeException(msg, e);
+    }
+    if (fingerprint < 0) fingerprint = -fingerprint;  // ignore sign
+    String fpString = Long.toString(fingerprint, 16); // hex
+    return NS_MAP_ARRAY_RECORD + fpString;
+  }
+
+  static boolean isNonStringMapSchema(Schema s) {
+    if (s != null && s.getType() == Schema.Type.ARRAY) {
+      Class c = getClassProp(s, CLASS_PROP);
+      if (c != null && Map.class.isAssignableFrom (c))
+        return true;
+    }
+    return false;
+  }
+
+  @Override
+  protected Schema createSchema(Type type, Map<String,Schema> names) {
+    if (type instanceof GenericArrayType) {                  // generic array
+      Type component = ((GenericArrayType)type).getGenericComponentType();
+      if (component == Byte.TYPE)                            // byte array
+        return Schema.create(Schema.Type.BYTES);           
+      Schema result = Schema.createArray(createSchema(component, names));
+      setElement(result, component);
+      return result;
+    } else if (type instanceof ParameterizedType) {
+      ParameterizedType ptype = (ParameterizedType)type;
+      Class raw = (Class)ptype.getRawType();
+      Type[] params = ptype.getActualTypeArguments();
+      if (Map.class.isAssignableFrom(raw)) {                 // Map
+        Class key = (Class)params[0];
+        if (isStringable(key)) {                             // Stringable key
+          Schema schema = Schema.createMap(createSchema(params[1], names));
+          schema.addProp(KEY_CLASS_PROP, key.getName());
+          return schema;
+        } else if (key != String.class) {
+          Schema schema = createNonStringMapSchema(params[0], params[1], names);
+          schema.addProp(CLASS_PROP, raw.getName());
+          return schema;
+        }
+      } else if (Collection.class.isAssignableFrom(raw)) {   // Collection
+        if (params.length != 1)
+          throw new AvroTypeException("No array type specified.");
+        Schema schema = Schema.createArray(createSchema(params[0], names));
+        schema.addProp(CLASS_PROP, raw.getName());
+        return schema;
+      }
+    } else if ((type == Byte.class) || (type == Byte.TYPE)) {
+      Schema result = Schema.create(Schema.Type.INT);
+      result.addProp(CLASS_PROP, Byte.class.getName());
+      return result;
+    } else if ((type == Short.class) || (type == Short.TYPE)) {
+      Schema result = Schema.create(Schema.Type.INT);
+      result.addProp(CLASS_PROP, Short.class.getName());
+      return result;
+    } else if ((type == Character.class) || (type == Character.TYPE)) {
+        Schema result = Schema.create(Schema.Type.INT);
+        result.addProp(CLASS_PROP, Character.class.getName());
+        return result;
+    } else if (type instanceof Class) {                      // Class
+      Class<?> c = (Class<?>)type;
+      if (c.isPrimitive() ||                                 // primitives
+          c == Void.class || c == Boolean.class || 
+          c == Integer.class || c == Long.class ||
+          c == Float.class || c == Double.class || 
+          c == Byte.class || c == Short.class || 
+          c == Character.class)
+        return super.createSchema(type, names);
+      if (c.isArray()) {                                     // array
+        Class component = c.getComponentType();
+        if (component == Byte.TYPE) {                        // byte array
+          Schema result = Schema.create(Schema.Type.BYTES);
+          result.addProp(CLASS_PROP, c.getName());
+          return result;
+        }
+        Schema result = Schema.createArray(createSchema(component, names));
+        result.addProp(CLASS_PROP, c.getName());
+        setElement(result, component);
+        return result;
+      }
+      AvroSchema explicit = c.getAnnotation(AvroSchema.class);
+      if (explicit != null)                                  // explicit schema
+        return Schema.parse(explicit.value());
+      if (CharSequence.class.isAssignableFrom(c))            // String
+        return Schema.create(Schema.Type.STRING);
+      if (ByteBuffer.class.isAssignableFrom(c))              // bytes
+        return Schema.create(Schema.Type.BYTES);
+      if (Collection.class.isAssignableFrom(c))              // array
+        throw new AvroRuntimeException("Can't find element type of Collection");
+      Conversion<?> conversion = getConversionByClass(c);
+      if (conversion != null) {
+        return conversion.getRecommendedSchema();
+      }
+      String fullName = c.getName();
+      Schema schema = names.get(fullName);
+      if (schema == null) {
+        String name = c.getSimpleName();
+        String space = c.getPackage() == null ? "" : c.getPackage().getName();
+        if (c.getEnclosingClass() != null)                   // nested class
+          space = c.getEnclosingClass().getName() + "$";
+        Union union = c.getAnnotation(Union.class);
+        if (union != null) {                                 // union annotated
+          return getAnnotatedUnion(union, names);
+        } else if (isStringable(c)) {                        // Stringable
+          Schema result = Schema.create(Schema.Type.STRING);
+          result.addProp(CLASS_PROP, c.getName());
+          return result;
+        } else if (c.isEnum()) {                             // Enum
+          List<String> symbols = new ArrayList<String>();
+          Enum[] constants = (Enum[])c.getEnumConstants();
+          for (int i = 0; i < constants.length; i++)
+            symbols.add(constants[i].name());
+          schema = Schema.createEnum(name, null /* doc */, space, symbols);
+          consumeAvroAliasAnnotation(c, schema);
+        } else if (GenericFixed.class.isAssignableFrom(c)) { // fixed
+          int size = c.getAnnotation(FixedSize.class).value();
+          schema = Schema.createFixed(name, null /* doc */, space, size);
+          consumeAvroAliasAnnotation(c, schema);
+        } else if (IndexedRecord.class.isAssignableFrom(c)) { // specific
+          return super.createSchema(type, names);
+        } else {                                             // record
+          List<Schema.Field> fields = new ArrayList<Schema.Field>();
+          boolean error = Throwable.class.isAssignableFrom(c);
+          schema = Schema.createRecord(name, null /* doc */, space, error);
+          consumeAvroAliasAnnotation(c, schema);
+          names.put(c.getName(), schema);
+          for (Field field : getCachedFields(c))
+            if ((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0 
+                && !field.isAnnotationPresent(AvroIgnore.class)) {
+              Schema fieldSchema = createFieldSchema(field, names);
+              AvroDefault defaultAnnotation
+                = field.getAnnotation(AvroDefault.class);
+              JsonNode defaultValue = (defaultAnnotation == null)
+                ? null
+                : Schema.parseJson(defaultAnnotation.value());
+              
+              if (defaultValue == null
+                  && fieldSchema.getType() == Schema.Type.UNION) {
+                Schema defaultType = fieldSchema.getTypes().get(0);
+                if (defaultType.getType() == Schema.Type.NULL) {
+                  defaultValue = NullNode.getInstance();
+                }
+              }
+              AvroName annotatedName = field.getAnnotation(AvroName.class);       // Rename fields
+              String fieldName = (annotatedName != null)            
+                ? annotatedName.value()
+                : field.getName();
+              Schema.Field recordField 
+                = new Schema.Field(fieldName, fieldSchema, null, defaultValue);
+             
+              AvroMeta meta = field.getAnnotation(AvroMeta.class);              // add metadata
+              if (meta != null) 
+                recordField.addProp(meta.key(), meta.value());  
+              for(Schema.Field f : fields) {                                
+                if (f.name().equals(fieldName)) 
+                  throw new AvroTypeException("double field entry: "+ fieldName);
+              }
+              fields.add(recordField);
+            }
+          if (error)                              // add Throwable message
+            fields.add(new Schema.Field("detailMessage", THROWABLE_MESSAGE,
+                                        null, null));
+          schema.setFields(fields);
+          AvroMeta meta = c.getAnnotation(AvroMeta.class);
+          if (meta != null) 
+              schema.addProp(meta.key(), meta.value());
+        }
+        names.put(fullName, schema);
+      }
+      return schema;
+    }
+    return super.createSchema(type, names);
+  }
+
+  @Override protected boolean isStringable(Class<?> c) {
+    return c.isAnnotationPresent(Stringable.class) || super.isStringable(c);
+  }
+
+  private static final Schema THROWABLE_MESSAGE =
+    makeNullable(Schema.create(Schema.Type.STRING));
+
+  // if array element type is a class with a union annotation, note it
+  // this is required because we cannot set a property on the union itself 
+  private void setElement(Schema schema, Type element) {
+    if (!(element instanceof Class)) return;
+    Class<?> c = (Class<?>)element;
+    Union union = c.getAnnotation(Union.class);
+    if (union != null)                          // element is annotated union
+      schema.addProp(ELEMENT_PROP, c.getName());
+  }
+
+  // construct a schema from a union annotation
+  private Schema getAnnotatedUnion(Union union, Map<String,Schema> names) {
+    List<Schema> branches = new ArrayList<Schema>();
+    for (Class branch : union.value())
+      branches.add(createSchema(branch, names));
+    return Schema.createUnion(branches);
+  }
+
+  /** Create and return a union of the null schema and the provided schema. */
+  public static Schema makeNullable(Schema schema) {
+    if (schema.getType() == Schema.Type.UNION) {
+      // check to see if the union already contains NULL
+      for (Schema subType : schema.getTypes()) {
+        if (subType.getType() == Schema.Type.NULL) {
+          return schema;
+        }
+      }
+      // add null as the first type in a new union
+      List<Schema> withNull = new ArrayList<Schema>();
+      withNull.add(Schema.create(Schema.Type.NULL));
+      withNull.addAll(schema.getTypes());
+      return Schema.createUnion(withNull);
+    } else {
+      // create a union with null
+      return Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),
+          schema));
+    }
+  }
+
+  private static final Map<Class<?>,Field[]> FIELDS_CACHE =
+    new ConcurrentHashMap<Class<?>,Field[]>();
+  
+  // Return of this class and its superclasses to serialize.
+  private static Field[] getCachedFields(Class<?> recordClass) {
+    Field[] fieldsList = FIELDS_CACHE.get(recordClass);
+    if (fieldsList != null)
+      return fieldsList;
+    fieldsList = getFields(recordClass, true);
+    FIELDS_CACHE.put(recordClass, fieldsList);
+    return fieldsList;
+  }
+
+  private static Field[] getFields(Class<?> recordClass, boolean excludeJava) {
+    Field[] fieldsList;
+    Map<String,Field> fields = new LinkedHashMap<String,Field>();
+    Class<?> c = recordClass;
+    do {
+      if (excludeJava && c.getPackage() != null
+          && c.getPackage().getName().startsWith("java."))
+        break;                                   // skip java built-in classes
+      for (Field field : c.getDeclaredFields())
+        if ((field.getModifiers() & (Modifier.TRANSIENT|Modifier.STATIC)) == 0)
+          if (fields.put(field.getName(), field) != null)
+            throw new AvroTypeException(c+" contains two fields named: "+field);
+      c = c.getSuperclass();
+    } while (c != null);
+    fieldsList = fields.values().toArray(new Field[0]);
+    return fieldsList;
+  }
+  
+  /** Create a schema for a field. */
+  protected Schema createFieldSchema(Field field, Map<String, Schema> names) {
+    AvroEncode enc = field.getAnnotation(AvroEncode.class);
+    if (enc != null)
+      try {
+          return enc.using().newInstance().getSchema();
+      } catch (Exception e) {
+          throw new AvroRuntimeException("Could not create schema from custom serializer for " + field.getName());
+      } 
+
+    AvroSchema explicit = field.getAnnotation(AvroSchema.class);
+    if (explicit != null)                                   // explicit schema
+      return Schema.parse(explicit.value());
+
+    Schema schema = createSchema(field.getGenericType(), names);
+    if (field.isAnnotationPresent(Stringable.class)) {      // Stringable
+      schema = Schema.create(Schema.Type.STRING);
+    }
+    if (field.isAnnotationPresent(Nullable.class))           // nullable
+      schema = makeNullable(schema);
+    return schema;
+  }
+
+  /** Return the protocol for a Java interface.
+   * <p>Note that this requires that <a
+   * href="http://paranamer.codehaus.org/">Paranamer</a> is run over compiled
+   * interface declarations, since Java 6 reflection does not provide access to
+   * method parameter names.  See Avro's build.xml for an example. */
+  @Override
+  public Protocol getProtocol(Class iface) {
+    Protocol protocol =
+      new Protocol(iface.getSimpleName(),
+                   iface.getPackage()==null?"":iface.getPackage().getName());
+    Map<String,Schema> names = new LinkedHashMap<String,Schema>();
+    Map<String,Message> messages = protocol.getMessages();
+    for (Method method : iface.getMethods())
+      if ((method.getModifiers() & Modifier.STATIC) == 0) {
+        String name = method.getName();
+        if (messages.containsKey(name))
+          throw new AvroTypeException("Two methods with same name: "+name);
+        messages.put(name, getMessage(method, protocol, names));
+      }
+
+    // reverse types, since they were defined in reference order
+    List<Schema> types = new ArrayList<Schema>();
+    types.addAll(names.values());
+    Collections.reverse(types);
+    protocol.setTypes(types);
+
+    return protocol;
+  }
+
+  private final Paranamer paranamer = new CachingParanamer();
+
+  private Message getMessage(Method method, Protocol protocol,
+                             Map<String,Schema> names) {
+    List<Schema.Field> fields = new ArrayList<Schema.Field>();
+    String[] paramNames = paranamer.lookupParameterNames(method);
+    Type[] paramTypes = method.getGenericParameterTypes();
+    Annotation[][] annotations = method.getParameterAnnotations();
+    for (int i = 0; i < paramTypes.length; i++) {
+      Schema paramSchema = getSchema(paramTypes[i], names);
+      for (int j = 0; j < annotations[i].length; j++) {
+        Annotation annotation = annotations[i][j];
+        if (annotation instanceof AvroSchema)     // explicit schema
+          paramSchema = Schema.parse(((AvroSchema)annotation).value());
+        else if (annotation instanceof Union)     // union
+          paramSchema = getAnnotatedUnion(((Union)annotation), names);
+        else if (annotation instanceof Nullable)  // nullable
+          paramSchema = makeNullable(paramSchema);
+      }
+      String paramName =  paramNames.length == paramTypes.length
+        ? paramNames[i]
+        : paramSchema.getName()+i;
+      fields.add(new Schema.Field(paramName, paramSchema,
+        null /* doc */, null));
+    }
+    Schema request = Schema.createRecord(fields);
+
+    Union union = method.getAnnotation(Union.class);
+    Schema response = union == null
+      ? getSchema(method.getGenericReturnType(), names)
+      : getAnnotatedUnion(union, names);
+    if (method.isAnnotationPresent(Nullable.class))          // nullable
+      response = makeNullable(response);
+
+    AvroSchema explicit = method.getAnnotation(AvroSchema.class);
+    if (explicit != null)                         // explicit schema
+      response = Schema.parse(explicit.value());
+
+    List<Schema> errs = new ArrayList<Schema>();
+    errs.add(Protocol.SYSTEM_ERROR);              // every method can throw
+    for (Type err : method.getGenericExceptionTypes())
+      if (err != AvroRemoteException.class) 
+        errs.add(getSchema(err, names));
+    Schema errors = Schema.createUnion(errs);
+    return protocol.createMessage(method.getName(), null /* doc */, request, response, errors);
+  }
+
+  private Schema getSchema(Type type, Map<String,Schema> names) {
+    try {
+      return createSchema(type, names);
+    } catch (AvroTypeException e) {               // friendly exception
+      throw new AvroTypeException("Error getting schema for "+type+": "
+                                  +e.getMessage(), e);
+    }
+  }
+
+  @Override
+  protected int compare(Object o1, Object o2, Schema s, boolean equals) {
+    switch (s.getType()) {
+    case ARRAY:
+      if (!o1.getClass().isArray())
+        break;
+      Schema elementType = s.getElementType();
+      int l1 = java.lang.reflect.Array.getLength(o1);
+      int l2 = java.lang.reflect.Array.getLength(o2);
+      int l = Math.min(l1, l2);
+      for (int i = 0; i < l; i++) {
+        int compare = compare(java.lang.reflect.Array.get(o1, i),
+                              java.lang.reflect.Array.get(o2, i),
+                              elementType, equals);
+        if (compare != 0) return compare;
+      }
+      return l1 - l2;
+    case BYTES:
+      if (!o1.getClass().isArray())
+        break;
+      byte[] b1 = (byte[])o1; 
+      byte[] b2 = (byte[])o2; 
+      return BinaryData.compareBytes(b1, 0, b1.length, b2, 0, b2.length);
+    }
+    return super.compare(o1, o2, s, equals);
+  }
+
+  @Override
+  protected Object getRecordState(Object record, Schema schema) {
+    return getFieldAccessors(record.getClass(), schema);
+  }
+  
+  private void consumeAvroAliasAnnotation(Class<?> c, Schema schema) {
+    AvroAlias alias = c.getAnnotation(AvroAlias.class);
+    if (alias != null) {
+      String space = alias.space();
+      if (AvroAlias.NULL.equals(space))
+        space = null;
+      schema.addAlias(alias.alias(), space);
+    }
+  }
+
+  @Override
+  public Object createFixed(Object old, Schema schema) {
+    // SpecificData will try to instantiate the type returned by getClass, but
+    // that is the converted class and can't be constructed.
+    LogicalType logicalType = schema.getLogicalType();
+    if (logicalType != null) {
+      Conversion<?> conversion = getConversionFor(schema.getLogicalType());
+      if (conversion != null) {
+        return new GenericData.Fixed(schema);
+      }
+    }
+    return super.createFixed(old, schema);
+  }
+
+  @Override
+  public Object newRecord(Object old, Schema schema) {
+    // SpecificData will try to instantiate the type returned by getClass, but
+    // that is the converted class and can't be constructed.
+    LogicalType logicalType = schema.getLogicalType();
+    if (logicalType != null) {
+      Conversion<?> conversion = getConversionFor(schema.getLogicalType());
+      if (conversion != null) {
+        return new GenericData.Record(schema);
+      }
+    }
+    return super.newRecord(old, schema);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
new file mode 100644
index 0000000..ee12180
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumReader.java
@@ -0,0 +1,312 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.lang.reflect.Array;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Conversion;
+import org.apache.avro.LogicalType;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.ResolvingDecoder;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.specific.SpecificDatumReader;
+
+/**
+ * {@link org.apache.avro.io.DatumReader DatumReader} for existing classes via
+ * Java reflection.
+ */
+public class ReflectDatumReader<T> extends SpecificDatumReader<T> {
+  public ReflectDatumReader() {
+    this(null, null, ReflectData.get());
+  }
+
+  /** Construct for reading instances of a class. */
+  public ReflectDatumReader(Class<T> c) {
+    this(new ReflectData(c.getClassLoader()));
+    setSchema(getSpecificData().getSchema(c));
+  }
+
+  /** Construct where the writer's and reader's schemas are the same. */
+  public ReflectDatumReader(Schema root) {
+    this(root, root, ReflectData.get());
+  }
+
+  /** Construct given writer's and reader's schema. */
+  public ReflectDatumReader(Schema writer, Schema reader) {
+    this(writer, reader, ReflectData.get());
+  }
+
+  /** Construct given writer's and reader's schema and the data model. */
+  public ReflectDatumReader(Schema writer, Schema reader, ReflectData data) {
+    super(writer, reader, data);
+  }
+
+  /** Construct given a {@link ReflectData}. */
+  public ReflectDatumReader(ReflectData data) {
+    super(data);
+  }
+
+  @Override
+  protected Object newArray(Object old, int size, Schema schema) {
+    Class<?> collectionClass =
+      ReflectData.getClassProp(schema, SpecificData.CLASS_PROP);
+    Class<?> elementClass =
+      ReflectData.getClassProp(schema, SpecificData.ELEMENT_PROP);
+
+    if (elementClass == null) {
+      // see if the element class will be converted and use that class
+      // logical types cannot conflict with java-element-class
+      Conversion<?> elementConversion = getData()
+          .getConversionFor(schema.getElementType().getLogicalType());
+      if (elementConversion != null) {
+        elementClass = elementConversion.getConvertedType();
+      }
+    }
+
+    if (collectionClass == null && elementClass == null)
+      return super.newArray(old, size, schema);   // use specific/generic
+
+    if (collectionClass != null && !collectionClass.isArray()) {
+      if (old instanceof Collection) {
+        ((Collection<?>)old).clear();
+        return old;
+      }
+      if (collectionClass.isAssignableFrom(ArrayList.class))
+        return new ArrayList<Object>();
+      return SpecificData.newInstance(collectionClass, schema);
+    }
+
+    if (elementClass == null) {
+      elementClass = collectionClass.getComponentType();
+    }
+    if (elementClass == null) {
+      ReflectData data = (ReflectData)getData();
+      elementClass = data.getClass(schema.getElementType());
+    }
+    return Array.newInstance(elementClass, size);
+  }
+
+  @Override
+  protected Object peekArray(Object array) {
+    return null;
+  }
+
+  @Override
+  protected void addToArray(Object array, long pos, Object e) {
+    throw new AvroRuntimeException("reflectDatumReader does not use addToArray");
+  }
+
+  @Override
+  /** Called to read an array instance.  May be overridden for alternate array
+   * representations.*/
+  protected Object readArray(Object old, Schema expected, ResolvingDecoder in)
+      throws IOException {
+    Schema expectedType = expected.getElementType();
+    long l = in.readArrayStart();
+    if (l <= 0) {
+      return newArray(old, 0, expected);
+    }
+    Object array = newArray(old, (int) l, expected);
+    if (array instanceof Collection) {
+      @SuppressWarnings("unchecked")
+      Collection<Object> c = (Collection<Object>) array;
+      return readCollection(c, expectedType, l, in);
+    } else if (array instanceof Map) {
+      // Only for non-string keys, we can use NS_MAP_* fields
+      // So we check the samee explicitly here
+      if (ReflectData.isNonStringMapSchema(expected)) {
+        Collection<Object> c = new ArrayList<Object> ();
+        readCollection(c, expectedType, l, in);
+        Map m = (Map)array;
+        for (Object ele: c) {
+          IndexedRecord rec = ((IndexedRecord)ele);
+          Object key = rec.get(ReflectData.NS_MAP_KEY_INDEX);
+          Object value = rec.get(ReflectData.NS_MAP_VALUE_INDEX);
+          m.put (key, value);
+        }
+        return array;
+      } else {
+        String msg = "Expected a schema of map with non-string keys but got " + expected;
+        throw new AvroRuntimeException(msg);
+      }
+    } else {
+      return readJavaArray(array, expectedType, l, in);
+    }
+  }
+
+  private Object readJavaArray(Object array, Schema expectedType, long l,
+      ResolvingDecoder in) throws IOException {
+    Class<?> elementType = array.getClass().getComponentType();
+    if (elementType.isPrimitive()) {
+      return readPrimitiveArray(array, elementType, l, in);
+    } else {
+      return readObjectArray((Object[]) array, expectedType, l, in);
+    }
+  }
+
+  private Object readPrimitiveArray(Object array, Class<?> c, long l, ResolvingDecoder in)
+      throws IOException {
+    return ArrayAccessor.readArray(array, c, l, in);
+  }
+
+  private Object readObjectArray(Object[] array, Schema expectedType, long l,
+      ResolvingDecoder in) throws IOException {
+    LogicalType logicalType = expectedType.getLogicalType();
+    Conversion<?> conversion = getData().getConversionFor(logicalType);
+    int index = 0;
+    if (logicalType != null && conversion != null) {
+      do {
+        int limit = index + (int) l;
+        while (index < limit) {
+          Object element = readWithConversion(
+              null, expectedType, logicalType, conversion, in);
+          array[index] = element;
+          index++;
+        }
+      } while ((l = in.arrayNext()) > 0);
+    } else {
+      do {
+        int limit = index + (int) l;
+        while (index < limit) {
+          Object element = readWithoutConversion(null, expectedType, in);
+          array[index] = element;
+          index++;
+        }
+      } while ((l = in.arrayNext()) > 0);
+    }
+    return array;
+  }
+
+  private Object readCollection(Collection<Object> c, Schema expectedType,
+      long l, ResolvingDecoder in) throws IOException {
+    LogicalType logicalType = expectedType.getLogicalType();
+    Conversion<?> conversion = getData().getConversionFor(logicalType);
+    if (logicalType != null && conversion != null) {
+      do {
+        for (int i = 0; i < l; i++) {
+          Object element = readWithConversion(
+              null, expectedType, logicalType, conversion, in);
+          c.add(element);
+        }
+      } while ((l = in.arrayNext()) > 0);
+    } else {
+      do {
+        for (int i = 0; i < l; i++) {
+          Object element = readWithoutConversion(null, expectedType, in);
+          c.add(element);
+        }
+      } while ((l = in.arrayNext()) > 0);
+    }
+    return c;
+  }
+
+  @Override
+  protected Object readString(Object old, Decoder in) throws IOException {
+    return super.readString(null, in).toString();
+  }
+
+  @Override
+  protected Object createString(String value) { return value; }
+
+  @Override
+  protected Object readBytes(Object old, Schema s, Decoder in)
+    throws IOException {
+    ByteBuffer bytes = in.readBytes(null);
+    Class<?> c = ReflectData.getClassProp(s, SpecificData.CLASS_PROP);
+    if (c != null && c.isArray()) {
+      byte[] result = new byte[bytes.remaining()];
+      bytes.get(result);
+      return result;
+    } else {
+      return bytes;
+    }
+  }
+
+  @Override
+  protected Object readInt(Object old,
+                           Schema expected, Decoder in) throws IOException {
+    Object value = in.readInt();
+    String intClass = expected.getProp(SpecificData.CLASS_PROP);
+    if (Byte.class.getName().equals(intClass))
+      value = ((Integer)value).byteValue();
+    else if (Short.class.getName().equals(intClass))
+      value = ((Integer)value).shortValue();
+    else if (Character.class.getName().equals(intClass))
+        value = ((Character)(char)(int)(Integer)value);
+    return value;
+  }
+
+  @Override
+  protected void readField(Object record, Field f, Object oldDatum,
+      ResolvingDecoder in, Object state) throws IOException {
+    if (state != null) {
+      FieldAccessor accessor = ((FieldAccessor[]) state)[f.pos()];
+      if (accessor != null) {
+        if (accessor.supportsIO()
+            && (!Schema.Type.UNION.equals(f.schema().getType())
+                || accessor.isCustomEncoded())) {
+          accessor.read(record, in);
+          return;
+        }
+        if (accessor.isStringable()) {
+          try {
+            String asString = (String) read(null, f.schema(), in);
+            accessor.set(record, asString == null 
+              ? null
+              : newInstanceFromString(accessor.getField().getType(), asString));
+            return;
+          } catch (Exception e) {
+            throw new AvroRuntimeException("Failed to read Stringable", e);
+          } 
+        }
+        LogicalType logicalType = f.schema().getLogicalType();
+        if (logicalType != null) {
+          Conversion<?> conversion = getData().getConversionByClass(
+              accessor.getField().getType(), logicalType);
+          if (conversion != null) {
+            try {
+              accessor.set(record, convert(
+                  readWithoutConversion(oldDatum, f.schema(), in),
+                  f.schema(), logicalType, conversion));
+            } catch (IllegalAccessException e) {
+              throw new AvroRuntimeException("Failed to set " + f);
+            }
+            return;
+          }
+        }
+        try {
+          accessor.set(record,
+              readWithoutConversion(oldDatum, f.schema(), in));
+          return;
+        } catch (IllegalAccessException e) {
+          throw new AvroRuntimeException("Failed to set " + f);
+        }
+      }
+    }
+    super.readField(record, f, oldDatum, in, state);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java
new file mode 100644
index 0000000..85e3c55
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectDatumWriter.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.specific.SpecificDatumWriter;
+
+/**
+ * {@link org.apache.avro.io.DatumWriter DatumWriter} for existing classes
+ * via Java reflection.
+ */
+public class ReflectDatumWriter<T> extends SpecificDatumWriter<T> {
+  public ReflectDatumWriter() {
+    this(ReflectData.get());
+  }
+
+  public ReflectDatumWriter(Class<T> c) {
+    this(c, ReflectData.get());
+  }
+
+  public ReflectDatumWriter(Class<T> c, ReflectData data) {
+    this(data.getSchema(c), data);
+  }
+
+  public ReflectDatumWriter(Schema root) {
+    this(root, ReflectData.get());
+  }
+
+  public ReflectDatumWriter(Schema root, ReflectData reflectData) {
+    super(root, reflectData);
+  }
+  
+  protected ReflectDatumWriter(ReflectData reflectData) {
+    super(reflectData);
+  }
+
+  /** Called to write a array.  May be overridden for alternate array
+   * representations.*/
+  @Override
+  protected void writeArray(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (datum instanceof Collection) {
+      super.writeArray(schema, datum, out);
+      return;
+    }
+    Class<?> elementClass = datum.getClass().getComponentType();
+    if (null == elementClass) {
+      // not a Collection or an Array
+      throw new AvroRuntimeException("Array data must be a Collection or Array");
+    } 
+    Schema element = schema.getElementType();
+    if (elementClass.isPrimitive()) {
+      Schema.Type type = element.getType();
+      out.writeArrayStart();
+      switch(type) {
+      case BOOLEAN:
+        if(elementClass.isPrimitive())
+        ArrayAccessor.writeArray((boolean[]) datum, out);
+        break;
+      case DOUBLE:
+        ArrayAccessor.writeArray((double[]) datum, out);
+        break;
+      case FLOAT:
+        ArrayAccessor.writeArray((float[]) datum, out);
+        break;
+      case INT:
+        if(elementClass.equals(int.class)) {
+          ArrayAccessor.writeArray((int[]) datum, out);
+        } else if(elementClass.equals(char.class)) {
+          ArrayAccessor.writeArray((char[]) datum, out);
+        } else if(elementClass.equals(short.class)) {
+          ArrayAccessor.writeArray((short[]) datum, out);
+        } else {
+          arrayError(elementClass, type);
+        }
+        break;
+      case LONG:
+        ArrayAccessor.writeArray((long[]) datum, out);
+        break;
+      default:
+        arrayError(elementClass, type);
+      }
+      out.writeArrayEnd();
+    } else {
+      out.writeArrayStart();
+      writeObjectArray(element, (Object[]) datum, out);
+      out.writeArrayEnd();
+    }
+  }
+  
+  private void writeObjectArray(Schema element, Object[] data, Encoder out) throws IOException {
+    int size = data.length;
+    out.setItemCount(size);
+    for (int i = 0; i < size; i++) {
+      this.write(element, data[i], out);
+    }
+  }
+    
+  private void arrayError(Class<?> cl, Schema.Type type) {
+    throw new AvroRuntimeException("Error writing array with inner type " +
+      cl + " and avro type: " + type);
+  }
+  
+  @Override
+  protected void writeBytes(Object datum, Encoder out) throws IOException {
+    if (datum instanceof byte[])
+      out.writeBytes((byte[])datum);
+    else
+      super.writeBytes(datum, out);
+  }
+
+  @Override
+  protected void write(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (datum instanceof Byte)
+      datum = ((Byte)datum).intValue();
+    else if (datum instanceof Short)
+      datum = ((Short)datum).intValue();
+    else if (datum instanceof Character)
+        datum = (int)(char)(Character)datum;
+    else if (datum instanceof Map && ReflectData.isNonStringMapSchema(schema)) {
+        // Maps with non-string keys are written as arrays.
+        // Schema for such maps is already changed. Here we
+        // just switch the map to a similar form too.
+        datum = ((Map)datum).entrySet();
+      }
+    try {
+      super.write(schema, datum, out);
+    } catch (NullPointerException e) {            // improve error message
+      NullPointerException result =
+        new NullPointerException("in "+schema.getFullName()+" "+e.getMessage());
+      result.initCause(e.getCause() == null ? e : e.getCause());
+      throw result;
+    }
+  }
+
+  @Override
+  protected void writeField(Object record, Field f, Encoder out, Object state)
+      throws IOException {
+    if (state != null) {
+      FieldAccessor accessor = ((FieldAccessor[]) state)[f.pos()];
+      if (accessor != null) {
+        if (accessor.supportsIO()
+            && (!Schema.Type.UNION.equals(f.schema().getType())
+                || accessor.isCustomEncoded())) {
+          accessor.write(record, out);
+          return;
+        }
+        if (accessor.isStringable()) {
+          try {
+            Object object = accessor.get(record);
+            write(f.schema(), (object == null) ? null : object.toString(), out);
+          } catch (IllegalAccessException e) {
+            throw new AvroRuntimeException("Failed to write Stringable", e);
+          }
+          return;
+        }  
+      }
+    }
+    super.writeField(record, f, out, state);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
new file mode 100644
index 0000000..1457cdb
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/ReflectionUtil.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import org.apache.avro.AvroRuntimeException;
+
+/**
+ * A few utility methods for using @link{java.misc.Unsafe}, mostly for private
+ * use.
+ * 
+ * Use of Unsafe on Android is forbidden, as Android provides only a very
+ * limited functionality for this class compared to the JDK version.
+ * 
+ */
+
+class ReflectionUtil {
+
+  private ReflectionUtil() {
+  }
+
+  private static FieldAccess fieldAccess;
+  static {
+    resetFieldAccess();
+  }
+  static void resetFieldAccess() {
+    // load only one implementation of FieldAccess
+    // so it is monomorphic and the JIT can inline
+    FieldAccess access = null;
+    try {
+      if (null == System.getProperty("avro.disable.unsafe")) {
+        FieldAccess unsafeAccess = load(
+            "org.apache.avro.reflect.FieldAccessUnsafe", FieldAccess.class);
+        if (validate(unsafeAccess)) {
+          access = unsafeAccess;
+        }
+      }
+    } catch (Throwable ignored) {
+    }
+    if (access == null) {
+      try {
+        FieldAccess reflectAccess = load(
+            "org.apache.avro.reflect.FieldAccessReflect", FieldAccess.class);
+        if (validate(reflectAccess)) {
+          access = reflectAccess;
+        }
+      } catch (Throwable oops) {
+        throw new AvroRuntimeException(
+            "Unable to load a functional FieldAccess class!");
+      }
+    }
+    fieldAccess = access;
+  }
+
+  private static <T> T load(String name, Class<T> type) throws Exception {
+    return ReflectionUtil.class.getClassLoader().loadClass(name)
+        .asSubclass(type).newInstance();
+  }
+
+  public static FieldAccess getFieldAccess() {
+    return fieldAccess;
+  }
+
+  private static boolean validate(FieldAccess access) throws Exception {
+    return new AccessorTestClass().validate(access);
+  }
+
+  private static final class AccessorTestClass {
+    private boolean b = true;
+    protected byte by = 0xf;
+    public char c = 'c';
+    short s = 123;
+    int i = 999;
+    long l = 12345L;
+    float f = 2.2f;
+    double d = 4.4d;
+    Object o = "foo";
+    Integer i2 = 555;
+
+    private boolean validate(FieldAccess access) throws Exception {
+      boolean valid = true;
+      valid &= validField(access, "b", b, false);
+      valid &= validField(access, "by", by, (byte) 0xaf);
+      valid &= validField(access, "c", c, 'C');
+      valid &= validField(access, "s", s, (short) 321);
+      valid &= validField(access, "i", i, 111);
+      valid &= validField(access, "l", l, 54321L);
+      valid &= validField(access, "f", f, 0.2f);
+      valid &= validField(access, "d", d, 0.4d);
+      valid &= validField(access, "o", o, new Object());
+      valid &= validField(access, "i2", i2, -555);
+      return valid;
+    }
+
+    private boolean validField(FieldAccess access, String name,
+        Object original, Object toSet) throws Exception {
+      FieldAccessor a;
+      boolean valid = true;
+      a = accessor(access, name);
+      valid &= original.equals(a.get(this));
+      a.set(this, toSet);
+      valid &= !original.equals(a.get(this));
+      return valid;
+    }
+
+    private FieldAccessor accessor(FieldAccess access, String name)
+        throws Exception {
+      return access.getAccessor(this.getClass().getDeclaredField(name));
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/Stringable.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/Stringable.java
new file mode 100644
index 0000000..840cc6a
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/Stringable.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declares that a class or field should be represented by an Avro string.  It's {@link
+ * Object#toString()} method will be used to convert it to a string, and its
+ * single String parameter constructor will be used to create instances.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE, ElementType.FIELD})
+ at Documented
+public @interface Stringable {}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/Union.java b/lang/java/avro/src/main/java/org/apache/avro/reflect/Union.java
new file mode 100644
index 0000000..b22fa66
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/Union.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declares that a Java type should be represented by an Avro union schema.
+ * May be used for base classes or interfaces whose instantiable subclasses can
+ * be listed in the parameters to the @Union annotation.  If applied to method
+ * parameters this determines the reflected message parameter type.  If applied
+ * to a method, this determines its return type.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE, ElementType.PARAMETER, ElementType.METHOD})
+ at Documented
+public @interface Union {
+  /** The instantiable classes that compose this union. */
+  Class[] value();
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/reflect/package.html b/lang/java/avro/src/main/java/org/apache/avro/reflect/package.html
new file mode 100644
index 0000000..dc6dbb9
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/reflect/package.html
@@ -0,0 +1,101 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Use Java reflection to generate schemas and protocols for existing
+classes.
+
+<p>Java types are mapped to Avro schemas as follows:
+
+<ul>
+
+<li><b>Classes</b> are mapped to Avro records. Only concrete classes
+  with a no-argument constructor are supported.  All inherited fields
+  that are not static or transient are used.  Fields are not permitted
+  to be null unless annotated by {@link
+  org.apache.avro.reflect.Nullable Nullable} or a {@link
+  org.apache.avro.reflect.Union Union} containing null.</li>
+
+<li><b>Arrays</b> are mapped to Avro array schemas.  If an array's
+  elements are a union defined by the {@link
+  org.apache.avro.reflect.Union Union} annotation, the "java-element"
+  property is set to the union's class, e.g.:
+  <pre>{"type": "array", "java-element": "org.acme.Foo"}</pre>
+</li>
+
+<li><b>Collection</b> implementations are mapped to Avro array schemas
+  with the "java-class" property set to the collection
+  implementation, e.g.:
+  <pre>{"type": "array", "java-class": "java.util.ArrayList"}</pre>
+</li>
+
+<li><b>{@link java.lang.String}</b> is mapped to an Avro string schema.</li>
+
+<li><b>byte[]</b> is mapped to an Avro bytes schema.</li>
+
+<li><b>short</b> is mapped to an Avro int schema with the "java-class"
+  property set to "java.lang.Short", e.g.:
+  <pre>{"type": "int", "java-class": "java.lang.Short"}</pre>
+
+<li><b>{@link java.math.BigDecimal}, {@link java.math.BigInteger},
+  {@link java.net.URI}, {@link java.net.URL}, {@link java.io.File}</b>
+  are mapped to an Avro string schema as
+  {@link org.apache.avro.reflect.Stringable Stringable} types and
+  serialized via their {@link java.lang.Object#toString() toString}
+  method and de-serialized via their {@link java.lang.String} constructor.
+  This is done via the "java-class", "java-key-class" or
+  "java-element-class" depending on whether it is a field, or map key
+  or a list/map element, e.g.:
+  <pre>{"type": "string", "java-class": "java.math.BigDecimal"}</pre></li>
+
+<li>All other types are mapped as in the {@link org.apache.avro.generic
+  generic} API.</li>
+
+</ul>
+
+<p>The {@link org.apache.avro.reflect.Union Union} annotation can be used
+to support reflection of schemas for interfaces, abstract base classes
+and other uses of polymorphism.
+
+<p>The {@link org.apache.avro.reflect.Stringable Stringable} annotation
+will cause a type to be serialized via its {@link java.lang.Object#toString()
+toString} method.
+
+<p>Fields annotated with {@link org.apache.avro.reflect.AvroIgnore AvroIgnore} 
+will not be written or read to. 
+
+<p> The {@link org.apache.avro.reflect.AvroName AvroName} annotation renames 
+the field in the schema to the given name. The reflect datum reader will look 
+for a schema field with the given name, when trying to read into such an 
+annotated java field. 
+
+<p>The {@link org.apache.avro.reflect.AvroMeta AvroMeta} annotation adds an 
+arbitrary key:value pair in the schema at the node of the java field.
+
+<p>The {@link org.apache.avro.reflect.AvroSchema AvroSchema} annotation forces 
+the use of an custom schema.
+
+<p>The {@link org.apache.avro.reflect.AvroEncode AvroEncode} annotation forces 
+the use of an custom encoder. This annotation overrides 
+{@link org.apache.avro.reflect.Stringable Stringable} and 
+{@link org.apache.avro.reflect.Nullable Nullable}.
+
+
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java b/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
new file mode 100644
index 0000000..e57d27e
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/AvroGenerated.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Indicates that an annotated class is an Avro generated class. All Avro 
+ * generated classes will be annotated with this annotation. 
+ */
+ at Target(ElementType.TYPE)
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface AvroGenerated {
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableInput.java b/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableInput.java
new file mode 100644
index 0000000..d09c449
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableInput.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.specific;
+
+import java.io.InputStream;
+import java.io.IOException;
+import java.io.ObjectInput;
+
+/** Helper to permit Externalizeable implementations that write to an
+ * InputStream. */
+class ExternalizableInput extends InputStream {
+  private final ObjectInput in;
+
+  public ExternalizableInput(ObjectInput in) { this.in = in; }
+
+  @Override public int available() throws IOException { return in.available(); }
+
+  @Override public void close() throws IOException { in.close(); }
+
+  @Override public boolean  markSupported() { return false; }
+
+  @Override public int read() throws IOException { return in.read(); }
+
+  @Override public int read(byte[] b) throws IOException { return in.read(b); }
+
+  @Override
+  public int read(byte[] b, int offset, int len) throws IOException {
+    return in.read(b, offset, len);
+  }
+  @Override
+  public long skip(long n) throws IOException {
+    return in.skip(n);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableOutput.java b/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableOutput.java
new file mode 100644
index 0000000..aa90e61
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/ExternalizableOutput.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.specific;
+
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.ObjectOutput;
+
+/** Helper to permit Externalizeable implementations that write to an
+ * OutputStream. */
+class ExternalizableOutput extends OutputStream {
+  private final ObjectOutput out;
+
+  public ExternalizableOutput(ObjectOutput out) { this.out = out; }
+
+  @Override public void flush() throws IOException { out.flush(); }
+
+  @Override public void close() throws IOException { out.close(); }
+
+  @Override public void write(int c) throws IOException { out.write(c); }
+
+  @Override public void write(byte[] b) throws IOException { out.write(b); }
+
+  @Override
+  public void write(byte[] b, int offset, int len) throws IOException {
+    out.write(b, offset, len);
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/FixedSize.java b/lang/java/avro/src/main/java/org/apache/avro/specific/FixedSize.java
new file mode 100644
index 0000000..8ccd185
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/FixedSize.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declares the size of implementations of
+ * {@link org.apache.avro.generic.GenericFixed GenericFixed}.
+ */
+ at Retention(RetentionPolicy.RUNTIME)
+ at Target({ElementType.TYPE})
+ at Documented
+public @interface FixedSize {
+  /** The declared size of instances of classes with this annotation. */
+  int value();
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
new file mode 100644
index 0000000..647f823
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificData.java
@@ -0,0 +1,380 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+import java.util.WeakHashMap;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.LinkedHashMap;
+import java.nio.ByteBuffer;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.ParameterizedType;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.util.ClassUtils;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryDecoder;
+
+/** Utilities for generated Java classes and interfaces. */
+public class SpecificData extends GenericData {
+
+  private static final SpecificData INSTANCE = new SpecificData();
+  
+  private static final Class<?>[] NO_ARG = new Class[]{};
+  private static final Class<?>[] SCHEMA_ARG = new Class[]{Schema.class};
+  private static final Map<Class,Constructor> CTOR_CACHE =
+    new ConcurrentHashMap<Class,Constructor>();
+
+  public static final String CLASS_PROP = "java-class";
+  public static final String KEY_CLASS_PROP = "java-key-class";
+  public static final String ELEMENT_PROP = "java-element-class";
+
+  /** List of Java reserved words from
+   * http://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.9
+   * combined with the boolean and null literals.
+   * combined with the classnames used internally in the generated avro code.
+   */
+  public static final Set<String> RESERVED_WORDS = new HashSet<String>
+    (Arrays.asList(new String[] {
+        "abstract", "assert", "boolean", "break", "byte", "case", "catch",
+        "char", "class", "const", "continue", "default", "do", "double",
+        "else", "enum", "extends", "false", "final", "finally", "float",
+        "for", "goto", "if", "implements", "import", "instanceof", "int",
+        "interface", "long", "native", "new", "null", "package", "private",
+        "protected", "public", "return", "short", "static", "strictfp",
+        "super", "switch", "synchronized", "this", "throw", "throws",
+        "transient", "true", "try", "void", "volatile", "while",
+        /* classnames use internally by the avro code generator */
+        "Builder"
+      }));
+
+  /** Read/write some common builtin classes as strings.  Representing these as
+   * strings isn't always best, as they aren't always ordered ideally, but at
+   * least they're stored.  Also note that, for compatibility, only classes
+   * that wouldn't be otherwise correctly readable or writable should be added
+   * here, e.g., those without a no-arg constructor or those whose fields are
+   * all transient. */
+  protected Set<Class> stringableClasses = new HashSet<Class>();
+  {
+    stringableClasses.add(java.math.BigDecimal.class);
+    stringableClasses.add(java.math.BigInteger.class);
+    stringableClasses.add(java.net.URI.class);
+    stringableClasses.add(java.net.URL.class);
+    stringableClasses.add(java.io.File.class);
+  }
+
+  /** For subclasses.  Applications normally use {@link SpecificData#get()}. */
+  public SpecificData() {}
+
+  /** Construct with a specific classloader. */
+  public SpecificData(ClassLoader classLoader) {
+    super(classLoader);
+  }
+  
+  @Override
+  public DatumReader createDatumReader(Schema schema) {
+    return new SpecificDatumReader(schema, schema, this);
+  }
+
+  @Override
+  public DatumReader createDatumReader(Schema writer, Schema reader) {
+    return new SpecificDatumReader(writer, reader, this);
+  }
+
+  @Override
+  public DatumWriter createDatumWriter(Schema schema) {
+    return new SpecificDatumWriter(schema, this);
+  }
+
+  /** Return the singleton instance. */
+  public static SpecificData get() { return INSTANCE; }
+
+  @Override
+  protected boolean isEnum(Object datum) {
+    return datum instanceof Enum || super.isEnum(datum);
+  }
+
+  @Override
+  public Object createEnum(String symbol, Schema schema) {
+    Class c = getClass(schema);
+    if (c == null) return super.createEnum(symbol, schema); // punt to generic
+    if (RESERVED_WORDS.contains(symbol))
+      symbol += "$";
+    return Enum.valueOf(c, symbol);
+  }
+
+  @Override
+  protected Schema getEnumSchema(Object datum) {
+    return (datum instanceof Enum)
+      ? getSchema(datum.getClass())
+      : super.getEnumSchema(datum);
+  }
+
+  private Map<String,Class> classCache = new ConcurrentHashMap<String,Class>();
+
+  private static final Class NO_CLASS = new Object(){}.getClass();
+  private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
+
+  /** Return the class that implements a schema, or null if none exists. */
+  public Class getClass(Schema schema) {
+    switch (schema.getType()) {
+    case FIXED:
+    case RECORD:
+    case ENUM:
+      String name = schema.getFullName();
+      if (name == null) return null;
+      Class c = classCache.get(name);
+      if (c == null) {
+        try {
+          c = ClassUtils.forName(getClassLoader(), getClassName(schema));
+        } catch (ClassNotFoundException e) {
+          c = NO_CLASS;
+        }
+        classCache.put(name, c);
+      }
+      return c == NO_CLASS ? null : c;
+    case ARRAY:   return List.class;
+    case MAP:     return Map.class;
+    case UNION:
+      List<Schema> types = schema.getTypes();     // elide unions with null
+      if ((types.size() == 2) && types.contains(NULL_SCHEMA))
+        return getWrapper(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
+      return Object.class;
+    case STRING:
+      if (STRING_TYPE_STRING.equals(schema.getProp(STRING_PROP)))
+        return String.class;
+      return CharSequence.class;
+    case BYTES:   return ByteBuffer.class;
+    case INT:     return Integer.TYPE;
+    case LONG:    return Long.TYPE;
+    case FLOAT:   return Float.TYPE;
+    case DOUBLE:  return Double.TYPE;
+    case BOOLEAN: return Boolean.TYPE;
+    case NULL:    return Void.TYPE;
+    default: throw new AvroRuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  private Class getWrapper(Schema schema) {
+    switch (schema.getType()) {
+    case INT:     return Integer.class;
+    case LONG:    return Long.class;
+    case FLOAT:   return Float.class;
+    case DOUBLE:  return Double.class;
+    case BOOLEAN: return Boolean.class;
+    }
+    return getClass(schema);
+  }
+
+  /** Returns the Java class name indicated by a schema's name and namespace. */
+  public static String getClassName(Schema schema) {
+    String namespace = schema.getNamespace();
+    String name = schema.getName();
+    if (namespace == null || "".equals(namespace))
+      return name;
+    String dot = namespace.endsWith("$") ? "" : ".";
+    return namespace + dot + name;
+  }
+
+  private final WeakHashMap<java.lang.reflect.Type,Schema> schemaCache =
+    new WeakHashMap<java.lang.reflect.Type,Schema>();
+
+  /** Find the schema for a Java type. */
+  public Schema getSchema(java.lang.reflect.Type type) {
+    Schema schema = schemaCache.get(type);
+    if (schema == null) {
+      schema = createSchema(type, new LinkedHashMap<String,Schema>());
+      schemaCache.put(type, schema);
+    }
+    return schema;
+  }
+
+  /** Create the schema for a Java type. */
+  @SuppressWarnings(value="unchecked")
+  protected Schema createSchema(java.lang.reflect.Type type,
+                                Map<String,Schema> names) {
+    if (type instanceof Class
+        && CharSequence.class.isAssignableFrom((Class)type))
+      return Schema.create(Type.STRING);
+    else if (type == ByteBuffer.class)
+      return Schema.create(Type.BYTES);
+    else if ((type == Integer.class) || (type == Integer.TYPE))
+      return Schema.create(Type.INT);
+    else if ((type == Long.class) || (type == Long.TYPE))
+      return Schema.create(Type.LONG);
+    else if ((type == Float.class) || (type == Float.TYPE))
+      return Schema.create(Type.FLOAT);
+    else if ((type == Double.class) || (type == Double.TYPE))
+      return Schema.create(Type.DOUBLE);
+    else if ((type == Boolean.class) || (type == Boolean.TYPE))
+      return Schema.create(Type.BOOLEAN);
+    else if ((type == Void.class) || (type == Void.TYPE))
+      return Schema.create(Type.NULL);
+    else if (type instanceof ParameterizedType) {
+      ParameterizedType ptype = (ParameterizedType)type;
+      Class raw = (Class)ptype.getRawType();
+      java.lang.reflect.Type[] params = ptype.getActualTypeArguments();
+      if (Collection.class.isAssignableFrom(raw)) { // array
+        if (params.length != 1)
+          throw new AvroTypeException("No array type specified.");
+        return Schema.createArray(createSchema(params[0], names));
+      } else if (Map.class.isAssignableFrom(raw)) {   // map
+        java.lang.reflect.Type key = params[0];
+        java.lang.reflect.Type value = params[1];
+        if (!(key instanceof Class
+              && CharSequence.class.isAssignableFrom((Class)key)))
+          throw new AvroTypeException("Map key class not CharSequence: "+key);
+        return Schema.createMap(createSchema(value, names));
+      } else {
+        return createSchema(raw, names);
+      }
+    } else if (type instanceof Class) {               // class
+      Class c = (Class)type;
+      String fullName = c.getName();
+      Schema schema = names.get(fullName);
+      if (schema == null)
+        try {
+          schema = (Schema)(c.getDeclaredField("SCHEMA$").get(null));
+
+          if (!fullName.equals(getClassName(schema)))
+            // HACK: schema mismatches class. maven shade plugin? try replacing.
+            schema = Schema.parse
+              (schema.toString().replace(schema.getNamespace(),
+                                         c.getPackage().getName()));
+        } catch (NoSuchFieldException e) {
+          throw new AvroRuntimeException("Not a Specific class: "+c);
+        } catch (IllegalAccessException e) {
+          throw new AvroRuntimeException(e);
+        }
+      names.put(fullName, schema);
+      return schema;
+    }
+    throw new AvroTypeException("Unknown type: "+type);
+  }
+
+  @Override
+  protected String getSchemaName(Object datum) {
+    if (datum != null) {
+      Class c = datum.getClass();
+      if (isStringable(c))
+        return Schema.Type.STRING.getName();
+    }
+    return super.getSchemaName(datum);
+  }
+
+  /** True iff a class should be serialized with toString(). */ 
+  protected boolean isStringable(Class<?> c) {
+    return stringableClasses.contains(c);
+  }
+
+  /** Return the protocol for a Java interface. */
+  public Protocol getProtocol(Class iface) {
+    try {
+      Protocol p = (Protocol)(iface.getDeclaredField("PROTOCOL").get(null));
+      if (!p.getNamespace().equals(iface.getPackage().getName()))
+        // HACK: protocol mismatches iface. maven shade plugin? try replacing.
+        p = Protocol.parse(p.toString().replace(p.getNamespace(),
+                                                iface.getPackage().getName()));
+      return p;
+   } catch (NoSuchFieldException e) {
+      throw new AvroRuntimeException("Not a Specific protocol: "+iface);
+    } catch (IllegalAccessException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override
+  protected int compare(Object o1, Object o2, Schema s, boolean eq) {
+    switch (s.getType()) {
+    case ENUM:
+      if (o1 instanceof Enum)
+        return ((Enum)o1).ordinal() - ((Enum)o2).ordinal();
+    default:
+      return super.compare(o1, o2, s, eq);
+    }
+  }
+  
+  /** Create an instance of a class.  If the class implements {@link
+   * SchemaConstructable}, call a constructor with a {@link
+   * org.apache.avro.Schema} parameter, otherwise use a no-arg constructor. */
+  @SuppressWarnings("unchecked")
+  public static Object newInstance(Class c, Schema s) {
+    boolean useSchema = SchemaConstructable.class.isAssignableFrom(c);
+    Object result;
+    try {
+      Constructor meth = (Constructor)CTOR_CACHE.get(c);
+      if (meth == null) {
+        meth = c.getDeclaredConstructor(useSchema ? SCHEMA_ARG : NO_ARG);
+        meth.setAccessible(true);
+        CTOR_CACHE.put(c, meth);
+      }
+      result = meth.newInstance(useSchema ? new Object[]{s} : (Object[])null);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    return result;
+  }
+  
+  @Override
+  public Object createFixed(Object old, Schema schema) {
+    Class c = getClass(schema);
+    if (c == null) return super.createFixed(old, schema); // punt to generic
+    return c.isInstance(old) ? old : newInstance(c, schema);
+  }
+  
+  @Override
+  public Object newRecord(Object old, Schema schema) {
+    Class c = getClass(schema);
+    if (c == null) return super.newRecord(old, schema); // punt to generic
+    return (c.isInstance(old) ? old : newInstance(c, schema));
+  }
+
+  /** Tag interface that indicates that a class has a one-argument constructor
+   * that accepts a Schema.
+   * @see #newInstance
+   */
+  public interface SchemaConstructable {}
+
+  /** Runtime utility used by generated classes. */
+  public static BinaryDecoder getDecoder(ObjectInput in) {
+    return DecoderFactory.get()
+      .directBinaryDecoder(new ExternalizableInput(in), null);
+  }
+  /** Runtime utility used by generated classes. */
+  public static BinaryEncoder getEncoder(ObjectOutput out) {
+    return EncoderFactory.get()
+      .directBinaryEncoder(new ExternalizableOutput(out), null);
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java
new file mode 100644
index 0000000..ceffdd6
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumReader.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.util.ClassUtils;
+
+/** {@link org.apache.avro.io.DatumReader DatumReader} for generated Java classes. */
+public class SpecificDatumReader<T> extends GenericDatumReader<T> {
+  public SpecificDatumReader() {
+    this(null, null, SpecificData.get());
+  }
+
+  /** Construct for reading instances of a class. */
+  public SpecificDatumReader(Class<T> c) {
+    this(new SpecificData(c.getClassLoader()));
+    setSchema(getSpecificData().getSchema(c));
+  }
+
+  /** Construct where the writer's and reader's schemas are the same. */
+  public SpecificDatumReader(Schema schema) {
+    this(schema, schema, SpecificData.get());
+  }
+
+  /** Construct given writer's and reader's schema. */
+  public SpecificDatumReader(Schema writer, Schema reader) {
+    this(writer, reader, SpecificData.get());
+  }
+
+  /** Construct given writer's schema, reader's schema, and a {@link
+   * SpecificData}. */
+  public SpecificDatumReader(Schema writer, Schema reader,
+                             SpecificData data) {
+    super(writer, reader, data);
+  }
+
+  /** Construct given a {@link SpecificData}. */
+  public SpecificDatumReader(SpecificData data) {
+    super(data);
+  }
+
+  /** Return the contained {@link SpecificData}. */
+  public SpecificData getSpecificData() { return (SpecificData)getData(); }
+
+  @Override
+  public void setSchema(Schema actual) {
+    // if expected is unset and actual is a specific record,
+    // then default expected to schema of currently loaded class
+    if (getExpected() == null && actual != null
+        && actual.getType() == Schema.Type.RECORD) {
+      SpecificData data = getSpecificData();
+      Class c = data.getClass(actual);
+      if (c != null && SpecificRecord.class.isAssignableFrom(c))
+        setExpected(data.getSchema(c));
+    }
+    super.setSchema(actual);
+  }
+
+  @Override protected Class findStringClass(Schema schema) {
+    Class stringClass = null;
+    switch (schema.getType()) {
+    case STRING:
+      stringClass = getPropAsClass(schema, SpecificData.CLASS_PROP);
+      break;
+    case MAP: 
+      stringClass = getPropAsClass(schema, SpecificData.KEY_CLASS_PROP);
+      break;
+    }
+    if (stringClass != null)
+      return stringClass;
+    return super.findStringClass(schema);
+  }
+
+  private Class getPropAsClass(Schema schema, String prop) {
+    String name = schema.getProp(prop);
+    if (name == null) return null;
+    try {
+      return ClassUtils.forName(getData().getClassLoader(), name);
+    } catch (ClassNotFoundException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java
new file mode 100644
index 0000000..49dddbb
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificDatumWriter.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Encoder;
+
+/** {@link org.apache.avro.io.DatumWriter DatumWriter} for generated Java classes. */
+public class SpecificDatumWriter<T> extends GenericDatumWriter<T> {
+  public SpecificDatumWriter() {
+    super(SpecificData.get());
+  }
+
+  public SpecificDatumWriter(Class<T> c) {
+    super(SpecificData.get().getSchema(c), SpecificData.get());
+  }
+  
+  public SpecificDatumWriter(Schema schema) {
+    super(schema, SpecificData.get());
+  }
+  
+  public SpecificDatumWriter(Schema root, SpecificData specificData) {
+    super(root, specificData);
+  }
+  
+  protected SpecificDatumWriter(SpecificData specificData) {
+    super(specificData);
+  }
+  
+  /** Returns the {@link SpecificData} implementation used by this writer. */
+  public SpecificData getSpecificData() {
+    return (SpecificData) getData();
+  }
+
+  @Override
+  protected void writeEnum(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (!(datum instanceof Enum))
+      super.writeEnum(schema, datum, out);        // punt to generic
+    else
+      out.writeEnum(((Enum)datum).ordinal());
+  }
+
+  @Override
+  protected void writeString(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (!(datum instanceof CharSequence)
+        && getSpecificData().isStringable(datum.getClass())) {
+      datum = datum.toString();                   // convert to string
+    }
+    writeString(datum, out);
+  }
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java
new file mode 100644
index 0000000..f95f42c
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificErrorBuilderBase.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.lang.reflect.Constructor;
+
+import org.apache.avro.Schema;
+import org.apache.avro.data.ErrorBuilder;
+import org.apache.avro.data.RecordBuilderBase;
+
+/** 
+ * Abstract base class for specific ErrorBuilder implementations.
+ * Not thread-safe.
+ */
+abstract public class SpecificErrorBuilderBase<T extends SpecificExceptionBase> 
+  extends RecordBuilderBase<T> implements ErrorBuilder<T> {
+  private Constructor<T> errorConstructor;
+  private Object value;
+  private boolean hasValue;
+  private Throwable cause;
+  private boolean hasCause;
+  
+  /**
+   * Creates a SpecificErrorBuilderBase for building errors of the given type.
+   * @param schema the schema associated with the error class.
+   */
+  protected SpecificErrorBuilderBase(Schema schema) {
+    super(schema, SpecificData.get());
+  }
+  
+  /**
+   * SpecificErrorBuilderBase copy constructor.
+   * @param other SpecificErrorBuilderBase instance to copy.
+   */
+  protected SpecificErrorBuilderBase(SpecificErrorBuilderBase<T> other) {
+    super(other, SpecificData.get());
+    this.errorConstructor = other.errorConstructor;
+    this.value = other.value;
+    this.hasValue = other.hasValue;
+    this.cause = other.cause;
+    this.hasCause = other.hasCause;
+  }
+  
+  /**
+   * Creates a SpecificErrorBuilderBase by copying an existing error instance.
+   * @param other the error instance to copy.
+   */
+  protected SpecificErrorBuilderBase(T other) {
+    super(other.getSchema(), SpecificData.get());
+    
+    Object otherValue = other.getValue();
+    if (otherValue != null) {
+      setValue(otherValue);
+    }
+    
+    Throwable otherCause = other.getCause();
+    if (otherCause != null) {
+      setCause(otherCause);
+    }
+  }
+
+  @Override
+  public Object getValue() {
+    return value;
+  }
+
+  @Override
+  public SpecificErrorBuilderBase<T> setValue(Object value) {
+    this.value = value;
+    hasValue = true;
+    return this;
+  }
+  
+  @Override
+  public boolean hasValue() {
+    return hasValue;
+  }
+  
+  @Override
+  public SpecificErrorBuilderBase<T> clearValue() {
+    value = null;
+    hasValue = false;
+    return this;
+  }
+
+  @Override
+  public Throwable getCause() {
+    return cause;
+  }
+
+  @Override
+  public SpecificErrorBuilderBase<T> setCause(Throwable cause) {
+    this.cause = cause;
+    hasCause = true;
+    return this;
+  }
+  
+  @Override
+  public boolean hasCause() {
+    return hasCause;
+  }
+  
+  @Override
+  public SpecificErrorBuilderBase<T> clearCause() {
+    cause = null;
+    hasCause = false;
+    return this;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
new file mode 100644
index 0000000..bf07e30
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificExceptionBase.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.specific;
+
+import java.io.Externalizable;
+import java.io.ObjectOutput;
+import java.io.ObjectInput;
+import java.io.IOException;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Schema;
+
+/** Base class for specific exceptions. */
+public abstract class SpecificExceptionBase extends AvroRemoteException
+  implements SpecificRecord, Externalizable {
+
+  public SpecificExceptionBase() {
+    super();
+  }
+  
+  public SpecificExceptionBase(Throwable value) {
+    super(value);
+  }
+
+  public SpecificExceptionBase(Object value) {
+    super(value);
+  }
+  
+  public SpecificExceptionBase(Object value, Throwable cause) {
+    super(value, cause);
+  }
+
+  public abstract Schema getSchema();
+  public abstract Object get(int field);
+  public abstract void put(int field, Object value);
+
+  public boolean equals(Object that) {
+    if (that == this) return true;                        // identical object
+    if (!(that instanceof SpecificExceptionBase)) return false; // not a record
+    if (this.getClass() != that.getClass()) return false; // not same schema
+    return SpecificData.get().compare(this, that, this.getSchema()) == 0;
+  }
+
+  public int hashCode() {
+    return SpecificData.get().hashCode(this, this.getSchema());
+  }
+
+  @Override public abstract void writeExternal(ObjectOutput out)
+    throws IOException;
+  @Override public abstract void readExternal(ObjectInput in)
+    throws IOException;
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java
new file mode 100644
index 0000000..9ff9093
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificFixed.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.io.Externalizable;
+import java.io.ObjectOutput;
+import java.io.ObjectInput;
+import java.io.IOException;
+import java.util.Arrays;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.io.BinaryData;
+
+/** Base class for generated fixed-sized data classes. */
+public abstract class SpecificFixed
+  implements GenericFixed, Comparable<SpecificFixed>, Externalizable {
+
+  private byte[] bytes;
+
+  public SpecificFixed() {
+    bytes(new byte[getSchema().getFixedSize()]);
+  }
+  
+  public SpecificFixed(byte[] bytes) {
+    bytes(bytes); 
+  }
+
+  public void bytes(byte[] bytes) { this.bytes = bytes; }
+
+  @Override public byte[] bytes() { return bytes; }
+
+  @Override public abstract Schema getSchema();
+
+  @Override public boolean equals(Object o) {
+    if (o == this) return true;
+    return o instanceof GenericFixed
+      && Arrays.equals(bytes, ((GenericFixed)o).bytes());
+  }
+
+  @Override public int hashCode() { return Arrays.hashCode(bytes); }
+  @Override public String toString() { return Arrays.toString(bytes); }
+
+  @Override public int compareTo(SpecificFixed that) {
+    return BinaryData.compareBytes(this.bytes, 0, this.bytes.length,
+                                   that.bytes, 0, that.bytes.length);
+  }
+
+  @Override public abstract void writeExternal(ObjectOutput out)
+    throws IOException;
+  @Override public abstract void readExternal(ObjectInput in)
+    throws IOException;
+
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecord.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecord.java
new file mode 100644
index 0000000..ad78df2
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecord.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.generic.IndexedRecord;
+
+/** Implemented by generated record classes. Permits efficient access to
+ * fields.*/
+public interface SpecificRecord extends IndexedRecord {
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
new file mode 100644
index 0000000..51ee653
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBase.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.io.Externalizable;
+import java.io.ObjectOutput;
+import java.io.ObjectInput;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericRecord;
+
+/** Base class for generated record classes. */
+public abstract class SpecificRecordBase
+  implements SpecificRecord, Comparable<SpecificRecord>, GenericRecord,
+             Externalizable {
+
+  public abstract Schema getSchema();
+  public abstract Object get(int field);
+  public abstract void put(int field, Object value);
+
+  @Override
+  public void put(String fieldName, Object value) {
+    put(getSchema().getField(fieldName).pos(), value);
+  }
+
+  @Override
+  public Object get(String fieldName) {
+    return get(getSchema().getField(fieldName).pos());
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == this) return true;                        // identical object
+    if (!(that instanceof SpecificRecord)) return false;  // not a record
+    if (this.getClass() != that.getClass()) return false; // not same schema
+    return SpecificData.get().compare(this, that, this.getSchema(), true) == 0;
+  }
+    
+  @Override
+  public int hashCode() {
+    return SpecificData.get().hashCode(this, this.getSchema());
+  }
+
+  @Override
+  public int compareTo(SpecificRecord that) {
+    return SpecificData.get().compare(this, that, this.getSchema());
+  }
+
+  @Override
+  public String toString() {
+    return SpecificData.get().toString(this);
+  }
+
+  @Override
+  public void writeExternal(ObjectOutput out)
+    throws IOException {
+    new SpecificDatumWriter(getSchema())
+      .write(this, SpecificData.getEncoder(out));
+  }
+
+  @Override
+  public void readExternal(ObjectInput in)
+    throws IOException {
+    new SpecificDatumReader(getSchema())
+      .read(this, SpecificData.getDecoder(in));
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java
new file mode 100644
index 0000000..d508dce
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/SpecificRecordBuilderBase.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.Schema;
+import org.apache.avro.data.RecordBuilderBase;
+
+/**
+ * Abstract base class for specific RecordBuilder implementations.
+ * Not thread-safe.
+ */
+abstract public class SpecificRecordBuilderBase<T extends SpecificRecord> 
+  extends RecordBuilderBase<T> {
+  
+  /**
+   * Creates a SpecificRecordBuilderBase for building records of the given type.
+   * @param schema the schema associated with the record class.
+   */
+  protected SpecificRecordBuilderBase(Schema schema) {
+    super(schema, SpecificData.get());
+  }
+  
+  /**
+   * SpecificRecordBuilderBase copy constructor.
+   * @param other SpecificRecordBuilderBase instance to copy.
+   */
+  protected SpecificRecordBuilderBase(SpecificRecordBuilderBase<T> other) {
+    super(other, SpecificData.get());
+  }
+  
+  /**
+   * Creates a SpecificRecordBuilderBase by copying an existing record instance.
+   * @param other the record instance to copy.
+   */
+  protected SpecificRecordBuilderBase(T other) {
+    super(other.getSchema(), SpecificData.get());
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/specific/package.html b/lang/java/avro/src/main/java/org/apache/avro/specific/package.html
new file mode 100644
index 0000000..d9be484
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/specific/package.html
@@ -0,0 +1,45 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Generate specific Java classes for schemas and protocols.
+
+<p>This API is recommended for most RPC uses and for data applications
+  that always use the same datatypes, i.e., whose schemas are known at
+  compile time.  For data applications that accept dynamic datatypes
+  the {@link org.apache.avro.generic generic} API is recommended.
+
+<p>Avro types are mapped to Java as follows:
+
+<ul>
+
+<li>Record, enum, and fixed schemas generate Java class definitions.</li>
+
+<li>All other types are mapped as in the {@link org.apache.avro.generic
+  generic} API.</li>
+
+</ul>
+
+<p>Note that when a generated class is not found corresponding to a
+  record, enum or fixed schema, a {@link org.apache.avro.generic
+  generic} representation is used.  This permits generated classes to
+  be nested within generic data structures.
+
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferInputStream.java b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferInputStream.java
new file mode 100644
index 0000000..69c739b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferInputStream.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.util;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+/** Utility to present {@link ByteBuffer} data as an {@link InputStream}.*/
+public class ByteBufferInputStream extends InputStream {
+  private List<ByteBuffer> buffers;
+  private int current;
+
+  public ByteBufferInputStream(List<ByteBuffer> buffers) {
+    this.buffers = buffers;
+  }
+
+  /** @see InputStream#read()
+   * @throws EOFException if EOF is reached. */
+  @Override
+  public int read() throws IOException {
+    return getBuffer().get() & 0xff;
+  }
+
+  /** @see InputStream#read(byte[], int, int)
+   * @throws EOFException if EOF is reached before reading all the bytes. */
+  @Override
+  public int read(byte[] b, int off, int len) throws IOException {
+    if (len == 0) return 0;
+    ByteBuffer buffer = getBuffer();
+    int remaining = buffer.remaining();
+    if (len > remaining) {
+      buffer.get(b, off, remaining);
+      return remaining;
+    } else {
+      buffer.get(b, off, len);
+      return len;
+    }
+  }
+
+  /** Read a buffer from the input without copying, if possible.
+   * @throws EOFException if EOF is reached before reading all the bytes. */
+  public ByteBuffer readBuffer(int length) throws IOException {
+    if (length == 0) return ByteBuffer.allocate(0);
+    ByteBuffer buffer = getBuffer();
+    if (buffer.remaining() == length) {           // can return current as-is?
+      current++;
+      return buffer;                              // return w/o copying
+    }
+    // punt: allocate a new buffer & copy into it
+    ByteBuffer result = ByteBuffer.allocate(length);
+    int start = 0;
+    while (start < length)
+      start += read(result.array(), start, length-start);
+    return result;
+  }
+
+  /** Returns the next non-empty buffer.
+   * @throws EOFException if EOF is reached before reading all the bytes.
+   */
+  private ByteBuffer getBuffer() throws IOException {
+    while (current < buffers.size()) {
+      ByteBuffer buffer = buffers.get(current);
+      if (buffer.hasRemaining())
+        return buffer;
+      current++;
+    }
+    throw new EOFException();
+  }
+}
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
new file mode 100644
index 0000000..2feb699
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/ByteBufferOutputStream.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.util;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/** Utility to collect data written to an {@link OutputStream} in {@link
+ * ByteBuffer}s.*/
+public class ByteBufferOutputStream extends OutputStream {
+  public static final int BUFFER_SIZE = 8192;
+
+  private List<ByteBuffer> buffers;
+
+  public ByteBufferOutputStream() {
+    reset();
+  }
+
+  /** Returns all data written and resets the stream to be empty. */
+  public List<ByteBuffer> getBufferList() {
+    List<ByteBuffer> result = buffers;
+    reset();
+    for (ByteBuffer buffer : result) buffer.flip();
+    return result;
+  }
+
+  /** Prepend a list of ByteBuffers to this stream. */
+  public void prepend(List<ByteBuffer> lists) {
+    for (ByteBuffer buffer: lists) {
+      buffer.position(buffer.limit());
+    }
+    buffers.addAll(0, lists);
+  }
+  
+  /** Append a list of ByteBuffers to this stream. */
+  public void append(List<ByteBuffer> lists) {
+    for (ByteBuffer buffer: lists) {
+      buffer.position(buffer.limit());
+    }
+    buffers.addAll(lists);
+  }
+  
+  public void reset() {
+    buffers = new LinkedList<ByteBuffer>();
+    buffers.add(ByteBuffer.allocate(BUFFER_SIZE));
+  }
+
+  public void write(ByteBuffer buffer) {
+    buffers.add(buffer);
+  }
+
+  @Override
+  public void write(int b) {
+    ByteBuffer buffer = buffers.get(buffers.size()-1);
+    if (buffer.remaining() < 1) {
+      buffer = ByteBuffer.allocate(BUFFER_SIZE);
+      buffers.add(buffer);
+    }
+    buffer.put((byte)b);
+  }
+
+  @Override
+  public void write(byte[] b, int off, int len) {
+    ByteBuffer buffer = buffers.get(buffers.size()-1);
+    int remaining = buffer.remaining();
+    while (len > remaining) {
+      buffer.put(b, off, remaining);
+      len -= remaining;
+      off += remaining;
+      buffer = ByteBuffer.allocate(BUFFER_SIZE);
+      buffers.add(buffer);
+      remaining = buffer.remaining();
+    }
+    buffer.put(b, off, len);
+  }
+
+  /** Add a buffer to the output without copying, if possible. */
+  public void writeBuffer(ByteBuffer buffer) throws IOException {
+    if (buffer.remaining() < BUFFER_SIZE) {
+      write(buffer.array(), buffer.position(), buffer.remaining());
+    } else {                                      // append w/o copying bytes
+      ByteBuffer dup = buffer.duplicate();
+      dup.position(buffer.limit());               // ready for flip
+      buffers.add(dup);
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/ClassUtils.java b/lang/java/avro/src/main/java/org/apache/avro/util/ClassUtils.java
new file mode 100644
index 0000000..4e3c41d
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/ClassUtils.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util;
+
+public class ClassUtils {
+
+  private ClassUtils() {
+  }
+
+  /**
+   * Loads a class using the class loader.
+   * 1. The class loader of the current class is being used.
+   * 2. The thread context class loader is being used.
+   * If both approaches fail, returns null.
+   *
+   * @param className The name of the class to load.
+   * @return The class or null if no class loader could load the class.
+   */
+  public static Class<?> forName(String className)
+    throws ClassNotFoundException {
+    return ClassUtils.forName(ClassUtils.class, className);
+  }
+
+  /**
+   * Loads a class using the class loader.
+   * 1. The class loader of the context class is being used.
+   * 2. The thread context class loader is being used.
+   * If both approaches fail, returns null.
+   *
+   * @param contextClass The name of a context class to use.
+   * @param className    The name of the class to load
+   * @return The class or null if no class loader could load the class.
+   */
+  public static Class<?> forName(Class<?> contextClass, String className)
+    throws ClassNotFoundException {
+    Class<?> c = null;
+    if (contextClass.getClassLoader() != null) {
+      c = forName(className, contextClass.getClassLoader());
+    }
+    if (c == null
+        && Thread.currentThread().getContextClassLoader() != null) {
+      c = forName(className, Thread.currentThread().getContextClassLoader());
+    }
+    if (c == null) {
+      throw new ClassNotFoundException("Failed to load class" + className);
+    }
+    return c;
+  }
+
+  /**
+   * Loads a class using the class loader.
+   * 1. The class loader of the context class is being used.
+   * 2. The thread context class loader is being used.
+   * If both approaches fail, returns null.
+   *
+   * @param classLoader The classloader to use.
+   * @param className    The name of the class to load
+   * @return The class or null if no class loader could load the class.
+   */
+  public static Class<?> forName(ClassLoader classLoader, String className)
+    throws ClassNotFoundException {
+    Class<?> c = null;
+    if (classLoader != null) {
+      c = forName(className, classLoader);
+    }
+    if (c == null && Thread.currentThread().getContextClassLoader() != null) {
+      c = forName(className, Thread.currentThread().getContextClassLoader());
+    }
+    if (c == null) {
+      throw new ClassNotFoundException("Failed to load class" + className);
+    }
+    return c;
+  }
+
+  /**
+   * Loads a {@link Class} from the specified {@link ClassLoader} without
+   * throwing {@link ClassNotFoundException}.
+   *
+   * @param className
+   * @param classLoader
+   * @return
+   */
+  private static Class<?> forName(String className, ClassLoader classLoader) {
+    Class<?> c = null;
+    if (classLoader != null && className != null) {
+      try {
+        c = Class.forName(className, true, classLoader);
+      } catch (ClassNotFoundException e) {
+        //Ignore and return null
+      }
+    }
+    return c;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
new file mode 100644
index 0000000..061d160
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/Utf8.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util;
+
+import java.nio.charset.Charset;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.avro.io.BinaryData;
+
+/** A Utf8 string.  Unlike {@link String}, instances are mutable.  This is more
+ * efficient than {@link String} when reading or writing a sequence of values,
+ * as a single instance may be reused. */
+public class Utf8 implements Comparable<Utf8>, CharSequence {
+  private static final byte[] EMPTY = new byte[0];
+  private static final Charset UTF8 = Charset.forName("UTF-8");
+
+  private byte[] bytes = EMPTY;
+  private int length;
+  private String string;
+
+  public Utf8() {}
+
+  public Utf8(String string) {
+    this.bytes = getBytesFor(string);
+    this.length = bytes.length;
+    this.string = string;
+  }
+  
+  public Utf8(Utf8 other) {
+    this.length = other.length;
+    this.bytes = new byte[other.length];
+    System.arraycopy(other.bytes, 0, this.bytes, 0, this.length);
+    this.string = other.string;
+  }
+
+  public Utf8(byte[] bytes) {
+    this.bytes = bytes;
+    this.length = bytes.length;
+  }
+
+  /** Return UTF-8 encoded bytes.
+   * Only valid through {@link #getByteLength()}. */
+  public byte[] getBytes() { return bytes; }
+
+  /** Return length in bytes.
+   * @deprecated call {@link #getByteLength()} instead. */
+  public int getLength() { return length; }
+
+  /** Return length in bytes. */
+  public int getByteLength() { return length; }
+
+  /** Set length in bytes.  Should called whenever byte content changes, even
+   * if the length does not change, as this also clears the cached String.
+   * @deprecated call {@link #setByteLength(int)} instead. */
+  public Utf8 setLength(int newLength) {
+    return setByteLength(newLength);
+  }
+
+  /** Set length in bytes.  Should called whenever byte content changes, even
+   * if the length does not change, as this also clears the cached String. */
+  public Utf8 setByteLength(int newLength) {
+    if (this.bytes.length < newLength) {
+      byte[] newBytes = new byte[newLength];
+      System.arraycopy(bytes, 0, newBytes, 0, this.length);
+      this.bytes = newBytes;
+    }
+    this.length = newLength;
+    this.string = null;
+    return this;
+  }
+
+  /** Set to the contents of a String. */
+  public Utf8 set(String string) {
+    this.bytes = getBytesFor(string);
+    this.length = bytes.length;
+    this.string = string;
+    return this;
+  }
+
+  private abstract static class Utf8Converter {
+    public abstract String fromUtf8(byte[] bytes, int length);
+    public abstract byte[] toUtf8(String str);
+  }
+
+  private static final Utf8Converter UTF8_CONVERTER =
+    System.getProperty("java.version").startsWith("1.6.")
+    ? new Utf8Converter() {                       // optimized for Java 6
+        public String fromUtf8(byte[] bytes, int length) {
+          try {
+            return new String(bytes, 0, length, "UTF-8");
+          } catch (UnsupportedEncodingException e) {
+            throw new RuntimeException(e);
+          }
+        }
+        public byte[] toUtf8(String str) {
+          try {
+            return str.getBytes("UTF-8");
+          } catch (UnsupportedEncodingException e) {
+            throw new RuntimeException(e);
+          }
+        }
+      }
+    : new Utf8Converter() {                       // faster in Java 7 & 8
+        public String fromUtf8(byte[] bytes, int length) {
+          return new String(bytes, 0, length, UTF8);
+        }
+        public byte[] toUtf8(String str) {
+          return str.getBytes(UTF8);
+        }
+      };
+
+  @Override
+  public String toString() {
+    if (this.length == 0) return "";
+    if (this.string == null) {
+      this.string = UTF8_CONVERTER.fromUtf8(bytes, length);
+    }
+    return this.string;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (o == this) return true;
+    if (!(o instanceof Utf8)) return false;
+    Utf8 that = (Utf8)o;
+    if (!(this.length == that.length)) return false;
+    byte[] thatBytes = that.bytes;
+    for (int i = 0; i < this.length; i++)
+      if (bytes[i] != thatBytes[i])
+        return false;
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int hash = 0;
+    for (int i = 0; i < this.length; i++)
+      hash = hash*31 + bytes[i];
+    return hash;
+  }
+
+  @Override
+  public int compareTo(Utf8 that) {
+    return BinaryData.compareBytes(this.bytes, 0, this.length,
+                                   that.bytes, 0, that.length);
+  }
+
+  // CharSequence implementation
+  @Override public char charAt(int index) { return toString().charAt(index); }
+  @Override public int length() { return toString().length(); }
+  @Override public CharSequence subSequence(int start, int end) {
+    return toString().subSequence(start, end);
+  }
+
+  /** Gets the UTF-8 bytes for a String */
+  public static final byte[] getBytesFor(String str) {
+    return UTF8_CONVERTER.toUtf8(str);
+  }
+
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
new file mode 100644
index 0000000..d65227c
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/WeakIdentityHashMap.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.avro.util;
+
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Implements a combination of WeakHashMap and IdentityHashMap.
+ * Useful for caches that need to key off of a == comparison
+ * instead of a .equals.
+ * 
+ * <b>
+ * This class is not a general-purpose Map implementation! While
+ * this class implements the Map interface, it intentionally violates
+ * Map's general contract, which mandates the use of the equals method
+ * when comparing objects. This class is designed for use only in the
+ * rare cases wherein reference-equality semantics are required.
+ * 
+ * Note that this implementation is not synchronized.
+ * </b>
+ */
+public class WeakIdentityHashMap<K, V> implements Map<K, V> {
+  private final ReferenceQueue<K> queue = new ReferenceQueue<K>();
+  private Map<IdentityWeakReference, V> backingStore
+    = new HashMap<IdentityWeakReference, V>();
+
+  public WeakIdentityHashMap() {}
+
+  public void clear() {
+    backingStore.clear();
+    reap();
+  }
+
+  public boolean containsKey(Object key) {
+    reap();
+    return backingStore.containsKey(new IdentityWeakReference(key));
+  }
+
+  public boolean containsValue(Object value)  {
+    reap();
+    return backingStore.containsValue(value);
+  }
+
+  public Set<Map.Entry<K, V>> entrySet() {
+    reap();
+    Set<Map.Entry<K, V>> ret = new HashSet<Map.Entry<K, V>>();
+    for (Map.Entry<IdentityWeakReference, V> ref : backingStore.entrySet()) {
+      final K key = ref.getKey().get();
+      final V value = ref.getValue();
+      Map.Entry<K, V> entry = new Map.Entry<K, V>() {
+        public K getKey() {
+          return key;
+        }
+        public V getValue() {
+          return value;
+        }
+        public V setValue(V value) {
+          throw new UnsupportedOperationException();
+        }
+      };
+      ret.add(entry);
+    }
+    return Collections.unmodifiableSet(ret);
+  }
+
+  public Set<K> keySet() {
+    reap();
+    Set<K> ret = new HashSet<K>();
+    for (IdentityWeakReference ref : backingStore.keySet()) {
+      ret.add(ref.get());
+    }
+    return Collections.unmodifiableSet(ret);
+  }
+
+  public boolean equals(Object o) {
+    return backingStore.equals(((WeakIdentityHashMap)o).backingStore);
+  }
+
+  public V get(Object key) {
+    reap();
+    return backingStore.get(new IdentityWeakReference(key));
+  }
+  public V put(K key, V value) {
+    reap();
+    return backingStore.put(new IdentityWeakReference(key), value);
+  }
+
+  public int hashCode() {
+    reap();
+    return backingStore.hashCode();
+  }
+  public boolean isEmpty() {
+    reap();
+    return backingStore.isEmpty();
+  }
+  public void putAll(Map t) {
+    throw new UnsupportedOperationException();
+  }
+  public V remove(Object key) {
+    reap();
+    return backingStore.remove(new IdentityWeakReference(key));
+  }
+  public int size() {
+    reap();
+    return backingStore.size();
+  }
+  public Collection<V> values() {
+    reap();
+    return backingStore.values();
+  }
+
+  private synchronized void reap() {
+      Object zombie = queue.poll();
+
+      while (zombie != null) {
+        IdentityWeakReference victim = (IdentityWeakReference)zombie;
+        backingStore.remove(victim);
+        zombie = queue.poll();
+      }
+    }
+
+  class IdentityWeakReference extends WeakReference<K> {
+    int hash;
+        
+    @SuppressWarnings("unchecked")
+      IdentityWeakReference(Object obj) {
+      super((K)obj, queue);
+      hash = System.identityHashCode(obj);
+    }
+
+    public int hashCode() {
+      return hash;
+    }
+
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      IdentityWeakReference ref = (IdentityWeakReference)o;
+      if (this.get() == ref.get()) {
+        return true;
+      }
+      return false;
+    }
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java b/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
new file mode 100644
index 0000000..8d5661b
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/internal/JacksonUtils.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util.internal;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.JsonProperties;
+import org.apache.avro.Schema;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.util.TokenBuffer;
+
+public class JacksonUtils {
+  static final String BYTES_CHARSET = "ISO-8859-1";
+
+  private JacksonUtils() {
+  }
+
+  public static JsonNode toJsonNode(Object datum) {
+    if (datum == null) {
+      return null;
+    }
+    try {
+      TokenBuffer generator = new TokenBuffer(new ObjectMapper());
+      toJson(datum, generator);
+      return new ObjectMapper().readTree(generator.asParser());
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @SuppressWarnings(value="unchecked")
+  static void toJson(Object datum, JsonGenerator generator) throws IOException {
+    if (datum == JsonProperties.NULL_VALUE) { // null
+      generator.writeNull();
+    } else if (datum instanceof Map) { // record, map
+      generator.writeStartObject();
+      for (Map.Entry<Object,Object> entry : ((Map<Object,Object>) datum).entrySet()) {
+        generator.writeFieldName(entry.getKey().toString());
+        toJson(entry.getValue(), generator);
+      }
+      generator.writeEndObject();
+    } else if (datum instanceof Collection) { // array
+      generator.writeStartArray();
+      for (Object element : (Collection<?>) datum) {
+        toJson(element, generator);
+      }
+      generator.writeEndArray();
+    } else if (datum instanceof byte[]) { // bytes, fixed
+      generator.writeString(new String((byte[]) datum, BYTES_CHARSET));
+    } else if (datum instanceof CharSequence || datum instanceof Enum<?>) { // string, enum
+      generator.writeString(datum.toString());
+    } else if (datum instanceof Double) { // double
+      generator.writeNumber((Double) datum);
+    } else if (datum instanceof Float) { // float
+      generator.writeNumber((Float) datum);
+    } else if (datum instanceof Long) { // long
+      generator.writeNumber((Long) datum);
+    } else if (datum instanceof Integer) { // int
+      generator.writeNumber((Integer) datum);
+    } else if (datum instanceof Boolean) { // boolean
+      generator.writeBoolean((Boolean) datum);
+    }
+  }
+
+  public static Object toObject(JsonNode jsonNode) {
+    return toObject(jsonNode, null);
+  }
+
+  public static Object toObject(JsonNode jsonNode, Schema schema) {
+    if (schema != null && schema.getType().equals(Schema.Type.UNION)) {
+      return toObject(jsonNode, schema.getTypes().get(0));
+    }
+    if (jsonNode == null) {
+      return null;
+    } else if (jsonNode.isNull()) {
+      return JsonProperties.NULL_VALUE;
+    } else if (jsonNode.isBoolean()) {
+      return jsonNode.asBoolean();
+    } else if (jsonNode.isInt()) {
+      if (schema == null || schema.getType().equals(Schema.Type.INT)) {
+        return jsonNode.asInt();
+      } else if (schema.getType().equals(Schema.Type.LONG)) {
+        return jsonNode.asLong();
+      }
+    } else if (jsonNode.isLong()) {
+      return jsonNode.asLong();
+    } else if (jsonNode.isDouble()) {
+      if (schema == null || schema.getType().equals(Schema.Type.DOUBLE)) {
+        return jsonNode.asDouble();
+      } else if (schema.getType().equals(Schema.Type.FLOAT)) {
+        return (float) jsonNode.asDouble();
+      }
+    } else if (jsonNode.isTextual()) {
+      if (schema == null || schema.getType().equals(Schema.Type.STRING) ||
+          schema.getType().equals(Schema.Type.ENUM)) {
+        return jsonNode.asText();
+      } else if (schema.getType().equals(Schema.Type.BYTES)) {
+        try {
+          return jsonNode.getTextValue().getBytes(BYTES_CHARSET);
+        } catch (UnsupportedEncodingException e) {
+          throw new AvroRuntimeException(e);
+        }
+      }
+    } else if (jsonNode.isArray()) {
+      List l = new ArrayList();
+      for (JsonNode node : jsonNode) {
+        l.add(toObject(node, schema == null ? null : schema.getElementType()));
+      }
+      return l;
+    } else if (jsonNode.isObject()) {
+      Map m = new LinkedHashMap();
+      for (Iterator<String> it = jsonNode.getFieldNames(); it.hasNext(); ) {
+        String key = it.next();
+        Schema s = null;
+        if (schema == null) {
+          s = null;
+        } else if (schema.getType().equals(Schema.Type.MAP)) {
+          s = schema.getValueType();
+        } else if (schema.getType().equals(Schema.Type.RECORD)) {
+          s = schema.getField(key).schema();
+        }
+        Object value = toObject(jsonNode.get(key), s);
+        m.put(key, value);
+      }
+      return m;
+    }
+    return null;
+  }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/util/package.html b/lang/java/avro/src/main/java/org/apache/avro/util/package.html
new file mode 100644
index 0000000..638e187
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/util/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Common utility classes.
+</body>
+</html>
diff --git a/lang/java/avro/src/main/java/overview.html b/lang/java/avro/src/main/java/overview.html
new file mode 100644
index 0000000..d64872e
--- /dev/null
+++ b/lang/java/avro/src/main/java/overview.html
@@ -0,0 +1,88 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html>
+<head>
+   <title>Avro</title>
+</head>
+<body>Avro is a data serialization system.
+
+  <h2>Overview</h2>
+
+  <p>Avro provides:
+    <ul>
+      <li>Rich data structures.
+      <li>A compact, fast, binary data format.
+      <li>A container file, to store persistent data.
+      <li>Remote procedure call (RPC).
+      <li>Simple integration with dynamic languages.  Code generation
+      is not required to read or write data files nor to use or
+      implement RPC protocols.  Code generation as an optional
+      optimization, only worth implementing for statically typed
+      languages.
+    </ul>  
+
+  <h2>Schemas</h2>
+
+  <p>Avro relies on <i>{@link org.apache.avro.Schema schemas}</i>.
+  When Avro data is read, the schema used when writing it is always
+  present.  This permits each datum to be written with no per-value
+  overheads, making serialization both fast and small.  This also
+  facilitates use with dynamic, scripting languages, since data,
+  together with its schema, is fully self-describing.
+
+  <p>When Avro data is stored in a {@link
+  org.apache.avro.file.DataFileWriter file}, its schema is stored with
+  it, so that files may be processed later by any program.  If the
+  program reading the data expects a different schema this can be
+  easily resolved, since both schemas are present.
+
+  <p>When Avro is used in {@link org.apache.avro.ipc RPC}, the client
+    and server exchange schemas in the connection handshake.  (This
+    can be optimized so that, for most calls, no schemas are actually
+    transmitted.)  Since both client and server both have the other's
+    full schema, correspondence between same named fields, missing
+    fields, extra fields, etc. can all be easily resolved.
+
+  <p>Avro schemas are defined with
+  with <a href="http://www.json.org/">JSON</a> .  This facilitates
+  implementation in languages that already have JSON libraries.
+
+  <h2>Comparison with other systems</h2>
+
+  Avro provides functionality similar to systems such
+  as <a href="http://incubator.apache.org/thrift/">Thrift</a>,
+  <a href="http://code.google.com/protobuf/">Protocol Buffers</a>,
+  etc.  Avro differs from these systems in the following fundamental
+  aspects.
+  <ul>
+    <li><i>Dynamic typing</i>: Avro does not require that code be
+    generated.  Data is always accompanied by a schema that permits
+    full processing of that data without code generation, static
+    datatypes, etc.  This facilitates construction of generic
+    data-processing systems and languages.
+    <li><i>Untagged data</i>: Since the schema is present when data is
+    read, considerably less type information need be encoded with
+    data, resulting in smaller serialization size.</li>
+    <li><i>No manually-assigned field IDs</i>: When a schema changes,
+    both the old and new schema are always present when processing
+    data, so differences may be resolved symbolically, using field
+    names.
+  </ul>  
+
+</body>
+</html>
diff --git a/lang/java/avro/src/test/java/NoPackage.java b/lang/java/avro/src/test/java/NoPackage.java
new file mode 100644
index 0000000..1d89614
--- /dev/null
+++ b/lang/java/avro/src/test/java/NoPackage.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** I am a class without a package. */
+public interface NoPackage {
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java b/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java
new file mode 100644
index 0000000..e920e46
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/AvroTestUtil.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+
+/** Utilities for Avro tests. */
+public class AvroTestUtil {
+  static final File TMPDIR = new File(System.getProperty("test.dir", System.getProperty("java.io.tmpdir", "/tmp")), "tmpfiles");
+
+  private AvroTestUtil() {
+  }
+
+  /** 
+   * Create a temporary file in a test-appropriate directory.
+   * 
+   * @param testClass The test case class requesting the file creation
+   * @param name The name of the file to be created 
+   */
+  public static File tempFile(Class testClass, String name) {
+    File testClassDir = new File(TMPDIR, testClass.getName());
+    testClassDir.mkdirs();
+    return new File(testClassDir, name);
+  }
+
+  /** 
+   * Create a temporary directory in a test-appropriate directory.
+   * 
+   * @param testClass The test case class requesting the directory creation
+   * @param name The name of the directory to be created  
+   */
+  public static File tempDirectory(Class testClass, String name) {
+    File tmpFile = tempFile(testClass, name);
+    tmpFile.delete();
+    tmpFile.mkdir();
+    return tmpFile;
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/BarRecord.java b/lang/java/avro/src/test/java/org/apache/avro/BarRecord.java
new file mode 100644
index 0000000..70bdd83
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/BarRecord.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+public class BarRecord {
+  private String beerMsg;
+
+  public BarRecord() {
+  }
+
+  public BarRecord(String beerMsg) {
+    this.beerMsg = beerMsg;
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that instanceof BarRecord) {
+      if (this.beerMsg == null) {
+        return ((BarRecord) that).beerMsg == null;
+      } else {
+        return this.beerMsg.equals(((BarRecord) that).beerMsg);
+      }
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return beerMsg.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return BarRecord.class.getSimpleName() + "{msg=" + beerMsg + "}";
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
new file mode 100644
index 0000000..81252d4
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java
@@ -0,0 +1,375 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package org.apache.avro;  
+ at SuppressWarnings("all")
+ at org.apache.avro.specific.AvroGenerated
+public class FooBarSpecificRecord extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  private static final long serialVersionUID = 1031933828916876443L;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"FooBarSpecificRecord\",\"namespace\":\"org.apache.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"nicknames\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"relatedids\",\"type\":{\"type\":\"array\",\" [...]
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+  @Deprecated public int id;
+  @Deprecated public java.lang.String name;
+  @Deprecated public java.util.List<java.lang.String> nicknames;
+  @Deprecated public java.util.List<java.lang.Integer> relatedids;
+  @Deprecated public org.apache.avro.TypeEnum typeEnum;
+
+  /**
+   * Default constructor.  Note that this does not initialize fields
+   * to their default values from the schema.  If that is desired then
+   * one should use <code>newBuilder()</code>. 
+   */
+  public FooBarSpecificRecord() {}
+
+  /**
+   * All-args constructor.
+   */
+  public FooBarSpecificRecord(java.lang.Integer id, java.lang.String name, java.util.List<java.lang.String> nicknames, java.util.List<java.lang.Integer> relatedids, org.apache.avro.TypeEnum typeEnum) {
+    this.id = id;
+    this.name = name;
+    this.nicknames = nicknames;
+    this.relatedids = relatedids;
+    this.typeEnum = typeEnum;
+  }
+
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  // Used by DatumWriter.  Applications should not call. 
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return id;
+    case 1: return name;
+    case 2: return nicknames;
+    case 3: return relatedids;
+    case 4: return typeEnum;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  // Used by DatumReader.  Applications should not call. 
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: id = (java.lang.Integer)value$; break;
+    case 1: name = (java.lang.String)value$; break;
+    case 2: nicknames = (java.util.List<java.lang.String>)value$; break;
+    case 3: relatedids = (java.util.List<java.lang.Integer>)value$; break;
+    case 4: typeEnum = (org.apache.avro.TypeEnum)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+
+  /**
+   * Gets the value of the 'id' field.
+   */
+  public java.lang.Integer getId() {
+    return id;
+  }
+
+  /**
+   * Sets the value of the 'id' field.
+   * @param value the value to set.
+   */
+  public void setId(java.lang.Integer value) {
+    this.id = value;
+  }
+
+  /**
+   * Gets the value of the 'name' field.
+   */
+  public java.lang.String getName() {
+    return name;
+  }
+
+  /**
+   * Sets the value of the 'name' field.
+   * @param value the value to set.
+   */
+  public void setName(java.lang.String value) {
+    this.name = value;
+  }
+
+  /**
+   * Gets the value of the 'nicknames' field.
+   */
+  public java.util.List<java.lang.String> getNicknames() {
+    return nicknames;
+  }
+
+  /**
+   * Sets the value of the 'nicknames' field.
+   * @param value the value to set.
+   */
+  public void setNicknames(java.util.List<java.lang.String> value) {
+    this.nicknames = value;
+  }
+
+  /**
+   * Gets the value of the 'relatedids' field.
+   */
+  public java.util.List<java.lang.Integer> getRelatedids() {
+    return relatedids;
+  }
+
+  /**
+   * Sets the value of the 'relatedids' field.
+   * @param value the value to set.
+   */
+  public void setRelatedids(java.util.List<java.lang.Integer> value) {
+    this.relatedids = value;
+  }
+
+  /**
+   * Gets the value of the 'typeEnum' field.
+   */
+  public org.apache.avro.TypeEnum getTypeEnum() {
+    return typeEnum;
+  }
+
+  /**
+   * Sets the value of the 'typeEnum' field.
+   * @param value the value to set.
+   */
+  public void setTypeEnum(org.apache.avro.TypeEnum value) {
+    this.typeEnum = value;
+  }
+
+  /** Creates a new FooBarSpecificRecord RecordBuilder */
+  public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder() {
+    return new org.apache.avro.FooBarSpecificRecord.Builder();
+  }
+  
+  /** Creates a new FooBarSpecificRecord RecordBuilder by copying an existing Builder */
+  public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(org.apache.avro.FooBarSpecificRecord.Builder other) {
+    return new org.apache.avro.FooBarSpecificRecord.Builder(other);
+  }
+  
+  /** Creates a new FooBarSpecificRecord RecordBuilder by copying an existing FooBarSpecificRecord instance */
+  public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(org.apache.avro.FooBarSpecificRecord other) {
+    return new org.apache.avro.FooBarSpecificRecord.Builder(other);
+  }
+  
+  /**
+   * RecordBuilder for FooBarSpecificRecord instances.
+   */
+  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<FooBarSpecificRecord>
+    implements org.apache.avro.data.RecordBuilder<FooBarSpecificRecord> {
+
+    private int id;
+    private java.lang.String name;
+    private java.util.List<java.lang.String> nicknames;
+    private java.util.List<java.lang.Integer> relatedids;
+    private org.apache.avro.TypeEnum typeEnum;
+
+    /** Creates a new Builder */
+    private Builder() {
+      super(org.apache.avro.FooBarSpecificRecord.SCHEMA$);
+    }
+    
+    /** Creates a Builder by copying an existing Builder */
+    private Builder(org.apache.avro.FooBarSpecificRecord.Builder other) {
+      super(other);
+      if (isValidValue(fields()[0], other.id)) {
+        this.id = data().deepCopy(fields()[0].schema(), other.id);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.name)) {
+        this.name = data().deepCopy(fields()[1].schema(), other.name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.nicknames)) {
+        this.nicknames = data().deepCopy(fields()[2].schema(), other.nicknames);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.relatedids)) {
+        this.relatedids = data().deepCopy(fields()[3].schema(), other.relatedids);
+        fieldSetFlags()[3] = true;
+      }
+      if (isValidValue(fields()[4], other.typeEnum)) {
+        this.typeEnum = data().deepCopy(fields()[4].schema(), other.typeEnum);
+        fieldSetFlags()[4] = true;
+      }
+    }
+    
+    /** Creates a Builder by copying an existing FooBarSpecificRecord instance */
+    private Builder(org.apache.avro.FooBarSpecificRecord other) {
+            super(org.apache.avro.FooBarSpecificRecord.SCHEMA$);
+      if (isValidValue(fields()[0], other.id)) {
+        this.id = data().deepCopy(fields()[0].schema(), other.id);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.name)) {
+        this.name = data().deepCopy(fields()[1].schema(), other.name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.nicknames)) {
+        this.nicknames = data().deepCopy(fields()[2].schema(), other.nicknames);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.relatedids)) {
+        this.relatedids = data().deepCopy(fields()[3].schema(), other.relatedids);
+        fieldSetFlags()[3] = true;
+      }
+      if (isValidValue(fields()[4], other.typeEnum)) {
+        this.typeEnum = data().deepCopy(fields()[4].schema(), other.typeEnum);
+        fieldSetFlags()[4] = true;
+      }
+    }
+
+    /** Gets the value of the 'id' field */
+    public java.lang.Integer getId() {
+      return id;
+    }
+    
+    /** Sets the value of the 'id' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder setId(int value) {
+      validate(fields()[0], value);
+      this.id = value;
+      fieldSetFlags()[0] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'id' field has been set */
+    public boolean hasId() {
+      return fieldSetFlags()[0];
+    }
+    
+    /** Clears the value of the 'id' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder clearId() {
+      fieldSetFlags()[0] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'name' field */
+    public java.lang.String getName() {
+      return name;
+    }
+    
+    /** Sets the value of the 'name' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder setName(java.lang.String value) {
+      validate(fields()[1], value);
+      this.name = value;
+      fieldSetFlags()[1] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'name' field has been set */
+    public boolean hasName() {
+      return fieldSetFlags()[1];
+    }
+    
+    /** Clears the value of the 'name' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder clearName() {
+      name = null;
+      fieldSetFlags()[1] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'nicknames' field */
+    public java.util.List<java.lang.String> getNicknames() {
+      return nicknames;
+    }
+    
+    /** Sets the value of the 'nicknames' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder setNicknames(java.util.List<java.lang.String> value) {
+      validate(fields()[2], value);
+      this.nicknames = value;
+      fieldSetFlags()[2] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'nicknames' field has been set */
+    public boolean hasNicknames() {
+      return fieldSetFlags()[2];
+    }
+    
+    /** Clears the value of the 'nicknames' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder clearNicknames() {
+      nicknames = null;
+      fieldSetFlags()[2] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'relatedids' field */
+    public java.util.List<java.lang.Integer> getRelatedids() {
+      return relatedids;
+    }
+    
+    /** Sets the value of the 'relatedids' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder setRelatedids(java.util.List<java.lang.Integer> value) {
+      validate(fields()[3], value);
+      this.relatedids = value;
+      fieldSetFlags()[3] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'relatedids' field has been set */
+    public boolean hasRelatedids() {
+      return fieldSetFlags()[3];
+    }
+    
+    /** Clears the value of the 'relatedids' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder clearRelatedids() {
+      relatedids = null;
+      fieldSetFlags()[3] = false;
+      return this;
+    }
+
+    /** Gets the value of the 'typeEnum' field */
+    public org.apache.avro.TypeEnum getTypeEnum() {
+      return typeEnum;
+    }
+    
+    /** Sets the value of the 'typeEnum' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder setTypeEnum(org.apache.avro.TypeEnum value) {
+      validate(fields()[4], value);
+      this.typeEnum = value;
+      fieldSetFlags()[4] = true;
+      return this; 
+    }
+    
+    /** Checks whether the 'typeEnum' field has been set */
+    public boolean hasTypeEnum() {
+      return fieldSetFlags()[4];
+    }
+    
+    /** Clears the value of the 'typeEnum' field */
+    public org.apache.avro.FooBarSpecificRecord.Builder clearTypeEnum() {
+      typeEnum = null;
+      fieldSetFlags()[4] = false;
+      return this;
+    }
+
+    @Override
+    public FooBarSpecificRecord build() {
+      try {
+        FooBarSpecificRecord record = new FooBarSpecificRecord();
+        record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]);
+        record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]);
+        record.nicknames = fieldSetFlags()[2] ? this.nicknames : (java.util.List<java.lang.String>) defaultValue(fields()[2]);
+        record.relatedids = fieldSetFlags()[3] ? this.relatedids : (java.util.List<java.lang.Integer>) defaultValue(fields()[3]);
+        record.typeEnum = fieldSetFlags()[4] ? this.typeEnum : (org.apache.avro.TypeEnum) defaultValue(fields()[4]);
+        return record;
+      } catch (Exception e) {
+        throw new org.apache.avro.AvroRuntimeException(e);
+      }
+    }
+  }
+
+  private static final org.apache.avro.io.DatumWriter
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+
+  @Override public void writeExternal(java.io.ObjectOutput out)
+    throws java.io.IOException {
+    WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
+  }
+
+  private static final org.apache.avro.io.DatumReader
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+
+  @Override public void readExternal(java.io.ObjectInput in)
+    throws java.io.IOException {
+    READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/FooRecord.java b/lang/java/avro/src/test/java/org/apache/avro/FooRecord.java
new file mode 100644
index 0000000..0447f01
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/FooRecord.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+public class FooRecord {
+  private int fooCount;
+
+  public FooRecord() {
+  }
+
+  public FooRecord(int fooCount) {
+    this.fooCount = fooCount;
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that instanceof FooRecord) {
+      return this.fooCount == ((FooRecord) that).fooCount;
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return fooCount;
+  }
+
+  @Override
+  public String toString() {
+    return FooRecord.class.getSimpleName() + "{count=" + fooCount + "}";
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
new file mode 100644
index 0000000..72a632b
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+/**
+ * Generates file with objects of a specific schema(that doesn't contain nesting
+ * of arrays and maps) with random data. This is only for testing.
+ * Generated file contains the count of objects of the specified schema followed
+ * by objects serialized using BlockingBinaryEncoder. No other metadata is
+ * written to the file. See interoptests.py for more details(interoptests.py
+ * reads the file generated here and validates the contents).
+ */
+public class GenerateBlockingData {
+  private static final int SYNC_INTERVAL = 1000;
+  private static ByteArrayOutputStream buffer =
+                      new ByteArrayOutputStream(2*SYNC_INTERVAL);
+  
+  private static EncoderFactory factory = EncoderFactory.get();
+  private static Encoder bufOut = EncoderFactory.get().blockingBinaryEncoder(
+      buffer, null);
+  private static int blockCount;
+
+  private static void writeBlock(Encoder vout, FileOutputStream out)
+               throws IOException{
+    vout.writeLong(blockCount);
+    bufOut.flush();
+    buffer.writeTo(out);
+    buffer.reset();
+    blockCount = 0;
+  }
+  
+  public static void main(String[] args) throws Exception {
+    if(args.length != 3) {
+      System.out.println(
+          "Usage: GenerateBlockingData <schemafile> <outputfile> <count>");
+      System.exit(-1);
+    }
+    
+    Schema sch = Schema.parse(new File(args[0]));
+    File outputFile = new File(args[1]);
+    int numObjects = Integer.parseInt(args[2]);
+    
+    FileOutputStream out = new FileOutputStream(outputFile, false);
+    DatumWriter<Object> dout = new GenericDatumWriter<Object>();
+    dout.setSchema(sch);
+    Encoder vout = factory.directBinaryEncoder(out, null);
+    vout.writeLong(numObjects); // metadata:the count of objects in the file
+    
+    for (Object datum : new RandomData(sch, numObjects)) {
+      dout.write(datum, bufOut);
+      blockCount++;
+      if (buffer.size() >= SYNC_INTERVAL) {
+        writeBlock(vout, out);
+      }
+    }
+    if (blockCount > 0) {
+      writeBlock(vout, out);
+    }
+    out.flush();
+    out.close();
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/RandomData.java b/lang/java/avro/src/test/java/org/apache/avro/RandomData.java
new file mode 100644
index 0000000..c92a980
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/RandomData.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.util.Utf8;
+
+/** Generates schema data as Java objects with random values. */
+public class RandomData implements Iterable<Object> {
+  private final Schema root;
+  private final long seed;
+  private final int count;
+
+  public RandomData(Schema schema, int count) {
+    this(schema, count, System.currentTimeMillis());
+  }
+
+  public RandomData(Schema schema, int count, long seed) {
+    this.root = schema;
+    this.seed = seed;
+    this.count = count;
+  }
+  
+  public Iterator<Object> iterator() {
+    return new Iterator<Object>() {
+      private int n;
+      private Random random = new Random(seed);
+      public boolean hasNext() { return n < count; }
+      public Object next() {
+        n++;
+        return generate(root, random, 0);
+      }
+      public void remove() { throw new UnsupportedOperationException(); }
+    };
+  }
+  
+  @SuppressWarnings(value="unchecked")
+  private static Object generate(Schema schema, Random random, int d) {
+    switch (schema.getType()) {
+    case RECORD:
+      GenericRecord record = new GenericData.Record(schema);
+      for (Schema.Field field : schema.getFields())
+        record.put(field.name(), generate(field.schema(), random, d+1));
+      return record;
+    case ENUM:
+      List<String> symbols = schema.getEnumSymbols();
+      return new GenericData.EnumSymbol
+        (schema, symbols.get(random.nextInt(symbols.size())));
+    case ARRAY:
+      int length = (random.nextInt(5)+2)-d;
+      GenericArray<Object> array =
+        new GenericData.Array(length<=0?0:length, schema);
+      for (int i = 0; i < length; i++)
+        array.add(generate(schema.getElementType(), random, d+1));
+      return array;
+    case MAP:
+      length = (random.nextInt(5)+2)-d;
+      Map<Object,Object> map = new HashMap<Object,Object>(length<=0?0:length);
+      for (int i = 0; i < length; i++) {
+        map.put(randomUtf8(random, 40),
+                generate(schema.getValueType(), random, d+1));
+      }
+      return map;
+    case UNION:
+      List<Schema> types = schema.getTypes();
+      return generate(types.get(random.nextInt(types.size())), random, d);
+    case FIXED:
+      byte[] bytes = new byte[schema.getFixedSize()];
+      random.nextBytes(bytes);
+      return new GenericData.Fixed(schema, bytes);
+    case STRING:  return randomUtf8(random, 40);
+    case BYTES:   return randomBytes(random, 40);
+    case INT:     return random.nextInt();
+    case LONG:    return random.nextLong();
+    case FLOAT:   return random.nextFloat();
+    case DOUBLE:  return random.nextDouble();
+    case BOOLEAN: return random.nextBoolean();
+    case NULL:    return null;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  private static Utf8 randomUtf8(Random rand, int maxLength) {
+    Utf8 utf8 = new Utf8().setLength(rand.nextInt(maxLength));
+    for (int i = 0; i < utf8.getLength(); i++) {
+      utf8.getBytes()[i] = (byte)('a'+rand.nextInt('z'-'a'));
+    }
+    return utf8;
+  }
+
+  private static ByteBuffer randomBytes(Random rand, int maxLength) {
+    ByteBuffer bytes = ByteBuffer.allocate(rand.nextInt(maxLength));
+    bytes.limit(bytes.capacity());
+    rand.nextBytes(bytes.array());
+    return bytes;
+  }
+
+  public static void main(String[] args) throws Exception {
+    if(args.length < 3 || args.length > 4) {
+      System.out.println("Usage: RandomData <schemafile> <outputfile> <count> [codec]");
+      System.exit(-1);
+    }
+    Schema sch = Schema.parse(new File(args[0]));
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.setCodec(CodecFactory.fromString(args.length >= 4 ? args[3] : "null"));
+    writer.create(sch, new File(args[1]));
+    try {
+      for (Object datum : new RandomData(sch, Integer.parseInt(args[2]))) {
+        writer.append(datum);
+      }
+    } finally {
+      writer.close();
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestCircularReferences.java b/lang/java/avro/src/test/java/org/apache/avro/TestCircularReferences.java
new file mode 100644
index 0000000..79d8c1e
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestCircularReferences.java
@@ -0,0 +1,405 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericData.Record;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.util.Utf8;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestCircularReferences {
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  public static class Reference extends LogicalType {
+    private static final String REFERENCE = "reference";
+    private static final String REF_FIELD_NAME = "ref-field-name";
+
+    private final String refFieldName;
+
+    public Reference(String refFieldName) {
+      super(REFERENCE);
+      this.refFieldName = refFieldName;
+    }
+
+    public Reference(Schema schema) {
+      super(REFERENCE);
+      this.refFieldName = schema.getProp(REF_FIELD_NAME);
+    }
+
+    @Override
+    public Schema addToSchema(Schema schema) {
+      super.addToSchema(schema);
+      schema.addProp(REF_FIELD_NAME, refFieldName);
+      return schema;
+    }
+
+    @Override
+    public String getName() {
+      return REFERENCE;
+    }
+
+    public String getRefFieldName() {
+      return refFieldName;
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      if (schema.getField(refFieldName) == null) {
+        throw new IllegalArgumentException("Invalid field name for reference field: " + refFieldName);
+      }
+    }
+  }
+
+  public static class Referenceable extends LogicalType {
+    private static final String REFERENCEABLE = "referenceable";
+    private static final String ID_FIELD_NAME = "id-field-name";
+
+    private final String idFieldName;
+
+    public Referenceable(String idFieldName) {
+      super(REFERENCEABLE);
+      this.idFieldName = idFieldName;
+    }
+
+    public Referenceable(Schema schema) {
+      super(REFERENCEABLE);
+      this.idFieldName = schema.getProp(ID_FIELD_NAME);
+    }
+
+    @Override
+    public Schema addToSchema(Schema schema) {
+      super.addToSchema(schema);
+      schema.addProp(ID_FIELD_NAME, idFieldName);
+      return schema;
+    }
+
+    @Override
+    public String getName() {
+      return REFERENCEABLE;
+    }
+
+    public String getIdFieldName() {
+      return idFieldName;
+    }
+
+    @Override
+    public void validate(Schema schema) {
+      super.validate(schema);
+      Schema.Field idField = schema.getField(idFieldName);
+      if (idField == null || idField.schema().getType() != Schema.Type.LONG) {
+        throw new IllegalArgumentException("Invalid ID field: " + idFieldName + ": " + idField);
+      }
+    }
+  }
+
+  @BeforeClass
+  public static void addReferenceTypes() {
+    LogicalTypes.register(Referenceable.REFERENCEABLE, new LogicalTypes.LogicalTypeFactory() {
+      @Override
+      public LogicalType fromSchema(Schema schema) {
+        return new Referenceable(schema);
+      }
+    });
+    LogicalTypes.register(Reference.REFERENCE, new LogicalTypes.LogicalTypeFactory() {
+      @Override
+      public LogicalType fromSchema(Schema schema) {
+        return new Reference(schema);
+      }
+    });
+  }
+
+  public static class ReferenceManager {
+    private interface Callback {
+      void set(Object referenceable);
+    }
+
+    private final Map<Long, Object> references = new HashMap<Long, Object>();
+    private final Map<Object, Long> ids = new IdentityHashMap<Object, Long>();
+    private final Map<Long, List<Callback>> callbacksById = new HashMap<Long, List<Callback>>();
+    private final ReferenceableTracker tracker = new ReferenceableTracker();
+    private final ReferenceHandler handler = new ReferenceHandler();
+
+    public ReferenceableTracker getTracker() {
+      return tracker;
+    }
+
+    public ReferenceHandler getHandler() {
+      return handler;
+    }
+
+    public class ReferenceableTracker extends Conversion<IndexedRecord> {
+      @Override
+      @SuppressWarnings("unchecked")
+      public Class<IndexedRecord> getConvertedType() {
+        return (Class) Record.class;
+      }
+
+      @Override
+      public String getLogicalTypeName() {
+        return Referenceable.REFERENCEABLE;
+      }
+
+      @Override
+      public IndexedRecord fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
+        // read side
+        long id = getId(value, schema);
+
+        // keep track of this for later references
+        references.put(id, value);
+
+        // call any callbacks waiting to resolve this id
+        List<Callback> callbacks = callbacksById.get(id);
+        for (Callback callback : callbacks) {
+          callback.set(value);
+        }
+
+        return value;
+      }
+
+      @Override
+      public IndexedRecord toRecord(IndexedRecord value, Schema schema, LogicalType type) {
+        // write side
+        long id = getId(value, schema);
+
+        // keep track of this for later references
+        //references.put(id, value);
+        ids.put(value, id);
+
+        return value;
+      }
+
+      private long getId(IndexedRecord referenceable, Schema schema) {
+        Referenceable info = (Referenceable) schema.getLogicalType();
+        int idField = schema.getField(info.getIdFieldName()).pos();
+        return (Long) referenceable.get(idField);
+      }
+    }
+
+    public class ReferenceHandler extends Conversion<IndexedRecord> {
+      @Override
+      @SuppressWarnings("unchecked")
+      public Class<IndexedRecord> getConvertedType() {
+        return (Class) Record.class;
+      }
+
+      @Override
+      public String getLogicalTypeName() {
+        return Reference.REFERENCE;
+      }
+
+      @Override
+      public IndexedRecord fromRecord(final IndexedRecord record, Schema schema, LogicalType type) {
+        // read side: resolve the record or save a callback
+        final Schema.Field refField = schema.getField(((Reference) type).getRefFieldName());
+
+        Long id = (Long) record.get(refField.pos());
+        if (id != null) {
+          if (references.containsKey(id)) {
+            record.put(refField.pos(), references.get(id));
+
+          } else {
+            List<Callback> callbacks = callbacksById.get(id);
+            if (callbacks == null) {
+              callbacks = new ArrayList<Callback>();
+              callbacksById.put(id, callbacks);
+            }
+            // add a callback to resolve this reference when the id is available
+            callbacks.add(new Callback() {
+              @Override
+              public void set(Object referenceable) {
+                record.put(refField.pos(), referenceable);
+              }
+            });
+          }
+        }
+
+        return record;
+      }
+
+      @Override
+      public IndexedRecord toRecord(IndexedRecord record, Schema schema, LogicalType type) {
+        // write side: replace a referenced field with its id
+        Schema.Field refField = schema.getField(((Reference) type).getRefFieldName());
+        IndexedRecord referenced = (IndexedRecord) record.get(refField.pos());
+        if (referenced == null) {
+          return record;
+        }
+
+        // hijack the field to return the id instead of the ref
+        return new HijackingIndexedRecord(record, refField.pos(), ids.get(referenced));
+      }
+    }
+
+    private static class HijackingIndexedRecord implements IndexedRecord {
+      private final IndexedRecord wrapped;
+      private final int index;
+      private final Object data;
+
+      public HijackingIndexedRecord(IndexedRecord wrapped, int index, Object data) {
+        this.wrapped = wrapped;
+        this.index = index;
+        this.data = data;
+      }
+
+      @Override
+      public void put(int i, Object v) {
+        throw new RuntimeException("[BUG] This is a read-only class.");
+      }
+
+      @Override
+      public Object get(int i) {
+        if (i == index) {
+          return data;
+        }
+        return wrapped.get(i);
+      }
+
+      @Override
+      public Schema getSchema() {
+        return wrapped.getSchema();
+      }
+    }
+  }
+
+  @Test
+  public void test() throws IOException {
+    ReferenceManager manager = new ReferenceManager();
+    GenericData model = new GenericData();
+    model.addLogicalTypeConversion(manager.getTracker());
+    model.addLogicalTypeConversion(manager.getHandler());
+
+    Schema parentSchema = Schema.createRecord("Parent", null, null, false);
+
+    Schema parentRefSchema = Schema.createUnion(
+        Schema.create(Schema.Type.NULL),
+        Schema.create(Schema.Type.LONG),
+        parentSchema);
+    Reference parentRef = new Reference("parent");
+
+    List<Schema.Field> childFields = new ArrayList<Schema.Field>();
+    childFields.add(new Schema.Field("c", Schema.create(Schema.Type.STRING), null, null));
+    childFields.add(new Schema.Field("parent", parentRefSchema, null, null));
+    Schema childSchema = parentRef.addToSchema(
+        Schema.createRecord("Child", null, null, false, childFields));
+
+    List<Schema.Field> parentFields = new ArrayList<Schema.Field>();
+    parentFields.add(new Schema.Field("id", Schema.create(Schema.Type.LONG), null, null));
+    parentFields.add(new Schema.Field("p", Schema.create(Schema.Type.STRING), null, null));
+    parentFields.add(new Schema.Field("child", childSchema, null, null));
+    parentSchema.setFields(parentFields);
+    Referenceable idRef = new Referenceable("id");
+
+    Schema schema = idRef.addToSchema(parentSchema);
+
+    System.out.println("Schema: " + schema.toString(true));
+
+    Record parent = new Record(schema);
+    parent.put("id", 1L);
+    parent.put("p", "parent data!");
+
+    Record child = new Record(childSchema);
+    child.put("c", "child data!");
+    child.put("parent", parent);
+
+    parent.put("child", child);
+
+    // serialization round trip
+    File data = write(model, schema, parent);
+    List<Record> records = read(model, schema, data);
+
+    Record actual = records.get(0);
+
+    // because the record is a recursive structure, equals won't work
+    Assert.assertEquals("Should correctly read back the parent id",
+        1L, actual.get("id"));
+    Assert.assertEquals("Should correctly read back the parent data",
+        new Utf8("parent data!"), actual.get("p"));
+
+    Record actualChild = (Record) actual.get("child");
+    Assert.assertEquals("Should correctly read back the child data",
+        new Utf8("child data!"), actualChild.get("c"));
+    Object childParent = actualChild.get("parent");
+    Assert.assertTrue("Should have a parent Record object",
+        childParent instanceof Record);
+
+    Record childParentRecord = (Record) actualChild.get("parent");
+    Assert.assertEquals("Should have the right parent id",
+        1L, childParentRecord.get("id"));
+    Assert.assertEquals("Should have the right parent data",
+        new Utf8("parent data!"), childParentRecord.get("p"));
+  }
+
+  private <D> List<D> read(GenericData model, Schema schema, File file) throws IOException {
+    DatumReader<D> reader = newReader(model, schema);
+    List<D> data = new ArrayList<D>();
+    FileReader<D> fileReader = null;
+
+    try {
+      fileReader = new DataFileReader<D>(file, reader);
+      for (D datum : fileReader) {
+        data.add(datum);
+      }
+    } finally {
+      if (fileReader != null) {
+        fileReader.close();
+      }
+    }
+
+    return data;
+  }
+
+  @SuppressWarnings("unchecked")
+  private <D> DatumReader<D> newReader(GenericData model, Schema schema) {
+    return model.createDatumReader(schema);
+  }
+
+  @SuppressWarnings("unchecked")
+  private <D> File write(GenericData model, Schema schema, D... data) throws IOException {
+    File file = temp.newFile();
+    DatumWriter<D> writer = model.createDatumWriter(schema);
+    DataFileWriter<D> fileWriter = new DataFileWriter<D>(writer);
+
+    try {
+      fileWriter.create(schema, file);
+      for (D datum : data) {
+        fileWriter.append(datum);
+      }
+    } finally {
+      fileWriter.close();
+    }
+
+    return file;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
new file mode 100644
index 0000000..21c99d8
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import junit.framework.Assert;
+
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.SeekableFileInput;
+import org.apache.avro.file.Syncable;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+ at RunWith(Parameterized.class)
+public class TestDataFile {
+  private static final Logger LOG =
+    LoggerFactory.getLogger(TestDataFile.class);
+
+  CodecFactory codec = null;
+  public TestDataFile(CodecFactory codec) {
+    this.codec = codec;
+    LOG.info("Running with codec: " + codec);
+  }
+
+  @Parameters
+  public static List<Object[]> codecs() {
+    List<Object[]> r = new ArrayList<Object[]>();
+    r.add(new Object[] { null });
+    r.add(new Object[] { CodecFactory.deflateCodec(0) });
+    r.add(new Object[] { CodecFactory.deflateCodec(1) });
+    r.add(new Object[] { CodecFactory.deflateCodec(9) });
+    r.add(new Object[] { CodecFactory.nullCodec() });
+    r.add(new Object[] { CodecFactory.snappyCodec() });
+    r.add(new Object[] { CodecFactory.xzCodec(0) });
+    r.add(new Object[] { CodecFactory.xzCodec(1) });
+    r.add(new Object[] { CodecFactory.xzCodec(6) });
+    return r;
+  }
+
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "200"));
+  private static final boolean VALIDATE =
+    !"false".equals(System.getProperty("test.validate", "true"));
+  private static final File DIR
+    = new File(System.getProperty("test.dir", "/tmp"));
+  private static final long SEED = System.currentTimeMillis();
+  private static final String SCHEMA_JSON =
+    "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+    +"{\"name\":\"stringField\", \"type\":\"string\"},"
+    +"{\"name\":\"longField\", \"type\":\"long\"}]}";
+  private static final Schema SCHEMA = new Schema.Parser().parse(SCHEMA_JSON);
+
+  private File makeFile() {
+    return new File(DIR, "test-" + codec + ".avro");
+  }
+
+  @Test public void runTestsInOrder() throws Exception {
+    testGenericWrite();
+    testGenericRead();
+    testSplits();
+    testSyncDiscovery();
+    testGenericAppend();
+    testReadWithHeader();
+    testFSync(false);
+    testFSync(true);
+  }
+
+  public void testGenericWrite() throws IOException {
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+      .setSyncInterval(100);
+    if (codec != null) {
+      writer.setCodec(codec);
+    }
+    writer.create(SCHEMA, makeFile());
+    try {
+      int count = 0;
+      for (Object datum : new RandomData(SCHEMA, COUNT, SEED)) {
+        writer.append(datum);
+        if (++count%(COUNT/3) == 0)
+          writer.sync();                          // force some syncs mid-file
+        if (count == 5) {
+          // force a write of an invalid record
+          boolean threwProperly = false;
+          try {
+            GenericData.Record record = (GenericData.Record) datum;
+            record.put(1, null);
+            threwProperly = true;
+            writer.append(record);
+            threwProperly = false;
+          } catch (DataFileWriter.AppendWriteException e) {
+            System.out.println("Ignoring: "+e);
+          }
+          Assert.assertTrue("failed to throw when expected", threwProperly);
+        }
+      }
+    } finally {
+      writer.close();
+    }
+
+    // Ensure that a second call to close doesn't raise an exception. (AVRO-1249)
+    Exception doubleCloseEx = null;
+
+    try {
+      writer.close();
+    } catch (Exception e) {
+      doubleCloseEx = e;
+    }
+
+    Assert.assertNull("Double close() threw an unexpected exception", doubleCloseEx);
+  }
+
+  public void testGenericRead() throws IOException {
+    DataFileReader<Object> reader =
+      new DataFileReader<Object>(makeFile(), new GenericDatumReader<Object>());
+    try {
+      Object datum = null;
+      if (VALIDATE) {
+        for (Object expected : new RandomData(SCHEMA, COUNT, SEED)) {
+          datum = reader.next(datum);
+          assertEquals(expected, datum);
+        }
+      } else {
+        for (int i = 0; i < COUNT; i++) {
+          datum = reader.next(datum);
+        }
+      }
+    } finally {
+      reader.close();
+    }
+  }
+
+  public void testSplits() throws IOException {
+    File file = makeFile();
+    DataFileReader<Object> reader =
+      new DataFileReader<Object>(file, new GenericDatumReader<Object>());
+    Random rand = new Random(SEED);
+    try {
+      int splits = 10;                            // number of splits
+      int length = (int)file.length();            // length of file
+      int end = length;                           // end of split
+      int remaining = end;                        // bytes remaining
+      int count = 0;                              // count of entries
+      while (remaining > 0) {
+        int start = Math.max(0, end - rand.nextInt(2*length/splits));
+        reader.sync(start);                       // count entries in split
+        while (!reader.pastSync(end)) {
+          reader.next();
+          count++;
+        }
+        remaining -= end-start;
+        end = start;
+      }
+      assertEquals(COUNT, count);
+    } finally {
+      reader.close();
+    }
+  }
+
+  public void testSyncDiscovery() throws IOException {
+    File file = makeFile();
+    DataFileReader<Object> reader =
+      new DataFileReader<Object>(file, new GenericDatumReader<Object>());
+    try {
+      // discover the sync points
+      ArrayList<Long> syncs = new ArrayList<Long>();
+      long previousSync = -1;
+      while (reader.hasNext()) {
+        if (reader.previousSync() != previousSync) {
+          previousSync = reader.previousSync();
+          syncs.add(previousSync);
+        }
+        reader.next();
+      }
+      // confirm that the first point is the one reached by sync(0)
+      reader.sync(0);
+      assertEquals((long)reader.previousSync(), (long)syncs.get(0));
+      // and confirm that all points are reachable
+      for (Long sync : syncs) {
+        reader.seek(sync);
+        assertNotNull(reader.next());
+      }
+    } finally {
+      reader.close();
+    }
+  }
+
+  public void testGenericAppend() throws IOException {
+    File file = makeFile();
+    long start = file.length();
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+      .appendTo(file);
+    try {
+      for (Object datum : new RandomData(SCHEMA, COUNT, SEED+1)) {
+        writer.append(datum);
+      }
+    } finally {
+      writer.close();
+    }
+    DataFileReader<Object> reader =
+      new DataFileReader<Object>(file, new GenericDatumReader<Object>());
+    try {
+      reader.seek(start);
+      Object datum = null;
+      if (VALIDATE) {
+        for (Object expected : new RandomData(SCHEMA, COUNT, SEED+1)) {
+          datum = reader.next(datum);
+          assertEquals(expected, datum);
+        }
+      } else {
+        for (int i = 0; i < COUNT; i++) {
+          datum = reader.next(datum);
+        }
+      }
+    } finally {
+      reader.close();
+    }
+  }  
+
+  public void testReadWithHeader() throws IOException {
+    File file = makeFile();
+    DataFileReader<Object> reader =
+      new DataFileReader<Object>(file, new GenericDatumReader<Object>());
+    // get a header for this file
+    DataFileStream.Header header = reader.getHeader();
+    // re-open to an arbitrary position near the middle, with sync == true
+    SeekableFileInput sin = new SeekableFileInput(file);
+    sin.seek(sin.length() / 2);
+    reader = DataFileReader.<Object>openReader(sin, new GenericDatumReader<Object>(),
+        header, true);
+    assertNotNull("Should be able to reopen from arbitrary point", reader.next());
+    long validPos = reader.previousSync();
+    // post sync, we know of a valid sync point: re-open with seek (sync == false)
+    sin.seek(validPos);
+    reader = DataFileReader.<Object>openReader(sin, new GenericDatumReader<Object>(),
+        header, false);
+    assertEquals("Should not move from sync point on reopen", validPos, sin.tell());
+    assertNotNull("Should be able to reopen at sync point", reader.next());
+  }
+
+  @Test public void testSyncInHeader() throws IOException {
+    DataFileReader<Object> reader = new DataFileReader<Object>
+      (new File("../../../share/test/data/syncInMeta.avro"),
+       new GenericDatumReader<Object>());
+    reader.sync(0);
+    for (Object datum : reader)
+      assertNotNull(datum);
+  }
+
+  @Test public void test12() throws IOException {
+    readFile(new File("../../../share/test/data/test.avro12"),
+             new GenericDatumReader<Object>());
+  }
+
+  @Test
+  public void testFlushCount() throws IOException {
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.setFlushOnEveryBlock(false);
+    TestingByteArrayOutputStream out = new TestingByteArrayOutputStream();
+    writer.create(SCHEMA, out);
+    int currentCount = 0;
+    int flushCounter = 0;
+    try {
+      for (Object datum : new RandomData(SCHEMA, COUNT, SEED+1)) {
+        currentCount++;
+        writer.append(datum);
+        writer.sync();
+        if (currentCount % 10 == 0) {
+          flushCounter++;
+          writer.flush();
+        }
+      }
+    } finally {
+      writer.close();
+    }
+    System.out.println("Total number of flushes: " + out.flushCount);
+    // Unfortunately, the underlying buffered output stream might flush data
+    // to disk when the buffer becomes full, so the only check we can
+    // accurately do is that each sync did not lead to a flush and that the
+    // file was flushed at least as many times as we called flush. Generally
+    // noticed that out.flushCount is almost always 24 though.
+    Assert.assertTrue(out.flushCount < currentCount &&
+      out.flushCount >= flushCounter);
+  }
+
+  private void testFSync(boolean useFile) throws IOException {
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.setFlushOnEveryBlock(false);
+    TestingByteArrayOutputStream out = new TestingByteArrayOutputStream();
+    if (useFile) {
+      File f = makeFile();
+      SeekableFileInput in = new SeekableFileInput(f);
+      writer.appendTo(in, out);
+    } else {
+      writer.create(SCHEMA, out);
+    }
+    int currentCount = 0;
+    int syncCounter = 0;
+    try {
+      for (Object datum : new RandomData(SCHEMA, COUNT, SEED+1)) {
+        currentCount++;
+        writer.append(datum);
+        if (currentCount % 10 == 0) {
+          writer.fSync();
+          syncCounter++;
+        }
+      }
+    } finally {
+      writer.close();
+    }
+    System.out.println("Total number of syncs: " + out.syncCount);
+    Assert.assertEquals(syncCounter, out.syncCount);
+  }
+
+
+  static void readFile(File f, DatumReader<? extends Object> datumReader)
+    throws IOException {
+    FileReader<? extends Object> reader = DataFileReader.openReader(f, datumReader);
+    for (Object datum : reader) {
+      assertNotNull(datum);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    File input = new File(args[0]);
+    Schema projection = null;
+    if (args.length > 1)
+      projection = Schema.parse(new File(args[1]));
+    TestDataFile.readFile(input, new GenericDatumReader<Object>(null, projection));
+    long start = System.currentTimeMillis();
+    for (int i = 0; i < 4; i++)
+      TestDataFile.readFile(input, new GenericDatumReader<Object>(null, projection));
+    System.out.println("Time: "+(System.currentTimeMillis()-start));
+  }
+
+  private class TestingByteArrayOutputStream extends ByteArrayOutputStream
+    implements Syncable {
+    private int flushCount = 0;
+    private int syncCount = 0;
+
+    @Override
+    public void flush() throws IOException {
+      super.flush();
+      flushCount++;
+    }
+
+    @Override
+    public void sync() throws IOException {
+      syncCount++;
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java
new file mode 100644
index 0000000..e806857
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+ at RunWith(Parameterized.class)
+public class TestDataFileConcat {
+  private static final Logger LOG =
+    LoggerFactory.getLogger(TestDataFileConcat.class);
+
+  CodecFactory codec = null;
+  CodecFactory codec2 = null;
+  boolean recompress;
+  public TestDataFileConcat(CodecFactory codec, CodecFactory codec2, Boolean recompress) {
+    this.codec = codec;
+    this.codec2 = codec2;
+    this.recompress = recompress;
+    LOG.info("Testing concatenating files, " + codec2 + " into " + codec + 
+        " with recompress=" + recompress);
+  }
+
+  @Parameters
+  public static List<Object[]> codecs() {
+    List<Object[]> r = new ArrayList<Object[]>();
+    r.add(new Object[] { null , null, false});
+    r.add(new Object[] { null , null, true});
+    r.add(new Object[]
+        { CodecFactory.deflateCodec(1), CodecFactory.deflateCodec(6), false });
+    r.add(new Object[]
+        { CodecFactory.deflateCodec(1), CodecFactory.deflateCodec(6), true });
+    r.add(new Object[]
+        { CodecFactory.deflateCodec(3), CodecFactory.nullCodec(), false });
+    r.add(new Object[]
+        { CodecFactory.nullCodec(), CodecFactory.deflateCodec(6), false });
+    r.add(new Object[]
+            { CodecFactory.xzCodec(1), CodecFactory.xzCodec(2), false });
+    r.add(new Object[]
+            { CodecFactory.xzCodec(1), CodecFactory.xzCodec(2), true });
+    r.add(new Object[]
+            { CodecFactory.xzCodec(2), CodecFactory.nullCodec(), false });
+    r.add(new Object[]
+            { CodecFactory.nullCodec(), CodecFactory.xzCodec(2), false });
+    return r;
+  }
+
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "200"));
+  private static final boolean VALIDATE =
+    !"false".equals(System.getProperty("test.validate", "true"));
+  private static final File DIR
+    = new File(System.getProperty("test.dir", "/tmp"));
+  private static final long SEED = System.currentTimeMillis();
+
+  private static final String SCHEMA_JSON =
+    "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+    +"{\"name\":\"stringField\", \"type\":\"string\"}" +
+    ","
+    +"{\"name\":\"longField\", \"type\":\"long\"}" +
+    "]}";
+  private static final Schema SCHEMA = new Schema.Parser().parse(SCHEMA_JSON);
+
+  private File makeFile(String name) {
+    return new File(DIR, "test-" + name + ".avro");
+  }
+
+  @Test
+  public void testConcatenateFiles() throws IOException {
+    System.out.println("SEED = "+SEED);
+    System.out.println("COUNT = "+COUNT);
+    for (int k = 0; k < 5; k++) {
+      int syncInterval = 460 +k;
+      RandomData data1 = new RandomData(SCHEMA, COUNT, SEED);
+      RandomData data2 = new RandomData(SCHEMA, COUNT, SEED+1);
+      File file1 = makeFile((codec == null ? "null" : codec.toString()) + "-A");
+      File file2 = makeFile((codec2 == null ? "null" : codec2.toString()) + "-B");
+      DataFileWriter<Object> writer =
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+        .setSyncInterval(syncInterval);
+      if (codec != null) {
+        writer.setCodec(codec);
+      }
+      writer.create(SCHEMA, file1);
+      try {
+        for (Object datum : data1) {
+          writer.append(datum);
+        }
+      } finally {
+        writer.close();
+      }
+      DataFileWriter<Object> writer2 =
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+        .setSyncInterval(syncInterval);
+      if (codec2 != null) {
+        writer2.setCodec(codec2);
+      }
+      writer2.create(SCHEMA, file2);
+      try {
+        for (Object datum : data2) {
+          writer2.append(datum);
+        }
+      } finally {
+        writer2.close();
+      }
+      DataFileWriter<Object> concatinto = 
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+        .setSyncInterval(syncInterval);
+      concatinto.appendTo(file1);
+      DataFileReader<Object> concatfrom =
+        new DataFileReader<Object>(file2, new GenericDatumReader<Object>());
+      concatinto.appendAllFrom(concatfrom, recompress);
+      concatinto.close();
+      concatfrom.close();
+
+      concatfrom = new DataFileReader<Object>(file2, new GenericDatumReader<Object>());
+
+
+      DataFileReader<Object> concat =
+        new DataFileReader<Object>(file1, new GenericDatumReader<Object>());
+      int count = 0;
+      try {
+        Object datum = null;
+        if (VALIDATE) {
+          for (Object expected : data1) {
+            datum = concat.next(datum);
+            assertEquals("at "+count++, expected, datum);
+          }
+          for (Object expected : data2) {
+            datum = concatfrom.next(datum);
+            assertEquals("at "+count++, expected, datum);
+          }
+          for (Object expected : data2) {
+            datum = concat.next(datum);
+            assertEquals("at "+count++, expected, datum);
+          }
+        } else {
+          for (int i = 0; i < COUNT*2; i++) {
+            datum = concat.next(datum);
+          }
+        }
+      } finally {
+        if (count != 3 * COUNT) {
+          System.out.println(count + " " + k);
+        }
+        concat.close();
+        concatfrom.close();
+      }
+
+    }
+  }
+  
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java
new file mode 100644
index 0000000..e76251c
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.fail;
+
+public class TestDataFileCorruption {
+
+  private static final File DIR
+      = new File(System.getProperty("test.dir", "/tmp"));
+
+  private File makeFile(String name) {
+    return new File(DIR, "test-" + name + ".avro");
+  }
+
+  @Test
+  public void testCorruptedFile() throws IOException {
+    Schema schema = Schema.create(Type.STRING);
+
+    // Write a data file
+    DataFileWriter<Utf8> w = new DataFileWriter<Utf8>(new GenericDatumWriter<Utf8>(schema));
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    w.create(schema, baos);
+    w.append(new Utf8("apple"));
+    w.append(new Utf8("banana"));
+    w.sync();
+    w.append(new Utf8("celery"));
+    w.append(new Utf8("date"));
+    long pos = w.sync();
+    w.append(new Utf8("endive"));
+    w.append(new Utf8("fig"));
+    w.close();
+
+    // Corrupt the input by inserting some zero bytes before the sync marker for the
+    // penultimate block
+    byte[] original = baos.toByteArray();
+    int corruptPosition = (int) pos - DataFileConstants.SYNC_SIZE;
+    int corruptedBytes = 3;
+    byte[] corrupted = new byte[original.length + corruptedBytes];
+    System.arraycopy(original, 0, corrupted, 0, corruptPosition);
+    System.arraycopy(original, corruptPosition,
+        corrupted, corruptPosition + corruptedBytes, original.length - corruptPosition);
+
+    File file = makeFile("corrupt");
+    file.deleteOnExit();
+    FileOutputStream out = new FileOutputStream(file);
+    out.write(corrupted);
+    out.close();
+
+    // Read the data file
+    DataFileReader r = new DataFileReader<Utf8>(file,
+        new GenericDatumReader<Utf8>(schema));
+    assertEquals("apple", r.next().toString());
+    assertEquals("banana", r.next().toString());
+    long prevSync = r.previousSync();
+    try {
+      r.next();
+      fail("Corrupt block should throw exception");
+    } catch (AvroRuntimeException e) {
+      assertEquals("Invalid sync!", e.getCause().getMessage());
+    }
+    r.sync(prevSync); // go to sync point after previous successful one
+    assertEquals("endive", r.next().toString());
+    assertEquals("fig", r.next().toString());
+    assertFalse(r.hasNext());
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
new file mode 100644
index 0000000..9270e8c
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+import java.util.UUID;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class TestDataFileCustomSync {
+  private byte[] createDataFile(byte[] sync) throws IOException {
+    Schema schema = Schema.create(Type.STRING);
+    DataFileWriter<Utf8> w = new DataFileWriter<Utf8>(new GenericDatumWriter<Utf8>(schema));
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    w.create(schema, baos, sync);
+    w.append(new Utf8("apple"));
+    w.append(new Utf8("banana"));
+    w.sync();
+    w.append(new Utf8("celery"));
+    w.append(new Utf8("date"));
+    w.sync();
+    w.append(new Utf8("endive"));
+    w.append(new Utf8("fig"));
+    w.close();
+    return baos.toByteArray();
+  }
+
+  private static byte[] generateSync() {
+    try {
+      MessageDigest digester = MessageDigest.getInstance("MD5");
+      long time = System.currentTimeMillis();
+      digester.update((UUID.randomUUID()+"@"+time).getBytes());
+      return digester.digest();
+    } catch (NoSuchAlgorithmException e) {
+      throw new RuntimeException(e);
+    }
+  }  
+
+  @Test(expected = IOException.class)
+  public void testInvalidSync() throws IOException {
+    // Invalid size (must be 16):
+    byte[] sync = new byte[8];
+    createDataFile(sync);
+  }
+
+  @Test
+  public void testRandomSync() throws IOException {
+    byte[] sync = generateSync();
+    byte[] randSyncFile = createDataFile(null);
+    byte[] customSyncFile = createDataFile(sync);
+    assertFalse(Arrays.equals(randSyncFile, customSyncFile));
+  }
+
+  @Test
+  public void testCustomSync() throws IOException {
+    byte[] sync = generateSync();
+    byte[] customSyncFile = createDataFile(sync);
+    byte[] sameCustomSyncFile = createDataFile(sync);
+    assertTrue(Arrays.equals(customSyncFile, sameCustomSyncFile));
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java
new file mode 100644
index 0000000..12efbf3
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+/** Simple test of DataFileWriter and DataFileStream with deflate codec. */
+public class TestDataFileDeflate {
+  @Test
+  public void testWriteAndRead() throws IOException {
+    Schema schema = Schema.create(Type.STRING);
+
+    // Write it
+    DataFileWriter<Utf8> w = new DataFileWriter<Utf8>(new GenericDatumWriter<Utf8>(schema));
+    w.setCodec(CodecFactory.deflateCodec(6));
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    w.create(schema, baos);
+    w.append(new Utf8("hello world"));
+    w.append(new Utf8("hello moon"));
+    w.sync();
+    w.append(new Utf8("bye bye world"));
+    w.append(new Utf8("bye bye moon"));
+    w.close();
+
+    // Read it
+    DataFileStream<Utf8> r = new DataFileStream<Utf8>(
+        new ByteArrayInputStream(baos.toByteArray()),
+        new GenericDatumReader<Utf8>(schema));
+    assertEquals("hello world", r.next().toString());
+    assertEquals("hello moon", r.next().toString());
+    assertEquals("bye bye world", r.next().toString());
+    assertEquals("bye bye moon", r.next().toString());
+    assertFalse(r.hasNext());
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
new file mode 100644
index 0000000..5d5b2d3
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import junit.framework.Assert;
+
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.junit.Test;
+
+public class TestDataFileMeta {
+  @Test(expected=AvroRuntimeException.class)
+  public void testUseReservedMeta() {
+    DataFileWriter<?> w = new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    w.setMeta("avro.foo", "bar");
+  }
+
+  @Test()
+  public void testUseMeta() throws IOException {
+    DataFileWriter<?> w = new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    File f = AvroTestUtil.tempFile(getClass(), "testDataFileMeta.avro");
+    w.setMeta("hello", "bar");
+    w.create(Schema.create(Type.NULL), f);
+    w.close();
+
+    DataFileStream<Void> r = new DataFileStream<Void>(new FileInputStream(f), new GenericDatumReader<Void>());
+
+    assertTrue(r.getMetaKeys().contains("hello"));
+
+    assertEquals("bar", r.getMetaString("hello"));
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testUseMetaAfterCreate() throws IOException {
+    DataFileWriter<?> w = new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    w.create(Schema.create(Type.NULL), new ByteArrayOutputStream());
+    w.setMeta("foo", "bar");
+  }
+
+  @Test
+  public void testBlockSizeSetInvalid() {
+    int exceptions = 0;
+    for (int i = -1; i < 33; i++) {
+      // 33 invalid, one valid
+      try {
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>()).setSyncInterval(i);
+      } catch (IllegalArgumentException iae) {
+        exceptions++;
+      }
+    }
+    Assert.assertEquals(33, exceptions);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFileReflect.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileReflect.java
new file mode 100644
index 0000000..78ff014
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFileReflect.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.SeekableFileInput;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestDataFileReflect {
+
+  private static final File DIR = new File(System.getProperty("test.dir",
+      "/tmp"));
+  private static final File FILE = new File(DIR, "test.avro");
+
+  /*
+   * Test that using multiple schemas in a file works doing a union before
+   * writing any records.
+   */
+  @Test
+  public void testMultiReflectWithUnionBeforeWriting() throws IOException {
+    FileOutputStream fos = new FileOutputStream(FILE);
+
+    ReflectData reflectData = ReflectData.get();
+    List<Schema> schemas = Arrays.asList(new Schema[] {
+        reflectData.getSchema(FooRecord.class),
+        reflectData.getSchema(BarRecord.class) });
+    Schema union = Schema.createUnion(schemas);
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new ReflectDatumWriter<Object>(union))
+      .create(union, fos);
+
+    // test writing to a file
+    CheckList<Object> check = new CheckList<Object>();
+    write(writer, new BarRecord("One beer please"), check);
+    write(writer, new FooRecord(10), check);
+    write(writer, new BarRecord("Two beers please"), check);
+    write(writer, new FooRecord(20), check);
+    writer.close();
+
+    ReflectDatumReader<Object> din = new ReflectDatumReader<Object>();
+    SeekableFileInput sin = new SeekableFileInput(FILE);
+    DataFileReader<Object> reader = new DataFileReader<Object>(sin, din);
+    int count = 0;
+    for (Object datum : reader)
+      check.assertEquals(datum, count++);
+    Assert.assertEquals(count, check.size());
+    reader.close();
+  }
+
+  /*
+   * Test that writing a record with a field that is null.
+   */
+  @Test
+  public void testNull() throws IOException {
+    FileOutputStream fos = new FileOutputStream(FILE);
+
+    ReflectData reflectData = ReflectData.AllowNull.get();
+    Schema schema = reflectData.getSchema(BarRecord.class);
+    DataFileWriter<BarRecord> writer = new DataFileWriter<BarRecord>
+      (new ReflectDatumWriter<BarRecord>(BarRecord.class, reflectData))
+      .create(schema, fos);
+
+    // test writing to a file
+    CheckList<BarRecord> check = new CheckList<BarRecord>();
+    write(writer, new BarRecord("One beer please"), check);
+    // null record here, fails when using the default reflectData instance
+    write(writer, new BarRecord(), check);
+    write(writer, new BarRecord("Two beers please"), check);
+    writer.close();
+
+    ReflectDatumReader<BarRecord> din = new ReflectDatumReader<BarRecord>();
+    SeekableFileInput sin = new SeekableFileInput(FILE);
+    DataFileReader<BarRecord> reader = new DataFileReader<BarRecord>(sin, din);
+    int count = 0;
+    for (BarRecord datum : reader)
+      check.assertEquals(datum, count++);
+    Assert.assertEquals(count, check.size());
+    reader.close();
+  }
+
+  /*
+   * Test that writing out and reading in a nested class works
+   */
+  @Test
+  public void testNestedClass() throws IOException {
+    FileOutputStream fos = new FileOutputStream(FILE);
+
+    Schema schema = ReflectData.get().getSchema(BazRecord.class);
+    DataFileWriter<BazRecord> writer =
+      new DataFileWriter<BazRecord>(new ReflectDatumWriter<BazRecord>(schema))
+      .create(schema, fos);
+
+    // test writing to a file
+    CheckList<BazRecord> check = new CheckList<BazRecord>();
+    write(writer, new BazRecord(10), check);
+    write(writer, new BazRecord(20), check);
+    writer.close();
+
+    ReflectDatumReader<BazRecord> din = new ReflectDatumReader<BazRecord>();
+    SeekableFileInput sin = new SeekableFileInput(FILE);
+    DataFileReader<BazRecord> reader = new DataFileReader<BazRecord>(sin, din);
+    int count = 0;
+    for (BazRecord datum : reader)
+      check.assertEquals(datum, count++);
+    Assert.assertEquals(count, check.size());
+    reader.close();
+  }
+
+  private <T> void write(DataFileWriter<T> writer, T o, CheckList<T> l)
+      throws IOException {
+    writer.append(l.addAndReturn(o));
+  }
+
+  @SuppressWarnings("serial")
+  private static class CheckList<T> extends ArrayList<T> {
+    T addAndReturn(T check) {
+      add(check);
+      return check;
+    }
+
+    void assertEquals(Object toCheck, int i) {
+      Assert.assertNotNull(toCheck);
+      Object o = get(i);
+      Assert.assertNotNull(o);
+      Assert.assertEquals(toCheck, o);
+    }
+  }
+
+  private static class BazRecord {
+    private int nbr;
+
+    @SuppressWarnings("unused")
+    public BazRecord() {
+    }
+
+    public BazRecord(int nbr) {
+      this.nbr = nbr;
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that instanceof BazRecord) {
+        return this.nbr == ((BazRecord) that).nbr;
+      }
+      return false;
+    }
+
+    @Override
+    public int hashCode() {
+      return nbr;
+    }
+
+    @Override
+    public String toString() {
+      return BazRecord.class.getSimpleName() + "{cnt=" + nbr + "}";
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestLogicalType.java b/lang/java/avro/src/test/java/org/apache/avro/TestLogicalType.java
new file mode 100644
index 0000000..0b2667f
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestLogicalType.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.util.Arrays;
+import java.util.concurrent.Callable;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestLogicalType {
+
+  @Test
+  public void testDecimalFromSchema() {
+    Schema schema = Schema.createFixed("aFixed", null, null, 4);
+    schema.addProp("logicalType", "decimal");
+    schema.addProp("precision", 9);
+    schema.addProp("scale", 2);
+    LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
+
+    Assert.assertTrue("Should be a Decimal",
+        logicalType instanceof LogicalTypes.Decimal);
+    LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
+    Assert.assertEquals("Should have correct precision",
+        9, decimal.getPrecision());
+    Assert.assertEquals("Should have correct scale",
+        2, decimal.getScale());
+  }
+
+  @Test
+  public void testInvalidLogicalTypeIgnored() {
+    final Schema schema = Schema.createFixed("aFixed", null, null, 2);
+    schema.addProp("logicalType", "decimal");
+    schema.addProp("precision", 9);
+    schema.addProp("scale", 2);
+
+    Assert.assertNull("Should ignore invalid logical type",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalWithNonByteArrayTypes() {
+    final LogicalType decimal = LogicalTypes.decimal(5, 2);
+    // test simple types
+    Schema[] nonBytes = new Schema[] {
+        Schema.createRecord("Record", null, null, false),
+        Schema.createArray(Schema.create(Schema.Type.BYTES)),
+        Schema.createMap(Schema.create(Schema.Type.BYTES)),
+        Schema.createEnum("Enum", null, null, Arrays.asList("a", "b")),
+        Schema.createUnion(Arrays.asList(
+            Schema.create(Schema.Type.BYTES),
+            Schema.createFixed("fixed", null, null, 4))),
+        Schema.create(Schema.Type.BOOLEAN), Schema.create(Schema.Type.INT),
+        Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.FLOAT),
+        Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.NULL),
+        Schema.create(Schema.Type.STRING) };
+    for (final Schema schema : nonBytes) {
+      assertThrows("Should reject type: " + schema.getType(),
+          IllegalArgumentException.class,
+          "Logical type decimal must be backed by fixed or bytes", new Callable() {
+            @Override
+            public Object call() throws Exception {
+              decimal.addToSchema(schema);
+              return null;
+            }
+          });
+    }
+  }
+
+  @Test
+  public void testUnknownFromJsonNode() {
+    Schema schema = Schema.create(Schema.Type.STRING);
+    schema.addProp("logicalType", "unknown");
+    schema.addProp("someProperty", 34);
+    LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
+    Assert.assertNull("Should not return a LogicalType instance", logicalType);
+  }
+
+  @Test
+  public void testDecimalBytesHasNoPrecisionLimit() {
+    Schema schema = Schema.create(Schema.Type.BYTES);
+    // precision is not limited for bytes
+    LogicalTypes.decimal(Integer.MAX_VALUE).addToSchema(schema);
+    Assert.assertEquals("Precision should be an Integer.MAX_VALUE",
+        Integer.MAX_VALUE,
+        ((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getPrecision());
+  }
+
+  @Test
+  public void testDecimalFixedPrecisionLimit() {
+    // 4 bytes can hold up to 9 digits of precision
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    assertThrows("Should reject precision", IllegalArgumentException.class,
+        "fixed(4) cannot store 10 digits (max 9)", new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(10).addToSchema(schema);
+            return null;
+          }
+        }
+    );
+    Assert.assertNull("Invalid logical type should not be set on schema",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalFailsWithZeroPrecision() {
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    assertThrows("Should reject precision", IllegalArgumentException.class,
+        "Invalid decimal precision: 0 (must be positive)", new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(0).addToSchema(schema);
+            return null;
+          }
+        });
+    Assert.assertNull("Invalid logical type should not be set on schema",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalFailsWithNegativePrecision() {
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    assertThrows("Should reject precision", IllegalArgumentException.class,
+        "Invalid decimal precision: -9 (must be positive)", new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(-9).addToSchema(schema);
+            return null;
+          }
+        });
+    Assert.assertNull("Invalid logical type should not be set on schema",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalScaleBoundedByPrecision() {
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    assertThrows("Should reject precision", IllegalArgumentException.class,
+        "Invalid decimal scale: 10 (greater than precision: 9)",
+        new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(9, 10).addToSchema(schema);
+            return null;
+          }
+        });
+    Assert.assertNull("Invalid logical type should not be set on schema",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalFailsWithNegativeScale() {
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    assertThrows("Should reject precision", IllegalArgumentException.class,
+        "Invalid decimal scale: -2 (must be positive)", new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(9, -2).addToSchema(schema);
+            return null;
+          }
+        });
+    Assert.assertNull("Invalid logical type should not be set on schema",
+        LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testSchemaRejectsSecondLogicalType() {
+    final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    LogicalTypes.decimal(9).addToSchema(schema);
+    assertThrows("Should reject second logical type",
+        AvroRuntimeException.class,
+        "Can't overwrite property: scale", new Callable() {
+          @Override
+          public Object call() throws Exception {
+            LogicalTypes.decimal(9, 2).addToSchema(schema);
+            return null;
+          }
+        }
+    );
+    Assert.assertEquals("First logical type should still be set on schema",
+        LogicalTypes.decimal(9), LogicalTypes.fromSchemaIgnoreInvalid(schema));
+  }
+
+  @Test
+  public void testDecimalDefaultScale() {
+    Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    // 4 bytes can hold up to 9 digits of precision
+    LogicalTypes.decimal(9).addToSchema(schema);
+    Assert.assertEquals("Scale should be a 0",
+        0,
+        ((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getScale());
+  }
+
+  @Test
+  public void testFixedDecimalToFromJson() {
+    Schema schema = Schema.createFixed("aDecimal", null, null, 4);
+    LogicalTypes.decimal(9, 2).addToSchema(schema);
+    Schema parsed = new Schema.Parser().parse(schema.toString(true));
+    Assert.assertEquals("Constructed and parsed schemas should match",
+        schema, parsed);
+  }
+
+  @Test
+  public void testBytesDecimalToFromJson() {
+    Schema schema = Schema.create(Schema.Type.BYTES);
+    LogicalTypes.decimal(9, 2).addToSchema(schema);
+    Schema parsed = new Schema.Parser().parse(schema.toString(true));
+    Assert.assertEquals("Constructed and parsed schemas should match",
+        schema, parsed);
+  }
+
+  @Test
+  public void testLogicalTypeEquals() {
+    LogicalTypes.Decimal decimal90 = LogicalTypes.decimal(9);
+    LogicalTypes.Decimal decimal80 = LogicalTypes.decimal(8);
+    LogicalTypes.Decimal decimal92 = LogicalTypes.decimal(9, 2);
+
+    assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 0), decimal90);
+    assertEqualsTrue("Same decimal", LogicalTypes.decimal(8, 0), decimal80);
+    assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 2), decimal92);
+    assertEqualsFalse("Different logical type", LogicalTypes.uuid(), decimal90);
+    assertEqualsFalse("Different precision", decimal90, decimal80);
+    assertEqualsFalse("Different scale", decimal90, decimal92);
+  }
+
+  @Test
+  public void testLogicalTypeInSchemaEquals() {
+    Schema schema1 = Schema.createFixed("aDecimal", null, null, 4);
+    Schema schema2 = Schema.createFixed("aDecimal", null, null, 4);
+    Schema schema3 = Schema.createFixed("aDecimal", null, null, 4);
+    Assert.assertNotSame(schema1, schema2);
+    Assert.assertNotSame(schema1, schema3);
+    assertEqualsTrue("No logical types", schema1, schema2);
+    assertEqualsTrue("No logical types", schema1, schema3);
+
+    LogicalTypes.decimal(9).addToSchema(schema1);
+    assertEqualsFalse("Two has no logical type", schema1, schema2);
+
+    LogicalTypes.decimal(9).addToSchema(schema2);
+    assertEqualsTrue("Same logical types", schema1, schema2);
+
+    LogicalTypes.decimal(9, 2).addToSchema(schema3);
+    assertEqualsFalse("Different logical type", schema1, schema3);
+  }
+
+  public static void assertEqualsTrue(String message, Object o1, Object o2) {
+    Assert.assertTrue("Should be equal (forward): " + message, o1.equals(o2));
+    Assert.assertTrue("Should be equal (reverse): " + message, o2.equals(o1));
+  }
+
+  public static void assertEqualsFalse(String message, Object o1, Object o2) {
+    Assert.assertFalse("Should be equal (forward): " + message, o1.equals(o2));
+    Assert.assertFalse("Should be equal (reverse): " + message, o2.equals(o1));
+  }
+
+  /**
+   * A convenience method to avoid a large number of @Test(expected=...) tests
+   * @param message A String message to describe this assertion
+   * @param expected An Exception class that the Runnable should throw
+   * @param containedInMessage A String that should be contained by the thrown
+   *                           exception's message
+   * @param callable A Callable that is expected to throw the exception
+   */
+  public static void assertThrows(String message,
+                                  Class<? extends Exception> expected,
+                                  String containedInMessage,
+                                  Callable callable) {
+    try {
+      callable.call();
+      Assert.fail("No exception was thrown (" + message + "), expected: " +
+          expected.getName());
+    } catch (Exception actual) {
+      Assert.assertEquals(message, expected, actual.getClass());
+      Assert.assertTrue(
+          "Expected exception message (" + containedInMessage + ") missing: " +
+              actual.getMessage(),
+          actual.getMessage().contains(containedInMessage)
+      );
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java b/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java
new file mode 100644
index 0000000..007c273
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+
+import org.junit.Test;
+
+public class TestProtocol {
+
+  @Test public void testPropEquals() {
+    Protocol p1 = new Protocol("P", null, "foo");
+    p1.addProp("a","1");
+    Protocol p2 = new Protocol("P", null, "foo");
+    p2.addProp("a","2");
+    assertFalse(p1.equals(p2));
+  }
+  
+  @Test
+  public void testSplitProtocolBuild() {
+    Protocol p = new Protocol("P", null, "foo");
+    p.addProp("property", "some value");
+     
+    String protocolString = p.toString();
+    final int mid = protocolString.length() / 2;
+    String[] parts = {
+      protocolString.substring(0, mid),
+      protocolString.substring(mid),
+    }; 
+    
+    Protocol parsedStringProtocol = org.apache.avro.Protocol.parse(protocolString);
+    Protocol parsedArrayOfStringProtocol =
+      org.apache.avro.Protocol.parse(protocolString.substring(0, mid),
+                                     protocolString.substring(mid));
+
+    assertNotNull(parsedStringProtocol);
+    assertNotNull(parsedArrayOfStringProtocol);
+    assertEquals(parsedStringProtocol.toString(), parsedArrayOfStringProtocol.toString());
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
new file mode 100644
index 0000000..ba2cab4
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.junit.Test;
+
+public class TestSchema {  
+  @Test
+  public void testSplitSchemaBuild() {
+    Schema s = SchemaBuilder
+       .record("HandshakeRequest")
+       .namespace("org.apache.avro.ipc").fields()
+         .name("clientProtocol").type().optional().stringType()
+         .name("meta").type().optional().map().values().bytesType()
+         .endRecord();
+    
+    String schemaString = s.toString();
+    final int mid = schemaString.length() / 2;
+    
+    Schema parsedStringSchema = new org.apache.avro.Schema.Parser().parse(s.toString());
+    Schema parsedArrayOfStringSchema =
+      new org.apache.avro.Schema.Parser().parse
+      (schemaString.substring(0, mid), schemaString.substring(mid));
+    assertNotNull(parsedStringSchema);
+    assertNotNull(parsedArrayOfStringSchema);
+    assertEquals(parsedStringSchema.toString(), parsedArrayOfStringSchema.toString());
+  }
+
+  @Test
+  public void testDuplicateRecordFieldName() {
+    final Schema schema = Schema.createRecord("RecordName", null, null, false);
+    final List<Field> fields = new ArrayList<Field>();
+    fields.add(new Field("field_name", Schema.create(Type.NULL), null, null));
+    fields.add(new Field("field_name", Schema.create(Type.INT), null, null));
+    try {
+      schema.setFields(fields);
+      fail("Should not be able to create a record with duplicate field name.");
+    } catch (AvroRuntimeException are) {
+      assertTrue(are.getMessage().contains("Duplicate field field_name in record RecordName"));
+    }
+  }
+
+  @Test
+  public void testCreateUnionVarargs() {
+    List<Schema> types = new ArrayList<Schema>();
+    types.add(Schema.create(Type.NULL));
+    types.add(Schema.create(Type.LONG));
+    Schema expected = Schema.createUnion(types);
+
+    Schema schema = Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.LONG));
+    assertEquals(expected, schema);
+  }
+
+  @Test
+  public void testEmptyRecordSchema() {
+    Schema schema = Schema.createRecord("foobar", null, null, false);
+    String schemaString = schema.toString();
+    assertNotNull(schemaString);
+  }
+
+  @Test
+  public void testSchemaWithFields() {
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(new Field("field_name1", Schema.create(Type.NULL), null, null));
+    fields.add(new Field("field_name2", Schema.create(Type.INT), null, null));
+    Schema schema = Schema.createRecord("foobar", null, null, false, fields);
+    String schemaString = schema.toString();
+    assertNotNull(schemaString);
+    assertEquals(2, schema.getFields().size());
+  }
+
+  @Test(expected = NullPointerException.class)
+  public void testSchemaWithNullFields() {
+    Schema.createRecord("foobar", null, null, false, null);
+  }
+
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
new file mode 100644
index 0000000..58a1a6f
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java
@@ -0,0 +1,728 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.Schema.Field.Order;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecordBuilder;
+import org.codehaus.jackson.node.NullNode;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestSchemaBuilder {
+
+  private static final File DIR = new File(System.getProperty("test.dir", "/tmp"));
+  private static final File FILE = new File(DIR, "test.avro");
+
+  @Test
+  public void testRecord() {
+    Schema schema = SchemaBuilder
+        .record("myrecord").namespace("org.example").aliases("oldrecord").fields()
+        .name("f0").aliases("f0alias").type().stringType().noDefault()
+        .name("f1").doc("This is f1").type().longType().noDefault()
+        .name("f2").type().nullable().booleanType().booleanDefault(true)
+        .endRecord();
+
+    Assert.assertEquals("myrecord", schema.getName());
+    Assert.assertEquals("org.example", schema.getNamespace());
+    Assert.assertEquals("org.example.oldrecord", schema.getAliases().iterator().next());
+    Assert.assertFalse(schema.isError());
+    List<Schema.Field> fields = schema.getFields();
+    Assert.assertEquals(3, fields.size());
+    Assert.assertEquals(
+        new Schema.Field("f0", Schema.create(Schema.Type.STRING), null, null),
+        fields.get(0));
+    Assert.assertTrue(fields.get(0).aliases().contains("f0alias"));
+    Assert.assertEquals(
+        new Schema.Field("f1", Schema.create(Schema.Type.LONG), "This is f1", null),
+        fields.get(1));
+
+    List<Schema> types = new ArrayList<Schema>();
+    types.add(Schema.create(Schema.Type.BOOLEAN));
+    types.add(Schema.create(Schema.Type.NULL));
+    Schema optional = Schema.createUnion(types);
+    Assert.assertEquals(new Schema.Field("f2", optional, null, true),
+        fields.get(2));
+  }
+  
+  @Test
+  public void testDoc() {
+    Schema s = SchemaBuilder.fixed("myfixed").doc("mydoc").size(1);
+    Assert.assertEquals("mydoc", s.getDoc());
+  }
+  
+  @Test
+  public void testProps() {
+    Schema s = SchemaBuilder.builder().intBuilder()
+      .prop("p1", "v1")
+      .prop("p2", "v2")
+      .prop("p2", "v2real") // overwrite
+      .endInt();
+    @SuppressWarnings("deprecation")
+    int size = s.getProps().size();
+    Assert.assertEquals(2, size);
+    Assert.assertEquals("v1", s.getProp("p1"));
+    Assert.assertEquals("v2real", s.getProp("p2"));
+  }
+
+  @Test
+  public void testNamespaces() {
+    Schema s1 = SchemaBuilder.record("myrecord")
+      .namespace("org.example")
+      .fields()
+        .name("myint").type().intType().noDefault()
+        .endRecord();
+    Schema s2 = SchemaBuilder.record("org.example.myrecord")
+      .fields()
+        .name("myint").type().intType().noDefault()
+        .endRecord();
+    Schema s3 = SchemaBuilder.record("org.example.myrecord")
+      .namespace("org.example2")
+      .fields()
+        .name("myint").type().intType().noDefault()
+        .endRecord();
+    Schema s4 = SchemaBuilder.builder("org.example").record("myrecord")
+      .fields()
+        .name("myint").type().intType().noDefault()
+        .endRecord();
+    
+    Assert.assertEquals("myrecord", s1.getName());
+    Assert.assertEquals("myrecord", s2.getName());
+    Assert.assertEquals("myrecord", s3.getName());
+    Assert.assertEquals("myrecord", s4.getName());
+
+    Assert.assertEquals("org.example", s1.getNamespace());
+    Assert.assertEquals("org.example", s2.getNamespace());
+    Assert.assertEquals("org.example", s3.getNamespace()); // namespace call is ignored
+    Assert.assertEquals("org.example", s4.getNamespace());
+
+    Assert.assertEquals("org.example.myrecord", s1.getFullName());
+    Assert.assertEquals("org.example.myrecord", s2.getFullName());
+    Assert.assertEquals("org.example.myrecord", s3.getFullName());
+    Assert.assertEquals("org.example.myrecord", s4.getFullName());
+  }
+
+  @Test(expected = NullPointerException.class)
+  public void testMissingRecordName() {
+    SchemaBuilder
+      .record(null).fields() // null name
+        .name("f0").type().stringType().noDefault()
+        .endRecord();
+  }
+  
+  @Test
+  public void testBoolean() {
+    Schema.Type type = Schema.Type.BOOLEAN;
+    Schema simple = SchemaBuilder.builder().booleanType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .booleanBuilder().prop("p", "v").endBoolean();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testInt() {
+    Schema.Type type = Schema.Type.INT;
+    Schema simple = SchemaBuilder.builder().intType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .intBuilder().prop("p", "v").endInt();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testLong() {
+    Schema.Type type = Schema.Type.LONG;
+    Schema simple = SchemaBuilder.builder().longType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .longBuilder().prop("p", "v").endLong();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testFloat() {
+    Schema.Type type = Schema.Type.FLOAT;
+    Schema simple = SchemaBuilder.builder().floatType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .floatBuilder().prop("p", "v").endFloat();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testDuble() {
+    Schema.Type type = Schema.Type.DOUBLE;
+    Schema simple = SchemaBuilder.builder().doubleType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .doubleBuilder().prop("p", "v").endDouble();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testString() {
+    Schema.Type type = Schema.Type.STRING;
+    Schema simple = SchemaBuilder.builder().stringType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .stringBuilder().prop("p", "v").endString();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testBytes() {
+    Schema.Type type = Schema.Type.BYTES;
+    Schema simple = SchemaBuilder.builder().bytesType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .bytesBuilder().prop("p", "v").endBytes();
+    Assert.assertEquals(expected, built1);
+  }
+  
+  @Test
+  public void testNull() {
+    Schema.Type type = Schema.Type.NULL;
+    Schema simple = SchemaBuilder.builder().nullType();
+    Schema expected = primitive(type, simple);
+    Schema built1 = SchemaBuilder.builder()
+        .nullBuilder().prop("p", "v").endNull();
+    Assert.assertEquals(expected, built1);
+  }
+
+  
+  private Schema primitive(Schema.Type type, Schema bare) {
+    // test creation of bare schema by name
+    Schema bareByName = SchemaBuilder.builder().type(type.getName());
+    Assert.assertEquals(Schema.create(type), bareByName);
+    Assert.assertEquals(bareByName, bare);
+    // return a schema with custom prop set
+    Schema p = Schema.create(type);
+    p.addProp("p", "v");
+    return p;
+  }
+  
+
+//  @Test
+//  public void testError() {
+//    Schema schema = SchemaBuilder
+//        .errorType("myerror")
+//        .requiredString("message")
+//        .build();
+//
+//    Assert.assertEquals("myerror", schema.getName());
+//    Assert.assertTrue(schema.isError());
+//  }
+
+  @Test
+  public void testRecursiveRecord() {
+    Schema schema = SchemaBuilder.record("LongList").fields()
+      .name("value").type().longType().noDefault()
+      .name("next").type().optional().type("LongList")
+      .endRecord();
+
+    Assert.assertEquals("LongList", schema.getName());
+    List<Schema.Field> fields = schema.getFields();
+    Assert.assertEquals(2, fields.size());
+    Assert.assertEquals(
+        new Schema.Field("value", Schema.create(Schema.Type.LONG), null, null),
+        fields.get(0));
+
+    Assert.assertEquals(
+        Schema.Type.UNION,
+        fields.get(1).schema().getType());
+
+    Assert.assertEquals(
+        Schema.Type.NULL,
+        fields.get(1).schema().getTypes().get(0).getType());
+    Schema recordSchema = fields.get(1).schema().getTypes().get(1);
+    Assert.assertEquals(Schema.Type.RECORD, recordSchema.getType());
+    Assert.assertEquals("LongList", recordSchema.getName());
+    Assert.assertEquals(NullNode.getInstance(), fields.get(1).defaultValue());
+  }
+
+  @Test
+  public void testEnum() {
+    List<String> symbols = Arrays.asList("a", "b");
+    Schema expected = Schema.createEnum("myenum", null, null, symbols);
+    expected.addProp("p", "v");
+    Schema schema = SchemaBuilder.enumeration("myenum")
+        .prop("p", "v").symbols("a", "b");
+    Assert.assertEquals(expected, schema);
+  }
+
+  @Test
+  public void testFixed() {
+    Schema expected = Schema.createFixed("myfixed", null, null, 16);
+    expected.addAlias("myOldFixed");
+    Schema schema = SchemaBuilder.fixed("myfixed")
+        .aliases("myOldFixed").size(16);
+    Assert.assertEquals(expected, schema);
+  }
+
+  @Test
+  public void testArray() {
+    Schema longSchema = Schema.create(Schema.Type.LONG);
+    Schema expected = Schema.createArray(longSchema);
+
+    Schema schema1 = SchemaBuilder.array().items().longType();
+    Assert.assertEquals(expected, schema1);
+
+    Schema schema2 = SchemaBuilder.array().items(longSchema);
+    Assert.assertEquals(expected, schema2);
+
+    Schema schema3 = SchemaBuilder.array().prop("p", "v")
+        .items().type("long");
+    expected.addProp("p", "v");
+    Assert.assertEquals(expected, schema3);
+}
+
+  @Test
+  public void testMap() {
+    Schema intSchema = Schema.create(Schema.Type.INT);
+    Schema expected = Schema.createMap(intSchema);
+
+    Schema schema1 = SchemaBuilder.map().values().intType();
+    Assert.assertEquals(expected, schema1);
+
+    Schema schema2 = SchemaBuilder.map().values(intSchema);
+    Assert.assertEquals(expected, schema2);
+
+    Schema schema3 = SchemaBuilder.map().prop("p", "v")
+        .values().type("int");
+    expected.addProp("p", "v");
+    Assert.assertEquals(expected, schema3);
+  }
+
+  @Test
+  public void testUnionAndNullable() {
+    List<Schema> types = new ArrayList<Schema>();
+    types.add(Schema.create(Schema.Type.LONG));
+    types.add(Schema.create(Schema.Type.NULL));
+    Schema expected = Schema.createUnion(types);
+    
+    Schema schema = SchemaBuilder.unionOf()
+        .longType().and()
+        .nullType().endUnion();
+    Assert.assertEquals(expected, schema);
+    
+    schema = SchemaBuilder.nullable().longType();
+    Assert.assertEquals(expected, schema);
+  }
+  
+  @Test
+  public void testFields() {
+    Schema rec = SchemaBuilder.record("Rec").fields()
+      .name("documented").doc("documented").type().nullType().noDefault()
+      .name("ascending").orderAscending().type().booleanType().noDefault()
+      .name("descending").orderDescending().type().floatType().noDefault()
+      .name("ignored").orderIgnore().type().doubleType().noDefault()
+      .name("aliased").aliases("anAlias").type().stringType().noDefault()
+      .endRecord();
+    Assert.assertEquals("documented", rec.getField("documented").doc());
+    Assert.assertEquals(Order.ASCENDING, rec.getField("ascending").order());
+    Assert.assertEquals(Order.DESCENDING, rec.getField("descending").order());
+    Assert.assertEquals(Order.IGNORE, rec.getField("ignored").order());
+    Assert.assertTrue(rec.getField("aliased").aliases().contains("anAlias"));
+  }
+  
+  @Test
+  public void testFieldShortcuts() {
+    Schema full = SchemaBuilder.record("Blah").fields()
+        .name("rbool").type().booleanType().noDefault()
+        .name("obool").type().optional().booleanType()
+        .name("nbool").type().nullable().booleanType().booleanDefault(true)
+        .name("rint").type().intType().noDefault()
+        .name("oint").type().optional().intType()
+        .name("nint").type().nullable().intType().intDefault(1)
+        .name("rlong").type().longType().noDefault()
+        .name("olong").type().optional().longType()
+        .name("nlong").type().nullable().longType().longDefault(2L)
+        .name("rfloat").type().floatType().noDefault()
+        .name("ofloat").type().optional().floatType()
+        .name("nfloat").type().nullable().floatType().floatDefault(-1.1f)
+        .name("rdouble").type().doubleType().noDefault()
+        .name("odouble").type().optional().doubleType()
+        .name("ndouble").type().nullable().doubleType().doubleDefault(99.9d)
+        .name("rstring").type().stringType().noDefault()
+        .name("ostring").type().optional().stringType()
+        .name("nstring").type().nullable().stringType().stringDefault("def")
+        .name("rbytes").type().bytesType().noDefault()
+        .name("obytes").type().optional().bytesType()
+        .name("nbytes").type().nullable().bytesType().bytesDefault(new byte[] {1,2,3})
+        .endRecord();
+    
+    Schema shortcut = SchemaBuilder.record("Blah").fields()
+        .requiredBoolean("rbool")
+        .optionalBoolean("obool")
+        .nullableBoolean("nbool", true)
+        .requiredInt("rint")
+        .optionalInt("oint")
+        .nullableInt("nint", 1)
+        .requiredLong("rlong")
+        .optionalLong("olong")
+        .nullableLong("nlong", 2L)
+        .requiredFloat("rfloat")
+        .optionalFloat("ofloat")
+        .nullableFloat("nfloat", -1.1f)
+        .requiredDouble("rdouble")
+        .optionalDouble("odouble")
+        .nullableDouble("ndouble", 99.9d)
+        .requiredString("rstring")
+        .optionalString("ostring")
+        .nullableString("nstring", "def")
+        .requiredBytes("rbytes")
+        .optionalBytes("obytes")
+        .nullableBytes("nbytes", new byte[] {1,2,3})
+        .endRecord();
+    
+    Assert.assertEquals(full, shortcut);
+  }
+  
+  @Test
+  public void testNames() {
+    // no contextual namespace
+    Schema r = SchemaBuilder.record("Rec").fields()
+      .name("f0").type().fixed("org.foo.MyFixed").size(1).noDefault()
+      .name("f1").type("org.foo.MyFixed").noDefault()
+      .name("f2").type("org.foo.MyFixed", "").noDefault()
+      .name("f3").type("org.foo.MyFixed", null).noDefault()
+      .name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
+      .name("f5").type("MyFixed", "org.foo").noDefault()
+      .endRecord();
+    Schema expected = Schema.createFixed("org.foo.MyFixed", null, null, 1);
+    checkField(r, expected, "f0");
+    checkField(r, expected, "f1");
+    checkField(r, expected, "f2");
+    checkField(r, expected, "f3");
+    checkField(r, expected, "f4");
+    checkField(r, expected, "f5");
+    
+    // context namespace
+    Schema f = SchemaBuilder.builder("").fixed("Foo").size(1);
+    Assert.assertEquals(Schema.createFixed("Foo", null, null, 1), f);
+   
+    // context namespace from record matches
+    r = SchemaBuilder.record("Rec").namespace("org.foo").fields()
+        .name("f0").type().fixed("MyFixed").size(1).noDefault()
+        .name("f1").type("org.foo.MyFixed").noDefault()
+        .name("f2").type("org.foo.MyFixed", "").noDefault()
+        .name("f3").type("org.foo.MyFixed", null).noDefault()
+        .name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
+        .name("f5").type("MyFixed", "org.foo").noDefault()
+        .name("f6").type("MyFixed", null).noDefault()
+        .name("f7").type("MyFixed").noDefault()
+        .endRecord();
+    checkField(r, expected, "f0");
+    checkField(r, expected, "f1");
+    checkField(r, expected, "f2");
+    checkField(r, expected, "f3");
+    checkField(r, expected, "f4");
+    checkField(r, expected, "f5");
+    checkField(r, expected, "f6");
+    checkField(r, expected, "f7");
+
+    // context namespace from record does not match
+    r = SchemaBuilder.record("Rec").namespace("org.rec").fields()
+        .name("f0").type().fixed("MyFixed").namespace("org.foo").size(1).noDefault()
+        .name("f1").type("org.foo.MyFixed").noDefault()
+        .name("f2").type("org.foo.MyFixed", "").noDefault()
+        .name("f3").type("org.foo.MyFixed", null).noDefault()
+        .name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
+        .name("f5").type("MyFixed", "org.foo").noDefault()
+        .endRecord();
+    checkField(r, expected, "f0");
+    checkField(r, expected, "f1");
+    checkField(r, expected, "f2");
+    checkField(r, expected, "f3");
+    checkField(r, expected, "f4");
+    checkField(r, expected, "f5");
+    
+    // context namespace from record, nested has no namespace
+    expected = Schema.createFixed("MyFixed", null, null, 1);
+    r = SchemaBuilder.record("Rec").namespace("org.rec").fields()
+        .name("f0").type().fixed("MyFixed").namespace("").size(1).noDefault()
+        .name("f1").type("MyFixed", "").noDefault()
+        .endRecord();
+    checkField(r, expected, "f0");
+    checkField(r, expected, "f1");
+    
+    // mimic names of primitives, but with a namesapce.  This is OK
+    SchemaBuilder.fixed("org.test.long").size(1);
+    SchemaBuilder.fixed("long").namespace("org.test").size(1);
+    SchemaBuilder.builder("org.test").fixed("long").size(1);
+
+  }
+  
+  private void checkField(Schema r, Schema expected, String name) {
+    Assert.assertEquals(expected, r.getField(name).schema());
+  }
+  
+  @Test(expected=SchemaParseException.class)
+  public void testNamesFailRedefined() {
+    SchemaBuilder.record("Rec").fields()
+      .name("f0").type().enumeration("MyEnum").symbols("A", "B").enumDefault("A")
+      .name("f1").type().enumeration("MyEnum").symbols("X", "Y").noDefault()
+      .endRecord();
+  }
+
+  @Test(expected=SchemaParseException.class)
+  public void testNamesFailAbsent() {
+    SchemaBuilder.builder().type("notdefined");
+  }
+  
+  @Test(expected=AvroTypeException.class)
+  public void testNameReserved() {
+    SchemaBuilder.fixed("long").namespace("").size(1);
+  }
+  
+  @Test
+  public void testFieldTypesAndDefaultValues() {
+    byte[] bytedef = new byte[]{3};
+    ByteBuffer bufdef = ByteBuffer.wrap(bytedef);
+    String strdef = "\u0003";
+    HashMap<String, String> mapdef = new HashMap<String, String>();
+    mapdef.put("a", "A");
+    ArrayList<String> arrdef = new ArrayList<String>();
+    arrdef.add("arr");
+    
+    Schema rec = SchemaBuilder.record("inner").fields()
+      .name("f").type().intType().noDefault()
+      .endRecord();
+    
+    Schema rec2 = SchemaBuilder.record("inner2").fields()
+      .name("f2").type().intType().noDefault()
+      .endRecord();
+    
+    GenericData.Record recdef = 
+        new GenericRecordBuilder(rec).set("f", 1).build();
+        
+    GenericData.Record recdef2 =
+        new GenericRecordBuilder(rec2).set("f2", 2).build();
+    
+    Schema r = SchemaBuilder.record("r").fields()
+      .name("boolF").type().booleanType().booleanDefault(false)
+      .name("intF").type().intType().intDefault(1)
+      .name("longF").type().longType().longDefault(2L)
+      .name("floatF").type().floatType().floatDefault(3.0f)
+      .name("doubleF").type().doubleType().doubleDefault(4.0d)
+      .name("stringF").type().stringType().stringDefault("def")
+      .name("bytesF1").type().bytesType().bytesDefault(bytedef)
+      .name("bytesF2").type().bytesType().bytesDefault(bufdef)
+      .name("bytesF3").type().bytesType().bytesDefault(strdef)
+      .name("nullF").type().nullType().nullDefault()
+      .name("fixedF1").type().fixed("F1").size(1).fixedDefault(bytedef)
+      .name("fixedF2").type().fixed("F2").size(1).fixedDefault(bufdef)
+      .name("fixedF3").type().fixed("F3").size(1).fixedDefault(strdef)
+      .name("enumF").type().enumeration("E1").symbols("S").enumDefault("S")
+      .name("mapF").type().map().values().stringType()
+        .mapDefault(mapdef)
+      .name("arrayF").type().array().items().stringType()
+        .arrayDefault(arrdef)
+      .name("recordF").type().record("inner").fields()
+        .name("f").type().intType().noDefault()
+        .endRecord().recordDefault(recdef)
+      .name("byName").type("E1").withDefault("S")
+      // union builders, one for each 'first type' in a union:
+      .name("boolU").type().unionOf().booleanType().and()
+        .intType().endUnion().booleanDefault(false)
+      .name("intU").type().unionOf().intType().and()
+        .longType().endUnion().intDefault(1)
+      .name("longU").type().unionOf().longType().and()
+        .intType().endUnion().longDefault(2L)
+      .name("floatU").type().unionOf().floatType().and()
+        .intType().endUnion().floatDefault(3.0f)
+      .name("doubleU").type().unionOf().doubleType().and()
+        .intType().endUnion().doubleDefault(4.0d)
+      .name("stringU").type().unionOf().stringType().and()
+        .intType().endUnion().stringDefault("def")
+      .name("bytesU").type().unionOf().bytesType().and()
+        .intType().endUnion().bytesDefault(bytedef)
+      .name("nullU").type().unionOf().nullType().and()
+        .intType().endUnion().nullDefault()
+      .name("fixedU").type().unionOf().fixed("F4").size(1).and()
+        .intType().endUnion().fixedDefault(bytedef)
+      .name("enumU").type().unionOf().enumeration("E2").symbols("SS").and()
+        .intType().endUnion().enumDefault("SS")
+      .name("mapU").type().unionOf().map().values().stringType().and()
+        .intType().endUnion().mapDefault(mapdef)
+      .name("arrayU").type().unionOf().array().items().stringType().and()
+        .intType().endUnion().arrayDefault(arrdef)
+      .name("recordU").type().unionOf().record("inner2").fields()
+        .name("f2").type().intType().noDefault()
+        .endRecord().and().intType().endUnion().recordDefault(recdef2)
+      .endRecord();
+    
+    GenericData.Record newRec =
+        new GenericRecordBuilder(r).build();
+    
+    Assert.assertEquals(false, newRec.get("boolF"));
+    Assert.assertEquals(false, newRec.get("boolU"));
+    Assert.assertEquals(1, newRec.get("intF"));
+    Assert.assertEquals(1, newRec.get("intU"));
+    Assert.assertEquals(2L, newRec.get("longF"));
+    Assert.assertEquals(2L, newRec.get("longU"));
+    Assert.assertEquals(3f, newRec.get("floatF"));
+    Assert.assertEquals(3f, newRec.get("floatU"));
+    Assert.assertEquals(4d, newRec.get("doubleF"));
+    Assert.assertEquals(4d, newRec.get("doubleU"));
+    Assert.assertEquals("def", newRec.get("stringF").toString());
+    Assert.assertEquals("def", newRec.get("stringU").toString());
+    Assert.assertEquals(bufdef, newRec.get("bytesF1"));
+    Assert.assertEquals(bufdef, newRec.get("bytesF2"));
+    Assert.assertEquals(bufdef, newRec.get("bytesF3"));
+    Assert.assertEquals(bufdef, newRec.get("bytesU"));
+    Assert.assertNull(newRec.get("nullF"));
+    Assert.assertNull(newRec.get("nullU"));
+    Assert.assertArrayEquals(bytedef, 
+        ((GenericData.Fixed)newRec.get("fixedF1")).bytes());
+    Assert.assertArrayEquals(bytedef, 
+        ((GenericData.Fixed)newRec.get("fixedF2")).bytes());
+    Assert.assertArrayEquals(bytedef, 
+        ((GenericData.Fixed)newRec.get("fixedF3")).bytes());
+    Assert.assertArrayEquals(bytedef, 
+        ((GenericData.Fixed)newRec.get("fixedU")).bytes());
+    Assert.assertEquals("S", newRec.get("enumF").toString());
+    Assert.assertEquals("SS", newRec.get("enumU").toString());
+    @SuppressWarnings("unchecked")
+    Map<CharSequence, CharSequence> map = 
+      (Map<CharSequence, CharSequence>) newRec.get("mapF");
+    Assert.assertEquals(mapdef.size(), map.size());
+    for(Map.Entry<CharSequence, CharSequence> e : map.entrySet()) {
+      Assert.assertEquals(
+          mapdef.get(e.getKey().toString()), e.getValue().toString());
+    }
+    Assert.assertEquals(newRec.get("mapF"), newRec.get("mapU"));
+    @SuppressWarnings("unchecked")
+    GenericData.Array<CharSequence> arr = 
+      (GenericData.Array<CharSequence>) newRec.get("arrayF");
+    Assert.assertEquals(arrdef.size(), arr.size());
+    for(CharSequence c : arr) {
+      Assert.assertTrue(arrdef.contains(c.toString()));
+    }
+    Assert.assertEquals(newRec.get("arrF"), newRec.get("arrU"));
+    Assert.assertEquals(recdef, newRec.get("recordF"));
+    Assert.assertEquals(recdef2, newRec.get("recordU"));
+    Assert.assertEquals("S", newRec.get("byName").toString());
+  }
+  
+  @Test(expected=SchemaBuilderException.class)
+  public void testBadDefault() {
+    SchemaBuilder.record("r").fields()
+      .name("f").type(Schema.create(Schema.Type.INT)).withDefault(new Object())
+      .endRecord();
+  }
+  
+  @Test 
+  public void testUnionFieldBuild() {
+    SchemaBuilder.record("r").fields()
+      .name("allUnion").type().unionOf()
+        .booleanType().and()
+        .intType().and()
+        .longType().and()
+        .floatType().and()
+        .doubleType().and()
+        .stringType().and()
+        .bytesType().and()
+        .nullType().and()
+        .fixed("Fix").size(1).and()
+        .enumeration("Enu").symbols("Q").and()
+        .array().items().intType().and()
+        .map().values().longType().and()
+        .record("Rec").fields()
+          .name("one").type("Fix").noDefault()
+          .endRecord()
+        .endUnion().booleanDefault(false)
+      .endRecord();
+  }
+
+  @Test
+  public void testDefaults() throws IOException {
+    Schema writeSchema = SchemaBuilder.record("r").fields()
+      .name("requiredInt").type().intType().noDefault()
+      .name("optionalInt").type().optional().intType()
+      .name("nullableIntWithDefault").type().nullable().intType().intDefault(3)
+      .endRecord();
+
+    GenericData.Record rec1 = new GenericRecordBuilder(writeSchema)
+        .set("requiredInt", 1)
+        .build();
+
+    Assert.assertEquals(1, rec1.get("requiredInt"));
+    Assert.assertEquals(null, rec1.get("optionalInt"));
+    Assert.assertEquals(3, rec1.get("nullableIntWithDefault"));
+
+    GenericData.Record rec2 = new GenericRecordBuilder(writeSchema)
+        .set("requiredInt", 1)
+        .set("optionalInt", 2)
+        .set("nullableIntWithDefault", 13)
+        .build();
+
+    Assert.assertEquals(1, rec2.get("requiredInt"));
+    Assert.assertEquals(2, rec2.get("optionalInt"));
+    Assert.assertEquals(13, rec2.get("nullableIntWithDefault"));
+
+    // write to file
+    DataFileWriter<Object> writer =
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.create(writeSchema, FILE);
+    writer.append(rec1);
+    writer.append(rec2);
+    writer.close();
+
+    Schema readSchema = SchemaBuilder.record("r").fields()
+        .name("requiredInt").type().intType().noDefault()
+        .name("optionalInt").type().optional().intType()
+        .name("nullableIntWithDefault").type().nullable().intType().intDefault(3)
+        .name("newOptionalInt").type().optional().intType()
+        .name("newNullableIntWithDefault").type().nullable().intType().intDefault(5)
+        .endRecord();
+
+    DataFileReader<GenericData.Record> reader =
+        new DataFileReader<GenericData.Record>(FILE,
+            new GenericDatumReader<GenericData.Record>(writeSchema, readSchema));
+
+    GenericData.Record rec1read = reader.iterator().next();
+    Assert.assertEquals(1, rec1read.get("requiredInt"));
+    Assert.assertEquals(null, rec1read.get("optionalInt"));
+    Assert.assertEquals(3, rec1read.get("nullableIntWithDefault"));
+    Assert.assertEquals(null, rec1read.get("newOptionalInt"));
+    Assert.assertEquals(5, rec1read.get("newNullableIntWithDefault"));
+
+    GenericData.Record rec2read = reader.iterator().next();
+    Assert.assertEquals(1, rec2read.get("requiredInt"));
+    Assert.assertEquals(2, rec2read.get("optionalInt"));
+    Assert.assertEquals(13, rec2read.get("nullableIntWithDefault"));
+    Assert.assertEquals(null, rec2read.get("newOptionalInt"));
+    Assert.assertEquals(5, rec2read.get("newNullableIntWithDefault"));
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java
new file mode 100644
index 0000000..9b8cde1
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java
@@ -0,0 +1,608 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static junit.framework.Assert.assertEquals;
+import static org.apache.avro.SchemaCompatibility.checkReaderWriterCompatibility;
+
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.avro.Schema.Field;
+import org.apache.avro.SchemaCompatibility.SchemaCompatibilityType;
+import org.apache.avro.SchemaCompatibility.SchemaPairCompatibility;
+import org.apache.avro.generic.GenericData.EnumSymbol;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Unit-tests for SchemaCompatibility. */
+public class TestSchemaCompatibility {
+  private static final Logger LOG = LoggerFactory.getLogger(TestSchemaCompatibility.class);
+
+  // -----------------------------------------------------------------------------------------------
+
+  private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
+  private static final Schema BOOLEAN_SCHEMA = Schema.create(Schema.Type.BOOLEAN);
+  private static final Schema INT_SCHEMA = Schema.create(Schema.Type.INT);
+  private static final Schema LONG_SCHEMA = Schema.create(Schema.Type.LONG);
+  private static final Schema FLOAT_SCHEMA = Schema.create(Schema.Type.FLOAT);
+  private static final Schema DOUBLE_SCHEMA = Schema.create(Schema.Type.DOUBLE);
+  private static final Schema STRING_SCHEMA = Schema.create(Schema.Type.STRING);
+  private static final Schema BYTES_SCHEMA = Schema.create(Schema.Type.BYTES);
+
+  private static final Schema INT_ARRAY_SCHEMA = Schema.createArray(INT_SCHEMA);
+  private static final Schema LONG_ARRAY_SCHEMA = Schema.createArray(LONG_SCHEMA);
+  private static final Schema STRING_ARRAY_SCHEMA = Schema.createArray(STRING_SCHEMA);
+
+  private static final Schema INT_MAP_SCHEMA = Schema.createMap(INT_SCHEMA);
+  private static final Schema LONG_MAP_SCHEMA = Schema.createMap(LONG_SCHEMA);
+  private static final Schema STRING_MAP_SCHEMA = Schema.createMap(STRING_SCHEMA);
+
+  private static final Schema ENUM1_AB_SCHEMA =
+      Schema.createEnum("Enum1", null, null, list("A", "B"));
+  private static final Schema ENUM1_ABC_SCHEMA =
+      Schema.createEnum("Enum1", null, null, list("A", "B", "C"));
+  private static final Schema ENUM1_BC_SCHEMA =
+      Schema.createEnum("Enum1", null, null, list("B", "C"));
+  private static final Schema ENUM2_AB_SCHEMA =
+      Schema.createEnum("Enum2", null, null, list("A", "B"));
+
+  private static final Schema EMPTY_UNION_SCHEMA =
+      Schema.createUnion(new ArrayList<Schema>());
+  private static final Schema NULL_UNION_SCHEMA =
+      Schema.createUnion(list(NULL_SCHEMA));
+  private static final Schema INT_UNION_SCHEMA =
+      Schema.createUnion(list(INT_SCHEMA));
+  private static final Schema LONG_UNION_SCHEMA =
+      Schema.createUnion(list(LONG_SCHEMA));
+  private static final Schema STRING_UNION_SCHEMA =
+      Schema.createUnion(list(STRING_SCHEMA));
+  private static final Schema INT_STRING_UNION_SCHEMA =
+      Schema.createUnion(list(INT_SCHEMA, STRING_SCHEMA));
+  private static final Schema STRING_INT_UNION_SCHEMA =
+      Schema.createUnion(list(STRING_SCHEMA, INT_SCHEMA));
+
+  // Non recursive records:
+  private static final Schema EMPTY_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema EMPTY_RECORD2 =
+      Schema.createRecord("Record2", null, null, false);
+  private static final Schema A_INT_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema A_LONG_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema A_INT_B_INT_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema A_DINT_RECORD1 =  // DTYPE means TYPE with default value
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema A_INT_B_DINT_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  private static final Schema A_DINT_B_DINT_RECORD1 =
+      Schema.createRecord("Record1", null, null, false);
+  static {
+    EMPTY_RECORD1.setFields(Collections.<Field>emptyList());
+    EMPTY_RECORD2.setFields(Collections.<Field>emptyList());
+    A_INT_RECORD1.setFields(list(
+        new Field("a", INT_SCHEMA, null, null)));
+    A_LONG_RECORD1.setFields(list(
+        new Field("a", LONG_SCHEMA, null, null)));
+    A_INT_B_INT_RECORD1.setFields(list(
+        new Field("a", INT_SCHEMA, null, null),
+        new Field("b", INT_SCHEMA, null, null)));
+    A_DINT_RECORD1.setFields(list(
+        new Field("a", INT_SCHEMA, null, 0)));
+    A_INT_B_DINT_RECORD1.setFields(list(
+        new Field("a", INT_SCHEMA, null, null),
+        new Field("b", INT_SCHEMA, null, 0)));
+    A_DINT_B_DINT_RECORD1.setFields(list(
+        new Field("a", INT_SCHEMA, null, 0),
+        new Field("b", INT_SCHEMA, null, 0)));
+  }
+
+  // Recursive records
+  private static final Schema INT_LIST_RECORD =
+      Schema.createRecord("List", null, null, false);
+  private static final Schema LONG_LIST_RECORD =
+      Schema.createRecord("List", null, null, false);
+  static {
+    INT_LIST_RECORD.setFields(list(
+        new Field("head", INT_SCHEMA, null, null),
+        new Field("tail", INT_LIST_RECORD, null, null)));
+    LONG_LIST_RECORD.setFields(list(
+        new Field("head", LONG_SCHEMA, null, null),
+        new Field("tail", LONG_LIST_RECORD, null, null)));
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  /** Reader/writer schema pair. */
+  private static final class ReaderWriter {
+    private final Schema mReader;
+    private final Schema mWriter;
+
+    public ReaderWriter(final Schema reader, final Schema writer) {
+      mReader = reader;
+      mWriter = writer;
+    }
+
+    public Schema getReader() {
+      return mReader;
+    }
+
+    public Schema getWriter() {
+      return mWriter;
+    }
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  private static final Schema WRITER_SCHEMA = Schema.createRecord(list(
+      new Schema.Field("oldfield1", INT_SCHEMA, null, null),
+      new Schema.Field("oldfield2", STRING_SCHEMA, null, null)));
+
+  @Test
+  public void testValidateSchemaPairMissingField() throws Exception {
+    final List<Schema.Field> readerFields = list(
+        new Schema.Field("oldfield1", INT_SCHEMA, null, null));
+    final Schema reader = Schema.createRecord(readerFields);
+    final SchemaCompatibility.SchemaPairCompatibility expectedResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            reader,
+            WRITER_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+
+    // Test omitting a field.
+    assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
+  }
+
+  @Test
+  public void testValidateSchemaPairMissingSecondField() throws Exception {
+    final List<Schema.Field> readerFields = list(
+        new Schema.Field("oldfield2", STRING_SCHEMA, null, null));
+    final Schema reader = Schema.createRecord(readerFields);
+    final SchemaCompatibility.SchemaPairCompatibility expectedResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            reader,
+            WRITER_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+
+    // Test omitting other field.
+    assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
+  }
+
+  @Test
+  public void testValidateSchemaPairAllFields() throws Exception {
+    final List<Schema.Field> readerFields = list(
+        new Schema.Field("oldfield1", INT_SCHEMA, null, null),
+        new Schema.Field("oldfield2", STRING_SCHEMA, null, null));
+    final Schema reader = Schema.createRecord(readerFields);
+    final SchemaCompatibility.SchemaPairCompatibility expectedResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            reader,
+            WRITER_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+
+    // Test with all fields.
+    assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
+  }
+
+  @Test
+  public void testValidateSchemaNewFieldWithDefault() throws Exception {
+    final List<Schema.Field> readerFields = list(
+        new Schema.Field("oldfield1", INT_SCHEMA, null, null),
+        new Schema.Field("newfield1", INT_SCHEMA, null, 42));
+    final Schema reader = Schema.createRecord(readerFields);
+    final SchemaCompatibility.SchemaPairCompatibility expectedResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            reader,
+            WRITER_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+
+    // Test new field with default value.
+    assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
+  }
+
+  @Test
+  public void testValidateSchemaNewField() throws Exception {
+    final List<Schema.Field> readerFields = list(
+        new Schema.Field("oldfield1", INT_SCHEMA, null, null),
+        new Schema.Field("newfield1", INT_SCHEMA, null, null));
+    final Schema reader = Schema.createRecord(readerFields);
+    final SchemaCompatibility.SchemaPairCompatibility expectedResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.INCOMPATIBLE,
+            reader,
+            WRITER_SCHEMA,
+            String.format(
+                "Data encoded using writer schema:%n%s%n"
+                + "will or may fail to decode using reader schema:%n%s%n",
+                WRITER_SCHEMA.toString(true),
+                reader.toString(true)));
+
+    // Test new field without default value.
+    assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
+  }
+
+  @Test
+  public void testValidateArrayWriterSchema() throws Exception {
+    final Schema validReader = Schema.createArray(STRING_SCHEMA);
+    final Schema invalidReader = Schema.createMap(STRING_SCHEMA);
+    final SchemaCompatibility.SchemaPairCompatibility validResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            validReader,
+            STRING_ARRAY_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+    final SchemaCompatibility.SchemaPairCompatibility invalidResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.INCOMPATIBLE,
+            invalidReader,
+            STRING_ARRAY_SCHEMA,
+            String.format(
+                "Data encoded using writer schema:%n%s%n"
+                + "will or may fail to decode using reader schema:%n%s%n",
+                STRING_ARRAY_SCHEMA.toString(true),
+                invalidReader.toString(true)));
+
+    assertEquals(
+        validResult,
+        checkReaderWriterCompatibility(validReader, STRING_ARRAY_SCHEMA));
+    assertEquals(
+        invalidResult,
+        checkReaderWriterCompatibility(invalidReader, STRING_ARRAY_SCHEMA));
+  }
+
+  @Test
+  public void testValidatePrimitiveWriterSchema() throws Exception {
+    final Schema validReader = Schema.create(Schema.Type.STRING);
+    final SchemaCompatibility.SchemaPairCompatibility validResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE,
+            validReader,
+            STRING_SCHEMA,
+            SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
+    final SchemaCompatibility.SchemaPairCompatibility invalidResult =
+        new SchemaCompatibility.SchemaPairCompatibility(
+            SchemaCompatibility.SchemaCompatibilityType.INCOMPATIBLE,
+            INT_SCHEMA,
+            STRING_SCHEMA,
+            String.format(
+                "Data encoded using writer schema:%n%s%n"
+                + "will or may fail to decode using reader schema:%n%s%n",
+                STRING_SCHEMA.toString(true),
+                INT_SCHEMA.toString(true)));
+
+    assertEquals(
+        validResult,
+        checkReaderWriterCompatibility(validReader, STRING_SCHEMA));
+    assertEquals(
+        invalidResult,
+        checkReaderWriterCompatibility(INT_SCHEMA, STRING_SCHEMA));
+  }
+
+  /** Reader union schema must contain all writer union branches. */
+  @Test
+  public void testUnionReaderWriterSubsetIncompatibility() {
+    final Schema unionWriter = Schema.createUnion(list(INT_SCHEMA, STRING_SCHEMA));
+    final Schema unionReader = Schema.createUnion(list(STRING_SCHEMA));
+    final SchemaPairCompatibility result =
+        checkReaderWriterCompatibility(unionReader, unionWriter);
+    assertEquals(SchemaCompatibilityType.INCOMPATIBLE, result.getType());
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  /** Collection of reader/writer schema pair that are compatible. */
+  public static final List<ReaderWriter> COMPATIBLE_READER_WRITER_TEST_CASES = list(
+      new ReaderWriter(BOOLEAN_SCHEMA, BOOLEAN_SCHEMA),
+
+      new ReaderWriter(INT_SCHEMA, INT_SCHEMA),
+
+      new ReaderWriter(LONG_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(LONG_SCHEMA, LONG_SCHEMA),
+
+      // Avro spec says INT/LONG can be promoted to FLOAT/DOUBLE.
+      // This is arguable as this causes a loss of precision.
+      new ReaderWriter(FLOAT_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(FLOAT_SCHEMA, LONG_SCHEMA),
+      new ReaderWriter(DOUBLE_SCHEMA, LONG_SCHEMA),
+
+      new ReaderWriter(DOUBLE_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(DOUBLE_SCHEMA, FLOAT_SCHEMA),
+
+      new ReaderWriter(STRING_SCHEMA, STRING_SCHEMA),
+
+      new ReaderWriter(BYTES_SCHEMA, BYTES_SCHEMA),
+
+      new ReaderWriter(INT_ARRAY_SCHEMA, INT_ARRAY_SCHEMA),
+      new ReaderWriter(LONG_ARRAY_SCHEMA, INT_ARRAY_SCHEMA),
+      new ReaderWriter(INT_MAP_SCHEMA, INT_MAP_SCHEMA),
+      new ReaderWriter(LONG_MAP_SCHEMA, INT_MAP_SCHEMA),
+
+      new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_AB_SCHEMA),
+      new ReaderWriter(ENUM1_ABC_SCHEMA, ENUM1_AB_SCHEMA),
+
+      // Tests involving unions:
+      new ReaderWriter(EMPTY_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
+      new ReaderWriter(INT_UNION_SCHEMA, INT_UNION_SCHEMA),
+      new ReaderWriter(INT_STRING_UNION_SCHEMA, STRING_INT_UNION_SCHEMA),
+      new ReaderWriter(INT_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
+      new ReaderWriter(LONG_UNION_SCHEMA, INT_UNION_SCHEMA),
+
+      // Special case of singleton unions:
+      new ReaderWriter(INT_UNION_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, INT_UNION_SCHEMA),
+
+      // Tests involving records:
+      new ReaderWriter(EMPTY_RECORD1, EMPTY_RECORD1),
+      new ReaderWriter(EMPTY_RECORD1, A_INT_RECORD1),
+
+      new ReaderWriter(A_INT_RECORD1, A_INT_RECORD1),
+      new ReaderWriter(A_DINT_RECORD1, A_INT_RECORD1),
+      new ReaderWriter(A_DINT_RECORD1, A_DINT_RECORD1),
+      new ReaderWriter(A_INT_RECORD1, A_DINT_RECORD1),
+
+      new ReaderWriter(A_LONG_RECORD1, A_INT_RECORD1),
+
+      new ReaderWriter(A_INT_RECORD1, A_INT_B_INT_RECORD1),
+      new ReaderWriter(A_DINT_RECORD1, A_INT_B_INT_RECORD1),
+
+      new ReaderWriter(A_INT_B_DINT_RECORD1, A_INT_RECORD1),
+      new ReaderWriter(A_DINT_B_DINT_RECORD1, EMPTY_RECORD1),
+      new ReaderWriter(A_DINT_B_DINT_RECORD1, A_INT_RECORD1),
+      new ReaderWriter(A_INT_B_INT_RECORD1, A_DINT_B_DINT_RECORD1),
+
+      new ReaderWriter(INT_LIST_RECORD, INT_LIST_RECORD),
+      new ReaderWriter(LONG_LIST_RECORD, LONG_LIST_RECORD),
+      new ReaderWriter(LONG_LIST_RECORD, INT_LIST_RECORD),
+
+      new ReaderWriter(NULL_SCHEMA, NULL_SCHEMA)
+  );
+
+  // -----------------------------------------------------------------------------------------------
+
+  /** Collection of reader/writer schema pair that are incompatible. */
+  public static final List<ReaderWriter> INCOMPATIBLE_READER_WRITER_TEST_CASES = list(
+      new ReaderWriter(NULL_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(NULL_SCHEMA, LONG_SCHEMA),
+
+      new ReaderWriter(BOOLEAN_SCHEMA, INT_SCHEMA),
+
+      new ReaderWriter(INT_SCHEMA, NULL_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, BOOLEAN_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, LONG_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, FLOAT_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, DOUBLE_SCHEMA),
+
+      new ReaderWriter(LONG_SCHEMA, FLOAT_SCHEMA),
+      new ReaderWriter(LONG_SCHEMA, DOUBLE_SCHEMA),
+
+      new ReaderWriter(FLOAT_SCHEMA, DOUBLE_SCHEMA),
+
+      new ReaderWriter(STRING_SCHEMA, BOOLEAN_SCHEMA),
+      new ReaderWriter(STRING_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(STRING_SCHEMA, BYTES_SCHEMA),
+
+      new ReaderWriter(BYTES_SCHEMA, NULL_SCHEMA),
+      new ReaderWriter(BYTES_SCHEMA, INT_SCHEMA),
+      new ReaderWriter(BYTES_SCHEMA, STRING_SCHEMA),
+
+      new ReaderWriter(INT_ARRAY_SCHEMA, LONG_ARRAY_SCHEMA),
+      new ReaderWriter(INT_MAP_SCHEMA, INT_ARRAY_SCHEMA),
+      new ReaderWriter(INT_ARRAY_SCHEMA, INT_MAP_SCHEMA),
+      new ReaderWriter(INT_MAP_SCHEMA, LONG_MAP_SCHEMA),
+
+      new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_ABC_SCHEMA),
+      new ReaderWriter(ENUM1_BC_SCHEMA, ENUM1_ABC_SCHEMA),
+
+      new ReaderWriter(ENUM1_AB_SCHEMA, ENUM2_AB_SCHEMA),
+      new ReaderWriter(INT_SCHEMA, ENUM2_AB_SCHEMA),
+      new ReaderWriter(ENUM2_AB_SCHEMA, INT_SCHEMA),
+
+      // Tests involving unions:
+      new ReaderWriter(INT_UNION_SCHEMA, INT_STRING_UNION_SCHEMA),
+      new ReaderWriter(STRING_UNION_SCHEMA, INT_STRING_UNION_SCHEMA),
+
+      new ReaderWriter(EMPTY_RECORD2, EMPTY_RECORD1),
+      new ReaderWriter(A_INT_RECORD1, EMPTY_RECORD1),
+      new ReaderWriter(A_INT_B_DINT_RECORD1, EMPTY_RECORD1),
+
+      new ReaderWriter(INT_LIST_RECORD, LONG_LIST_RECORD),
+
+      // Last check:
+      new ReaderWriter(NULL_SCHEMA, INT_SCHEMA)
+  );
+
+  // -----------------------------------------------------------------------------------------------
+
+  /** Tests reader/writer compatibility validation. */
+  @Test
+  public void testReaderWriterCompatibility() {
+    for (ReaderWriter readerWriter : COMPATIBLE_READER_WRITER_TEST_CASES) {
+      final Schema reader = readerWriter.getReader();
+      final Schema writer = readerWriter.getWriter();
+      LOG.debug("Testing compatibility of reader {} with writer {}.", reader, writer);
+      final SchemaPairCompatibility result =
+          checkReaderWriterCompatibility(reader, writer);
+      assertEquals(String.format(
+          "Expecting reader %s to be compatible with writer %s, but tested incompatible.",
+          reader, writer),
+          SchemaCompatibilityType.COMPATIBLE, result.getType());
+    }
+  }
+
+  /** Tests the reader/writer incompatibility validation. */
+  @Test
+  public void testReaderWriterIncompatibility() {
+    for (ReaderWriter readerWriter : INCOMPATIBLE_READER_WRITER_TEST_CASES) {
+      final Schema reader = readerWriter.getReader();
+      final Schema writer = readerWriter.getWriter();
+      LOG.debug("Testing incompatibility of reader {} with writer {}.", reader, writer);
+      final SchemaPairCompatibility result =
+          checkReaderWriterCompatibility(reader, writer);
+      assertEquals(String.format(
+          "Expecting reader %s to be incompatible with writer %s, but tested compatible.",
+          reader, writer),
+          SchemaCompatibilityType.INCOMPATIBLE, result.getType());
+    }
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  /**
+   * Descriptor for a test case that encodes a datum according to a given writer schema,
+   * then decodes it according to reader schema and validates the decoded value.
+   */
+  private static final class DecodingTestCase {
+    /** Writer schema used to encode the datum. */
+    private final Schema mWriterSchema;
+
+    /** Datum to encode according to the specified writer schema. */
+    private final Object mDatum;
+
+    /** Reader schema used to decode the datum encoded using the writer schema. */
+    private final Schema mReaderSchema;
+
+    /** Expected datum value when using the reader schema to decode from the writer schema. */
+    private final Object mDecodedDatum;
+
+    public DecodingTestCase(
+        final Schema writerSchema,
+        final Object datum,
+        final Schema readerSchema,
+        final Object decoded) {
+      mWriterSchema = writerSchema;
+      mDatum = datum;
+      mReaderSchema = readerSchema;
+      mDecodedDatum = decoded;
+    }
+
+    public Schema getReaderSchema() {
+      return mReaderSchema;
+    }
+
+    public Schema getWriterSchema() {
+      return mWriterSchema;
+    }
+
+    public Object getDatum() {
+      return mDatum;
+    }
+
+    public Object getDecodedDatum() {
+      return mDecodedDatum;
+    }
+  }
+
+  // -----------------------------------------------------------------------------------------------
+
+  public static final List<DecodingTestCase> DECODING_COMPATIBILITY_TEST_CASES = list(
+      new DecodingTestCase(INT_SCHEMA, 1, INT_SCHEMA, 1),
+      new DecodingTestCase(INT_SCHEMA, 1, LONG_SCHEMA, 1L),
+      new DecodingTestCase(INT_SCHEMA, 1, FLOAT_SCHEMA, 1.0f),
+      new DecodingTestCase(INT_SCHEMA, 1, DOUBLE_SCHEMA, 1.0d),
+
+      // This is currently accepted but causes a precision loss:
+      // IEEE 754 floats have 24 bits signed mantissa
+      new DecodingTestCase(INT_SCHEMA, (1 << 24) + 1, FLOAT_SCHEMA, (float) ((1 << 24) + 1)),
+
+      // new DecodingTestCase(LONG_SCHEMA, 1L, INT_SCHEMA, 1),  // should work in best-effort!
+
+      new DecodingTestCase(
+          ENUM1_AB_SCHEMA, new EnumSymbol(ENUM1_AB_SCHEMA, "A"),
+          ENUM1_ABC_SCHEMA, new EnumSymbol(ENUM1_ABC_SCHEMA, "A")),
+
+      new DecodingTestCase(
+          ENUM1_ABC_SCHEMA, new EnumSymbol(ENUM1_ABC_SCHEMA, "A"),
+          ENUM1_AB_SCHEMA, new EnumSymbol(ENUM1_AB_SCHEMA, "A")),
+
+      new DecodingTestCase(
+          ENUM1_ABC_SCHEMA, new EnumSymbol(ENUM1_ABC_SCHEMA, "B"),
+          ENUM1_BC_SCHEMA, new EnumSymbol(ENUM1_BC_SCHEMA, "B")),
+
+      new DecodingTestCase(
+          INT_STRING_UNION_SCHEMA, "the string",
+          STRING_SCHEMA, new Utf8("the string")),
+
+      new DecodingTestCase(
+          INT_STRING_UNION_SCHEMA, "the string",
+          STRING_UNION_SCHEMA, new Utf8("the string"))
+);
+
+  /** Tests the reader/writer compatibility at decoding time. */
+  @Test
+  public void testReaderWriterDecodingCompatibility() throws Exception {
+    for (DecodingTestCase testCase : DECODING_COMPATIBILITY_TEST_CASES) {
+      final Schema readerSchema = testCase.getReaderSchema();
+      final Schema writerSchema = testCase.getWriterSchema();
+      final Object datum = testCase.getDatum();
+      final Object expectedDecodedDatum = testCase.getDecodedDatum();
+
+      LOG.debug(
+          "Testing incompatibility of reader {} with writer {}.",
+          readerSchema, writerSchema);
+
+      LOG.debug("Encode datum {} with writer {}.", datum, writerSchema);
+      final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+      final Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
+      final DatumWriter<Object> datumWriter = new GenericDatumWriter<Object>(writerSchema);
+      datumWriter.write(datum, encoder);
+      encoder.flush();
+
+      LOG.debug(
+          "Decode datum {} whose writer is {} with reader {}.",
+          new Object[]{datum, writerSchema, readerSchema});
+      final byte[] bytes = baos.toByteArray();
+      final Decoder decoder = DecoderFactory.get().resolvingDecoder(
+          writerSchema, readerSchema,
+          DecoderFactory.get().binaryDecoder(bytes, null));
+      final DatumReader<Object> datumReader = new GenericDatumReader<Object>(readerSchema);
+      final Object decodedDatum = datumReader.read(null, decoder);
+
+      assertEquals(String.format(
+          "Expecting decoded value %s when decoding value %s whose writer schema is %s "
+          + "using reader schema %s, but value was %s.",
+          expectedDecodedDatum, datum, writerSchema, readerSchema, decodedDatum),
+          expectedDecodedDatum, decodedDatum);
+    }
+  }
+
+  /** Borrowed from the Guava library. */
+  private static <E> ArrayList<E> list(E... elements) {
+    final ArrayList<E> list = new ArrayList<E>();
+    Collections.addAll(list, elements);
+    return list;
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
new file mode 100644
index 0000000..405d74d
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Formatter;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.junit.experimental.runners.Enclosed;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.avro.util.CaseFinder;
+
+
+ at RunWith(Enclosed.class)
+public class TestSchemaNormalization {
+
+  @RunWith(Parameterized.class)
+  public static class TestCanonical {
+    String input, expectedOutput;
+    public TestCanonical(String i, String o) { input=i; expectedOutput=o; }
+
+    @Parameters public static List<Object[]> cases() throws IOException
+    { return CaseFinder.find(data(), "canonical", new ArrayList<Object[]>()); }
+
+    @Test public void testCanonicalization() throws Exception {
+      assertEquals(SchemaNormalization.toParsingForm(Schema.parse(input)),
+                   expectedOutput);
+    }
+  }
+
+  @RunWith(Parameterized.class)
+  public static class TestFingerprint {
+    String input, expectedOutput;
+    public TestFingerprint(String i, String o) { input=i; expectedOutput=o; }
+
+    @Parameters public static List<Object[]> cases() throws IOException
+    { return CaseFinder.find(data(),"fingerprint",new ArrayList<Object[]>()); }
+
+    @Test public void testCanonicalization() throws Exception {
+      Schema s = Schema.parse(input);
+      long carefulFP = altFingerprint(SchemaNormalization.toParsingForm(s));
+      assertEquals(carefulFP, Long.parseLong(expectedOutput));
+      assertEqHex(carefulFP, SchemaNormalization.parsingFingerprint64(s));
+    }
+  }
+
+  private static String DATA_FILE =
+    (System.getProperty("share.dir", "../../../share")
+     + "/test/data/schema-tests.txt");
+
+  private static BufferedReader data() throws IOException
+  { return new BufferedReader(new FileReader(DATA_FILE)); }
+
+  /** Compute the fingerprint of <i>bytes[s,s+l)</i> using a slow
+      algorithm that's an alternative to that implemented in {@link
+      SchemaNormalization}.  Algo from Broder93 ("Some applications of Rabin's
+      fingerpringint method"). */
+  public static long altFingerprint(String s) {
+    // In our algorithm, we multiply all inputs by x^64 (which is
+    // equivalent to prepending it with a single "1" bit followed
+    // by 64 zero bits).  This both deals with the fact that
+    // CRCs ignore leading zeros, and also ensures some degree of
+    // randomness for small inputs
+    try {
+      long tmp = altExtend(SchemaNormalization.EMPTY64, 64, ONE,
+                           s.getBytes("UTF-8"));
+      return altExtend(SchemaNormalization.EMPTY64, 64, tmp, POSTFIX);
+    } catch (java.io.UnsupportedEncodingException e)
+      { throw new RuntimeException(e); } 
+  }
+
+  private static long altExtend(long poly, int degree, long fp, byte[] b) {
+    final long overflowBit = 1L<<(64-degree);
+    for (int i = 0; i < b.length; i++) {
+      for (int j = 1; j < 129; j = j<<1) {
+        boolean overflow = (0 != (fp & overflowBit));
+        fp >>>= 1;
+        if (0 != (j&b[i])) fp |= ONE; // shift in the input bit
+        if (overflow) {
+          fp ^= poly; // hi-order coeff of poly kills overflow bit
+        }
+      }
+    }
+    return fp;
+  }
+
+  private static final long ONE = 0x8000000000000000L;
+  private static final byte[] POSTFIX = { 0, 0, 0, 0, 0, 0, 0, 0 };
+
+  private static void assertEqHex(long expected, long actual) {
+    String m = format("0x%016x != 0x%016x", expected, actual).toString();
+    assertTrue(m, expected == actual);
+  }
+
+  private static String format(String f, Object... args) {
+    return (new Formatter()).format(f, args).toString();
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestSchemaValidation.java b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaValidation.java
new file mode 100644
index 0000000..0c0c166
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestSchemaValidation.java
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.util.ArrayList;
+
+import org.apache.avro.reflect.ReflectData;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestSchemaValidation {
+
+  SchemaValidatorBuilder builder = new SchemaValidatorBuilder();
+
+  Schema rec = SchemaBuilder.record("test.Rec").fields()
+      .name("a").type().intType().intDefault(1)
+      .name("b").type().longType().noDefault()
+      .endRecord();
+
+  Schema rec2 = SchemaBuilder.record("test.Rec").fields()
+      .name("a").type().intType().intDefault(1)
+      .name("b").type().longType().noDefault()
+      .name("c").type().intType().intDefault(0)
+      .endRecord();
+
+  Schema rec3 = SchemaBuilder.record("test.Rec").fields()
+      .name("b").type().longType().noDefault()
+      .name("c").type().intType().intDefault(0)
+      .endRecord();
+
+  Schema rec4 = SchemaBuilder.record("test.Rec").fields()
+      .name("b").type().longType().noDefault()
+      .name("c").type().intType().noDefault()
+      .endRecord();
+
+  Schema rec5 = SchemaBuilder.record("test.Rec").fields()
+      .name("a").type().stringType().stringDefault("") // different type from original
+      .name("b").type().longType().noDefault()
+      .name("c").type().intType().intDefault(0)
+      .endRecord();
+
+  @Test
+  public void testAllTypes() throws SchemaValidationException {
+    Schema s = SchemaBuilder.record("r").fields()
+        .requiredBoolean("boolF")
+        .requiredInt("intF")
+        .requiredLong("longF")
+        .requiredFloat("floatF")
+        .requiredDouble("doubleF")
+        .requiredString("stringF")
+        .requiredBytes("bytesF")
+        .name("fixedF1").type().fixed("F1").size(1).noDefault()
+        .name("enumF").type().enumeration("E1").symbols("S").noDefault()
+        .name("mapF").type().map().values().stringType().noDefault()
+        .name("arrayF").type().array().items().stringType().noDefault()
+        .name("recordF").type().record("inner").fields()
+        .name("f").type().intType().noDefault()
+        .endRecord().noDefault()
+        .optionalBoolean("boolO")
+        .endRecord();
+    testValidatorPasses(builder.mutualReadStrategy().validateLatest(), s, s);
+  }
+
+  @Test
+  public void testReadOnePrior() throws SchemaValidationException {
+    testValidatorPasses(builder.canReadStrategy().validateLatest(), rec3, rec);
+    testValidatorPasses(builder.canReadStrategy().validateLatest(), rec5, rec3);
+    testValidatorFails(builder.canReadStrategy().validateLatest(), rec4, rec);
+  }
+
+  @Test
+  public void testReadAllPrior() throws SchemaValidationException {
+    testValidatorPasses(builder.canReadStrategy().validateAll(), rec3, rec, rec2);
+    testValidatorFails(builder.canReadStrategy().validateAll(), rec4, rec, rec2, rec3);
+    testValidatorFails(builder.canReadStrategy().validateAll(), rec5, rec, rec2, rec3);
+  }
+
+  @Test
+  public void testOnePriorCanRead() throws SchemaValidationException {
+    testValidatorPasses(builder.canBeReadStrategy().validateLatest(), rec, rec3);
+    testValidatorFails(builder.canBeReadStrategy().validateLatest(), rec, rec4);
+  }
+
+  @Test
+  public void testAllPriorCanRead() throws SchemaValidationException {
+    testValidatorPasses(builder.canBeReadStrategy().validateAll(), rec, rec3, rec2);
+    testValidatorFails(builder.canBeReadStrategy().validateAll(), rec, rec4, rec3, rec2);
+  }
+
+  @Test
+  public void testOnePriorCompatible() throws SchemaValidationException {
+    testValidatorPasses(builder.mutualReadStrategy().validateLatest(), rec, rec3);
+    testValidatorFails(builder.mutualReadStrategy().validateLatest(), rec, rec4);
+  }
+
+  @Test
+  public void testAllPriorCompatible() throws SchemaValidationException {
+    testValidatorPasses(builder.mutualReadStrategy().validateAll(), rec, rec3, rec2);
+    testValidatorFails(builder.mutualReadStrategy().validateAll(), rec, rec4, rec3, rec2);
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testInvalidBuild() {
+    builder.strategy(null).validateAll();
+  }
+
+  public static class Point {
+    double x;
+    double y;
+  }
+
+  public static class Circle {
+    Point center;
+    double radius;
+  }
+
+  public static final Schema circleSchema = SchemaBuilder.record("Circle")
+      .fields()
+      .name("center").type().record("Point")
+          .fields()
+          .requiredDouble("x")
+          .requiredDouble("y")
+          .endRecord().noDefault()
+      .requiredDouble("radius")
+      .endRecord();
+
+  public static final Schema circleSchemaDifferentNames = SchemaBuilder
+      .record("crcl").fields()
+      .name("center").type().record("pt")
+      .fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .endRecord().noDefault()
+      .requiredDouble("radius")
+      .endRecord();
+
+  @Test
+  public void testReflectMatchStructure() throws SchemaValidationException {
+    testValidatorPasses(builder.canBeReadStrategy().validateAll(),
+        circleSchemaDifferentNames, ReflectData.get().getSchema(Circle.class));
+  }
+
+  @Test
+  public void testReflectWithAllowNullMatchStructure() throws SchemaValidationException {
+    testValidatorPasses(builder.canBeReadStrategy().validateAll(),
+        circleSchemaDifferentNames, ReflectData.AllowNull.get().getSchema(Circle.class));
+  }
+
+  private void testValidatorPasses(SchemaValidator validator,
+      Schema schema, Schema... prev) throws SchemaValidationException {
+    ArrayList<Schema> prior = new ArrayList<Schema>();
+    for(int i = prev.length - 1; i >= 0; i--) {
+      prior.add(prev[i]);
+    }
+    validator.validate(schema, prior);
+  }
+
+  private void testValidatorFails(SchemaValidator validator,
+      Schema schemaFails, Schema... prev) throws SchemaValidationException {
+    ArrayList<Schema> prior = new ArrayList<Schema>();
+    for(int i = prev.length - 1; i >= 0; i--) {
+      prior.add(prev[i]);
+    }
+    boolean threw = false;
+    try {
+      // should fail
+      validator.validate(schemaFails, prior);
+    } catch (SchemaValidationException sve) {
+      threw = true;
+    }
+    Assert.assertTrue(threw);
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java b/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java
new file mode 100644
index 0000000..fbf2b62
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java
@@ -0,0 +1,13 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package org.apache.avro;  
+ at SuppressWarnings("all")
+ at org.apache.avro.specific.AvroGenerated
+public enum TypeEnum { 
+  a, b, c  ;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"TypeEnum\",\"namespace\":\"org.apache.avro\",\"symbols\":[\"a\",\"b\",\"c\"]}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java b/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java
new file mode 100644
index 0000000..c3f236a
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/data/RecordBuilderBaseTest.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.data;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Unit test for RecordBuilderBase.
+ */
+public class RecordBuilderBaseTest {
+  private static Set<Type> primitives;
+  private static Set<Type> nonNullPrimitives;
+  
+  @BeforeClass()
+  public static void setUpBeforeClass() {
+    primitives = new HashSet<Type>(Arrays.asList(Type.values()));
+    primitives.removeAll(Arrays.asList(new Type[] { 
+        Type.RECORD, Type.ENUM, Type.ARRAY, Type.MAP, Type.UNION, Type.FIXED 
+    }));
+    
+    nonNullPrimitives = new HashSet<Type>(primitives);
+    nonNullPrimitives.remove(Type.NULL);
+  }
+  
+  @Test
+  public void testIsValidValueWithPrimitives() { 
+    // Verify that a non-null value is valid for all primitives:
+    for (Type type : primitives) {
+      Field f = new Field("f", Schema.create(type), null, null);
+      Assert.assertTrue(RecordBuilderBase.isValidValue(f, new Object()));
+    }
+    
+    // Verify that null is not valid for all non-null primitives:
+    for (Type type : nonNullPrimitives) {
+      Field f = new Field("f", Schema.create(type), null, null);
+      Assert.assertFalse(RecordBuilderBase.isValidValue(f, null));
+    }
+  }
+  
+  @Test
+  public void testIsValidValueWithNullField() {
+    // Verify that null is a valid value for null fields:
+    Assert.assertTrue(RecordBuilderBase.isValidValue(
+        new Field("f", Schema.create(Type.NULL), null, null), null));
+  }
+  
+  @Test
+  public void testIsValidValueWithUnion() {
+    // Verify that null values are not valid for a union with no null type:
+    Schema unionWithoutNull = Schema.createUnion(Arrays.asList(new Schema[] { 
+        Schema.create(Type.STRING), Schema.create(Type.BOOLEAN)
+    }));
+    
+    Assert.assertTrue(RecordBuilderBase.isValidValue(
+        new Field("f", unionWithoutNull, null, null), new Object()));
+    Assert.assertFalse(RecordBuilderBase.isValidValue(
+        new Field("f", unionWithoutNull, null, null), null));
+    
+    // Verify that null values are valid for a union with a null type:
+    Schema unionWithNull = Schema.createUnion(Arrays.asList(new Schema[] { 
+        Schema.create(Type.STRING), Schema.create(Type.NULL)
+    }));
+    
+    Assert.assertTrue(RecordBuilderBase.isValidValue(
+        new Field("f", unionWithNull, null, null), new Object()));
+    Assert.assertTrue(RecordBuilderBase.isValidValue(
+        new Field("f", unionWithNull, null, null), null));
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/data/TestTimeConversions.java b/lang/java/avro/src/test/java/org/apache/avro/data/TestTimeConversions.java
new file mode 100644
index 0000000..5e315cd
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/data/TestTimeConversions.java
@@ -0,0 +1,250 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.data;
+
+import org.apache.avro.LogicalTypes;
+import org.apache.avro.Schema;
+import org.apache.avro.data.TimeConversions.DateConversion;
+import org.apache.avro.data.TimeConversions.LossyTimeMicrosConversion;
+import org.apache.avro.data.TimeConversions.LossyTimestampMicrosConversion;
+import org.apache.avro.data.TimeConversions.TimeMicrosConversion;
+import org.apache.avro.data.TimeConversions.TimestampMicrosConversion;
+import org.apache.avro.data.TimeConversions.TimeConversion;
+import org.apache.avro.data.TimeConversions.TimestampConversion;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.LocalDate;
+import org.joda.time.LocalTime;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.util.Date;
+
+public class TestTimeConversions {
+
+  public static Schema DATE_SCHEMA;
+  public static Schema TIME_MILLIS_SCHEMA;
+  public static Schema TIME_MICROS_SCHEMA;
+  public static Schema TIMESTAMP_MILLIS_SCHEMA;
+  public static Schema TIMESTAMP_MICROS_SCHEMA;
+
+  @BeforeClass
+  public static void createSchemas() {
+    TestTimeConversions.DATE_SCHEMA = LogicalTypes.date()
+        .addToSchema(Schema.create(Schema.Type.INT));
+    TestTimeConversions.TIME_MILLIS_SCHEMA = LogicalTypes.timeMillis()
+        .addToSchema(Schema.create(Schema.Type.INT));
+    TestTimeConversions.TIME_MICROS_SCHEMA = LogicalTypes.timeMicros()
+        .addToSchema(Schema.create(Schema.Type.LONG));
+    TestTimeConversions.TIMESTAMP_MILLIS_SCHEMA = LogicalTypes.timestampMillis()
+        .addToSchema(Schema.create(Schema.Type.LONG));
+    TestTimeConversions.TIMESTAMP_MICROS_SCHEMA = LogicalTypes.timestampMicros()
+        .addToSchema(Schema.create(Schema.Type.LONG));
+  }
+
+  @Test
+  public void testDateConversion() throws Exception {
+    DateConversion conversion = new DateConversion();
+    LocalDate Jan_6_1970 = new LocalDate(1970, 1, 6);    //  5
+    LocalDate Jan_1_1970 = new LocalDate(1970, 1, 1);    //  0
+    LocalDate Dec_27_1969 = new LocalDate(1969, 12, 27); // -5
+
+    Assert.assertEquals("6 Jan 1970 should be 5", 5,
+        (int) conversion.toInt(Jan_6_1970, DATE_SCHEMA, LogicalTypes.date()));
+    Assert.assertEquals("1 Jan 1970 should be 0", 0,
+        (int) conversion.toInt(Jan_1_1970, DATE_SCHEMA, LogicalTypes.date()));
+    Assert.assertEquals("27 Dec 1969 should be -5", -5,
+        (int) conversion.toInt(Dec_27_1969, DATE_SCHEMA, LogicalTypes.date()));
+
+    Assert.assertEquals("6 Jan 1970 should be 5",
+        conversion.fromInt(5, DATE_SCHEMA, LogicalTypes.date()), Jan_6_1970);
+    Assert.assertEquals("1 Jan 1970 should be 0",
+        conversion.fromInt(0, DATE_SCHEMA, LogicalTypes.date()), Jan_1_1970);
+    Assert.assertEquals("27 Dec 1969 should be -5",
+        conversion.fromInt(-5, DATE_SCHEMA, LogicalTypes.date()), Dec_27_1969);
+  }
+
+  @Test
+  public void testTimeMillisConversion() throws Exception {
+    TimeConversion conversion = new TimeConversion();
+    LocalTime oneAM = new LocalTime(1, 0);
+    LocalTime afternoon = new LocalTime(15, 14, 15, 926);
+    int afternoonMillis = ((15 * 60 + 14) * 60 + 15) * 1000 + 926;
+
+    Assert.assertEquals("Midnight should be 0", 0,
+        (int) conversion.toInt(
+            LocalTime.MIDNIGHT, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+    Assert.assertEquals("01:00 should be 3,600,000", 3600000,
+        (int) conversion.toInt(
+            oneAM, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+    Assert.assertEquals("15:14:15.926 should be " + afternoonMillis,
+        afternoonMillis,
+        (int) conversion.toInt(
+            afternoon, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+
+    Assert.assertEquals("Midnight should be 0",
+        LocalTime.MIDNIGHT,
+        conversion.fromInt(0, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+    Assert.assertEquals("01:00 should be 3,600,000",
+        oneAM,
+        conversion.fromInt(
+            3600000, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+    Assert.assertEquals("15:14:15.926 should be " + afternoonMillis,
+        afternoon,
+        conversion.fromInt(
+            afternoonMillis, TIME_MILLIS_SCHEMA, LogicalTypes.timeMillis()));
+  }
+
+  @Test
+  public void testTimeMicrosConversion() throws Exception {
+    TimeMicrosConversion conversion = new TimeMicrosConversion();
+    LocalTime oneAM = new LocalTime(1, 0);
+    LocalTime afternoon = new LocalTime(15, 14, 15, 926);
+    long afternoonMicros = ((long) (15 * 60 + 14) * 60 + 15) * 1000000 + 926551;
+
+    Assert.assertEquals("Midnight should be 0",
+        LocalTime.MIDNIGHT,
+        conversion.fromLong(0L, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("01:00 should be 3,600,000,000",
+        oneAM,
+        conversion.fromLong(
+            3600000000L, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("15:14:15.926000 should be " + afternoonMicros,
+        afternoon,
+        conversion.fromLong(
+            afternoonMicros, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+
+    try {
+      conversion.toLong(afternoon,
+          TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
+      Assert.fail("Should not convert LocalTime to long");
+    } catch (UnsupportedOperationException e) {
+      // expected
+    }
+  }
+
+  @Test
+  public void testLossyTimeMicrosConversion() throws Exception {
+    TimeMicrosConversion conversion = new LossyTimeMicrosConversion();
+    LocalTime oneAM = new LocalTime(1, 0);
+    LocalTime afternoon = new LocalTime(15, 14, 15, 926);
+    long afternoonMicros = ((long) (15 * 60 + 14) * 60 + 15) * 1000000 + 926551;
+
+    Assert.assertEquals("Midnight should be 0", 0,
+        (long) conversion.toLong(
+            LocalTime.MIDNIGHT, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("01:00 should be 3,600,000,000", 3600000000L,
+        (long) conversion.toLong(
+            oneAM, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("15:14:15.926551 should be " + afternoonMicros,
+        dropMicros(afternoonMicros), // loses precision!
+        (long) conversion.toLong(
+            afternoon, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+
+    Assert.assertEquals("Midnight should be 0",
+        LocalTime.MIDNIGHT,
+        conversion.fromLong(0L, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("01:00 should be 3,600,000,000",
+        oneAM,
+        conversion.fromLong(
+            3600000000L, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+    Assert.assertEquals("15:14:15.926000 should be " + afternoonMicros,
+        afternoon,
+        conversion.fromLong(
+            afternoonMicros, TIME_MICROS_SCHEMA, LogicalTypes.timeMicros()));
+  }
+
+  @Test
+  public void testTimestampMillisConversion() throws Exception {
+    TimestampConversion conversion = new TimestampConversion();
+    long nowInstant = new Date().getTime();
+
+    DateTime now = conversion.fromLong(
+        nowInstant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
+    long roundTrip = conversion.toLong(
+        now, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
+    Assert.assertEquals("Round-trip conversion should work",
+        nowInstant, roundTrip);
+
+    long May_28_2015_21_46_53_221_instant = 1432849613221L;
+    DateTime May_28_2015_21_46_53_221 =
+        new DateTime(2015, 5, 28, 21, 46, 53, 221, DateTimeZone.UTC);
+
+    Assert.assertEquals("Known date should be correct",
+        May_28_2015_21_46_53_221,
+        conversion.fromLong(May_28_2015_21_46_53_221_instant,
+            TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()));
+    Assert.assertEquals("Known date should be correct",
+        May_28_2015_21_46_53_221_instant,
+        (long) conversion.toLong(May_28_2015_21_46_53_221,
+            TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()));
+  }
+
+  @Test
+  public void testTimestampMicrosConversion() throws Exception {
+    TimestampMicrosConversion conversion = new TimestampMicrosConversion();
+
+    long May_28_2015_21_46_53_221_843_instant = 1432849613221L * 1000 + 843;
+    DateTime May_28_2015_21_46_53_221 =
+        new DateTime(2015, 5, 28, 21, 46, 53, 221, DateTimeZone.UTC);
+
+    Assert.assertEquals("Known date should be correct",
+        May_28_2015_21_46_53_221,
+        conversion.fromLong(May_28_2015_21_46_53_221_843_instant,
+            TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()));
+
+    try {
+      conversion.toLong(May_28_2015_21_46_53_221,
+          TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
+      Assert.fail("Should not convert DateTime to long");
+    } catch (UnsupportedOperationException e) {
+      // expected
+    }
+  }
+
+  @Test
+  public void testLossyTimestampMicrosConversion() throws Exception {
+    TimestampMicrosConversion conversion = new LossyTimestampMicrosConversion();
+    long nowInstant = new Date().getTime() * 1000 + 674; // add fake micros
+
+    DateTime now = conversion.fromLong(
+        nowInstant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
+    long roundTrip = conversion.toLong(
+        now, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
+    Assert.assertEquals("Round-trip conversion should lose microseconds",
+        dropMicros(nowInstant), roundTrip);
+
+    long May_28_2015_21_46_53_221_843_instant = 1432849613221L * 1000 + 843;
+    DateTime May_28_2015_21_46_53_221 =
+        new DateTime(2015, 5, 28, 21, 46, 53, 221, DateTimeZone.UTC);
+
+    Assert.assertEquals("Known date should be correct",
+        May_28_2015_21_46_53_221,
+        conversion.fromLong(May_28_2015_21_46_53_221_843_instant,
+            TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()));
+    Assert.assertEquals("Known date should be correct",
+        dropMicros(May_28_2015_21_46_53_221_843_instant),
+        (long) conversion.toLong(May_28_2015_21_46_53_221,
+            TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()));
+  }
+
+  private long dropMicros(long micros) {
+    return micros / 1000 * 1000;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java
new file mode 100644
index 0000000..febcaef
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestBZip2Codec.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+import static org.junit.Assert.assertTrue;
+
+public class TestBZip2Codec {
+  
+  @Test
+  public void testBZip2CompressionAndDecompression() throws IOException {
+    Codec codec = CodecFactory.fromString("bzip2").createInstance();
+    assertTrue(codec instanceof BZip2Codec);
+    assertTrue(codec.getName().equals("bzip2"));
+    
+    //This is 3 times the byte buffer on the BZip2 decompress plus some extra
+    final int inputByteSize = BZip2Codec.DEFAULT_BUFFER_SIZE * 3 + 42;
+    
+    byte[] inputByteArray = new byte[inputByteSize];
+    
+    //Generate something that will compress well
+    for (int i = 0; i < inputByteSize; i++) {
+      inputByteArray[i] = (byte)(65 + i % 10);
+    }
+    
+    ByteBuffer inputByteBuffer = ByteBuffer.allocate(inputByteSize * 2);
+    inputByteBuffer.put(inputByteArray);
+    
+    ByteBuffer compressedBuffer = codec.compress(inputByteBuffer);
+    
+    //Make sure something returned
+    assertTrue(compressedBuffer.array().length > 0);
+    //Make sure the compressed output is smaller then the original
+    assertTrue(compressedBuffer.array().length < inputByteArray.length);
+    
+    ByteBuffer decompressedBuffer = codec.decompress(compressedBuffer);
+    
+    //The original array should be the same length as the decompressed array
+    assertTrue(decompressedBuffer.array().length == inputByteArray.length);
+    
+    //Every byte in the outputByteArray should equal every byte in the input array 
+    byte[] outputByteArray = decompressedBuffer.array();
+    for (int i = 0; i < inputByteSize; i++) {
+      inputByteArray[i] = outputByteArray[i];
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java
new file mode 100644
index 0000000..35d0f97
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.file;
+
+
+import org.apache.avro.file.codec.CustomCodec;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class TestCustomCodec {
+
+  @Test
+  public void testCustomCodec() {
+    CustomCodec customCodec = new CustomCodec();
+    Codec snappyCodec = new SnappyCodec.Option().createInstance();
+    assertTrue(customCodec.equals(new CustomCodec()));
+    assertFalse(customCodec.equals(snappyCodec));
+
+    String testString = "Testing 123";
+    ByteBuffer original = ByteBuffer.allocate(testString.getBytes().length);
+    original.put(testString.getBytes());
+    original.rewind();
+    ByteBuffer decompressed = null;
+    try {
+      ByteBuffer compressed = customCodec.compress(original);
+      compressed.rewind();
+      decompressed = customCodec.decompress(compressed);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+
+    assertEquals(testString, new String(decompressed.array()));
+
+  }
+
+}
\ No newline at end of file
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestLengthLimitedInputStream.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestLengthLimitedInputStream.java
new file mode 100644
index 0000000..2494428
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestLengthLimitedInputStream.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestLengthLimitedInputStream {
+  InputStream raw;
+
+  @Before
+  public void setupRawStream() {
+    byte[] buf = new byte[128];
+    for (int i = 0; i < 128; ++i) {
+      buf[i] = (byte)i;
+    }
+    raw = new ByteArrayInputStream(buf);
+  }
+
+  @Test
+  public void testAvailable() throws IOException {
+    InputStream is = new LengthLimitedInputStream(raw, 10);
+    assertEquals(10, is.available());
+    is.skip(100);
+    assertEquals(0, is.available());
+  }
+
+  @Test
+  public void testRead() throws IOException {
+    InputStream is = new LengthLimitedInputStream(raw, 10);
+    byte[] x = new byte[12];
+    assertEquals(0, is.read());
+    assertEquals(9, is.read(x));
+    assertEquals(-1, is.read(x));
+    assertEquals(x[8], 9);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/TestSeekableByteArrayInput.java b/lang/java/avro/src/test/java/org/apache/avro/file/TestSeekableByteArrayInput.java
new file mode 100644
index 0000000..9c5e3ff
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/TestSeekableByteArrayInput.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.file;
+
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericData.Record;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.util.Utf8;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestSeekableByteArrayInput {
+
+    private byte[] getSerializedMessage(IndexedRecord message, Schema schema) throws Exception {
+        ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
+        SpecificDatumWriter<IndexedRecord> writer = new SpecificDatumWriter<IndexedRecord>();
+        DataFileWriter<IndexedRecord> dfw = null;
+        try {
+            dfw = new DataFileWriter<IndexedRecord>(writer).create(schema, baos);
+            dfw.append(message);
+        } finally {
+            if (dfw != null) {
+                dfw.close();
+            }
+        }
+        return baos.toByteArray();
+    }
+
+    private Schema getTestSchema() throws Exception {
+        Schema schema = Schema.createRecord("TestRecord", "this is a test record", "org.apache.avro.file", false);
+        List<Field> fields = new ArrayList<Field>();
+        fields.add(new Field("name", Schema.create(Type.STRING), "this is a test field", null));
+        schema.setFields(fields);
+        return schema;
+    }
+
+    @Test
+    public void testSerialization() throws Exception {
+        Schema testSchema = getTestSchema();
+        GenericRecord message = new Record(testSchema);
+        message.put("name", "testValue");
+
+        byte[] data = getSerializedMessage(message, testSchema);
+
+        GenericDatumReader<IndexedRecord> reader = new GenericDatumReader<IndexedRecord>(testSchema);
+
+        SeekableInput in = new SeekableByteArrayInput(data);
+        FileReader<IndexedRecord> dfr = null;
+        IndexedRecord result = null;
+        try {
+            dfr = DataFileReader.openReader(in, reader);
+            result = dfr.next();
+        } finally {
+            if (dfr != null) {
+                dfr.close();
+            }
+        }
+        Assert.assertNotNull(result);
+        Assert.assertTrue(result instanceof GenericRecord);
+        Assert.assertEquals(new Utf8("testValue"), ((GenericRecord) result).get("name"));
+    }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java b/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java
new file mode 100644
index 0000000..7ff4bcf
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.file.codec;
+
+import org.apache.avro.file.Codec;
+
+import java.io.IOException;
+import java.nio.*;
+
+/**
+ * Simple Custom Codec to validate making Codec Public
+ * Compress and Decompress operations are just bitwise-NOT of data
+ */
+public class CustomCodec extends Codec {
+
+  private static final String CODECNAME = "CUSTOMCODEC";
+
+  @Override
+  public String getName() {
+    return CODECNAME;
+  }
+
+  @Override
+  public ByteBuffer compress(ByteBuffer in) throws IOException {
+    ByteBuffer out = ByteBuffer.allocate(in.remaining());
+    while (in.position() < in.capacity())
+      out.put((byte) ~in.get());
+    return out;
+  }
+
+  @Override
+  public ByteBuffer decompress(ByteBuffer in) throws IOException {
+    ByteBuffer out = ByteBuffer.allocate(in.remaining());
+    while (in.position() < in.capacity())
+      out.put((byte) ~in.get());
+    return out;
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    if (this == other)
+      return true;
+    if (other instanceof Codec) {
+      ByteBuffer original = ByteBuffer.allocate(getName().getBytes().length);
+      original.put(getName().getBytes());
+      original.rewind();
+      try {
+        return compareDecompress((Codec) other, original);
+      } catch (IOException e) {
+        return false;
+      }
+    } else
+      return false;
+  }
+
+  /**
+   * Codecs must implement an equals() method.  Two codecs, A and B are equal
+   * if: the result of A and B decompressing content compressed by A is the same
+   * AND the retult of A and B decompressing content compressed by B is the same
+   */
+  private boolean compareDecompress(Codec other, ByteBuffer original) throws IOException {
+    ByteBuffer compressedA = this.compress(original);
+    original.rewind();
+    ByteBuffer compressedB = other.compress(original);
+
+    if (this.decompress(compressedA).equals(other.decompress((ByteBuffer) compressedA.rewind())) &&
+      this.decompress(compressedB).equals(other.decompress((ByteBuffer) compressedB.rewind()))
+      ) {
+      return true;
+    }
+    return false;
+  }
+
+  @Override
+  public int hashCode() {
+    return getName().hashCode();
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
new file mode 100644
index 0000000..fe59341
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericData.java
@@ -0,0 +1,481 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Collection;
+import java.util.ArrayDeque;
+
+import static org.junit.Assert.*;
+
+import java.util.Arrays;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.SchemaBuilder;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.generic.GenericData.Record;
+import org.apache.avro.util.Utf8;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import org.junit.Test;
+
+public class TestGenericData {
+  
+  @Test(expected=AvroRuntimeException.class)
+    public void testrecordConstructorNullSchema() throws Exception {
+    new GenericData.Record(null);
+  }
+    
+  @Test(expected=AvroRuntimeException.class)
+    public void testrecordConstructorWrongSchema() throws Exception {
+    new GenericData.Record(Schema.create(Schema.Type.INT));
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+    public void testArrayConstructorNullSchema() throws Exception {
+    new GenericData.Array<Object>(1, null);
+  }
+    
+  @Test(expected=AvroRuntimeException.class)
+    public void testArrayConstructorWrongSchema() throws Exception {
+    new GenericData.Array<Object>(1, Schema.create(Schema.Type.INT));
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testRecordCreateEmptySchema() throws Exception {
+    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
+    Record r = new GenericData.Record(s);
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testGetEmptySchemaFields() throws Exception {
+    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
+    s.getFields();
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testGetEmptySchemaField() throws Exception {
+    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
+    s.getField("foo");
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testRecordPutInvalidField() throws Exception {
+    Schema s = Schema.createRecord("schemaName", "schemaDoc", "namespace", false);
+    List<Schema.Field> fields = new ArrayList<Schema.Field>();
+    fields.add(new Schema.Field("someFieldName", s, "docs", null));
+    s.setFields(fields);
+    Record r = new GenericData.Record(s);
+    r.put("invalidFieldName", "someValue");
+  }
+  
+  @Test
+  /** Make sure that even with nulls, hashCode() doesn't throw NPE. */
+  public void testHashCode() {
+    GenericData.get().hashCode(null, Schema.create(Type.NULL));
+    GenericData.get().hashCode(null, Schema.createUnion(
+        Arrays.asList(Schema.create(Type.BOOLEAN), Schema.create(Type.STRING))));
+    List<CharSequence> stuff = new ArrayList<CharSequence>();
+    stuff.add("string");
+    Schema schema = recordSchema();
+    GenericRecord r = new GenericData.Record(schema);
+    r.put(0, stuff);
+    GenericData.get().hashCode(r, schema);
+  }
+  
+  @Test
+  public void testEquals() {
+    Schema s = recordSchema();
+    GenericRecord r0 = new GenericData.Record(s);
+    GenericRecord r1 = new GenericData.Record(s);
+    GenericRecord r2 = new GenericData.Record(s);
+    Collection<CharSequence> l0 = new ArrayDeque<CharSequence>();
+    List<CharSequence> l1 = new ArrayList<CharSequence>();
+    GenericArray<CharSequence> l2 = 
+      new GenericData.Array<CharSequence>(1,s.getFields().get(0).schema());
+    String foo = "foo";
+    l0.add(new StringBuffer(foo));
+    l1.add(foo);
+    l2.add(new Utf8(foo));
+    r0.put(0, l0);
+    r1.put(0, l1);
+    r2.put(0, l2);
+    assertEquals(r0, r1);
+    assertEquals(r0, r2);
+    assertEquals(r1, r2);
+  }
+  
+  private Schema recordSchema() {
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(new Field("anArray", Schema.createArray(Schema.create(Type.STRING)), null, null));
+    Schema schema = Schema.createRecord("arrayFoo", "test", "mytest", false);
+    schema.setFields(fields);
+    
+    return schema;
+  }
+
+  @Test public void testEquals2() {
+   Schema schema1 = Schema.createRecord("r", null, "x", false);
+   List<Field> fields1 = new ArrayList<Field>();
+   fields1.add(new Field("a", Schema.create(Schema.Type.STRING), null, null,
+                         Field.Order.IGNORE));
+   schema1.setFields(fields1);
+
+   // only differs in field order
+   Schema schema2 = Schema.createRecord("r", null, "x", false);
+   List<Field> fields2 = new ArrayList<Field>();
+   fields2.add(new Field("a", Schema.create(Schema.Type.STRING), null, null,
+                         Field.Order.ASCENDING));
+   schema2.setFields(fields2);
+
+   GenericRecord record1 = new GenericData.Record(schema1);
+   record1.put("a", "1");
+
+   GenericRecord record2 = new GenericData.Record(schema2);
+   record2.put("a", "2");
+
+   assertFalse(record2.equals(record1));
+   assertFalse(record1.equals(record2));
+  }
+
+  @Test
+  public void testRecordGetFieldDoesntExist() throws Exception {
+    List<Field> fields = new ArrayList<Field>();
+    Schema schema = Schema.createRecord(fields);
+    GenericData.Record record = new GenericData.Record(schema);
+    assertNull(record.get("does not exist"));
+  }
+  
+  @Test
+  public void testArrayReversal() {
+      Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
+      GenericArray<Integer> forward = new GenericData.Array<Integer>(10, schema);
+      GenericArray<Integer> backward = new GenericData.Array<Integer>(10, schema);
+      for (int i = 0; i <= 9; i++) {
+        forward.add(i);
+      }
+      for (int i = 9; i >= 0; i--) {
+        backward.add(i);
+      }
+      forward.reverse();
+      assertTrue(forward.equals(backward));
+  }
+
+  @Test
+  public void testArrayListInterface() {
+    Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
+    GenericArray<Integer> array = new GenericData.Array<Integer>(1, schema);
+    array.add(99);
+    assertEquals(new Integer(99), array.get(0));
+    List<Integer> list = new ArrayList<Integer>();
+    list.add(99);
+    assertEquals(array, list);
+    assertEquals(list, array);
+    assertEquals(list.hashCode(), array.hashCode());
+    try {
+      array.get(2);
+      fail("Expected IndexOutOfBoundsException getting index 2");
+    } catch (IndexOutOfBoundsException e) {}
+    array.clear();
+    assertEquals(0, array.size());
+    try {
+      array.get(0);
+      fail("Expected IndexOutOfBoundsException getting index 0 after clear()");
+    } catch (IndexOutOfBoundsException e) {}
+
+  }
+  @Test
+  public void testArrayAddAtLocation()
+  {
+    Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
+    GenericArray<Integer> array = new GenericData.Array<Integer>(6, schema);
+    array.clear();
+    for(int i=0; i<5; ++i)
+      array.add(i);
+    assertEquals(5, array.size());
+    array.add(0, 6);
+    assertEquals(new Integer(6), array.get(0));
+    assertEquals(6, array.size());
+    assertEquals(new Integer(0), array.get(1));
+    assertEquals(new Integer(4), array.get(5));
+    array.add(6, 7);
+    assertEquals(new Integer(7), array.get(6));
+    assertEquals(7, array.size());
+    assertEquals(new Integer(6), array.get(0));
+    assertEquals(new Integer(4), array.get(5));
+    array.add(1, 8);
+    assertEquals(new Integer(8), array.get(1));
+    assertEquals(new Integer(0), array.get(2));
+    assertEquals(new Integer(6), array.get(0));
+    assertEquals(8, array.size());
+    try {
+	array.get(9);
+	fail("Expected IndexOutOfBoundsException after adding elements");
+    } catch (IndexOutOfBoundsException e){}
+  }
+  @Test
+  public void testArrayRemove()
+  {
+    Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
+    GenericArray<Integer> array = new GenericData.Array<Integer>(10, schema);
+    array.clear();
+    for(int i=0; i<10; ++i)
+      array.add(i);
+    assertEquals(10, array.size());
+    assertEquals(new Integer(0), array.get(0));
+    assertEquals(new Integer(9), array.get(9));
+
+    array.remove(0);
+    assertEquals(9, array.size());
+    assertEquals(new Integer(1), array.get(0));
+    assertEquals(new Integer(2), array.get(1));
+    assertEquals(new Integer(9), array.get(8));
+
+    // Test boundary errors.
+    try {
+      array.get(9);
+      fail("Expected IndexOutOfBoundsException after removing an element");
+    } catch (IndexOutOfBoundsException e){}
+    try {
+      array.set(9, 99);
+      fail("Expected IndexOutOfBoundsException after removing an element");
+    } catch (IndexOutOfBoundsException e){}
+    try {
+      array.remove(9);
+      fail("Expected IndexOutOfBoundsException after removing an element");
+    } catch (IndexOutOfBoundsException e){}
+
+    // Test that we can still remove for properly sized arrays, and the rval
+    assertEquals(new Integer(9), array.remove(8));
+    assertEquals(8, array.size());
+
+
+    // Test insertion after remove
+    array.add(88);
+    assertEquals(new Integer(88), array.get(8));
+  }
+  @Test
+  public void testArraySet()
+  {
+    Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
+    GenericArray<Integer> array = new GenericData.Array<Integer>(10, schema);
+    array.clear();
+    for(int i=0; i<10; ++i)
+      array.add(i);
+    assertEquals(10, array.size());
+    assertEquals(new Integer(0), array.get(0));
+    assertEquals(new Integer(5), array.get(5));
+
+    assertEquals(new Integer(5), array.set(5, 55));
+    assertEquals(10, array.size());
+    assertEquals(new Integer(55), array.get(5));
+  }
+  
+  @Test
+  public void testToStringIsJson() throws JsonParseException, IOException {
+    Field stringField = new Field("string", Schema.create(Type.STRING), null, null);
+    Field enumField = new Field("enum", Schema.createEnum("my_enum", "doc", null, Arrays.asList("a", "b", "c")), null, null);
+    Schema schema = Schema.createRecord("my_record", "doc", "mytest", false);
+    schema.setFields(Arrays.asList(stringField, enumField));
+    
+    GenericRecord r = new GenericData.Record(schema);
+    // \u2013 is EN DASH
+    r.put(stringField.name(), "hello\nthere\"\tyou\u2013}");
+    r.put(enumField.name(), new GenericData.EnumSymbol(enumField.schema(),"a"));
+    
+    String json = r.toString();
+    JsonFactory factory = new JsonFactory();
+    JsonParser parser = factory.createJsonParser(json);
+    ObjectMapper mapper = new ObjectMapper();
+    
+    // will throw exception if string is not parsable json
+    mapper.readTree(parser);
+  }
+
+  @Test public void testToStringDoesNotEscapeForwardSlash() throws Exception {
+    GenericData data = GenericData.get();
+    assertEquals("\"/\"", data.toString("/"));
+  }
+
+  @Test public void testToStringNanInfinity() throws Exception {
+    GenericData data = GenericData.get();
+    assertEquals("\"Infinity\"",data.toString(Float.POSITIVE_INFINITY));
+    assertEquals("\"-Infinity\"",data.toString(Float.NEGATIVE_INFINITY));
+    assertEquals("\"NaN\"", data.toString(Float.NaN));
+    assertEquals("\"Infinity\"",data.toString(Double.POSITIVE_INFINITY));
+    assertEquals("\"-Infinity\"",data.toString(Double.NEGATIVE_INFINITY));
+    assertEquals("\"NaN\"", data.toString(Double.NaN));
+  }
+
+  @Test
+  public void testCompare() {
+    // Prepare a schema for testing.
+    Field integerField = new Field("test", Schema.create(Type.INT), null, null);
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(integerField);
+    Schema record = Schema.createRecord("test", null, null, false);
+    record.setFields(fields);
+    
+    ByteArrayOutputStream b1 = new ByteArrayOutputStream(5);
+    ByteArrayOutputStream b2 = new ByteArrayOutputStream(5);
+    BinaryEncoder b1Enc = EncoderFactory.get().binaryEncoder(b1, null);
+    BinaryEncoder b2Enc = EncoderFactory.get().binaryEncoder(b2, null);
+    // Prepare two different datums
+    Record testDatum1 = new Record(record);
+    testDatum1.put(0, 1);
+    Record testDatum2 = new Record(record);
+    testDatum2.put(0, 2);
+    GenericDatumWriter<Record> gWriter = new GenericDatumWriter<Record>(record);
+    Integer start1 = 0, start2 = 0;
+    try {
+      // Write two datums in each stream
+      // and get the offset length after the first write in each.
+      gWriter.write(testDatum1, b1Enc);
+      b1Enc.flush();
+      start1 = b1.size();
+      gWriter.write(testDatum1, b1Enc);
+      b1Enc.flush();
+      b1.close();
+      gWriter.write(testDatum2, b2Enc);
+      b2Enc.flush();
+      start2 = b2.size();
+      gWriter.write(testDatum2, b2Enc);
+      b2Enc.flush();
+      b2.close();
+      // Compare to check if offset-based compare works right.
+      assertEquals(-1, BinaryData.compare(b1.toByteArray(), start1, b2.toByteArray(), start2, record));
+    } catch (IOException e) {
+      fail("IOException while writing records to output stream.");
+    }
+  }
+  
+  @Test
+  public void testEnumCompare() {
+    Schema s = Schema.createEnum("Kind",null,null,Arrays.asList("Z","Y","X"));
+    GenericEnumSymbol z = new GenericData.EnumSymbol(s, "Z");
+    GenericEnumSymbol y = new GenericData.EnumSymbol(s, "Y");
+    assertEquals(0, z.compareTo(z));
+    assertTrue(y.compareTo(z) > 0);
+    assertTrue(z.compareTo(y) < 0);
+  }
+
+  @Test
+  public void testByteBufferDeepCopy() {
+    // Test that a deep copy of a byte buffer respects the byte buffer
+    // limits and capacity.
+    byte[] buffer_value = {0, 1, 2, 3, 0, 0, 0};
+    ByteBuffer buffer = ByteBuffer.wrap(buffer_value, 1, 4);
+    Schema schema = Schema.createRecord("my_record", "doc", "mytest", false);
+    Field byte_field =
+      new Field("bytes", Schema.create(Type.BYTES), null, null);
+    schema.setFields(Arrays.asList(byte_field));
+    
+    GenericRecord record = new GenericData.Record(schema);
+    record.put(byte_field.name(), buffer);
+    
+    GenericRecord copy = GenericData.get().deepCopy(schema, record);
+    ByteBuffer buffer_copy = (ByteBuffer) copy.get(byte_field.name());
+
+    assertEquals(buffer, buffer_copy);
+  }
+
+  @Test
+  public void testValidateNullableEnum() {
+    List<Schema> unionTypes = new ArrayList<Schema>();
+    Schema schema;
+    Schema nullSchema = Schema.create(Type.NULL);
+    Schema enumSchema = Schema.createEnum("AnEnum", null, null, Arrays.asList("X","Y","Z"));
+    GenericEnumSymbol w = new GenericData.EnumSymbol(enumSchema, "W");
+    GenericEnumSymbol x = new GenericData.EnumSymbol(enumSchema, "X");
+    GenericEnumSymbol y = new GenericData.EnumSymbol(enumSchema, "Y");
+    GenericEnumSymbol z = new GenericData.EnumSymbol(enumSchema, "Z");
+
+    // null is first
+    unionTypes.clear();
+    unionTypes.add(nullSchema);
+    unionTypes.add(enumSchema);
+    schema = Schema.createUnion(unionTypes);
+
+    assertTrue(GenericData.get().validate(schema, z));
+    assertTrue(GenericData.get().validate(schema, y));
+    assertTrue(GenericData.get().validate(schema, x));
+    assertFalse(GenericData.get().validate(schema, w));
+    assertTrue(GenericData.get().validate(schema, null));
+
+    // null is last
+    unionTypes.clear();
+    unionTypes.add(enumSchema);
+    unionTypes.add(nullSchema);
+    schema = Schema.createUnion(unionTypes);
+
+    assertTrue(GenericData.get().validate(schema, z));
+    assertTrue(GenericData.get().validate(schema, y));
+    assertTrue(GenericData.get().validate(schema, x));
+    assertFalse(GenericData.get().validate(schema, w));
+    assertTrue(GenericData.get().validate(schema, null));
+  }
+
+  private enum anEnum { ONE,TWO,THREE };
+  @Test
+  public void validateRequiresGenericSymbolForEnumSchema() {
+    final Schema schema = Schema.createEnum("my_enum", "doc", "namespace", Arrays.asList("ONE","TWO","THREE"));
+    final GenericData gd = GenericData.get();
+    
+    /* positive cases */
+    assertTrue(gd.validate(schema, new GenericData.EnumSymbol(schema, "ONE")));
+    assertTrue(gd.validate(schema, new GenericData.EnumSymbol(schema, anEnum.ONE)));
+
+    /* negative cases */
+    assertFalse("We don't expect GenericData to allow a String datum for an enum schema", gd.validate(schema, "ONE"));
+    assertFalse("We don't expect GenericData to allow a Java Enum for an enum schema", gd.validate(schema, anEnum.ONE));
+  }
+
+  @Test
+  public void testValidateUnion() {
+      Schema type1Schema = SchemaBuilder.record("Type1")
+          .fields()
+          .requiredString("myString")
+          .requiredInt("myInt")
+          .endRecord();
+
+      Schema type2Schema = SchemaBuilder.record("Type2")
+          .fields()
+          .requiredString("myString")
+          .endRecord();
+
+      Schema unionSchema = SchemaBuilder.unionOf()
+          .type(type1Schema).and().type(type2Schema)
+          .endUnion();
+
+    GenericRecord record = new GenericData.Record(type2Schema);
+    record.put("myString", "myValue");
+    assertTrue(GenericData.get().validate(unionSchema, record));
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
new file mode 100644
index 0000000..ed52874
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericDatumWriter.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ConcurrentModificationException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.DirectBinaryEncoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonDecoder;
+import org.apache.avro.AvroTypeException;
+import org.junit.Test;
+import org.apache.avro.util.Utf8;
+
+public class TestGenericDatumWriter {
+  @Test
+  public void testWrite() throws IOException {
+    String json = "{\"type\": \"record\", \"name\": \"r\", \"fields\": ["
+      + "{ \"name\": \"f1\", \"type\": \"long\" }"
+      + "]}";
+    Schema s = Schema.parse(json);
+    GenericRecord r = new GenericData.Record(s);
+    r.put("f1", 100L);
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    GenericDatumWriter<GenericRecord> w =
+      new GenericDatumWriter<GenericRecord>(s);
+    Encoder e = EncoderFactory.get().jsonEncoder(s, bao);
+    w.write(r, e);
+    e.flush();
+    
+    Object o = new GenericDatumReader<GenericRecord>(s).read(null,
+        DecoderFactory.get().jsonDecoder(s, new ByteArrayInputStream(bao.toByteArray())));
+    assertEquals(r, o);
+  }
+
+  @Test
+  public void testArrayConcurrentModification() throws Exception {
+    String json = "{\"type\": \"array\", \"items\": \"int\" }";
+    Schema s = Schema.parse(json);
+    final GenericArray<Integer> a = new GenericData.Array<Integer>(1, s);
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    final GenericDatumWriter<GenericArray<Integer>> w =
+      new GenericDatumWriter<GenericArray<Integer>>(s);
+
+    CountDownLatch sizeWrittenSignal = new CountDownLatch(1);
+    CountDownLatch eltAddedSignal = new CountDownLatch(1);
+
+    final TestEncoder e = new TestEncoder(EncoderFactory.get()
+        .directBinaryEncoder(bao, null), sizeWrittenSignal, eltAddedSignal);
+    
+    // call write in another thread
+    ExecutorService executor = Executors.newSingleThreadExecutor();
+    Future<Void> result = executor.submit(new Callable<Void>() {
+      @Override
+      public Void call() throws Exception {
+        w.write(a, e);
+        return null;
+      }
+    });
+    sizeWrittenSignal.await();
+    // size has been written so now add an element to the array
+    a.add(7);
+    // and signal for the element to be written
+    eltAddedSignal.countDown();
+    try {
+      result.get();
+      fail("Expected ConcurrentModificationException");
+    } catch (ExecutionException ex) {
+      assertTrue(ex.getCause() instanceof ConcurrentModificationException);
+    }
+  }
+  
+
+  @Test
+  public void testMapConcurrentModification() throws Exception {
+    String json = "{\"type\": \"map\", \"values\": \"int\" }";
+    Schema s = Schema.parse(json);
+    final Map<String, Integer> m = new HashMap<String, Integer>();
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    final GenericDatumWriter<Map<String, Integer>> w =
+      new GenericDatumWriter<Map<String, Integer>>(s);
+
+    CountDownLatch sizeWrittenSignal = new CountDownLatch(1);
+    CountDownLatch eltAddedSignal = new CountDownLatch(1);
+
+    final TestEncoder e = new TestEncoder(EncoderFactory.get()
+        .directBinaryEncoder(bao, null), sizeWrittenSignal, eltAddedSignal);
+    
+    // call write in another thread
+    ExecutorService executor = Executors.newSingleThreadExecutor();
+    Future<Void> result = executor.submit(new Callable<Void>() {
+      @Override
+      public Void call() throws Exception {
+        w.write(m, e);
+        return null;
+      }
+    });
+    sizeWrittenSignal.await();
+    // size has been written so now add an entry to the map
+    m.put("a", 7);
+    // and signal for the entry to be written
+    eltAddedSignal.countDown();
+    try {
+      result.get();
+      fail("Expected ConcurrentModificationException");
+    } catch (ExecutionException ex) {
+      assertTrue(ex.getCause() instanceof ConcurrentModificationException);
+    }
+  }
+  
+  static class TestEncoder extends Encoder {
+    
+    Encoder e;
+    CountDownLatch sizeWrittenSignal;
+    CountDownLatch eltAddedSignal;
+    
+    TestEncoder(Encoder encoder, CountDownLatch sizeWrittenSignal,
+        CountDownLatch eltAddedSignal) {
+      this.e = encoder;
+      this.sizeWrittenSignal = sizeWrittenSignal;
+      this.eltAddedSignal = eltAddedSignal;
+    }
+    
+    @Override
+    public void writeArrayStart() throws IOException {
+      e.writeArrayStart();
+      sizeWrittenSignal.countDown();
+      try {
+        eltAddedSignal.await();
+      } catch (InterruptedException e) {
+        // ignore
+      }
+    }
+
+    @Override
+    public void writeMapStart() throws IOException {
+      e.writeMapStart();
+      sizeWrittenSignal.countDown();
+      try {
+        eltAddedSignal.await();
+      } catch (InterruptedException e) {
+        // ignore
+      }
+    }
+    
+    @Override
+    public void flush() throws IOException { e.flush(); }
+    @Override
+    public void writeNull() throws IOException { e.writeNull(); }
+    @Override
+    public void writeBoolean(boolean b) throws IOException { e.writeBoolean(b); }
+    @Override
+    public void writeInt(int n) throws IOException { e.writeInt(n); }
+    @Override
+    public void writeLong(long n) throws IOException { e.writeLong(n); }
+    @Override
+    public void writeFloat(float f) throws IOException { e.writeFloat(f); }
+    @Override
+    public void writeDouble(double d) throws IOException { e.writeDouble(d); }
+    @Override
+    public void writeString(Utf8 utf8) throws IOException { e.writeString(utf8); }
+    @Override
+    public void writeBytes(ByteBuffer bytes) throws IOException { e.writeBytes(bytes); }
+    @Override
+    public void writeBytes(byte[] bytes, int start, int len) throws IOException { e.writeBytes(bytes, start, len); }
+    @Override
+    public void writeFixed(byte[] bytes, int start, int len) throws IOException { e.writeFixed(bytes, start, len); }
+    @Override
+    public void writeEnum(int en) throws IOException { e.writeEnum(en); }
+    @Override
+    public void setItemCount(long itemCount) throws IOException { e.setItemCount(itemCount); }
+    @Override
+    public void startItem() throws IOException { e.startItem(); }
+    @Override
+    public void writeArrayEnd() throws IOException { e.writeArrayEnd(); }
+    @Override
+    public void writeMapEnd() throws IOException { e.writeMapEnd(); }
+    @Override
+    public void writeIndex(int unionIndex) throws IOException { e.writeIndex(unionIndex); }
+  };
+
+  @Test(expected=AvroTypeException.class)
+  public void writeDoesNotAllowStringForGenericEnum() throws IOException {
+    final String json = "{\"type\": \"record\", \"name\": \"recordWithEnum\"," +
+      "\"fields\": [ " +
+        "{\"name\": \"field\", \"type\": " +
+          "{\"type\": \"enum\", \"name\": \"enum\", \"symbols\": " +
+            "[\"ONE\",\"TWO\",\"THREE\"] " +
+          "}" +
+        "}" +
+      "]}";
+    Schema schema = Schema.parse(json);
+    GenericRecord record = new GenericData.Record(schema);
+    record.put("field", "ONE");
+
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    GenericDatumWriter<GenericRecord> writer =
+      new GenericDatumWriter<GenericRecord>(schema);
+    Encoder encoder = EncoderFactory.get().jsonEncoder(schema, bao);
+
+    writer.write(record, encoder);
+  }
+
+  private enum AnEnum { ONE, TWO, THREE };
+  @Test(expected=AvroTypeException.class)
+  public void writeDoesNotAllowJavaEnumForGenericEnum() throws IOException {
+    final String json = "{\"type\": \"record\", \"name\": \"recordWithEnum\"," +
+      "\"fields\": [ " +
+        "{\"name\": \"field\", \"type\": " +
+          "{\"type\": \"enum\", \"name\": \"enum\", \"symbols\": " +
+            "[\"ONE\",\"TWO\",\"THREE\"] " +
+          "}" +
+        "}" +
+      "]}";
+    Schema schema = Schema.parse(json);
+    GenericRecord record = new GenericData.Record(schema);
+    record.put("field", AnEnum.ONE);
+
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    GenericDatumWriter<GenericRecord> writer =
+      new GenericDatumWriter<GenericRecord>(schema);
+    Encoder encoder = EncoderFactory.get().jsonEncoder(schema, bao);
+
+    writer.write(record, encoder);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericLogicalTypes.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericLogicalTypes.java
new file mode 100644
index 0000000..3a4b1e1
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericLogicalTypes.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.generic;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import org.apache.avro.Conversion;
+import org.apache.avro.Conversions;
+import org.apache.avro.LogicalType;
+import org.apache.avro.LogicalTypes;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestGenericLogicalTypes {
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  public static final GenericData GENERIC = new GenericData();
+
+  @BeforeClass
+  public static void addDecimalAndUUID() {
+    GENERIC.addLogicalTypeConversion(new Conversions.DecimalConversion());
+    GENERIC.addLogicalTypeConversion(new Conversions.UUIDConversion());
+  }
+
+  @Test
+  public void testReadUUID() throws IOException {
+    Schema uuidSchema = Schema.create(Schema.Type.STRING);
+    LogicalTypes.uuid().addToSchema(uuidSchema);
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+    List<UUID> expected = Arrays.asList(u1, u2);
+
+    File test = write(Schema.create(Schema.Type.STRING),
+        u1.toString(), u2.toString());
+    Assert.assertEquals("Should convert Strings to UUIDs",
+        expected, read(GENERIC.createDatumReader(uuidSchema), test));
+  }
+
+  @Test
+  public void testWriteUUID() throws IOException {
+    Schema stringSchema = Schema.create(Schema.Type.STRING);
+    stringSchema.addProp(GenericData.STRING_PROP, "String");
+    Schema uuidSchema = Schema.create(Schema.Type.STRING);
+    LogicalTypes.uuid().addToSchema(uuidSchema);
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+    List<String> expected = Arrays.asList(u1.toString(), u2.toString());
+
+    File test = write(GENERIC, uuidSchema, u1, u2);
+    Assert.assertEquals("Should read UUIDs as Strings",
+        expected, read(GenericData.get().createDatumReader(stringSchema), test));
+  }
+
+  @Test
+  public void testWriteNullableUUID() throws IOException {
+    Schema stringSchema = Schema.create(Schema.Type.STRING);
+    stringSchema.addProp(GenericData.STRING_PROP, "String");
+    Schema nullableStringSchema = Schema.createUnion(
+        Schema.create(Schema.Type.NULL), stringSchema);
+
+    Schema uuidSchema = Schema.create(Schema.Type.STRING);
+    LogicalTypes.uuid().addToSchema(uuidSchema);
+    Schema nullableUuidSchema = Schema.createUnion(
+        Schema.create(Schema.Type.NULL), uuidSchema);
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+    List<String> expected = Arrays.asList(u1.toString(), u2.toString());
+
+    File test = write(GENERIC, nullableUuidSchema, u1, u2);
+    Assert.assertEquals("Should read UUIDs as Strings",
+        expected,
+        read(GenericData.get().createDatumReader(nullableStringSchema), test));
+  }
+
+  @Test
+  public void testReadDecimalFixed() throws IOException {
+    LogicalType decimal = LogicalTypes.decimal(9, 2);
+    Schema fixedSchema = Schema.createFixed("aFixed", null, null, 4);
+    Schema decimalSchema = decimal.addToSchema(
+        Schema.createFixed("aFixed", null, null, 4));
+
+    BigDecimal d1 = new BigDecimal("-34.34");
+    BigDecimal d2 = new BigDecimal("117230.00");
+    List<BigDecimal> expected = Arrays.asList(d1, d2);
+
+    Conversion<BigDecimal> conversion = new Conversions.DecimalConversion();
+
+    // use the conversion directly instead of relying on the write side
+    GenericFixed d1fixed = conversion.toFixed(d1, fixedSchema, decimal);
+    GenericFixed d2fixed = conversion.toFixed(d2, fixedSchema, decimal);
+
+    File test = write(fixedSchema, d1fixed, d2fixed);
+    Assert.assertEquals("Should convert fixed to BigDecimals",
+        expected, read(GENERIC.createDatumReader(decimalSchema), test));
+  }
+
+  @Test
+  public void testWriteDecimalFixed() throws IOException {
+    LogicalType decimal = LogicalTypes.decimal(9, 2);
+    Schema fixedSchema = Schema.createFixed("aFixed", null, null, 4);
+    Schema decimalSchema = decimal.addToSchema(
+        Schema.createFixed("aFixed", null, null, 4));
+
+    BigDecimal d1 = new BigDecimal("-34.34");
+    BigDecimal d2 = new BigDecimal("117230.00");
+
+    Conversion<BigDecimal> conversion = new Conversions.DecimalConversion();
+
+    GenericFixed d1fixed = conversion.toFixed(d1, fixedSchema, decimal);
+    GenericFixed d2fixed = conversion.toFixed(d2, fixedSchema, decimal);
+    List<GenericFixed> expected = Arrays.asList(d1fixed, d2fixed);
+
+    File test = write(GENERIC, decimalSchema, d1, d2);
+    Assert.assertEquals("Should read BigDecimals as fixed",
+        expected, read(GenericData.get().createDatumReader(fixedSchema), test));
+  }
+
+  @Test
+  public void testReadDecimalBytes() throws IOException {
+    LogicalType decimal = LogicalTypes.decimal(9, 2);
+    Schema bytesSchema = Schema.create(Schema.Type.BYTES);
+    Schema decimalSchema = decimal.addToSchema(Schema.create(Schema.Type.BYTES));
+
+    BigDecimal d1 = new BigDecimal("-34.34");
+    BigDecimal d2 = new BigDecimal("117230.00");
+    List<BigDecimal> expected = Arrays.asList(d1, d2);
+
+    Conversion<BigDecimal> conversion = new Conversions.DecimalConversion();
+
+    // use the conversion directly instead of relying on the write side
+    ByteBuffer d1bytes = conversion.toBytes(d1, bytesSchema, decimal);
+    ByteBuffer d2bytes = conversion.toBytes(d2, bytesSchema, decimal);
+
+    File test = write(bytesSchema, d1bytes, d2bytes);
+    Assert.assertEquals("Should convert bytes to BigDecimals",
+        expected, read(GENERIC.createDatumReader(decimalSchema), test));
+  }
+
+  @Test
+  public void testWriteDecimalBytes() throws IOException {
+    LogicalType decimal = LogicalTypes.decimal(9, 2);
+    Schema bytesSchema = Schema.create(Schema.Type.BYTES);
+    Schema decimalSchema = decimal.addToSchema(Schema.create(Schema.Type.BYTES));
+
+    BigDecimal d1 = new BigDecimal("-34.34");
+    BigDecimal d2 = new BigDecimal("117230.00");
+
+    Conversion<BigDecimal> conversion = new Conversions.DecimalConversion();
+
+    // use the conversion directly instead of relying on the write side
+    ByteBuffer d1bytes = conversion.toBytes(d1, bytesSchema, decimal);
+    ByteBuffer d2bytes = conversion.toBytes(d2, bytesSchema, decimal);
+    List<ByteBuffer> expected = Arrays.asList(d1bytes, d2bytes);
+
+    File test = write(GENERIC, decimalSchema, d1bytes, d2bytes);
+    Assert.assertEquals("Should read BigDecimals as bytes",
+        expected, read(GenericData.get().createDatumReader(bytesSchema), test));
+  }
+
+  private <D> List<D> read(DatumReader<D> reader, File file) throws IOException {
+    List<D> data = new ArrayList<D>();
+    FileReader<D> fileReader = null;
+
+    try {
+      fileReader = new DataFileReader<D>(file, reader);
+      for (D datum : fileReader) {
+        data.add(datum);
+      }
+    } finally {
+      if (fileReader != null) {
+        fileReader.close();
+      }
+    }
+
+    return data;
+  }
+
+  private <D> File write(Schema schema, D... data) throws IOException {
+    return write(GenericData.get(), schema, data);
+  }
+
+  @SuppressWarnings("unchecked")
+  private <D> File write(GenericData model, Schema schema, D... data) throws IOException {
+    File file = temp.newFile();
+    DatumWriter<D> writer = model.createDatumWriter(schema);
+    DataFileWriter<D> fileWriter = new DataFileWriter<D>(writer);
+
+    try {
+      fileWriter.create(schema, file);
+      for (D datum : data) {
+        fileWriter.append(datum);
+      }
+    } finally {
+      fileWriter.close();
+    }
+
+    return file;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
new file mode 100644
index 0000000..8f7dee5
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/generic/TestGenericRecordBuilder.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericData.Record;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Unit test for the GenericRecordBuilder class.
+ */
+public class TestGenericRecordBuilder {
+  @Test
+  public void testGenericBuilder() {
+    Schema schema = recordSchema();
+    GenericRecordBuilder builder = new GenericRecordBuilder(schema);
+    
+    // Verify that builder has no fields set after initialization:
+    for (Field field : schema.getFields()) {
+      Assert.assertFalse("RecordBuilder should not have field " + field.name(), 
+          builder.has(field.name()));
+      Assert.assertNull("Field " + field.name() + " should be null",
+          builder.get(field.name()));
+    }
+    
+    // Set field in builder:
+    builder.set("intField", 1);
+    List<String> anArray = Arrays.asList(new String[] { "one", "two", "three" });
+    builder.set("anArray", anArray);
+    Assert.assertTrue("anArray should be set", builder.has("anArray"));
+    Assert.assertEquals(anArray, builder.get("anArray"));
+    Assert.assertFalse("id should not be set", builder.has("id"));
+    Assert.assertNull(builder.get("id"));
+    
+    // Build the record, and verify that fields are set:
+    Record record = builder.build();
+    Assert.assertEquals(new Integer(1), record.get("intField"));
+    Assert.assertEquals(anArray, record.get("anArray"));
+    Assert.assertNotNull(record.get("id"));
+    Assert.assertEquals("0", record.get("id").toString());
+    
+    // Test copy constructors:
+    Assert.assertEquals(builder, new GenericRecordBuilder(builder));
+    Assert.assertEquals(record, new GenericRecordBuilder(record).build());
+    
+    // Test clear:
+    builder.clear("intField");
+    Assert.assertFalse(builder.has("intField"));
+    Assert.assertNull(builder.get("intField"));
+  }
+  
+  @Test(expected=org.apache.avro.AvroRuntimeException.class)
+  public void attemptToSetNonNullableFieldToNull() {
+    new GenericRecordBuilder(recordSchema()).set("intField", null);
+  }
+  
+  @Test(expected=org.apache.avro.AvroRuntimeException.class)
+  public void buildWithoutSettingRequiredFields1() {
+    new GenericRecordBuilder(recordSchema()).build();
+  }
+  
+  @Test()
+  public void buildWithoutSettingRequiredFields2() {
+    try {
+      new GenericRecordBuilder(recordSchema()).
+      set("anArray", Arrays.asList(new String[] { "one" })).
+      build();
+      Assert.fail("Should have thrown " + 
+          AvroRuntimeException.class.getCanonicalName());
+    } catch (AvroRuntimeException e) {
+      Assert.assertTrue(e.getMessage().contains("intField"));
+    }
+  }
+  
+  /** Creates a test record schema */
+  private static Schema recordSchema() {
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(new Field("id", Schema.create(Type.STRING), null, "0"));
+    fields.add(new Field("intField", Schema.create(Type.INT), null, null));
+    fields.add(new Field("anArray", Schema.createArray(Schema.create(Type.STRING)), null, null));
+    fields.add(new Field("optionalInt", Schema.createUnion
+                         (Arrays.asList(Schema.create(Type.NULL),
+                                        Schema.create(Type.INT))),
+                         null, Schema.NULL_VALUE));
+    Schema schema = Schema.createRecord("Foo", "test", "mytest", false);
+    schema.setFields(fields);
+    return schema;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java
new file mode 100644
index 0000000..ea8d778
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/LegacyBinaryEncoder.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.util.Utf8;
+
+/**
+ * Low-level support for serializing Avro values.
+ *
+ * This class has two types of methods.  One type of methods support
+ * the writing of leaf values (for example, {@link #writeLong} and
+ * {@link #writeString}).  These methods have analogs in {@link
+ * Decoder}.
+ *
+ * The other type of methods support the writing of maps and arrays.
+ * These methods are {@link #writeArrayStart}, {@link
+ * #startItem}, and {@link #writeArrayEnd} (and similar methods for
+ * maps).  Some implementations of {@link Encoder} handle the
+ * buffering required to break large maps and arrays into blocks,
+ * which is necessary for applications that want to do streaming.
+ * (See {@link #writeArrayStart} for details on these methods.)
+ *
+ *  @see Decoder
+ */
+public class LegacyBinaryEncoder extends Encoder {
+  protected OutputStream out;
+  
+  private interface ByteWriter {
+    void write(ByteBuffer bytes) throws IOException;
+  }
+  
+  private static final class SimpleByteWriter implements ByteWriter {
+    private final OutputStream out;
+
+    public SimpleByteWriter(OutputStream out) {
+      this.out = out;
+    }
+
+    @Override
+    public void write(ByteBuffer bytes) throws IOException {
+      encodeLong(bytes.remaining(), out);
+      out.write(bytes.array(), bytes.position(), bytes.remaining());
+    }
+  }
+  
+  private final ByteWriter byteWriter;
+
+  /** Create a writer that sends its output to the underlying stream
+   *  <code>out</code>. */
+  public LegacyBinaryEncoder(OutputStream out) {
+    this.out = out;
+    this.byteWriter = new SimpleByteWriter(out);
+  }
+
+  @Override
+  public void flush() throws IOException {
+    if (out != null) {
+      out.flush();
+    }
+  }
+
+  @Override
+  public void writeNull() throws IOException { }
+  
+  @Override
+  public void writeBoolean(boolean b) throws IOException {
+    out.write(b ? 1 : 0);
+  }
+
+  @Override
+  public void writeInt(int n) throws IOException {
+    encodeLong(n, out);
+  }
+
+  @Override
+  public void writeLong(long n) throws IOException {
+    encodeLong(n, out);
+  }
+  
+  @Override
+  public void writeFloat(float f) throws IOException {
+    encodeFloat(f, out);
+  }
+
+  @Override
+  public void writeDouble(double d) throws IOException {
+    encodeDouble(d, out);
+  }
+
+  @Override
+  public void writeString(Utf8 utf8) throws IOException {
+    encodeString(utf8.getBytes(), 0, utf8.getByteLength());
+  }
+  
+  @Override
+  public void writeString(String string) throws IOException {
+    byte[] bytes = Utf8.getBytesFor(string);
+    encodeString(bytes, 0, bytes.length);
+  }
+  
+  private void encodeString(byte[] bytes, int offset, int length) throws IOException {
+    encodeLong(length, out);
+    out.write(bytes, offset, length);
+  }
+  
+  @Override
+  public void writeBytes(ByteBuffer bytes) throws IOException {
+    byteWriter.write(bytes);
+  }
+  
+  @Override
+  public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+    encodeLong(len, out);
+    out.write(bytes, start, len);
+  }
+  
+  @Override
+  public void writeFixed(byte[] bytes, int start, int len) throws IOException {
+    out.write(bytes, start, len);
+  }
+
+  @Override
+  public void writeEnum(int e) throws IOException {
+    encodeLong(e, out);
+  }
+
+  @Override
+  public void writeArrayStart() throws IOException {
+  }
+
+  @Override
+  public void setItemCount(long itemCount) throws IOException {
+    if (itemCount > 0) {
+      writeLong(itemCount);
+    }
+  }
+  
+  @Override
+  public void startItem() throws IOException {
+  }
+
+  @Override
+  public void writeArrayEnd() throws IOException {
+    encodeLong(0, out);
+  }
+
+  @Override
+  public void writeMapStart() throws IOException {
+  }
+
+  @Override
+  public void writeMapEnd() throws IOException {
+    encodeLong(0, out);
+  }
+
+  @Override
+  public void writeIndex(int unionIndex) throws IOException {
+    encodeLong(unionIndex, out);
+  }
+  
+  protected static void encodeLong(long n, OutputStream o) throws IOException {
+    n = (n << 1) ^ (n >> 63); // move sign to low-order bit
+    while ((n & ~0x7F) != 0) {
+      o.write((byte)((n & 0x7f) | 0x80));
+      n >>>= 7;
+    }
+    o.write((byte)n);
+  }
+
+  protected static void encodeFloat(float f, OutputStream o) throws IOException {
+    long bits = Float.floatToRawIntBits(f);
+    o.write((int)(bits      ) & 0xFF);
+    o.write((int)(bits >>  8) & 0xFF);
+    o.write((int)(bits >> 16) & 0xFF);
+    o.write((int)(bits >> 24) & 0xFF);
+  }
+
+  protected static void encodeDouble(double d, OutputStream o) throws IOException {
+    long bits = Double.doubleToRawLongBits(d);
+    o.write((int)(bits      ) & 0xFF);
+    o.write((int)(bits >>  8) & 0xFF);
+    o.write((int)(bits >> 16) & 0xFF);
+    o.write((int)(bits >> 24) & 0xFF);
+    o.write((int)(bits >> 32) & 0xFF);
+    o.write((int)(bits >> 40) & 0xFF);
+    o.write((int)(bits >> 48) & 0xFF);
+    o.write((int)(bits >> 56) & 0xFF);
+  }
+
+}
+
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
new file mode 100644
index 0000000..aa3e1d7
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
@@ -0,0 +1,463 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.avro.RandomData;
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.util.ByteBufferInputStream;
+import org.apache.avro.util.ByteBufferOutputStream;
+import org.apache.avro.util.Utf8;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+ at RunWith(Parameterized.class)
+public class TestBinaryDecoder {
+  // prime number buffer size so that looping tests hit the buffer edge
+  // at different points in the loop.
+  DecoderFactory factory = new DecoderFactory().configureDecoderBufferSize(521);
+  private boolean useDirect = false;
+  static EncoderFactory e_factory = EncoderFactory.get();
+  public TestBinaryDecoder(boolean useDirect) {
+    this.useDirect = useDirect;
+  }
+  
+  @Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(new Object[][] {
+        { true },
+        { false },
+    });
+  }
+  
+  private Decoder newDecoderWithNoData() throws IOException {
+    return newDecoder(new byte[0]);
+  }
+
+  private Decoder newDecoder(byte[] bytes, int start, int len)
+    throws IOException {
+    return factory.binaryDecoder(bytes, start, len, null);
+    
+  }
+
+  private Decoder newDecoder(InputStream in) {
+    if (useDirect) {
+      return factory.directBinaryDecoder(in, null);
+    } else {
+      return factory.binaryDecoder(in, null);
+    }
+  }
+
+  private Decoder newDecoder(byte[] bytes) throws IOException {
+    return factory.binaryDecoder(bytes, null);
+  }
+
+  /** Verify EOFException throw at EOF */
+
+  @Test(expected=EOFException.class)
+  public void testEOFBoolean() throws IOException {
+    newDecoderWithNoData().readBoolean();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFInt() throws IOException {
+    newDecoderWithNoData().readInt();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFLong() throws IOException {
+    newDecoderWithNoData().readLong();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFFloat() throws IOException {
+    newDecoderWithNoData().readFloat();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFDouble() throws IOException {
+    newDecoderWithNoData().readDouble();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFBytes() throws IOException {
+    newDecoderWithNoData().readBytes(null);
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFString() throws IOException {
+    newDecoderWithNoData().readString(new Utf8("a"));
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testEOFFixed() throws IOException {
+    newDecoderWithNoData().readFixed(new byte[1]);
+  }
+
+  @Test(expected=EOFException.class)
+  public void testEOFEnum() throws IOException {
+    newDecoderWithNoData().readEnum();
+  }
+  
+  @Test
+  public void testReuse() throws IOException {
+    ByteBufferOutputStream bbo1 = new ByteBufferOutputStream();
+    ByteBufferOutputStream bbo2 = new ByteBufferOutputStream();
+    byte[] b1 = new byte[] { 1, 2 };
+    
+    BinaryEncoder e1 = e_factory.binaryEncoder(bbo1, null);
+    e1.writeBytes(b1);
+    e1.flush();
+    
+    BinaryEncoder e2 = e_factory.binaryEncoder(bbo2, null);
+    e2.writeBytes(b1);
+    e2.flush();
+    
+    DirectBinaryDecoder d = new DirectBinaryDecoder(
+        new ByteBufferInputStream(bbo1.getBufferList()));
+    ByteBuffer bb1 = d.readBytes(null);
+    Assert.assertEquals(b1.length, bb1.limit() - bb1.position());
+    
+    d.configure(new ByteBufferInputStream(bbo2.getBufferList()));
+    ByteBuffer bb2 = d.readBytes(null);
+    Assert.assertEquals(b1.length, bb2.limit() - bb2.position());
+    
+  }
+  
+  private static byte[] data = null;
+  private static int seed = -1;
+  private static Schema schema = null;
+  private static int count = 200;
+  private static ArrayList<Object> records = new ArrayList<Object>(count);
+  @BeforeClass
+  public static void generateData() throws IOException {
+    seed = (int)System.currentTimeMillis();
+    // note some tests (testSkipping) rely on this explicitly
+    String jsonSchema =
+      "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+      +"{\"name\":\"intField\", \"type\":\"int\"},"
+      +"{\"name\":\"bytesField\", \"type\":\"bytes\"},"
+      +"{\"name\":\"booleanField\", \"type\":\"boolean\"},"
+      +"{\"name\":\"stringField\", \"type\":\"string\"},"
+      +"{\"name\":\"floatField\", \"type\":\"float\"},"
+      +"{\"name\":\"doubleField\", \"type\":\"double\"},"
+      +"{\"name\":\"arrayField\", \"type\": " +
+          "{\"type\":\"array\", \"items\":\"boolean\"}},"
+      +"{\"name\":\"longField\", \"type\":\"long\"}]}";
+    schema = Schema.parse(jsonSchema);
+    GenericDatumWriter<Object> writer = new GenericDatumWriter<Object>();
+    writer.setSchema(schema);
+    ByteArrayOutputStream baos = new ByteArrayOutputStream(8192);
+    BinaryEncoder encoder = e_factory.binaryEncoder(baos, null);
+    
+    for (Object datum : new RandomData(schema, count, seed)) {
+      writer.write(datum, encoder);
+      records.add(datum);
+    }
+    encoder.flush();
+    data = baos.toByteArray();
+  }
+
+  @Test
+  public void testDecodeFromSources() throws IOException {
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    reader.setSchema(schema);
+    
+    ByteArrayInputStream is = new ByteArrayInputStream(data);
+    ByteArrayInputStream is2 = new ByteArrayInputStream(data);
+    ByteArrayInputStream is3 = new ByteArrayInputStream(data);
+
+    Decoder fromInputStream = newDecoder(is);
+    Decoder fromArray = newDecoder(data);
+    
+    byte[] data2 = new byte[data.length + 30];
+    Arrays.fill(data2, (byte)0xff);
+    System.arraycopy(data, 0, data2, 15, data.length);
+
+    Decoder fromOffsetArray = newDecoder(data2, 15, data.length);
+
+    BinaryDecoder initOnInputStream = factory.binaryDecoder(
+        new byte[50], 0, 30, null);
+    initOnInputStream = factory.binaryDecoder(is2, initOnInputStream);
+    BinaryDecoder initOnArray = factory.binaryDecoder(is3, null);
+    initOnArray = factory.binaryDecoder(
+        data, 0, data.length, initOnArray);
+    
+    for (Object datum : records) {
+      Assert.assertEquals(
+          "InputStream based BinaryDecoder result does not match",
+          datum, reader.read(null, fromInputStream));
+      Assert.assertEquals(
+          "Array based BinaryDecoder result does not match",
+          datum, reader.read(null, fromArray));
+      Assert.assertEquals(
+          "offset Array based BinaryDecoder result does not match",
+          datum, reader.read(null, fromOffsetArray));
+      Assert.assertEquals(
+          "InputStream initialized BinaryDecoder result does not match",
+          datum, reader.read(null, initOnInputStream));
+      Assert.assertEquals(
+          "Array initialized BinaryDecoder result does not match",
+          datum, reader.read(null, initOnArray));
+    }
+  }
+
+  @Test
+  public void testInputStreamProxy() throws IOException {
+    Decoder d = newDecoder(data);
+    if (d instanceof BinaryDecoder) {
+      BinaryDecoder bd = (BinaryDecoder) d;
+      InputStream test = bd.inputStream();
+      InputStream check = new ByteArrayInputStream(data);
+      validateInputStreamReads(test, check);
+      bd = factory.binaryDecoder(data, bd);
+      test = bd.inputStream();
+      check = new ByteArrayInputStream(data);
+      validateInputStreamSkips(test, check);
+      // with input stream sources
+      bd = factory.binaryDecoder(new ByteArrayInputStream(data), bd);
+      test = bd.inputStream();
+      check = new ByteArrayInputStream(data);
+      validateInputStreamReads(test, check);
+      bd = factory.binaryDecoder(new ByteArrayInputStream(data), bd);
+      test = bd.inputStream();
+      check = new ByteArrayInputStream(data);
+      validateInputStreamSkips(test, check);
+    }
+  }
+
+  @Test
+  public void testInputStreamProxyDetached() throws IOException {
+    Decoder d = newDecoder(data);
+    if (d instanceof BinaryDecoder) {
+      BinaryDecoder bd = (BinaryDecoder) d;
+      InputStream test = bd.inputStream();
+      InputStream check = new ByteArrayInputStream(data);
+      // detach input stream and decoder from old source
+      factory.binaryDecoder(new byte[56], null);
+      InputStream bad = bd.inputStream();
+      InputStream check2 = new ByteArrayInputStream(data);
+      validateInputStreamReads(test, check);
+      Assert.assertFalse(bad.read() == check2.read());
+    }
+  }
+  
+  @Test
+  public void testInputStreamPartiallyUsed() throws IOException {
+    BinaryDecoder bd = factory.binaryDecoder(
+        new ByteArrayInputStream(data), null);
+    InputStream test = bd.inputStream();
+    InputStream check = new ByteArrayInputStream(data);
+    // triggers buffer fill if unused and tests isEnd()
+    try {
+      Assert.assertFalse(bd.isEnd()); 
+    } catch (UnsupportedOperationException e) {
+      // this is ok if its a DirectBinaryDecoder.
+      if (bd.getClass() != DirectBinaryDecoder.class) {
+        throw e;
+      }
+    }
+    bd.readFloat(); // use data, and otherwise trigger buffer fill
+    check.skip(4); // skip the same # of bytes here
+    validateInputStreamReads(test, check);
+  }
+
+  private void validateInputStreamReads(InputStream test, InputStream check)
+      throws IOException {
+    byte[] bt = new byte[7];
+    byte[] bc = new byte[7]; 
+    while (true) {
+      int t = test.read();
+      int c = check.read();
+      Assert.assertEquals(c, t);
+      if (-1 == t) break;
+      t = test.read(bt);
+      c = check.read(bc);
+      Assert.assertEquals(c, t);
+      Assert.assertArrayEquals(bt, bc);
+      if (-1 == t) break;
+      t = test.read(bt, 1, 4);
+      c = check.read(bc, 1, 4);
+      Assert.assertEquals(c, t);
+      Assert.assertArrayEquals(bt, bc);
+      if (-1 == t) break;
+    }
+    Assert.assertEquals(0, test.skip(5));
+    Assert.assertEquals(0, test.available());
+    Assert.assertFalse(test.getClass() != ByteArrayInputStream.class && test.markSupported());
+    test.close();
+  }
+  
+  private void validateInputStreamSkips(InputStream test, InputStream check) throws IOException {
+    while(true) {
+      long t2 = test.skip(19);
+      long c2 = check.skip(19);
+      Assert.assertEquals(c2, t2);
+      if (0 == t2) break;
+    }
+    Assert.assertEquals(-1, test.read());
+  }
+
+  @Test
+  public void testBadIntEncoding() throws IOException {
+    byte[] badint = new byte[5];
+    Arrays.fill(badint, (byte)0xff);
+    Decoder bd = factory.binaryDecoder(badint, null);
+    String message = "";
+    try {
+      bd.readInt();
+    } catch (IOException ioe) {
+      message = ioe.getMessage();
+    }
+    Assert.assertEquals("Invalid int encoding", message);
+  }
+
+  @Test
+  public void testBadLongEncoding() throws IOException {
+    byte[] badint = new byte[10];
+    Arrays.fill(badint, (byte)0xff);
+    Decoder bd = factory.binaryDecoder(badint, null);
+    String message = "";
+    try {
+      bd.readLong();
+    } catch (IOException ioe) {
+      message = ioe.getMessage();
+    }
+    Assert.assertEquals("Invalid long encoding", message);
+  }
+
+  @Test
+  public void testBadLengthEncoding() throws IOException {
+    byte[] bad = new byte[] { (byte)1 };
+    Decoder bd = factory.binaryDecoder(bad, null);
+    String message = "";
+    try {
+      bd.readString();
+    } catch (AvroRuntimeException e) {
+      message = e.getMessage();
+    }
+    Assert.assertEquals("Malformed data. Length is negative: -1", message);
+  }
+
+  @Test(expected=EOFException.class)
+  public void testIntTooShort() throws IOException {
+    byte[] badint = new byte[4];
+    Arrays.fill(badint, (byte)0xff);
+    newDecoder(badint).readInt();
+  }
+
+  @Test(expected=EOFException.class)
+  public void testLongTooShort() throws IOException {
+    byte[] badint = new byte[9];
+    Arrays.fill(badint, (byte)0xff);
+    newDecoder(badint).readLong();
+  }
+  
+  @Test(expected=EOFException.class)
+  public void testFloatTooShort() throws IOException {
+    byte[] badint = new byte[3];
+    Arrays.fill(badint, (byte)0xff);
+    newDecoder(badint).readInt();
+  }
+
+  @Test(expected=EOFException.class)
+  public void testDoubleTooShort() throws IOException {
+    byte[] badint = new byte[7];
+    Arrays.fill(badint, (byte)0xff);
+    newDecoder(badint).readLong();
+  }
+
+  @Test
+  public void testSkipping() throws IOException {
+    Decoder d = newDecoder(data);
+    skipGenerated(d);
+    if (d instanceof BinaryDecoder) {
+      BinaryDecoder bd = (BinaryDecoder) d;
+      try {
+        Assert.assertTrue(bd.isEnd());
+      } catch (UnsupportedOperationException e) {
+        // this is ok if its a DirectBinaryDecoder.
+        if (bd.getClass() != DirectBinaryDecoder.class) {
+          throw e;
+        }
+      }
+      bd = factory.binaryDecoder(new ByteArrayInputStream(data), bd);
+      skipGenerated(bd);
+      try {
+        Assert.assertTrue(bd.isEnd());
+      } catch (UnsupportedOperationException e) {
+        // this is ok if its a DirectBinaryDecoder.
+        if (bd.getClass() != DirectBinaryDecoder.class) {
+          throw e;
+        }
+      }
+    }
+  }
+
+  private void skipGenerated(Decoder bd) throws IOException {
+    for (int i = 0; i < records.size(); i++) {
+      bd.readInt();
+      bd.skipBytes();
+      bd.skipFixed(1);
+      bd.skipString();
+      bd.skipFixed(4);
+      bd.skipFixed(8);
+      long leftover = bd.skipArray();
+      // booleans are one byte, array trailer is one byte
+      bd.skipFixed((int)leftover + 1); 
+      bd.skipFixed(0);
+      bd.readLong();
+    }
+    EOFException eof = null;
+    try {
+      bd.skipFixed(4);
+    } catch (EOFException e) {
+      eof = e;
+    }
+    Assert.assertTrue(null != eof);
+  }
+  
+  @Test(expected = EOFException.class)
+  public void testEOF() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    Encoder e = EncoderFactory.get().binaryEncoder(baos, null);
+    e.writeLong(0x10000000000000l);
+    e.flush();
+      
+    Decoder d = newDecoder(new ByteArrayInputStream(baos.toByteArray()));
+    Assert.assertEquals(0x10000000000000l, d.readLong());
+    d.readInt();
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
new file mode 100644
index 0000000..997ab94
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.apache.avro.util.Utf8;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestBinaryEncoderFidelity {
+  
+  static byte[] legacydata;
+  static byte[] complexdata;
+  EncoderFactory factory = EncoderFactory.get();
+  public static void generateData(Encoder e, boolean useReadOnlyByteBuffer) throws IOException {
+    // generate a bunch of data that should test the bounds of a BinaryEncoder
+    Random r = new Random(665321);
+    e.writeNull();
+    e.writeBoolean(true);
+    e.writeBoolean(false);
+    byte[] bytes = new byte[10];
+    ByteBuffer bb;
+    if (useReadOnlyByteBuffer) {
+      bb = ByteBuffer.wrap(bytes, 4, 4).asReadOnlyBuffer();
+    } else {
+      bb = ByteBuffer.wrap(bytes, 4, 4);
+    }
+    r.nextBytes(bytes);
+    e.writeBytes(bytes);
+    e.writeBytes(new byte[0]);
+    e.writeBytes(bytes, 3, 3);
+    e.writeBytes(new byte[0], 0, 0);
+    e.writeBytes(ByteBuffer.wrap(bytes, 2, 2));
+    e.writeBytes(bb);
+    e.writeBytes(bb);
+    e.writeDouble(0.0);
+    e.writeDouble(-0.0);
+    e.writeDouble(Double.NaN);
+    e.writeDouble(r.nextDouble());
+    e.writeDouble(Double.NEGATIVE_INFINITY);
+    e.writeEnum(65);
+    e.writeFixed(bytes);
+    e.writeFixed(bytes, 7, 2);
+    e.writeFloat(1.0f);
+    e.writeFloat(r.nextFloat());
+    e.writeFloat(Float.POSITIVE_INFINITY);
+    e.writeFloat(Float.MIN_NORMAL);
+    e.writeIndex(-2);
+    e.writeInt(0);
+    e.writeInt(-1);
+    e.writeInt(1);
+    e.writeInt(0x40);
+    e.writeInt(-0x41);
+    e.writeInt(0x2000);
+    e.writeInt(-0x2001);
+    e.writeInt(0x80000);
+    e.writeInt(-0x80001);
+    e.writeInt(0x4000000);
+    e.writeInt(-0x4000001);
+    e.writeInt(r.nextInt());
+    e.writeInt(r.nextInt());
+    e.writeInt(Integer.MAX_VALUE);
+    e.writeInt(Integer.MIN_VALUE);
+    e.writeLong(0);
+    e.writeLong(-1);
+    e.writeLong(1);
+    e.writeLong(0x40);
+    e.writeLong(-0x41);
+    e.writeLong(0x2000);
+    e.writeLong(-0x2001);
+    e.writeLong(0x80000);
+    e.writeLong(-0x80001);
+    e.writeLong(0x4000000);
+    e.writeLong(-0x4000001);
+    e.writeLong(0x200000000L);
+    e.writeLong(-0x200000001L);
+    e.writeLong(0x10000000000L);
+    e.writeLong(-0x10000000001L);
+    e.writeLong(0x800000000000L);
+    e.writeLong(-0x800000000001L);
+    e.writeLong(0x40000000000000L);
+    e.writeLong(-0x40000000000001L);
+    e.writeLong(0x2000000000000000L);
+    e.writeLong(-0x2000000000000001L);
+    e.writeLong(r.nextLong());
+    e.writeLong(r.nextLong());
+    e.writeLong(Long.MAX_VALUE);
+    e.writeLong(Long.MIN_VALUE);
+    e.writeString(new StringBuilder("StringBuilder\u00A2"));
+    e.writeString("String\u20AC");
+    e.writeString("");
+    e.writeString(new Utf8("Utf8\uD834\uDD1E"));
+    if (e instanceof BinaryEncoder) {
+      int count = ((BinaryEncoder)e).bytesBuffered();
+      System.out.println(e.getClass().getSimpleName() + " buffered: " + count);
+    }
+    e.flush();
+  }
+  
+  static void generateComplexData(Encoder e) throws IOException {
+    e.writeArrayStart();
+    e.setItemCount(1);
+    e.startItem();
+    e.writeInt(1);
+    e.writeArrayEnd();
+    e.writeMapStart();
+    e.setItemCount(2);
+    e.startItem();
+    e.writeString("foo");
+    e.writeInt(-1);
+    e.writeDouble(33.3);
+    e.startItem();
+    e.writeString("bar");
+    e.writeInt(1);
+    e.writeDouble(-33.3);
+    e.writeMapEnd();
+    e.flush();
+  }
+  
+  @BeforeClass
+  public static void generateLegacyData() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    Encoder e = new LegacyBinaryEncoder(baos);
+    generateData(e, false);
+    legacydata = baos.toByteArray();
+    baos.reset();
+    generateComplexData(e);
+    complexdata = baos.toByteArray();
+  }
+  
+  @Test
+  public void testBinaryEncoder() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    BinaryEncoder e = factory.binaryEncoder(baos, null);
+    generateData(e, true);
+    byte[] result = baos.toByteArray();
+    Assert.assertEquals(legacydata.length, result.length);
+    Assert.assertArrayEquals(legacydata, result);
+    baos.reset();
+    generateComplexData(e);
+    byte[] result2 = baos.toByteArray();
+    Assert.assertEquals(complexdata.length, result2.length);
+    Assert.assertArrayEquals(complexdata, result2);
+  }
+  
+  @Test
+  public void testDirectBinaryEncoder() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    BinaryEncoder e = factory.directBinaryEncoder(baos, null);
+    generateData(e, true);
+    byte[] result = baos.toByteArray();
+    Assert.assertEquals(legacydata.length, result.length);
+    Assert.assertArrayEquals(legacydata, result);
+    baos.reset();
+    generateComplexData(e);
+    byte[] result2 = baos.toByteArray();
+    Assert.assertEquals(complexdata.length, result2.length);
+    Assert.assertArrayEquals(complexdata, result2);
+  }
+
+  
+  @Test
+  public void testBlockingBinaryEncoder() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    BinaryEncoder e = factory.blockingBinaryEncoder(baos, null);
+    generateData(e, true);
+    byte[] result = baos.toByteArray();
+    Assert.assertEquals(legacydata.length, result.length);
+    Assert.assertArrayEquals(legacydata, result);
+    baos.reset();
+    generateComplexData(e);
+    byte[] result2 = baos.toByteArray();
+    // blocking will cause different length, should be two bytes larger
+    Assert.assertEquals(complexdata.length + 2, result2.length);
+    // the first byte is the array start, with the count of items negative
+    Assert.assertEquals(complexdata[0] >>> 1, result2[0]);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
new file mode 100644
index 0000000..95729fe
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java
@@ -0,0 +1,477 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Stack;
+import java.util.Collection;
+import java.util.Arrays;
+
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParser;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+
+ at RunWith(Parameterized.class)
+public class TestBlockingIO {
+  private static final String UTF_8 = "UTF-8";
+
+  private final int iSize;
+  private final int iDepth;
+  private final String sInput;
+
+  public TestBlockingIO (int sz, int dp, String inp) {
+    this.iSize = sz;
+    this.iDepth = dp;
+    this.sInput = inp;
+  }
+  
+  private static class Tests {
+    private final JsonParser parser;
+    private final Decoder input;
+    private final int depth;
+    public Tests(int bufferSize, int depth, String input)
+      throws IOException {
+  
+      this.depth = depth;
+      byte[] in = input.getBytes("UTF-8");
+      JsonFactory f = new JsonFactory();
+      JsonParser p = f.createJsonParser(
+          new ByteArrayInputStream(input.getBytes("UTF-8")));
+      
+      ByteArrayOutputStream os = new ByteArrayOutputStream();
+      EncoderFactory factory = new EncoderFactory()
+          .configureBlockSize(bufferSize);
+      Encoder cos = factory.blockingBinaryEncoder(os, null);
+      serialize(cos, p, os);
+      cos.flush();
+      
+      byte[] bb = os.toByteArray();
+      // dump(bb);
+      this.input = DecoderFactory.get().binaryDecoder(bb, null);
+      this.parser =  f.createJsonParser(new ByteArrayInputStream(in));
+    }
+    
+    public void scan() throws IOException {
+      Stack<S> countStack = new Stack<S>();
+      long count = 0;
+      while (parser.nextToken() != null) {
+        switch (parser.getCurrentToken()) {
+        case END_ARRAY:
+          assertEquals(0, count);
+          assertTrue(countStack.peek().isArray);
+          count = countStack.pop().count;
+          break;
+        case END_OBJECT:
+          assertEquals(0, count);
+          assertFalse(countStack.peek().isArray);
+          count = countStack.pop().count;
+          break;
+        case START_ARRAY:
+          countStack.push(new S(count, true));
+          count = input.readArrayStart();
+          continue;
+        case VALUE_STRING:
+        {
+          String s = parser.getText();
+          int n = s.getBytes(UTF_8).length;
+          checkString(s, input, n);
+          break;
+        }
+        case FIELD_NAME:
+        {
+          String s = parser.getCurrentName();
+          int n = s.getBytes(UTF_8).length;
+          checkString(s, input, n);
+          continue;
+        }
+        case START_OBJECT:
+          countStack.push(new S(count, false));
+          count = input.readMapStart();
+          if (count < 0) {
+            count = -count;
+            input.readLong();  // byte count
+          }
+          continue;
+        default:
+          throw new RuntimeException("Unsupported: " + parser.getCurrentToken());
+        }
+        count--;
+        if (count == 0) {
+          count = countStack.peek().isArray ? input.arrayNext() :
+            input.mapNext();
+        }
+      }
+    }
+
+    public void skip(int skipLevel) throws IOException {
+      Stack<S> countStack = new Stack<S>();
+      long count = 0;
+      while (parser.nextToken() != null) {
+        switch (parser.getCurrentToken()) {
+        case END_ARRAY:
+          // assertEquals(0, count);
+          assertTrue(countStack.peek().isArray);
+          count = countStack.pop().count;
+          break;
+        case END_OBJECT:
+          // assertEquals(0, count);
+          assertFalse(countStack.peek().isArray);
+          count = countStack.pop().count;
+          break;
+        case START_ARRAY:
+          if (countStack.size() == skipLevel) {
+            skipArray(parser, input, depth - skipLevel);
+            break;
+          } else {
+            countStack.push(new S(count, true));
+            count = input.readArrayStart();
+            continue;
+          }
+        case VALUE_STRING:
+        {
+          if (countStack.size() == skipLevel) {
+            input.skipBytes();
+          } else {
+            String s = parser.getText();
+            int n = s.getBytes(UTF_8).length;
+            checkString(s, input, n);
+          }
+          break;
+        }
+        case FIELD_NAME:
+        {
+          String s = parser.getCurrentName();
+          int n = s.getBytes(UTF_8).length;
+          checkString(s, input, n);
+          continue;
+        }
+        case START_OBJECT:
+          if (countStack.size() == skipLevel) {
+            skipMap(parser, input, depth - skipLevel);
+            break;
+          } else {
+            countStack.push(new S(count, false));
+            count = input.readMapStart();
+            if (count < 0) {
+              count = -count;
+              input.readLong();  // byte count
+            }
+            continue;
+          }
+        default:
+          throw new RuntimeException("Unsupported: " + parser.getCurrentToken());
+        }
+        count--;
+        if (count == 0) {
+          count = countStack.peek().isArray ? input.arrayNext() :
+            input.mapNext();
+        }
+      }
+    }
+  }
+
+  protected static void dump(byte[] bb) {
+    int col = 0;
+    for (byte b : bb) {
+      if (col % 16 == 0) {
+        System.out.println();
+      }
+      col++;
+      System.out.print(Integer.toHexString(b & 0xff) + " ");
+    }
+    System.out.println();
+  }
+
+  private static class S {
+    public final long count;
+    public final boolean isArray;
+    
+    public S(long count, boolean isArray) {
+      this.count = count;
+      this.isArray = isArray;
+    }
+  }
+
+  @Test
+  public void testScan() throws IOException {
+    Tests t = new Tests(iSize, iDepth, sInput);
+    t.scan();
+  }
+
+  @Test
+  public void testSkip1() throws IOException {
+    testSkip(iSize, iDepth, sInput, 0);
+  }
+
+  @Test
+  public void testSkip2() throws IOException {
+    testSkip(iSize, iDepth, sInput, 1);
+  }
+
+  @Test
+  public void testSkip3() throws IOException {
+    testSkip(iSize, iDepth, sInput, 2);
+  }
+
+  private void testSkip(int bufferSize, int depth, String input,
+      int skipLevel)
+    throws IOException {
+    Tests t = new Tests(bufferSize, depth, input);
+    t.skip(skipLevel);
+  }
+
+  private static void skipMap(JsonParser parser, Decoder input, int depth)
+    throws IOException {
+    for (long l = input.skipMap(); l != 0; l = input.skipMap()) {
+      for (long i = 0; i < l; i++) {
+        if (depth == 0) {
+          input.skipBytes();
+        } else {
+          skipArray(parser, input, depth - 1);
+        }
+      }
+    }
+    parser.skipChildren();
+  }
+
+  private static void skipArray(JsonParser parser, Decoder input, int depth)
+    throws IOException {
+    for (long l = input.skipArray(); l != 0; l = input.skipArray()) {
+      for (long i = 0; i < l; i++) {
+        if (depth == 1) {
+          input.skipBytes();
+        } else {
+          skipArray(parser, input, depth - 1);
+        }
+      }
+    }
+    parser.skipChildren();
+  }
+ 
+  private static void checkString(String s, Decoder input, int n)
+    throws IOException {
+    ByteBuffer buf = input.readBytes(null);
+    assertEquals(n, buf.remaining());
+    String s2 = new String(buf.array(), buf.position(),
+        buf.remaining(), UTF_8);
+    assertEquals(s, s2);
+  }
+  
+  private static void serialize(Encoder cos, JsonParser p,
+      ByteArrayOutputStream os)
+    throws IOException {
+    boolean[] isArray = new boolean[100];
+    int[] counts = new int[100];
+    int stackTop = -1;
+    
+    while (p.nextToken() != null) {
+      switch (p.getCurrentToken()) {
+      case END_ARRAY:
+        assertTrue(isArray[stackTop]);
+        cos.writeArrayEnd();
+        stackTop--;
+        break;
+      case END_OBJECT:
+        assertFalse(isArray[stackTop]);
+        cos.writeMapEnd();
+        stackTop--;
+        break;
+      case START_ARRAY:
+        if (stackTop >= 0 && isArray[stackTop]) {
+          cos.setItemCount(1);
+          cos.startItem();
+          counts[stackTop]++;
+        }
+        cos.writeArrayStart();
+        isArray[++stackTop] = true;
+        counts[stackTop] = 0;
+        continue;
+      case VALUE_STRING:
+        if (stackTop >= 0 && isArray[stackTop]) {
+          cos.setItemCount(1);
+          cos.startItem();
+          counts[stackTop]++;
+        }
+        byte[] bb = p.getText().getBytes(UTF_8);
+        cos.writeBytes(bb);
+        break;
+      case START_OBJECT:
+        if (stackTop >= 0 && isArray[stackTop]) {
+          cos.setItemCount(1);
+          cos.startItem();
+          counts[stackTop]++;
+        }
+        cos.writeMapStart();
+        isArray[++stackTop] = false;
+        counts[stackTop] = 0;
+        continue;
+      case FIELD_NAME:
+        cos.setItemCount(1);
+        cos.startItem();
+        counts[stackTop]++;
+        cos.writeBytes(p.getCurrentName().getBytes(UTF_8));
+        break;
+     default:
+       throw new RuntimeException("Unsupported: " + p.getCurrentToken());
+      }
+    }
+  }
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList (new Object[][] {
+        { 64, 0, "" },
+        { 64, 0, jss(0, 'a') },
+        { 64, 0, jss(3, 'a') },
+        { 64, 0, jss(64, 'a') },
+        { 64, 0, jss(65, 'a') },
+        { 64, 0, jss(100, 'a') },
+        { 64, 1, "[]" },
+        { 64, 1, "[" + jss(0, 'a') + "]" },
+        { 64, 1, "[" + jss(3, 'a') + "]" },
+        { 64, 1, "[" + jss(61, 'a') + "]" },
+        { 64, 1, "[" + jss(62, 'a') + "]" },
+        { 64, 1, "[" + jss(64, 'a') + "]" },
+        { 64, 1, "[" + jss(65, 'a') + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(0, '0') + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(10, '0') + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(63, '0') + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(64, '0') + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(65, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(0, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(10, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(51, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(52, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(54, '0') + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(55, '0') + "]" },
+
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(0, '0')
+               + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(63, '0')
+               + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(64, '0')
+               + "]" },
+        { 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(65, '0')
+                 + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(10, '0')
+                 + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(23, '0')
+                 + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(24, '0')
+                 + "]" },
+        { 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(25, '0')
+                 + "]" },
+        { 64, 2, "[[]]"},
+        { 64, 2, "[[" + jss(0, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(10, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(59, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(60, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(100, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(10, '0') + ", " + jss(53, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(10, '0') + ", "  + jss(54, 'a') + "], []]" },
+        { 64, 2, "[[" + jss(10, '0') + ", "  + jss(55, 'a') + "], []]" },
+
+        { 64, 2, "[[], [" + jss(0, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(10, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(63, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(64, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(65, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(10, '0') + ", " + jss(53, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(10, '0') + ", " + jss(54, 'a') + "]]" },
+        { 64, 2, "[[], [" + jss(10, '0') + ", " + jss(55, 'a') + "]]" },
+
+        { 64, 2, "[[" + jss(10, '0') + "]]"},
+        { 64, 2, "[[" + jss(62, '0') + "]]"},
+        { 64, 2, "[[" + jss(63, '0') + "]]"},
+        { 64, 2, "[[" + jss(64, '0') + "]]"},
+        { 64, 2, "[[" + jss(10, 'a') + ", " + jss(10, '0') + "]]"},
+        { 64, 2, "[[" + jss(10, 'a') + ", " + jss(52, '0') + "]]"},
+        { 64, 2, "[[" + jss(10, 'a') + ", " + jss(53, '0') + "]]"},
+        { 64, 2, "[[" + jss(10, 'a') + ", " + jss(54, '0') + "]]"},
+        { 64, 3, "[[[" + jss(10, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(62, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(63, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(64, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + ", " + jss(10, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + ", " + jss(52, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + ", " + jss(53, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(54, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(10, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(52, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(53, '0') + "]]]"},
+        { 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(54, '0') + "]]]"},
+
+        { 64, 2, "[[\"p\"], [\"mn\"]]"},
+        { 64, 2, "[[\"pqr\"], [\"mn\"]]"},
+        { 64, 2, "[[\"pqrstuvwxyz\"], [\"mn\"]]"},
+        { 64, 2, "[[\"abc\", \"pqrstuvwxyz\"], [\"mn\"]]"},
+        { 64, 2, "[[\"mn\"], [\"\"]]"},
+        { 64, 2, "[[\"mn\"], \"abc\"]"},
+        { 64, 2, "[[\"mn\"], \"abcdefghijk\"]"},
+        { 64, 2, "[[\"mn\"], \"pqr\", \"abc\"]"},
+        { 64, 2, "[[\"mn\"]]"},
+        { 64, 2, "[[\"p\"], [\"mnopqrstuvwx\"]]"},
+        { 64, 2, "[[\"pqr\"], [\"mnopqrstuvwx\"]]"},
+        { 64, 2, "[[\"pqrstuvwxyz\"], [\"mnopqrstuvwx\"]]"},
+        { 64, 2, "[[\"abc\"], \"pqrstuvwxyz\", [\"mnopqrstuvwx\"]]"},
+        { 64, 2, "[[\"mnopqrstuvwx\"], [\"\"]]"},
+        { 64, 2, "[[\"mnopqrstuvwx\"], [\"abc\"]]"},
+        { 64, 2, "[[\"mnopqrstuvwx\"], [\"abcdefghijk\"]]"},
+        { 64, 2, "[[\"mnopqrstuvwx\"], [\"pqr\", \"abc\"]]"},
+        { 100, 2, "[[\"pqr\", \"mnopqrstuvwx\"]]"},
+        { 100, 2, "[[\"pqr\", \"ab\", \"mnopqrstuvwx\"]]"},
+        { 64, 2, "[[[\"pqr\"]], [[\"ab\"], [\"mnopqrstuvwx\"]]]"},
+
+        { 64, 1, "{}" },
+        { 64, 1, "{\"n\": \"v\"}" },
+        { 64, 1, "{\"n1\": \"v\", \"n2\": []}" },
+        { 100, 1, "{\"n1\": \"v\", \"n2\": []}" },
+        { 100, 1, "{\"n1\": \"v\", \"n2\": [\"abc\"]}" },
+    });
+  }
+
+  /**
+   * Returns a new JSON String {@code n} bytes long with
+   * consecutive characters starting with {@code c}.
+   */
+  private static String jss(final int n, char c) {
+    char[] cc = new char[n + 2];
+    cc[0] = cc[n + 1] = '"';
+    for (int i = 1; i < n + 1; i++) {
+      if (c == 'Z') {
+        c = 'a';
+      } else if (c == 'z') {
+        c = '0';
+      } else if (c == '9') {
+        c = 'A';
+      } else {
+        c++;
+      }
+      cc[i] = c;
+    }
+    return new String(cc);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java
new file mode 100644
index 0000000..6438a60
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Arrays;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+/**
+ * This class has more exhaustive tests for Blocking IO. The reason
+ * we have both TestBlockingIO and TestBlockingIO2 is that with the
+ * mnemonics used in TestBlockingIO2, it is hard to test skip() operations.
+ * and with the test infrastructure of TestBlockingIO, it is hard to test
+ * enums, unions etc.
+ */
+ at RunWith(Parameterized.class)
+public class TestBlockingIO2 {
+  private final Decoder decoder;
+  private final String calls;
+  private Object[] values;
+  
+  public TestBlockingIO2 (int bufferSize, int skipLevel, String calls)
+    throws IOException {
+
+    ByteArrayOutputStream os = new ByteArrayOutputStream();
+    EncoderFactory factory = new EncoderFactory()
+        .configureBlockSize(bufferSize);
+    Encoder encoder = factory.blockingBinaryEncoder(os, null);
+    this.values = TestValidatingIO.randomValues(calls);
+
+    TestValidatingIO.generate(encoder, calls, values);
+    encoder.flush();
+    
+    byte[] bb = os.toByteArray();
+    
+    decoder = DecoderFactory.get().binaryDecoder(bb, null);
+    this.calls = calls;
+  }
+    
+  @Test
+  public void testScan() throws IOException {
+    TestValidatingIO.check(decoder, calls, values, -1);
+  }
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList (new Object[][] {
+        { 64, 0, "" },
+        { 64, 0, "S0" },
+        { 64, 0, "S3" },
+        { 64, 0, "S64" },
+        { 64, 0, "S65" },
+        { 64, 0, "S100" },
+        { 64, 1, "[]" },
+        { 64, 1, "[c1sS0]" },
+        { 64, 1, "[c1sS3]" },
+        { 64, 1, "[c1sS61]" },
+        { 64, 1, "[c1sS62]" },
+        { 64, 1, "[c1sS64]" },
+        { 64, 1, "[c1sS65]" },
+        { 64, 1, "[c2sS0sS0]" },
+        { 64, 1, "[c2sS0sS10]" },
+        { 64, 1, "[c2sS0sS63]" },
+        { 64, 1, "[c2sS0sS64]" },
+        { 64, 1, "[c2sS0sS65]" },
+        { 64, 1, "[c2sS10sS0]" },
+        { 64, 1, "[c2sS10sS10]" },
+        { 64, 1, "[c2sS10sS51]" },
+        { 64, 1, "[c2sS10sS52]" },
+        { 64, 1, "[c2sS10sS54]" },
+        { 64, 1, "[c2sS10sS55]" },
+        { 64, 1, "[c3sS0sS0sS0]" },
+        { 64, 1, "[c3sS0sS0sS63]" },
+        { 64, 1, "[c3sS0sS0sS64]" },
+        { 64, 1, "[c3sS0sS0sS65]" },
+        { 64, 1, "[c3sS10sS20sS10]" },
+        { 64, 1, "[c3sS10sS20sS23]" },
+        { 64, 1, "[c3sS10sS20sS24]" },
+        { 64, 1, "[c3sS10sS20sS25]" },
+        { 64, 1, "[c1s[]]" },
+        { 64, 1, "[c1s[c1sS0]]" },
+        { 64, 1, "[c1s[c1sS10]]" },
+        { 64, 1, "[c2s[c1sS10]s[]]" },
+        { 64, 1, "[c2s[c1sS59]s[]]" },
+        { 64, 1, "[c2s[c1sS60]s[]]" },
+        { 64, 1, "[c2s[c1sS100]s[]]" },
+        { 64, 1, "[c2s[c2sS10sS53]s[]]" },
+        { 64, 1, "[c2s[c2sS10sS54]s[]]" },
+        { 64, 1, "[c2s[c2sS10sS55]s[]]" },
+
+        { 64, 1, "[c2s[]s[c1sS0]]" },
+        { 64, 1, "[c2s[]s[c1sS10]]" },
+        { 64, 1, "[c2s[]s[c1sS63]]" },
+        { 64, 1, "[c2s[]s[c1sS64]]" },
+        { 64, 1, "[c2s[]s[c1sS65]]" },
+        { 64, 1, "[c2s[]s[c2sS10sS53]]" },
+        { 64, 1, "[c2s[]s[c2sS10sS54]]" },
+        { 64, 1, "[c2s[]s[c2sS10sS55]]" },
+
+        { 64, 1, "[c1s[c1sS10]]" },
+        { 64, 1, "[c1s[c1sS62]]" },
+        { 64, 1, "[c1s[c1sS63]]" },
+        { 64, 1, "[c1s[c1sS64]]" },
+
+        { 64, 1, "[c1s[c2sS10sS10]]" },
+        { 64, 1, "[c1s[c2sS10sS52]]" },
+        { 64, 1, "[c1s[c2sS10sS53]]" },
+        { 64, 1, "[c1s[c2sS10sS54]]" },
+
+        { 64, 1, "[c1s[c1s[c1sS10]]]" },
+        { 64, 1, "[c1s[c1s[c1sS62]]]" },
+        { 64, 1, "[c1s[c1s[c1sS63]]]" },
+        { 64, 1, "[c1s[c1s[c1sS64]]]" },
+
+        { 64, 1, "[c1s[c1s[c2sS10sS10]]]" },
+        { 64, 1, "[c1s[c1s[c2sS10sS52]]]" },
+        { 64, 1, "[c1s[c1s[c2sS10sS53]]]" },
+        { 64, 1, "[c1s[c1s[c2sS10sS54]]]" },
+
+        { 64, 1, "[c1s[c2sS10s[c1sS10]]]" },
+        { 64, 1, "[c1s[c2sS10s[c1sS52]]]" },
+        { 64, 1, "[c1s[c2sS10s[c1sS53]]]" },
+        { 64, 1, "[c1s[c2sS10s[c1sS54]]]" },
+
+        { 64, 1, "{}" },
+        { 64, 1, "{c1sK5S1}" },
+        { 64, 1, "{c1sK5[]}" },
+        { 100, 1, "{c1sK5[]}" },
+        { 100, 1, "{c1sK5[c1sS10]}" },
+
+        { 100, 1, "{c1sK5e10}" },
+        { 100, 1, "{c1sK5U1S10}" },
+        { 100, 1, "{c1sK5f10S10}" },
+        { 100, 1, "{c1sK5NS10}" },
+        { 100, 1, "{c1sK5BS10}" },
+        { 100, 1, "{c1sK5IS10}" },
+        { 100, 1, "{c1sK5LS10}" },
+        { 100, 1, "{c1sK5FS10}" },
+        { 100, 1, "{c1sK5DS10}" },
+    });
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
new file mode 100644
index 0000000..46a9025
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestEncoders {
+  private static EncoderFactory factory = EncoderFactory.get();
+
+  @Test
+  public void testBinaryEncoderInit() throws IOException {
+    OutputStream out = new ByteArrayOutputStream();
+    BinaryEncoder enc = factory.binaryEncoder(out, null);
+    Assert.assertTrue(enc == factory.binaryEncoder(out, enc));
+  }
+  
+  @Test(expected=NullPointerException.class)
+  public void testBadBinaryEncoderInit() {
+    factory.binaryEncoder(null, null);
+  }
+
+  @Test
+  public void testBlockingBinaryEncoderInit() throws IOException {
+    OutputStream out = new ByteArrayOutputStream();
+    BinaryEncoder reuse = null;
+    reuse = factory.blockingBinaryEncoder(out, reuse);
+    Assert.assertTrue(reuse == factory.blockingBinaryEncoder(out, reuse));
+    // comparison 
+  }
+  
+  @Test(expected=NullPointerException.class)
+  public void testBadBlockintBinaryEncoderInit() {
+    factory.binaryEncoder(null, null);
+  }
+  
+  @Test
+  public void testDirectBinaryEncoderInit() throws IOException {
+    OutputStream out = new ByteArrayOutputStream();
+    BinaryEncoder enc = factory.directBinaryEncoder(out, null);
+    Assert.assertTrue(enc ==  factory.directBinaryEncoder(out, enc));
+  }
+  
+  @Test(expected=NullPointerException.class)
+  public void testBadDirectBinaryEncoderInit() {
+    factory.directBinaryEncoder(null, null);
+  }
+
+  @Test
+  public void testJsonEncoderInit() throws IOException {
+    Schema s = Schema.parse("\"int\"");
+    OutputStream out = new ByteArrayOutputStream();
+    factory.jsonEncoder(s, out);
+    JsonEncoder enc = factory.jsonEncoder(s,
+        new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8));
+    enc.configure(out);
+  }
+  
+  @Test(expected=NullPointerException.class)
+  public void testBadJsonEncoderInitOS() throws IOException {
+    factory.jsonEncoder(Schema.create(Type.INT), (OutputStream)null);
+  }
+  
+  @Test(expected=NullPointerException.class)
+  public void testBadJsonEncoderInit() throws IOException {
+    factory.jsonEncoder(Schema.create(Type.INT), (JsonGenerator)null);
+  }
+
+  @Test
+  public void testJsonEncoderNewlineDelimited() throws IOException {
+    OutputStream out = new ByteArrayOutputStream();
+    Schema ints = Schema.create(Type.INT);
+    Encoder e = factory.jsonEncoder(ints, out);
+    String separator = System.getProperty("line.separator");
+    GenericDatumWriter<Integer> writer = new GenericDatumWriter<Integer>(ints);
+    writer.write(1, e);
+    writer.write(2, e);
+    e.flush();
+    Assert.assertEquals("1"+separator+"2", out.toString());
+  }
+
+  @Test
+  public void testValidatingEncoderInit() throws IOException {
+    Schema s = Schema.parse("\"int\"");
+    OutputStream out = new ByteArrayOutputStream();
+    Encoder e = factory.directBinaryEncoder(out, null);
+    factory.validatingEncoder(s, e).configure(e);
+  }
+
+  @Test
+  public void testJsonRecordOrdering() throws IOException {
+    String value = "{\"b\": 2, \"a\": 1}";
+    Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [" +
+        "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": \"int\"}" +
+    		"]}");
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
+    Object o = reader.read(null, decoder);
+    Assert.assertEquals("{\"a\": 1, \"b\": 2}", o.toString());
+  }
+
+  @Test(expected=AvroTypeException.class)
+  public void testJsonExcessFields() throws IOException {
+    String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a0\": 45, \"a2\":true, \"a1\": null}}";
+    Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+        "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+        "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n" +
+        "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n" +
+        "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n" +
+        "]}");
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
+    reader.read(null, decoder);
+  }
+
+  @Test
+  public void testJsonRecordOrdering2() throws IOException {
+    String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
+    Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+        "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+        "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n" +
+        "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n" +
+        "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n" +
+        "]}");
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
+    Object o = reader.read(null, decoder);
+    Assert.assertEquals("{\"a\": {\"a1\": null, \"a2\": true}, \"b\": {\"b1\": \"h\", \"b2\": 3.14, \"b3\": 1.4}}", o.toString());
+  }
+
+  @Test
+  public void testJsonRecordOrderingWithProjection() throws IOException {
+    String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
+    Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+        "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+        "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n" +
+        "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n" +
+        "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n" +
+        "]}");
+    Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+      "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+      "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" +
+      "]}");
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(writerSchema, readerSchema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
+    Object o = reader.read(null, decoder);
+    Assert.assertEquals("{\"a\": {\"a1\": null, \"a2\": true}}", o.toString());
+  }
+
+  @Test
+  public void testJsonRecordOrderingWithProjection2() throws IOException {
+    String value = "{\"b\": { \"b1\": \"h\", \"b2\": [3.14, 3.56], \"b3\": 1.4}, \"a\": {\"a2\":true, \"a1\": null}}";
+    Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+        "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+        "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n" +
+        "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n" +
+        "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":{\"type\":\"array\", \"items\":\"float\"}}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n" +
+        "]}");
+    Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n" +
+      "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n" +
+      "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" +
+      "]}");
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(writerSchema, readerSchema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
+    Object o = reader.read(null, decoder);
+    Assert.assertEquals("{\"a\": {\"a1\": null, \"a2\": true}}", o.toString());
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
new file mode 100644
index 0000000..7946beb
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Parser;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.generic.GenericDatumReader;
+
+import org.junit.Test;
+import org.junit.Assert;
+
+public class TestJsonDecoder {
+  
+  @Test public void testInt() throws Exception {
+    checkNumeric("int", 1);
+  }
+
+  @Test public void testLong() throws Exception {
+    checkNumeric("long", 1L);
+  }
+
+  @Test public void testFloat() throws Exception {
+    checkNumeric("float", 1.0F);
+  }
+
+  @Test public void testDouble() throws Exception {
+    checkNumeric("double", 1.0);
+  }
+
+  private void checkNumeric(String type, Object value) throws Exception {
+    String def = 
+      "{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+      +"[{\"type\":\""+type+"\",\"name\":\"n\"}]}";
+    Schema schema = Schema.parse(def);
+    DatumReader<GenericRecord> reader =
+      new GenericDatumReader<GenericRecord>(schema);
+
+    String[] records = {"{\"n\":1}", "{\"n\":1.0}"};
+
+    for (String record : records) {
+      Decoder decoder = DecoderFactory.get().jsonDecoder(schema, record);
+      GenericRecord r = reader.read(null, decoder);
+      Assert.assertEquals(value, r.get("n"));
+    }
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java
new file mode 100644
index 0000000..d5f1b06
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.TestValidatingIO.Encoding;
+import org.junit.Test;
+import org.junit.runners.Parameterized;
+import org.junit.runner.RunWith;
+
+ at RunWith(Parameterized.class)
+public class TestResolvingIO {
+
+  protected final Encoding eEnc;
+  protected final int iSkipL;
+  protected final String sJsWrtSchm;
+  protected final String sWrtCls;
+  protected final String sJsRdrSchm;
+  protected final String sRdrCls;
+
+  public TestResolvingIO (Encoding encoding,
+      int skipLevel, String jsonWriterSchema,
+      String writerCalls,
+      String jsonReaderSchema, String readerCalls
+  ) {
+    this.eEnc = encoding;
+    this.iSkipL = skipLevel;
+    this.sJsWrtSchm = jsonWriterSchema;
+    this.sWrtCls = writerCalls;
+    this.sJsRdrSchm = jsonReaderSchema;
+    this.sRdrCls = readerCalls;
+  }
+  
+  @Test
+  public void testIdentical() throws IOException {
+    performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsWrtSchm, sWrtCls);
+  }
+
+  private static final int COUNT = 10;
+
+  @Test
+  public void testCompatible() throws IOException {
+    performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsRdrSchm, sRdrCls);
+  }
+
+  private void performTest(Encoding encoding,
+      int skipLevel, String jsonWriterSchema,
+      String writerCalls, 
+      String jsonReaderSchema, String readerCalls)
+  throws IOException {
+    for (int i = 0; i < COUNT; i++) {
+      testOnce(jsonWriterSchema, writerCalls,
+          jsonReaderSchema, readerCalls, encoding, skipLevel);
+    }
+  }
+  
+  private void testOnce(String jsonWriterSchema,
+      String writerCalls,
+      String jsonReaderSchema,
+      String readerCalls,
+      Encoding encoding,
+      int skipLevel) throws IOException {
+    Object[] values = TestValidatingIO.randomValues(writerCalls);
+    Object[] expected = TestValidatingIO.randomValues(readerCalls);
+    
+    Schema writerSchema = new Schema.Parser().parse(jsonWriterSchema);
+    byte[] bytes = TestValidatingIO.make(writerSchema, writerCalls,
+        values, encoding);
+    Schema readerSchema = new Schema.Parser().parse(jsonReaderSchema);
+    TestValidatingIO.print(encoding, skipLevel, writerSchema, readerSchema, values, expected);
+    check(writerSchema, readerSchema, bytes, readerCalls,
+        expected,
+        encoding, skipLevel);
+  }
+
+  static void check(Schema wsc, Schema rsc, byte[] bytes,
+      String calls, Object[] values, Encoding encoding,
+      int skipLevel)
+      throws IOException {
+    // TestValidatingIO.dump(bytes);
+    // System.out.println(new String(bytes, "UTF-8"));
+    Decoder bvi = null;
+    switch (encoding) {
+    case BINARY:
+    case BLOCKING_BINARY:
+      bvi = DecoderFactory.get().binaryDecoder(bytes, null);
+      break;
+    case JSON:
+      InputStream in = new ByteArrayInputStream(bytes);
+      bvi = new JsonDecoder(wsc, in);
+      break;
+    }
+    Decoder vi = new ResolvingDecoder(wsc, rsc, bvi);
+    TestValidatingIO.check(vi, calls, values, skipLevel);
+  }
+  
+  @Parameterized.Parameters
+  public static Collection<Object[]> data2() {
+    return Arrays.asList(TestValidatingIO.convertTo2dArray(encodings, skipLevels, testSchemas()));
+  }
+
+  static Object[][] encodings = new Object[][] { { Encoding.BINARY },
+          { Encoding.BLOCKING_BINARY }, { Encoding.JSON } };
+  static Object[][] skipLevels =
+    new Object[][] { { -1 }, { 0 }, { 1 }, { 2 }  };
+  private static Object[][] testSchemas() {
+    // The mnemonics are the same as {@link TestValidatingIO#testSchemas}
+    return new Object[][] {
+        { "\"int\"", "I", "\"float\"", "F" },
+        { "\"int\"", "I", "\"double\"", "D" },
+        { "\"int\"", "I", "\"long\"", "L" },
+        { "\"long\"", "L", "\"float\"", "F" },
+        { "\"long\"", "L", "\"double\"", "D" },
+        { "\"float\"", "F", "\"double\"", "D" },
+
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[]",
+          "{\"type\":\"array\", \"items\": \"long\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
+        { "{\"type\":\"array\", \"items\": \"long\"}", "[]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
+        { "{\"type\":\"array\", \"items\": \"float\"}", "[]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
+
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]",
+          "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]" },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]" },
+        { "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]" },
+        { "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]",
+          "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]" },
+
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{}",
+          "{\"type\":\"map\", \"values\": \"long\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"long\"}", "{}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"float\"}", "{}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
+
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}",
+          "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}" },
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}" },
+        { "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}" },
+        { "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}",
+          "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}" },
+
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"long\"}]}", "L" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"double\"}]}", "D" },
+
+        // multi-field record with promotions
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+          + "{\"name\":\"f1\", \"type\":\"int\"},"
+          + "{\"name\":\"f2\", \"type\":\"float\"},"
+          + "{\"name\":\"f3\", \"type\":\"bytes\"},"
+          + "{\"name\":\"f4\", \"type\":\"string\"}]}", "BIFbS",
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", \"type\":\"double\"},"
+          + "{\"name\":\"f3\", \"type\":\"string\"},"
+          + "{\"name\":\"f4\", \"type\":\"bytes\"}]}", "BLDSb" },
+
+        { "[\"int\"]", "U0I",
+              "[\"long\"]", "U0L" },
+        { "[\"int\"]", "U0I",
+              "[\"double\"]", "U0D" },
+        { "[\"long\"]", "U0L",
+              "[\"double\"]", "U0D" },
+        { "[\"float\"]", "U0F",
+              "[\"double\"]", "U0D" },
+
+        { "\"int\"", "I", "[\"int\"]", "U0I" },
+
+        { "[\"int\"]", "U0I", "\"int\"", "I" },
+        { "[\"int\"]", "U0I", "\"long\"", "L" },
+
+        { "[\"boolean\", \"int\"]", "U1I",
+              "[\"boolean\", \"long\"]", "U1L" },
+        { "[\"boolean\", \"int\"]", "U1I",
+              "[\"long\", \"boolean\"]", "U0L" },
+    };
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
new file mode 100644
index 0000000..e6377b5
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import org.junit.Test;
+import org.junit.runners.Parameterized;
+import org.junit.runner.RunWith;
+import org.apache.avro.Schema;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Arrays;
+
+ at RunWith(Parameterized.class)
+public class TestResolvingIOResolving {
+  protected TestValidatingIO.Encoding eEnc;
+  protected final int iSkipL;
+  protected final String sJsWrtSchm;
+  protected final String sWrtCls;
+  protected final String sJsRdrSchm;
+  protected final String sRdrCls;
+
+  protected final Object[] oaWrtVals;
+  protected final Object[] oaRdrVals;
+
+  public TestResolvingIOResolving(TestValidatingIO.Encoding encoding,
+      int skipLevel, String jsonWriterSchema,
+      String writerCalls,
+      Object[] writerValues,
+      String jsonReaderSchema, String readerCalls,
+      Object[] readerValues
+  ) {
+    this.eEnc = encoding;
+    this.iSkipL = skipLevel;
+    this.sJsWrtSchm = jsonWriterSchema;
+    this.sWrtCls = writerCalls;
+    this.oaWrtVals = writerValues;
+    this.sJsRdrSchm = jsonReaderSchema;
+    this.sRdrCls = readerCalls;
+    this.oaRdrVals = readerValues;
+  }
+
+  @Test
+  public void testResolving()
+    throws IOException {
+    Schema writerSchema = new Schema.Parser().parse(sJsWrtSchm);
+    byte[] bytes = TestValidatingIO.make(writerSchema, sWrtCls,
+        oaWrtVals, eEnc);
+    Schema readerSchema = new Schema.Parser().parse(sJsRdrSchm);
+    TestValidatingIO.print(eEnc, iSkipL, writerSchema, readerSchema, oaWrtVals, oaRdrVals);
+    TestResolvingIO.check(writerSchema, readerSchema, bytes, sRdrCls,
+        oaRdrVals,
+        eEnc, iSkipL);
+  }
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data3() {
+    Collection<Object[]> ret = Arrays.asList(
+                TestValidatingIO.convertTo2dArray(TestResolvingIO.encodings,
+                                TestResolvingIO.skipLevels,
+        dataForResolvingTests()));
+    return ret;
+  }
+
+  private static Object[][] dataForResolvingTests() {
+    // The mnemonics are the same as {@link TestValidatingIO#testSchemas}
+    return new Object[][] {
+        // Projection
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"string\"},"
+          + "{\"name\":\"f2\", \"type\":\"string\"},"
+          + "{\"name\":\"f3\", \"type\":\"int\"}]}", "S10S10IS10S10I",
+          new Object[] { "s1", "s2", 100, "t1", "t2", 200 },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"string\" },"
+          + "{\"name\":\"f2\", \"type\":\"string\"}]}", "RS10S10RS10S10",
+          new Object[] { "s1", "s2", "t1", "t2" } },
+        // Reordered fields
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"int\"},"
+          + "{\"name\":\"f2\", \"type\":\"string\"}]}", "IS10",
+          new Object[] { 10, "hello" },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f2\", \"type\":\"string\" },"
+          + "{\"name\":\"f1\", \"type\":\"long\"}]}", "RLS10",
+          new Object[] { 10L, "hello" } },
+
+        // Default values
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
+          new Object[] { },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]}", "RI",
+          new Object[] { 100 } },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+            + "{\"name\":\"f2\", \"type\":\"int\"}]}", "I",
+          new Object[] { 10 },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+          + "{\"name\":\"f2\", \"type\":\"int\"}]}", "RII",
+          new Object[] { 10, 101 } },
+        { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            + "{\"name\": \"g1\", " +
+                        "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+                + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+            + "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
+          new Object[] { 10, 11L },
+          "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            + "{\"name\": \"g1\", " +
+                        "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+                + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+                + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+          + "{\"name\": \"g2\", \"type\": \"long\"}]}}", "RRIIL",
+          new Object[] { 10, 101, 11L } },
+        // Default value for a record.
+        { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            + "{\"name\": \"g2\", \"type\": \"long\"}]}", "L",
+          new Object[] { 11L },
+          "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+            + "{\"name\": \"g1\", " +
+                "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+              + "{\"name\":\"f1\", \"type\":\"int\" },"
+              + "{\"name\":\"f2\", \"type\":\"int\"}] }, "
+              + "\"default\": { \"f1\": 10, \"f2\": 101 } }, "
+            + "{\"name\": \"g2\", \"type\": \"long\"}]}", "RLRII",
+          new Object[] { 11L, 10, 101} },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
+          new Object[] { },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":{ \"type\": \"array\", \"items\": \"int\" }, "
+            + "\"default\": [100]}]}", "[c1sI]",
+          new Object[] { 100 } },
+        { "{ \"type\": \"array\", \"items\": {\"type\":\"record\","
+            + "\"name\":\"r\",\"fields\":[]} }", "[c1s]",
+            new Object[] { },
+          "{ \"type\": \"array\", \"items\": {\"type\":\"record\","
+            + "\"name\":\"r\",\"fields\":["
+            + "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]} }",
+            "[c1sI]",
+          new Object[] { 100 } },
+        // Enum resolution
+        { "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"x\",\"y\",\"z\"]}",
+            "e2",
+            new Object[] {  },
+            "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}",
+            "e1",
+            new Object[] {  } },
+        { "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"x\", \"y\" ]}",
+            "e1",
+            new Object[] {  },
+            "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}",
+            "e0",
+            new Object[] {  } },
+
+        // Union
+        { "\"int\"", "I", new Object[] { 100 },
+            "[ \"long\", \"int\"]", "U1I", new Object[] { 100 } },
+        { "[ \"long\", \"int\"]", "U1I", new Object[] { 100 } ,
+            "\"int\"", "I", new Object[] { 100 } },
+        // Union + promotion
+        { "\"int\"", "I", new Object[] { 100 },
+            "[ \"long\", \"string\"]", "U0L", new Object[] { 100L } },
+        { "[ \"int\", \"string\"]", "U0I", new Object[] { 100 },
+            "\"long\"", "L", new Object[] { 100L } },
+        // Record where union field is skipped.
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+          + "{\"name\":\"f1\", \"type\":\"int\"},"
+          + "{\"name\":\"f2\", \"type\":[\"int\", \"long\"]},"
+          + "{\"name\":\"f3\", \"type\":\"float\"}"
+          + "]}", "BIU0IF",
+          new Object[] { true, 100, 121, 10.75f },
+          "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f3\", \"type\":\"double\"}]}", "BLD",
+          new Object[] { true, 100L, 10.75d } },
+        // Array of record with arrays.
+        { "{ \"type\": \"array\", \"items\":" +
+        		"{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+            + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+            + "{\"name\":\"f1\", \"type\": {\"type\":\"array\", \"items\": \"boolean\" }}"
+            + "]}}", "[c2sB[c2sBsB]sB[c3sBsBsB]]",
+            new Object[] { true, false, false, false, true, true, true },
+            "{ \"type\": \"array\", \"items\":" +
+            "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+            + "{\"name\":\"f0\", \"type\":\"boolean\"}"
+            + "]}}", "[c2sBsB]",
+            new Object[] { true, false } },
+    };
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
new file mode 100644
index 0000000..bfec06d
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java
@@ -0,0 +1,860 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+ at RunWith(Parameterized.class)
+public class TestValidatingIO {
+  enum Encoding {
+    BINARY,
+    BLOCKING_BINARY,
+    JSON,
+  }
+  
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestValidatingIO.class);
+  
+  private Encoding eEnc;
+  private int iSkipL;
+  private String sJsSch;
+  private String sCl;
+
+  public TestValidatingIO (Encoding enc, int skip, String js, String cls) {
+    this.eEnc = enc;
+    this.iSkipL = skip;
+    this.sJsSch = js;
+    this.sCl = cls;
+  }
+  private static final int COUNT = 1;
+  
+  @Test
+  public void testMain() throws IOException {
+    for (int i = 0; i < COUNT; i++) {
+      testOnce(new Schema.Parser().parse(sJsSch), sCl, iSkipL, eEnc);
+    }
+  }
+
+  private void testOnce(Schema schema, String calls,
+      int skipLevel,
+      Encoding encoding)
+    throws IOException {
+    Object[] values = randomValues(calls);
+    print(eEnc, iSkipL, schema, schema, values, values);
+    byte[] bytes = make(schema, calls, values, encoding);
+    check(schema, bytes, calls, values, skipLevel, encoding);
+  }
+
+  public static byte[] make(Schema sc, String calls,
+      Object[] values, Encoding encoding) throws IOException {
+    EncoderFactory factory = EncoderFactory.get();
+    ByteArrayOutputStream ba = new ByteArrayOutputStream();
+    Encoder bvo = null;
+    switch (encoding) {
+    case BINARY:
+      bvo = factory.binaryEncoder(ba, null);
+      break;
+    case BLOCKING_BINARY:
+      bvo = factory.blockingBinaryEncoder(ba, null);
+      break;
+    case JSON:
+      bvo = factory.jsonEncoder(sc, ba);
+      break;
+    }
+        
+    Encoder vo = factory.validatingEncoder(sc, bvo);
+    generate(vo, calls, values);
+    vo.flush();
+    return ba.toByteArray();
+  }
+
+  public static class InputScanner {
+    private final char[] chars;
+    private int cpos = 0;
+    
+    public InputScanner(char[] chars) {
+      this.chars = chars;
+    }
+    
+    public boolean next() {
+      if (cpos < chars.length) {
+        cpos++;
+      }
+      return cpos != chars.length;
+    }
+    
+    public char cur() {
+      return chars[cpos];
+    }
+    
+    public boolean isDone() {
+      return cpos == chars.length;
+    }
+  }
+  public static void generate(Encoder vw, String calls,
+      Object[] values) throws IOException {
+    InputScanner cs = new InputScanner(calls.toCharArray());
+    int p = 0;
+    while (! cs.isDone()) {
+      char c = cs.cur();
+      cs.next();
+      switch (c) {
+      case 'N':
+        vw.writeNull();
+        break;
+      case 'B':
+        boolean b = (Boolean) values[p++];
+        vw.writeBoolean(b);
+        break;
+      case 'I':
+        int ii = (Integer) values[p++];
+        vw.writeInt(ii);
+        break;
+      case 'L':
+        long l = (Long) values[p++];
+        vw.writeLong(l);
+        break;
+      case 'F':
+        float f = (Float) values[p++];
+        vw.writeFloat(f);
+        break;
+      case 'D':
+        double d = (Double) values[p++];
+        vw.writeDouble(d);
+        break;
+      case 'S':
+        {
+          extractInt(cs);
+          String s = (String) values[p++];
+          vw.writeString(new Utf8(s));
+          break;
+        }
+      case 'K':
+      {
+        extractInt(cs);
+        String s = (String) values[p++];
+        vw.writeString(s);
+        break;
+      }
+      case 'b':
+        {
+          extractInt(cs);
+          byte[] bb = (byte[]) values[p++];
+          vw.writeBytes(bb);
+          break;
+        }
+      case 'f':
+        {
+          extractInt(cs);
+          byte[] bb = (byte[]) values[p++];
+          vw.writeFixed(bb);
+          break;
+        }
+      case 'e':
+        {
+          int e = extractInt(cs);
+          vw.writeEnum(e);
+          break;
+        }
+      case '[':
+        vw.writeArrayStart();
+        break;
+      case ']':
+        vw.writeArrayEnd();
+        break;
+      case '{':
+        vw.writeMapStart();
+        break;
+      case '}':
+        vw.writeMapEnd();
+        break;
+      case 'c':
+        vw.setItemCount(extractInt(cs));
+        break;
+      case 's':
+        vw.startItem();
+        break;
+      case 'U':
+        {
+          vw.writeIndex(extractInt(cs));
+          break;
+        }
+      default:
+        fail();
+        break;
+      }
+    }
+  }
+
+  public static Object[] randomValues(String calls) {
+    Random r = new Random(0L);
+    InputScanner cs = new InputScanner(calls.toCharArray());
+    List<Object> result = new ArrayList<Object>();
+    while (! cs.isDone()) {
+      char c = cs.cur();
+      cs.next();
+      switch (c) {
+      case 'N':
+        break;
+      case 'B':
+        result.add(r.nextBoolean());
+        break;
+      case 'I':
+        result.add(r.nextInt());
+        break;
+      case 'L':
+        result.add(new Long(r.nextInt()));
+        break;
+      case 'F':
+        result.add(new Float(r.nextInt()));
+        break;
+      case 'D':
+        result.add(new Double(r.nextInt()));
+        break;
+      case 'S':
+      case 'K':
+        result.add(nextString(r, extractInt(cs)));
+        break;
+      case 'b':
+      case 'f':
+        result.add(nextBytes(r, extractInt(cs)));
+        break;
+      case 'e':
+      case 'c':
+      case 'U':
+        extractInt(cs);
+      case '[':
+      case ']':
+      case '{':
+      case '}':
+      case 's':
+        break;
+      default:
+        fail();
+        break;
+      }
+    }
+    return result.toArray();
+  }
+
+  private static int extractInt(InputScanner sc) {
+    int r = 0;
+    while (! sc.isDone()) {
+      if (Character.isDigit(sc.cur())) {
+        r = r * 10 + sc.cur() - '0';
+        sc.next();
+      } else {
+        break;
+      }
+    }
+    return r;
+  }
+
+  private static byte[] nextBytes(Random r, int length) {
+    byte[] bb = new byte[length];
+    r.nextBytes(bb);
+    return bb;
+  }
+
+  private static String nextString(Random r, int length) {
+    char[] cc = new char[length];
+    for (int i = 0; i < length; i++) {
+      cc[i] = (char) ('A' + r.nextInt(26));
+    }
+    return new String(cc);
+  }
+
+  private static void check(Schema sc, byte[] bytes, String calls,
+      Object[] values, final int skipLevel, Encoding encoding)
+    throws IOException {
+    // dump(bytes);
+    // System.out.println(new String(bytes, "UTF-8"));
+    Decoder bvi = null;
+    switch (encoding) {
+    case BINARY:
+    case BLOCKING_BINARY:
+      bvi = DecoderFactory.get().binaryDecoder(bytes, null);
+      break;
+    case JSON:
+      InputStream in = new ByteArrayInputStream(bytes);
+      bvi = new JsonDecoder(sc, in);
+    }
+    Decoder vi = new ValidatingDecoder(sc, bvi);
+    check(vi, calls, values, skipLevel);
+  }
+  
+  public static void check(Decoder vi, String calls,
+      Object[] values, final int skipLevel) throws IOException {
+    InputScanner cs = new InputScanner(calls.toCharArray());
+    int p = 0;
+    int level = 0;
+    long[] counts = new long[100];
+    boolean[] isArray = new boolean[100];
+    boolean[] isEmpty = new boolean[100];
+    while (! cs.isDone()) {
+      final char c = cs.cur();
+      cs.next();
+      switch (c) {
+      case 'N':
+        vi.readNull();
+        break;
+      case 'B':
+        assertEquals(values[p++], vi.readBoolean());
+        break;
+      case 'I':
+        assertEquals(values[p++], vi.readInt());
+        break;
+      case 'L':
+        assertEquals(values[p++], vi.readLong());
+        break;
+      case 'F':
+        if (!(values[p] instanceof Float)) fail();
+        float f = (Float) values[p++];
+        assertEquals(f, vi.readFloat(), Math.abs(f / 1000));
+        break;
+      case 'D':
+        if (!(values[p] instanceof Double)) fail();
+        double d = (Double) values[p++];
+        assertEquals(d, vi.readDouble(), Math.abs(d / 1000));
+        break;
+      case 'S':
+        extractInt(cs);
+        if (level == skipLevel) {
+          vi.skipString();
+          p++;
+        } else {
+          String s = (String) values[p++];
+          assertEquals(new Utf8(s), vi.readString(null));
+        }
+        break;
+      case 'K':
+        extractInt(cs);
+        if (level == skipLevel) {
+          vi.skipString();
+          p++;
+        } else {
+          String s = (String) values[p++];
+          assertEquals(new Utf8(s), vi.readString(null));
+        }
+        break;
+      case 'b':
+        extractInt(cs);
+        if (level == skipLevel) {
+          vi.skipBytes();
+          p++;
+        } else {
+          byte[] bb = (byte[]) values[p++];
+          ByteBuffer bb2 = vi.readBytes(null);
+          byte[] actBytes = new byte[bb2.remaining()];
+          System.arraycopy(bb2.array(), bb2.position(), actBytes,
+              0, bb2.remaining());
+          assertArrayEquals(bb, actBytes);
+        }
+        break;
+      case 'f':
+        {
+          int len = extractInt(cs);
+          if (level == skipLevel) {
+            vi.skipFixed(len);
+            p++;
+          } else {
+            byte[] bb = (byte[]) values[p++];
+            byte[] actBytes = new byte[len];
+            vi.readFixed(actBytes);
+            assertArrayEquals(bb, actBytes);
+          }
+        }
+        break;
+      case 'e':
+      {
+        int e = extractInt(cs);
+        if (level == skipLevel) {
+          vi.readEnum();
+        } else {
+          assertEquals(e, vi.readEnum());
+        }
+      }
+      break;
+      case '[':
+        if (level == skipLevel) {
+          p += skip(cs, vi, true);
+          break;
+        } else {
+          level++;
+          counts[level] = vi.readArrayStart();
+          isArray[level] = true;
+          isEmpty[level] = counts[level] == 0;
+          continue;
+        }
+      case '{':
+        if (level == skipLevel) {
+          p += skip(cs, vi, false);
+          break;
+        } else {
+          level++;
+          counts[level] = vi.readMapStart();
+          isArray[level] = false;
+          isEmpty[level] = counts[level] == 0;
+          continue;
+        }
+      case ']':
+        assertEquals(0, counts[level]);
+        if (! isEmpty[level]) {
+          assertEquals(0, vi.arrayNext());
+        }
+        level--;
+        break;
+      case '}':
+        assertEquals(0, counts[level]);
+        if (! isEmpty[level]) {
+          assertEquals(0, vi.mapNext());
+        }
+        level--;
+        break;
+      case 's':
+        if (counts[level] == 0) {
+          if (isArray[level]) {
+            counts[level] = vi.arrayNext();
+          } else {
+            counts[level] = vi.mapNext();
+          }
+        }
+        counts[level]--;
+        continue;
+      case 'c':
+        extractInt(cs);
+        continue;
+      case 'U':
+        {
+          int idx = extractInt(cs);
+          assertEquals(idx, vi.readIndex());
+          continue;
+        }
+      case 'R':
+          ((ResolvingDecoder) vi).readFieldOrder();
+          continue;
+      default:
+        fail();
+      }
+    }
+    assertEquals(values.length, p);
+  }
+
+  private static int skip(InputScanner cs, Decoder vi, boolean isArray)
+    throws IOException {
+    final char end = isArray ? ']' : '}';
+    if (isArray) {
+      assertEquals(0, vi.skipArray());
+    } else if (end == '}'){
+      assertEquals(0, vi.skipMap());
+    }
+    int level = 0;
+    int p = 0;
+    while (! cs.isDone()) {
+      char c = cs.cur();
+      cs.next();
+      switch (c) {
+      case '[':
+      case '{':
+        ++level;
+        break;
+      case ']':
+      case '}':
+        if (c == end && level == 0) {
+          return p;
+        }
+        level--;
+        break;
+      case 'B':
+      case 'I':
+      case 'L':
+      case 'F':
+      case 'D':
+      case 'S':
+      case 'K':
+      case 'b':
+      case 'f':
+      case 'e':
+        p++;
+        break;
+      }
+    }
+    throw new RuntimeException("Don't know how to skip");
+  }
+
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(convertTo2dArray(encodings, skipLevels, testSchemas()));
+  }
+  
+  private static Object[][] encodings = new Object[][] {
+      { Encoding.BINARY }, { Encoding.BLOCKING_BINARY },
+      { Encoding.JSON }
+    }; 
+
+  private static Object[][] skipLevels = new Object[][] {
+      { -1 }, { 0 }, { 1 }, { 2 },
+  };
+  
+  public static Object[][] convertTo2dArray(final Object[][]... values) {
+    ArrayList<Object[]> ret = new ArrayList<Object[]>();
+
+    Iterator<Object[]> iter = cartesian(values);
+    while (iter.hasNext()) {
+      Object[] objects = iter.next();
+      ret.add(objects);
+    }
+    Object[][] retArrays = new Object[ret.size()][];
+    for (int i = 0; i < ret.size(); i++) {
+      retArrays[i] = ret.get(i);
+    }
+    return retArrays;
+  }
+  /**
+   * Returns the Cartesian product of input sequences.
+   */
+  public static Iterator<Object[]> cartesian(final Object[][]... values) {
+    return new Iterator<Object[]>() {
+      private int[] pos = new int[values.length];
+      @Override
+      public boolean hasNext() {
+        return pos[0] < values[0].length;
+      }
+
+      @Override
+      public Object[] next() {
+        Object[][] v = new Object[values.length][];
+        for (int i = 0; i < v.length; i++) {
+          v[i] = values[i][pos[i]];
+        }
+        for (int i = v.length - 1; i >= 0; i--) {
+          if (++pos[i] == values[i].length) {
+            if (i != 0) {
+              pos[i] = 0;
+            }
+          } else {
+            break;
+          }
+        }
+        return concat(v);
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException();
+      }
+    };
+  }
+  
+  /**
+   * Concatenates the input sequences in order and forms a longer sequence.
+   */
+  public static Object[] concat(Object[]... oo) {
+    int l = 0;
+    for (Object[] o : oo) {
+      l += o.length;
+    }
+    Object[] result = new Object[l];
+    l = 0;
+    for (Object[] o : oo) {
+      System.arraycopy(o, 0, result, l, o.length);
+      l += o.length;
+    }
+    return result;
+  }
+
+  /**
+   * Pastes incoming tables to form a wider table. All incoming tables
+   * should be of same height.
+   */
+  static Object[][] paste(Object[][]... in) {
+    Object[][] result = new Object[in[0].length][];
+    Object[][] cc = new Object[in.length][];
+    for (int i = 0; i < result.length; i++) {
+      for (int j = 0; j < cc.length; j++) {
+        cc[j] = in[j][i];
+      }
+      result[i] = concat(cc);
+    }
+    return result;
+  }
+
+  public static Object[][] testSchemas() {
+    /**
+     * The first argument is a schema.
+     * The second one is a sequence of (single character) mnemonics:
+     * N  null
+     * B  boolean
+     * I  int
+     * L  long
+     * F  float
+     * D  double
+     * K followed by integer - key-name (and its length) in a map
+     * S followed by integer - string and its length
+     * b followed by integer - bytes and length
+     * f followed by integer - fixed and length
+     * c  Number of items to follow in an array/map.
+     * U followed by integer - Union and its branch
+     * e followed by integer - Enum and its value
+     * [  Start array
+     * ]  End array
+     * {  Start map
+     * }  End map
+     * s  start item
+     */
+    return new Object[][] {
+        { "\"null\"", "N" },
+        { "\"boolean\"", "B" },
+        { "\"int\"", "I" },
+        { "\"long\"", "L" },
+        { "\"float\"", "F" },
+        { "\"double\"", "D" },
+        { "\"string\"", "S0" },
+        { "\"string\"", "S10" },
+        { "\"bytes\"", "b0" },
+        { "\"bytes\"", "b10" },
+        { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 1}", "f1" },
+        { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 10}", "f10" },
+        { "{\"type\":\"enum\", \"name\":\"en\", \"symbols\":[\"v1\", \"v2\"]}",
+            "e1" },
+
+        { "{\"type\":\"array\", \"items\": \"boolean\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"long\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"float\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"double\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"string\"}", "[]", },
+        { "{\"type\":\"array\", \"items\": \"bytes\"}", "[]", },
+        { "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", "
+          + "\"name\":\"fi\", \"size\": 10}}", "[]" },
+
+        { "{\"type\":\"array\", \"items\": \"boolean\"}", "[c1sB]" },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]" },
+        { "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]" },
+        { "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]" },
+        { "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]" },
+        { "{\"type\":\"array\", \"items\": \"string\"}", "[c1sS10]" },
+        { "{\"type\":\"array\", \"items\": \"bytes\"}", "[c1sb10]" },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[c1sIc1sI]" },
+        { "{\"type\":\"array\", \"items\": \"int\"}", "[c2sIsI]" },
+        { "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", "
+          + "\"name\":\"fi\", \"size\": 10}}", "[c2sf10sf10]" },
+
+        { "{\"type\":\"map\", \"values\": \"boolean\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"long\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"float\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"string\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": \"bytes\"}", "{}" },
+        { "{\"type\":\"map\", \"values\": "
+          + "{\"type\":\"array\", \"items\":\"int\"}}", "{}" },
+
+        { "{\"type\":\"map\", \"values\": \"boolean\"}", "{c1sK5B}" },
+        { "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}" },
+        { "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}" },
+        { "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}" },
+        { "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}" },
+        { "{\"type\":\"map\", \"values\": \"string\"}", "{c1sK5S10}" },
+        { "{\"type\":\"map\", \"values\": \"bytes\"}", "{c1sK5b10}" },
+        { "{\"type\":\"map\", \"values\": "
+          + "{\"type\":\"array\", \"items\":\"int\"}}", "{c1sK5[c3sIsIsI]}" },
+
+        { "{\"type\":\"map\", \"values\": \"boolean\"}",
+            "{c1sK5Bc2sK5BsK5B}" },
+
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"boolean\"}]}", "B" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"int\"}]}", "I" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"long\"}]}", "L" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"float\"}]}", "F" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"double\"}]}", "D" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"string\"}]}", "S10" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f\", \"type\":\"bytes\"}]}", "b10" },
+
+        // multi-field records
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"int\"},"
+          + "{\"name\":\"f2\", \"type\":\"double\"},"
+          + "{\"name\":\"f3\", \"type\":\"string\"}]}", "IDS10" },
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f0\", \"type\":\"null\"},"
+          + "{\"name\":\"f1\", \"type\":\"boolean\"},"
+          + "{\"name\":\"f2\", \"type\":\"int\"},"
+          + "{\"name\":\"f3\", \"type\":\"long\"},"
+          + "{\"name\":\"f4\", \"type\":\"float\"},"
+          + "{\"name\":\"f5\", \"type\":\"double\"},"
+          + "{\"name\":\"f6\", \"type\":\"string\"},"
+          + "{\"name\":\"f7\", \"type\":\"bytes\"}]}",
+            "NBILFDS10b25" },
+        
+        // record of records
+        { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":{\"type\":\"record\", "
+          + "\"name\":\"inner\", \"fields\":["
+          + "{\"name\":\"g1\", \"type\":\"int\"}, {\"name\":\"g2\", "
+          + "\"type\":\"double\"}]}},"
+          + "{\"name\":\"f2\", \"type\":\"string\"},"
+          + "{\"name\":\"f3\", \"type\":\"inner\"}]}",
+          "IDS10ID" },
+        // record with array
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", "
+          + "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}",
+          "L[c1sI]" },
+
+        // record with map
+        { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", "
+          + "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}",
+          "L{c1sK5I}" },
+
+        // array of records
+        { "{\"type\":\"array\", \"items\":"
+            + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", \"type\":\"null\"}]}}",
+            "[c2sLNsLN]" },
+
+        { "{\"type\":\"array\", \"items\":"
+            + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", "
+          + "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}}",
+            "[c2sL[c1sI]sL[c2sIsI]]" },
+        { "{\"type\":\"array\", \"items\":"
+            + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", "
+          + "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}}",
+            "[c2sL{c1sK5I}sL{c2sK5IsK5I}]" },
+        { "{\"type\":\"array\", \"items\":"
+            + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+          + "{\"name\":\"f1\", \"type\":\"long\"},"
+          + "{\"name\":\"f2\", "
+          + "\"type\":[\"null\", \"int\"]}]}}",
+            "[c2sLU0NsLU1I]" },
+
+        { "[\"boolean\"]", "U0B" },
+        { "[\"int\"]", "U0I" },
+        { "[\"long\"]", "U0L" },
+        { "[\"float\"]", "U0F" },
+        { "[\"double\"]", "U0D" },
+        { "[\"string\"]", "U0S10" },
+        { "[\"bytes\"]", "U0b10" },
+
+        { "[\"null\", \"int\"]", "U0N" },
+        { "[\"boolean\", \"int\"]", "U0B" },
+        { "[\"boolean\", \"int\"]", "U1I" },
+        { "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]",
+          "U0B" },
+
+        { "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]",
+            "U1[c1sI]" },
+          
+        // Recursion
+        { "{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+          + "{\"name\":\"label\", \"type\":\"string\"},"
+          + "{\"name\":\"children\", \"type\":"
+          + "{\"type\": \"array\", \"items\": \"Node\" }}]}",
+          "S10[c1sS10[]]" },
+          
+        { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+          + "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+          + "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+          + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+          + "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+          "U0N"},
+        { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+          + "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+          + "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+          + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+          + "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+          "U1S10"},
+        { "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+          + "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+          + "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+          + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+          + "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}",
+          "U2U1S10U0N"},
+          
+        // Deep recursion
+        { "{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+          + "{\"name\":\"children\", \"type\":"
+          + "{\"type\": \"array\", \"items\": \"Node\" }}]}",
+          "[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[]]]]]]]]]]]]" },
+              
+    };
+  }
+  
+  static void dump(byte[] bb) {
+    int col = 0;
+    for (byte b : bb) {
+      if (col % 16 == 0) {
+        System.out.println();
+      }
+      col++;
+      System.out.print(Integer.toHexString(b & 0xff) + " ");
+    }
+    System.out.println();
+  }
+
+  static void print(Encoding encoding, int skipLevel, Schema writerSchema, 
+      Schema readerSchema, Object[] writtenValues, Object[] expectedValues) {
+    LOG.debug("{} Skip Level {}", encoding, skipLevel); 
+    printSchemaAndValues("Writer", writerSchema, writtenValues);
+    printSchemaAndValues("Reader", readerSchema, expectedValues);
+  }
+
+  private static void printSchemaAndValues(String schemaType, Schema schema, Object[] values) {
+    LOG.debug("{} Schema {}", schemaType, schema); 
+    for (Object value : values) {
+      LOG.debug("{} -> {}", value, value.getClass().getSimpleName());
+    }
+  }  
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
new file mode 100644
index 0000000..142d104
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaBuilder;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecordBuilder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+ at RunWith(Parameterized.class)
+public class TestResolvingGrammarGenerator {
+  private final Schema schema;
+  private final JsonNode data;
+  
+  public TestResolvingGrammarGenerator(String jsonSchema, String jsonData)
+    throws IOException {
+    this.schema = Schema.parse(jsonSchema);
+    JsonFactory factory = new JsonFactory();
+    ObjectMapper mapper = new ObjectMapper(factory);
+
+    this.data = mapper.readTree(new StringReader(jsonData));
+  }
+
+  @Test
+  public void test() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    EncoderFactory factory = EncoderFactory.get();
+    Encoder e = factory.validatingEncoder(schema, 
+        factory.binaryEncoder(baos, null));
+    
+    ResolvingGrammarGenerator.encode(e, schema, data);
+    e.flush();
+  }
+
+  @Test
+  public void testRecordMissingRequiredFieldError() throws Exception {
+    Schema schemaWithoutField = SchemaBuilder
+        .record("MyRecord").namespace("ns")
+        .fields()
+          .name("field1").type().stringType().noDefault()
+        .endRecord();
+    Schema schemaWithField = SchemaBuilder
+        .record("MyRecord").namespace("ns")
+        .fields()
+          .name("field1").type().stringType().noDefault()
+          .name("field2").type().stringType().noDefault()
+        .endRecord();
+    GenericData.Record record = new GenericRecordBuilder(schemaWithoutField).set("field1", "someValue").build();
+    byte[] data = writeRecord(schemaWithoutField, record);
+    try {
+      readRecord(schemaWithField, data);
+      Assert.fail("Expected exception not thrown");
+    } catch (AvroTypeException typeException) {
+      Assert.assertEquals("Incorrect exception message",
+          "Found ns.MyRecord, expecting ns.MyRecord, missing required field field2", typeException.getMessage());
+    }
+  }
+  
+  @Parameterized.Parameters
+  public static Collection<Object[]> data() {
+    Collection<Object[]> ret = Arrays.asList(
+        new Object[][] {
+            { "{ \"type\": \"record\", \"name\": \"r\", \"fields\": [ "
+                + " { \"name\" : \"f1\", \"type\": \"int\" }, "
+                + " { \"name\" : \"f2\", \"type\": \"float\" } "
+                + "] } }",
+              "{ \"f2\": 10.4, \"f1\": 10 } " },
+            { "{ \"type\": \"enum\", \"name\": \"e\", \"symbols\": "
+                + "[ \"s1\", \"s2\"] } }", " \"s1\" " },
+            { "{ \"type\": \"enum\", \"name\": \"e\", \"symbols\": "
+                + "[ \"s1\", \"s2\"] } }", " \"s2\" " },
+            { "{ \"type\": \"fixed\", \"name\": \"f\", \"size\": 10 }",
+              "\"hello\"" },
+            { "{ \"type\": \"array\", \"items\": \"int\" }",
+              "[ 10, 20, 30 ]" },
+            { "{ \"type\": \"map\", \"values\": \"int\" }",
+              "{ \"k1\": 10, \"k3\": 20, \"k3\": 30 }" },
+            { "[ \"int\", \"long\" ]", "10" },
+            { "\"string\"", "\"hello\"" },
+            { "\"bytes\"", "\"hello\"" },
+            { "\"int\"", "10" },
+            { "\"long\"", "10" },
+            { "\"float\"", "10.0" },
+            { "\"double\"", "10.0" },
+            { "\"boolean\"", "true" },
+            { "\"boolean\"", "false" },
+            { "\"null\"", "null" },
+            }
+        );
+    return ret;
+  }
+
+  private byte[] writeRecord(Schema schema, GenericData.Record record) throws Exception {
+    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+    GenericDatumWriter<GenericData.Record> datumWriter = new GenericDatumWriter<GenericData.Record>(schema);
+    DataFileWriter<GenericData.Record> writer = new DataFileWriter<GenericData.Record>(datumWriter);
+    try {
+      writer.create(schema, byteStream);
+      writer.append(record);
+    } finally {
+      writer.close();
+    }
+    return byteStream.toByteArray();
+  }
+
+  private GenericData.Record readRecord(Schema schema, byte[] data) throws Exception {
+    ByteArrayInputStream byteStream = new ByteArrayInputStream(data);
+    GenericDatumReader<GenericData.Record> datumReader = new GenericDatumReader<GenericData.Record>(schema);
+    DataFileStream<GenericData.Record> reader = new DataFileStream<GenericData.Record>(byteStream, datumReader);
+    try {
+      return reader.next();
+    } finally {
+      reader.close();
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java
new file mode 100644
index 0000000..1b5ac6f
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/parsing/TestResolvingGrammarGenerator2.java
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io.parsing;
+
+import java.util.Arrays;
+import org.apache.avro.SchemaBuilder;
+import org.apache.avro.SchemaValidationException;
+import org.apache.avro.SchemaValidatorBuilder;
+import org.apache.avro.Schema;
+import org.junit.Assert;
+import org.junit.Test;
+
+/** ResolvingGrammarGenerator tests that are not Parameterized.*/
+public class TestResolvingGrammarGenerator2 {  
+  @Test public void testFixed() throws java.io.IOException {
+    new ResolvingGrammarGenerator().generate
+      (Schema.createFixed("MyFixed", null, null, 10),
+       Schema.create(Schema.Type.BYTES));
+    new ResolvingGrammarGenerator().generate
+      (Schema.create(Schema.Type.BYTES),
+       Schema.createFixed("MyFixed", null, null, 10));
+  }
+
+  Schema point2dFullname = SchemaBuilder.record("Point").namespace("written")
+      .fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .endRecord();
+
+  Schema point3dNoDefault = SchemaBuilder.record("Point").fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .requiredDouble("z")
+      .endRecord();
+
+  Schema point2d = SchemaBuilder.record("Point2D")
+      .fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .endRecord();
+
+  Schema point3d = SchemaBuilder.record("Point3D").fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .name("z").type().doubleType().doubleDefault(0.0)
+      .endRecord();
+
+  Schema point3dMatchName = SchemaBuilder.record("Point").fields()
+      .requiredDouble("x")
+      .requiredDouble("y")
+      .name("z").type().doubleType().doubleDefault(0.0)
+      .endRecord();
+
+  @Test(expected=SchemaValidationException.class)
+  public void testUnionResolutionNoStructureMatch() throws Exception {
+    // there is a short name match, but the structure does not match
+    Schema read = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        point3dNoDefault));
+
+    new SchemaValidatorBuilder().canBeReadStrategy().validateAll()
+        .validate(point2dFullname, Arrays.asList(read));
+  }
+
+  @Test
+  public void testUnionResolutionFirstStructureMatch2d() throws Exception {
+    // multiple structure matches with no short or full name matches
+    Schema read = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        point3dNoDefault, point2d, point3d));
+
+    Symbol grammar = new ResolvingGrammarGenerator().generate(
+        point2dFullname, read);
+    Assert.assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
+
+    Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction)
+        grammar.production[1];
+    Assert.assertEquals(2, action.rindex);
+  }
+
+  @Test
+  public void testUnionResolutionFirstStructureMatch3d() throws Exception {
+    // multiple structure matches with no short or full name matches
+    Schema read = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        point3dNoDefault, point3d, point2d));
+
+    Symbol grammar = new ResolvingGrammarGenerator().generate(
+        point2dFullname, read);
+    Assert.assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
+
+    Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction)
+        grammar.production[1];
+    Assert.assertEquals(2, action.rindex);
+  }
+
+  @Test
+  public void testUnionResolutionNamedStructureMatch() throws Exception {
+    // multiple structure matches with a short name match
+    Schema read = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        point2d, point3dMatchName, point3d));
+
+    Symbol grammar = new ResolvingGrammarGenerator().generate(
+        point2dFullname, read);
+    Assert.assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
+
+    Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction)
+        grammar.production[1];
+    Assert.assertEquals(2, action.rindex);
+  }
+
+  @Test
+  public void testUnionResolutionFullNameMatch() throws Exception {
+    // there is a full name match, so it should be chosen
+    Schema read = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        point2d, point3dMatchName, point3d, point2dFullname));
+
+    Symbol grammar = new ResolvingGrammarGenerator().generate(
+        point2dFullname, read);
+    Assert.assertTrue(grammar.production[1] instanceof Symbol.UnionAdjustAction);
+
+    Symbol.UnionAdjustAction action = (Symbol.UnionAdjustAction)
+        grammar.production[1];
+    Assert.assertEquals(4, action.rindex);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
new file mode 100644
index 0000000..e48fd14
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.reflect;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Iterator;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.SeekableByteArrayInput;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestByteBuffer {
+  static class X{
+    String name = "";
+    ByteBuffer content;
+  }
+  File content;
+
+  @Before public void before() throws IOException{
+    File tmpdir = AvroTestUtil.tempDirectory(getClass(), "content");
+    content = new File(tmpdir,"test-content");
+    FileOutputStream out = new FileOutputStream(content);
+    for(int i=0;i<100000;i++){
+      out.write("hello world\n".getBytes());
+    }
+    out.close();
+  }
+
+  @Test public void test() throws Exception{
+    Schema schema = ReflectData.get().getSchema(X.class);
+    ByteArrayOutputStream bout = new ByteArrayOutputStream();
+    writeOneXAsAvro(schema, bout);		
+    X record = readOneXFromAvro(schema, bout);
+		
+    String expected = getmd5(content);
+    String actual = getmd5(record.content);
+    assertEquals("md5 for result differed from input",expected,actual);
+  }
+
+  private X readOneXFromAvro(Schema schema, ByteArrayOutputStream bout)
+    throws IOException {
+    SeekableByteArrayInput input = new SeekableByteArrayInput(bout.toByteArray());
+    ReflectDatumReader<X> datumReader = new ReflectDatumReader<X>(schema);
+    FileReader<X> reader = DataFileReader.openReader(input, datumReader);
+    Iterator<X> it = reader.iterator();
+    assertTrue("missing first record",it.hasNext());
+    X record = it.next();
+    assertFalse("should be no more records - only wrote one out",it.hasNext());
+    return record;
+  }
+
+  private void writeOneXAsAvro(Schema schema, ByteArrayOutputStream bout)
+    throws IOException, FileNotFoundException {
+    DatumWriter<X> datumWriter = new ReflectDatumWriter<X>(schema);
+    DataFileWriter<X> writer = new DataFileWriter<X>(datumWriter);
+    writer.create(schema, bout);
+    X x = new X();
+    x.name = "xxx";
+    FileInputStream fis = new FileInputStream(content);
+    try{
+      FileChannel channel = fis.getChannel();
+      try{
+        long contentLength = content.length();
+        //set the content to be a file channel.
+        ByteBuffer buffer = channel.map(FileChannel.MapMode.READ_ONLY, 0, contentLength);
+        x.content = buffer;
+        writer.append(x);
+      }finally{
+        channel.close();
+      }
+    }finally{
+      fis.close();
+    }
+    writer.flush();
+    writer.close();
+  }
+
+  private String getmd5(File file) throws Exception{
+    FileInputStream fis = new FileInputStream(content);
+    try{
+      FileChannel channel = fis.getChannel();
+      try{
+        long contentLength = content.length();
+        ByteBuffer buffer = channel.map(FileChannel.MapMode.READ_ONLY, 0, contentLength);
+        return getmd5(buffer);
+      }finally{
+        channel.close();
+      }
+    }finally{
+      fis.close();
+    }
+  }
+
+  String getmd5(ByteBuffer buffer) throws NoSuchAlgorithmException{
+    MessageDigest mdEnc = MessageDigest.getInstance("MD5");
+    mdEnc.reset();
+    mdEnc.update(buffer);
+    return new BigInteger(1, mdEnc.digest()).toString(16);
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
new file mode 100644
index 0000000..41f508c
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java
@@ -0,0 +1,509 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map.Entry;
+
+import static org.junit.Assert.*;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.SeekableByteArrayInput;
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+/**
+ * Test serialization and de-serialization of non-string map-keys
+ */
+public class TestNonStringMapKeys {
+
+  @Test
+  public void testNonStringMapKeys() throws Exception {
+
+    Company entityObj1 = buildCompany();
+    Company entityObj2 = buildCompany();
+
+    String testType = "NonStringKeysTest";
+    Company [] entityObjs = {entityObj1, entityObj2};
+    byte[] bytes = testSerialization(testType, entityObj1, entityObj2);
+    List<GenericRecord> records = 
+      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+
+    GenericRecord record = records.get(0);
+    Object employees = record.get("employees");
+    assertTrue ("Unable to read 'employees' map", employees instanceof GenericArray);
+    GenericArray arrayEmployees = ((GenericArray)employees);
+    Object employeeRecord = arrayEmployees.get(0);
+    assertTrue (employeeRecord instanceof GenericRecord);
+    Object key = ((GenericRecord)employeeRecord).get(ReflectData.NS_MAP_KEY);
+    Object value = ((GenericRecord)employeeRecord).get(ReflectData.NS_MAP_VALUE);
+    assertTrue (key instanceof GenericRecord);
+    assertTrue (value instanceof GenericRecord);
+    //Map stored: 1:foo, 2:bar
+    Object id = ((GenericRecord)key).get("id");
+    Object name = ((GenericRecord)value).get("name").toString();
+    assertTrue (
+      (id.equals(1) && name.equals("Foo")) || 
+      (id.equals(2) && name.equals("Bar"))
+    );
+
+    List<Company> records2 =
+      (List<Company>) testReflectDatumRead(testType, bytes, entityObjs);
+    Company co = records2.get(0);
+    log ("Read: " + co);
+    assertNotNull (co.getEmployees());
+    assertEquals (2, co.getEmployees().size());
+    Iterator<Entry<EmployeeId, EmployeeInfo>> itr = co.getEmployees().entrySet().iterator();
+    while (itr.hasNext()) {
+      Entry<EmployeeId, EmployeeInfo> e = itr.next();
+      id = e.getKey().getId();
+      name = e.getValue().getName();
+      assertTrue (
+        (id.equals(1) && name.equals("Foo")) || 
+        (id.equals(2) && name.equals("Bar"))
+      );
+    }
+
+
+    byte[] jsonBytes = testJsonEncoder (testType, entityObj1);
+    assertNotNull ("Unable to serialize using jsonEncoder", jsonBytes);
+    GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
+    assertEquals ("JSON decoder output not same as Binary Decoder", record, jsonRecord);
+  }
+  
+  @Test
+  public void testNonStringMapKeysInNestedMaps() throws Exception {
+
+    Company2 entityObj1 = buildCompany2();
+
+    String testType = "NestedMapsTest";
+    Company2 [] entityObjs = {entityObj1};
+    byte[] bytes = testSerialization(testType, entityObj1);
+    List<GenericRecord> records =
+      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+
+    GenericRecord record = records.get(0);
+    Object employees = record.get("employees");
+    assertTrue ("Unable to read 'employees' map", employees instanceof GenericArray);
+    GenericArray employeesMapArray = ((GenericArray)employees);
+    
+    Object employeeMapElement = employeesMapArray.get(0);
+    assertTrue (employeeMapElement instanceof GenericRecord);
+    Object key = ((GenericRecord)employeeMapElement).get(ReflectData.NS_MAP_KEY);
+    Object value = ((GenericRecord)employeeMapElement).get(ReflectData.NS_MAP_VALUE);
+    assertEquals (11, key);
+    assertTrue (value instanceof GenericRecord);
+    GenericRecord employeeInfo = (GenericRecord)value;
+    Object name = employeeInfo.get("name").toString();
+    assertEquals ("Foo", name);
+    
+    Object companyMap = employeeInfo.get("companyMap");
+    assertTrue (companyMap instanceof GenericArray);
+    GenericArray companyMapArray = (GenericArray)companyMap;
+    
+    Object companyMapElement = companyMapArray.get(0);
+    assertTrue (companyMapElement instanceof GenericRecord);
+    key = ((GenericRecord)companyMapElement).get(ReflectData.NS_MAP_KEY);
+    value = ((GenericRecord)companyMapElement).get(ReflectData.NS_MAP_VALUE);
+    assertEquals (14, key);
+    if (value instanceof Utf8)
+      value = ((Utf8)value).toString();
+    assertEquals ("CompanyFoo", value);
+    
+    List<Company2> records2 =
+      (List<Company2>) testReflectDatumRead(testType, bytes, entityObjs);
+    Company2 co = records2.get(0);
+    log ("Read: " + co);
+    assertNotNull (co.getEmployees());
+    assertEquals (1, co.getEmployees().size());
+    Iterator<Entry<Integer, EmployeeInfo2>> itr = co.getEmployees().entrySet().iterator();
+    while (itr.hasNext()) {
+      Entry<Integer, EmployeeInfo2> e = itr.next();
+      Integer id = e.getKey();
+      name = e.getValue().getName();
+      assertTrue (id.equals(11) && name.equals("Foo"));
+      assertEquals ("CompanyFoo", e.getValue().companyMap.values().iterator().next());
+    }
+
+
+    byte[] jsonBytes = testJsonEncoder (testType, entityObj1);
+    assertNotNull ("Unable to serialize using jsonEncoder", jsonBytes);
+    GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
+    assertEquals ("JSON decoder output not same as Binary Decoder", record, jsonRecord);
+  }
+
+  @Test
+  public void testRecordNameInvariance() throws Exception {
+
+    SameMapSignature entityObj1 = buildSameMapSignature();
+
+    String testType = "RecordNameInvariance";
+    SameMapSignature [] entityObjs = {entityObj1};
+    byte[] bytes = testSerialization(testType, entityObj1);
+    List<GenericRecord> records =
+      (List<GenericRecord>) testGenericDatumRead(testType, bytes, entityObjs);
+
+    GenericRecord record = records.get(0);
+    Object map1obj = record.get("map1");
+    assertTrue ("Unable to read map1", map1obj instanceof GenericArray);
+    GenericArray map1array = ((GenericArray)map1obj);
+    
+    Object map1element = map1array.get(0);
+    assertTrue (map1element instanceof GenericRecord);
+    Object key = ((GenericRecord)map1element).get(ReflectData.NS_MAP_KEY);
+    Object value = ((GenericRecord)map1element).get(ReflectData.NS_MAP_VALUE);
+    assertEquals (1, key);
+    assertEquals ("Foo", value.toString());
+
+    Object map2obj = record.get("map2");
+    assertEquals (map1obj, map2obj);
+    
+    List<SameMapSignature> records2 =
+      (List<SameMapSignature>) testReflectDatumRead(testType, bytes, entityObjs);
+    SameMapSignature entity = records2.get(0);
+    log ("Read: " + entity);
+    assertNotNull (entity.getMap1());
+    assertEquals (1, entity.getMap1().size());
+    Iterator<Entry<Integer, String>> itr = entity.getMap1().entrySet().iterator();
+    while (itr.hasNext()) {
+      Entry<Integer, String> e = itr.next();
+      key = e.getKey();
+      value = e.getValue();
+      assertEquals (1, key);
+      assertEquals ("Foo", value.toString());
+    }
+    assertEquals (entity.getMap1(), entity.getMap2());
+
+
+    ReflectData rdata = ReflectData.get();
+    Schema schema = rdata.getSchema(SameMapSignature.class);
+    Schema map1schema = schema.getField("map1").schema().getElementType();
+    Schema map2schema = schema.getField("map2").schema().getElementType();
+    log ("Schema for map1 = " + map1schema);
+    log ("Schema for map2 = " + map2schema);
+    assertEquals (map1schema.getFullName(), "org.apache.avro.reflect.PairIntegerString");
+    assertEquals (map1schema, map2schema);
+
+
+    byte[] jsonBytes = testJsonEncoder (testType, entityObj1);
+    assertNotNull ("Unable to serialize using jsonEncoder", jsonBytes);
+    GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
+    assertEquals ("JSON decoder output not same as Binary Decoder", 
+      record.get("map1"), jsonRecord.get("map1"));
+    assertEquals ("JSON decoder output not same as Binary Decoder", 
+      record.get("map2"), jsonRecord.get("map2"));
+  }
+
+  /**
+   * Test serialization of non-string map-key POJOs
+   */
+  public <T> byte[] testSerialization(String testType, T ... entityObjs) throws Exception {
+
+    log ("---- Beginning " + testType + " ----");
+    T entityObj1 = entityObjs[0];
+    ReflectData rdata = ReflectData.AllowNull.get();
+
+    Schema schema = rdata.getSchema(entityObj1.getClass());
+    assertNotNull("Unable to get schema for " + testType, schema);
+    log (schema.toString(true));
+
+    ReflectDatumWriter<T> datumWriter =
+      new ReflectDatumWriter (entityObj1.getClass(), rdata);
+    DataFileWriter<T> fileWriter = new DataFileWriter<T> (datumWriter);
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    fileWriter.create(schema, baos);
+    for (T entityObj : entityObjs) {
+      fileWriter.append(entityObj);
+    }
+    fileWriter.close();
+
+    byte[] bytes = baos.toByteArray();
+    return bytes;
+  }
+
+  /**
+   * Test that non-string map-keys are readable through GenericDatumReader
+   * This methoud should read as array of {key, value} and not as a map
+   */
+  private <T> List<GenericRecord> testGenericDatumRead
+    (String testType, byte[] bytes, T ... entityObjs) throws IOException {
+
+    GenericDatumReader<GenericRecord> datumReader =
+      new GenericDatumReader<GenericRecord> ();
+    SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
+    DataFileReader<GenericRecord> fileReader =
+      new DataFileReader<GenericRecord>(avroInputStream, datumReader);
+
+    Schema schema = fileReader.getSchema();
+    assertNotNull("Unable to get schema for " + testType, schema);
+    GenericRecord record = null;
+    List<GenericRecord> records = new ArrayList<GenericRecord> ();
+    while (fileReader.hasNext()) {
+      records.add (fileReader.next(record));
+    }
+    return records;
+  }
+
+  /**
+   * Test that non-string map-keys are readable through ReflectDatumReader
+   * This methoud should form the original map and should not return any
+   * array of {key, value} as done by {@link #testGenericDatumRead()} 
+   */
+  private <T> List<T> testReflectDatumRead
+    (String testType, byte[] bytes, T ... entityObjs) throws IOException {
+
+    ReflectDatumReader<T> datumReader = new ReflectDatumReader<T> ();
+    SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
+    DataFileReader<T> fileReader = new DataFileReader<T>(avroInputStream, datumReader);
+
+    Schema schema = fileReader.getSchema();
+    T record = null;
+    List<T> records = new ArrayList<T> ();
+    while (fileReader.hasNext()) {
+      records.add (fileReader.next(record));
+    }
+    return records;
+  }
+
+  private <T> byte[] testJsonEncoder
+    (String testType, T entityObj) throws IOException {
+
+    ReflectData rdata = ReflectData.AllowNull.get();
+
+    Schema schema = rdata.getSchema(entityObj.getClass());
+    ByteArrayOutputStream os = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().jsonEncoder(schema, os);
+    ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<T>(schema, rdata);
+    datumWriter.write(entityObj, encoder);
+    encoder.flush();
+
+    byte[] bytes = os.toByteArray();
+    System.out.println ("JSON encoder output:\n" + new String(bytes));
+    return bytes;
+  }
+
+  private <T> GenericRecord testJsonDecoder
+    (String testType, byte[] bytes, T entityObj) throws IOException {
+
+    ReflectData rdata = ReflectData.AllowNull.get();
+
+    Schema schema = rdata.getSchema(entityObj.getClass());
+    GenericDatumReader<GenericRecord> datumReader =
+      new GenericDatumReader<GenericRecord>(schema);
+
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new String(bytes));
+    GenericRecord r = datumReader.read(null, decoder);
+    return r;
+  }
+
+  /**
+   * Create a POJO having non-string map-keys
+   */
+  private Company buildCompany () {
+    Company co = new Company ();
+    HashMap<EmployeeId, EmployeeInfo> employees = new HashMap<EmployeeId, EmployeeInfo>();
+    co.setEmployees(employees);
+    employees.put(new EmployeeId(1), new EmployeeInfo("Foo"));
+    employees.put(new EmployeeId(2), new EmployeeInfo("Bar"));
+    return co;
+  }
+
+  /**
+   * Create a POJO having non-string map-keys
+   * The objects inside that map should also have non-string map-keys
+   */
+  private Company2 buildCompany2 () {
+    Company2 co = new Company2 ();
+    HashMap<Integer, EmployeeInfo2> employees = new HashMap<Integer, EmployeeInfo2>();
+    co.setEmployees(employees);
+    
+    EmployeeId2 empId = new EmployeeId2(1);
+    EmployeeInfo2 empInfo = new EmployeeInfo2("Foo");
+    HashMap<Integer, String> companyMap = new HashMap<Integer, String>();
+    empInfo.setCompanyMap(companyMap);
+    companyMap.put(14, "CompanyFoo");
+    
+    employees.put(11, empInfo);
+    
+    return co;
+  }
+
+  private SameMapSignature buildSameMapSignature () {
+    SameMapSignature obj = new SameMapSignature();
+    obj.setMap1(new HashMap<Integer, String>());
+    obj.getMap1().put(1, "Foo");
+    obj.setMap2(new HashMap<Integer, String>());
+    obj.getMap2().put(1, "Foo");
+    return obj;
+  }
+
+  private void log (String msg) {
+    System.out.println (msg);
+  }
+}
+
+class Company {
+  HashMap <EmployeeId, EmployeeInfo> employees;
+
+  public HashMap<EmployeeId, EmployeeInfo> getEmployees() {
+    return employees;
+  }
+  public void setEmployees(HashMap<EmployeeId, EmployeeInfo> employees) {
+    this.employees = employees;
+  }
+  @Override
+  public String toString() {
+    return "Company [employees=" + employees + "]";
+  }
+}
+
+class EmployeeId {
+  Integer id;
+
+  public EmployeeId() {}
+  public EmployeeId(Integer id) {
+    this.id = id;
+  }
+  public Integer getId() {
+    return id;
+  }
+  public void setId(Integer zip) {
+    this.id = zip;
+  }
+  @Override
+  public String toString() {
+    return "EmployeeId [id=" + id + "]";
+  }
+}
+
+class EmployeeInfo {
+  String name;
+
+  public EmployeeInfo() {}
+  public EmployeeInfo(String name) {
+    this.name = name;
+  }
+  public String getName() {
+    return name;
+  }
+  public void setName(String name) {
+    this.name = name;
+  }
+  @Override
+  public String toString() {
+    return "EmployeeInfo [name=" + name + "]";
+  }
+}
+
+class Company2 {
+  HashMap <Integer, EmployeeInfo2> employees;
+
+  public Company2() {}
+  public HashMap<Integer, EmployeeInfo2> getEmployees() {
+    return employees;
+  }
+  public void setEmployees(HashMap<Integer, EmployeeInfo2> employees) {
+    this.employees = employees;
+  }
+  @Override
+  public String toString() {
+    return "Company2 [employees=" + employees + "]";
+  }
+}
+
+class EmployeeId2 {
+  Integer id;
+
+  public EmployeeId2() {}
+  public EmployeeId2(Integer id) {
+    this.id = id;
+  }
+  public Integer getId() {
+    return id;
+  }
+  public void setId(Integer zip) {
+    this.id = zip;
+  }
+  @Override
+  public String toString() {
+    return "EmployeeId2 [id=" + id + "]";
+  }
+}
+
+class EmployeeInfo2 {
+  String name;
+  HashMap<Integer, String> companyMap;
+
+  public EmployeeInfo2() {}
+  public EmployeeInfo2(String name) {
+    this.name = name;
+  }
+  public String getName() {
+    return name;
+  }
+  public void setName(String name) {
+    this.name = name;
+  }
+  public HashMap<Integer, String> getCompanyMap() {
+    return companyMap;
+  }
+  public void setCompanyMap(HashMap<Integer, String> companyMap) {
+    this.companyMap = companyMap;
+  }
+  @Override
+  public String toString() {
+    return "EmployeeInfo2 [name=" + name + "]";
+  }
+}
+
+class SameMapSignature {
+
+  HashMap<Integer, String> map1;
+  HashMap<Integer, String> map2;
+
+  public HashMap<Integer, String> getMap1() {
+    return map1;
+  }
+  public void setMap1(HashMap<Integer, String> map1) {
+    this.map1 = map1;
+  }
+  public HashMap<Integer, String> getMap2() {
+    return map2;
+  }
+  public void setMap2(HashMap<Integer, String> map2) {
+    this.map2 = map2;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
new file mode 100644
index 0000000..6c29ccc
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java
@@ -0,0 +1,1051 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.lang.reflect.Array;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.AvroTypeException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.reflect.TestReflect.SampleRecord.AnotherSampleRecord;
+import org.codehaus.jackson.node.NullNode;
+import org.junit.Test;
+
+public class TestReflect {
+  
+  EncoderFactory factory = new EncoderFactory();
+  
+  // test primitive type inference
+  @Test public void testVoid() {
+    check(Void.TYPE, "\"null\"");
+    check(Void.class, "\"null\"");
+  }
+
+  @Test public void testBoolean() {
+    check(Boolean.TYPE, "\"boolean\"");
+    check(Boolean.class, "\"boolean\"");
+  }
+
+  @Test public void testInt() {
+    check(Integer.TYPE, "\"int\"");
+    check(Integer.class, "\"int\"");
+  }
+
+  @Test public void testByte() {
+    check(Byte.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Byte\"}");
+    check(Byte.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Byte\"}");
+  }
+
+  @Test public void testShort() {
+    check(Short.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Short\"}");
+    check(Short.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Short\"}");
+  }
+
+  @Test public void testChar() {
+    check(Character.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Character\"}");
+    check(Character.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Character\"}");
+  }
+
+  @Test public void testLong() {
+    check(Long.TYPE, "\"long\"");
+    check(Long.class, "\"long\"");
+  }
+
+  @Test public void testFloat() {
+    check(Float.TYPE, "\"float\"");
+    check(Float.class, "\"float\"");
+  }
+
+  @Test public void testDouble() {
+    check(Double.TYPE, "\"double\"");
+    check(Double.class, "\"double\"");
+  }
+
+  @Test public void testString() {
+    check("Foo", "\"string\"");
+  }
+
+  @Test public void testBytes() {
+    check(ByteBuffer.allocate(0), "\"bytes\"");
+    check(new byte[0], "{\"type\":\"bytes\",\"java-class\":\"[B\"}");
+  }
+
+  @Test public void testUnionWithCollection() {
+    Schema s = new Schema.Parser().parse
+      ("[\"null\", {\"type\":\"array\",\"items\":\"float\"}]");
+    GenericData data = ReflectData.get();
+    assertEquals(1, data.resolveUnion(s, new ArrayList<Float>()));
+  }
+
+  @Test public void testUnionWithMap() {
+    Schema s = new Schema.Parser().parse
+      ("[\"null\", {\"type\":\"map\",\"values\":\"float\"}]");
+    GenericData data = ReflectData.get();
+    assertEquals(1, data.resolveUnion(s, new HashMap<String,Float>()));
+  }
+
+  @Test public void testUnionWithFixed() {
+    Schema s = new Schema.Parser().parse
+        ("[\"null\", {\"type\":\"fixed\",\"name\":\"f\",\"size\":1}]");
+    Schema f = new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"f\",\"size\":1}");
+    GenericData data = ReflectData.get();
+    assertEquals(1, data.resolveUnion(s, new GenericData.Fixed(f)));
+  }
+
+  @Test public void testUnionWithEnum() {
+    Schema s = new Schema.Parser().parse
+        ("[\"null\", {\"type\":\"enum\",\"name\":\"E\",\"namespace\":" +
+            "\"org.apache.avro.reflect.TestReflect$\",\"symbols\":[\"A\",\"B\"]}]");
+    GenericData data = ReflectData.get();
+    assertEquals(1, data.resolveUnion(s, E.A));
+  }
+
+  @Test public void testUnionWithBytes() {
+    Schema s = new Schema.Parser().parse ("[\"null\", \"bytes\"]");
+    GenericData data = ReflectData.get();
+    assertEquals(1, data.resolveUnion(s, ByteBuffer.wrap(new byte[]{1})));
+  }
+
+  // test map, array and list type inference
+  public static class R1 {
+    private Map<String,String> mapField = new HashMap<String,String>();
+    private String[] arrayField = new String[] { "foo" };
+    private List<String> listField = new ArrayList<String>();
+
+    {
+      mapField.put("foo", "bar");
+      listField.add("foo");
+    }
+    
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R1)) return false;
+      R1 that = (R1)o;
+      return mapField.equals(that.mapField)
+        && Arrays.equals(this.arrayField, that.arrayField) 
+        &&  listField.equals(that.listField);
+    }
+  }
+
+  @Test public void testMap() throws Exception {
+    check(R1.class.getDeclaredField("mapField").getGenericType(),
+          "{\"type\":\"map\",\"values\":\"string\"}");
+  }
+
+  @Test public void testArray() throws Exception {
+    check(R1.class.getDeclaredField("arrayField").getGenericType(),
+          "{\"type\":\"array\",\"items\":\"string\",\"java-class\":\"[Ljava.lang.String;\"}");
+  }
+  @Test public void testList() throws Exception {
+    check(R1.class.getDeclaredField("listField").getGenericType(),
+          "{\"type\":\"array\",\"items\":\"string\""
+          +",\"java-class\":\"java.util.List\"}");
+  }
+  
+  @Test public void testR1() throws Exception {
+    checkReadWrite(new R1());
+  }
+
+  // test record, array and list i/o
+  public static class R2 {
+    private String[] arrayField;
+    private Collection<String> collectionField;
+    
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R2)) return false;
+      R2 that = (R2)o;
+      return Arrays.equals(this.arrayField, that.arrayField) 
+        &&  collectionField.equals(that.collectionField);
+    }
+  }
+
+  @Test public void testR2() throws Exception {
+    R2 r2 = new R2();
+    r2.arrayField = new String[] {"foo"};
+    r2.collectionField = new ArrayList<String>();
+    r2.collectionField.add("foo");
+    checkReadWrite(r2);
+  }
+
+  // test array i/o of unboxed type
+  public static class R3 {
+    private int[] intArray;
+    
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R3)) return false;
+      R3 that = (R3)o;
+      return Arrays.equals(this.intArray, that.intArray);
+    }
+  }
+
+  @Test public void testR3() throws Exception {
+    R3 r3 = new R3();
+    r3.intArray = new int[] {1};
+    checkReadWrite(r3);
+  }
+
+  // test inherited fields & short datatype
+  public static class R4 {
+    public short value;
+    public short[] shorts;
+    public byte b;
+    public char c;
+    
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R4)) return false;
+      R4 that = (R4)o;
+      return this.value == that.value
+        && Arrays.equals(this.shorts, that.shorts)
+        && this.b == that.b
+        && this.c == that.c;
+    }
+  }
+
+  public static class R5 extends R4 {}
+
+  @Test public void testR5() throws Exception {
+    R5 r5 = new R5();
+    r5.value = 1;
+    r5.shorts = new short[] {3,255,256,Short.MAX_VALUE,Short.MIN_VALUE};
+    r5.b = 99;
+    r5.c = 'a';
+    checkReadWrite(r5);
+  }
+
+  // test union annotation on a class
+  @Union({R7.class, R8.class})
+  public static class R6 {}
+
+  public static class R7 extends R6 {
+    public int value;
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R7)) return false;
+      return this.value == ((R7)o).value;
+    }
+  }
+  public static class R8 extends R6 {
+    public float value;
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R8)) return false;
+      return this.value == ((R8)o).value;
+    }
+  }
+
+  // test arrays of union annotated class
+  public static class R9  {
+    public R6[] r6s;
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R9)) return false;
+      return Arrays.equals(this.r6s, ((R9)o).r6s);
+    }
+  }
+
+  @Test public void testR6() throws Exception {
+    R7 r7 = new R7();
+    r7.value = 1;
+    checkReadWrite(r7, ReflectData.get().getSchema(R6.class));
+    R8 r8 = new R8();
+    r8.value = 1;
+    checkReadWrite(r8, ReflectData.get().getSchema(R6.class));
+    R9 r9 = new R9();
+    r9.r6s = new R6[] {r7, r8};
+    checkReadWrite(r9, ReflectData.get().getSchema(R9.class));
+  }
+
+  // test union annotation on methods and parameters
+  public static interface P0 {
+    @Union({Void.class,String.class})
+      String foo(@Union({Void.class,String.class}) String s);
+  }
+
+  @Test public void testP0() throws Exception {
+    Protocol p0 = ReflectData.get().getProtocol(P0.class);
+    Protocol.Message message = p0.getMessages().get("foo");
+    // check response schema is union
+    Schema response = message.getResponse();
+    assertEquals(Schema.Type.UNION, response.getType());
+    assertEquals(Schema.Type.NULL, response.getTypes().get(0).getType());
+    assertEquals(Schema.Type.STRING, response.getTypes().get(1).getType());
+    // check request schema is union
+    Schema request = message.getRequest();
+    Field field = request.getField("s");
+    assertNotNull("field 's' should not be null", field);
+    Schema param = field.schema();
+    assertEquals(Schema.Type.UNION, param.getType());
+    assertEquals(Schema.Type.NULL, param.getTypes().get(0).getType());
+    assertEquals(Schema.Type.STRING, param.getTypes().get(1).getType());
+    // check union erasure
+    assertEquals(String.class, ReflectData.get().getClass(response));
+    assertEquals(String.class, ReflectData.get().getClass(param));
+  }
+
+  // test Stringable annotation
+  @Stringable public static class R10 {
+    private String text;
+    public R10(String text) { this.text = text; }
+    @Override
+    public String toString() { return text; }
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R10)) return false;
+      return this.text.equals(((R10)o).text);
+    }
+  }
+  
+  @Test public void testR10() throws Exception {
+    Schema r10Schema = ReflectData.get().getSchema(R10.class);
+    assertEquals(Schema.Type.STRING, r10Schema.getType());
+    assertEquals(R10.class.getName(), r10Schema.getProp("java-class"));
+    checkReadWrite(new R10("foo"), r10Schema);
+  }
+
+  // test Nullable annotation on field
+  public static class R11 {
+    @Nullable private String text;
+    @Override
+    public boolean equals(Object o) {
+      if (!(o instanceof R11)) return false;
+      R11 that = (R11)o;
+      if (this.text == null) return that.text == null;
+      return this.text.equals(that.text);
+    }
+  }
+  
+  @Test public void testR11() throws Exception {
+    Schema r11Record = ReflectData.get().getSchema(R11.class);
+    assertEquals(Schema.Type.RECORD, r11Record.getType());
+    Field r11Field = r11Record.getField("text");
+    assertEquals(NullNode.getInstance(), r11Field.defaultValue());
+    Schema r11FieldSchema = r11Field.schema();
+    assertEquals(Schema.Type.UNION, r11FieldSchema.getType());
+    assertEquals(Schema.Type.NULL, r11FieldSchema.getTypes().get(0).getType());
+    Schema r11String = r11FieldSchema.getTypes().get(1);
+    assertEquals(Schema.Type.STRING, r11String.getType());
+    R11 r11 = new R11();
+    checkReadWrite(r11, r11Record);
+    r11.text = "foo";
+    checkReadWrite(r11, r11Record);
+  }
+
+  // test nullable annotation on methods and parameters
+  public static interface P1 {
+    @Nullable String foo(@Nullable String s);
+  }
+
+  @Test public void testP1() throws Exception {
+    Protocol p1 = ReflectData.get().getProtocol(P1.class);
+    Protocol.Message message = p1.getMessages().get("foo");
+    // check response schema is union
+    Schema response = message.getResponse();
+    assertEquals(Schema.Type.UNION, response.getType());
+    assertEquals(Schema.Type.NULL, response.getTypes().get(0).getType());
+    assertEquals(Schema.Type.STRING, response.getTypes().get(1).getType());
+    // check request schema is union
+    Schema request = message.getRequest();
+    Field field = request.getField("s");
+    assertNotNull("field 's' should not be null", field);
+    Schema param = field.schema();
+    assertEquals(Schema.Type.UNION, param.getType());
+    assertEquals(Schema.Type.NULL, param.getTypes().get(0).getType());
+    assertEquals(Schema.Type.STRING, param.getTypes().get(1).getType());
+    // check union erasure
+    assertEquals(String.class, ReflectData.get().getClass(response));
+    assertEquals(String.class, ReflectData.get().getClass(param));
+  }
+
+  // test AvroSchema annotation
+  public static class R12 {                       // fields
+    @AvroSchema("\"int\"")
+      Object x;
+
+    @AvroSchema("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}")
+      List<String> strings;
+  }
+
+
+  @Test public void testR12() throws Exception {
+    Schema s = ReflectData.get().getSchema(R12.class);
+    assertEquals(Schema.Type.INT, s.getField("x").schema().getType());
+    assertEquals(Schema.parse
+                 ("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}"),
+                 s.getField("strings").schema());
+  }
+    
+  @AvroSchema("\"null\"")                          // record
+  public class R13 {}
+
+  @Test public void testR13() throws Exception {
+    Schema s = ReflectData.get().getSchema(R13.class);
+    assertEquals(Schema.Type.NULL, s.getType());
+  }
+    
+  public interface P4 {
+    @AvroSchema("\"int\"")                        // message value
+    Object foo(@AvroSchema("\"int\"")Object x);   // message param
+  }
+
+  @Test public void testP4() throws Exception {
+    Protocol p = ReflectData.get().getProtocol(P4.class);
+    Protocol.Message message = p.getMessages().get("foo");
+    assertEquals(Schema.Type.INT, message.getResponse().getType());
+    Field field = message.getRequest().getField("x");
+    assertEquals(Schema.Type.INT, field.schema().getType());
+  }
+
+  // test error
+  @SuppressWarnings("serial")
+  public static class E1 extends Exception {}
+  public static interface P2 {
+    void error() throws E1;
+  }
+
+  @Test public void testP2() throws Exception {
+    Schema e1 = ReflectData.get().getSchema(E1.class);
+    assertEquals(Schema.Type.RECORD, e1.getType());
+    assertTrue(e1.isError());
+    Field message = e1.getField("detailMessage");
+    assertNotNull("field 'detailMessage' should not be null", message);
+    Schema messageSchema = message.schema();
+    assertEquals(Schema.Type.UNION, messageSchema.getType());
+    assertEquals(Schema.Type.NULL, messageSchema.getTypes().get(0).getType());
+    assertEquals(Schema.Type.STRING, messageSchema.getTypes().get(1).getType());
+
+    Protocol p2 = ReflectData.get().getProtocol(P2.class);
+    Protocol.Message m = p2.getMessages().get("error");
+    // check error schema is union
+    Schema response = m.getErrors();
+    assertEquals(Schema.Type.UNION, response.getType());
+    assertEquals(Schema.Type.STRING, response.getTypes().get(0).getType());
+    assertEquals(e1, response.getTypes().get(1));
+  }
+
+  @Test public void testNoPackage() throws Exception {
+    Class<?> noPackage = Class.forName("NoPackage");
+    Schema s = ReflectData.get().getSchema(noPackage);
+    assertEquals(noPackage.getName(), ReflectData.getClassName(s));
+  }
+
+  void checkReadWrite(Object object) throws Exception {
+    checkReadWrite(object, ReflectData.get().getSchema(object.getClass()));
+  }
+  void checkReadWrite(Object object, Schema s) throws Exception {
+    ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.write(object, factory.directBinaryEncoder(out, null));
+    ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
+    Object after =
+      reader.read(null, DecoderFactory.get().binaryDecoder(
+          out.toByteArray(), null));
+    assertEquals(object, after);
+
+    // check reflective setField works for records
+    if (s.getType().equals(Schema.Type.RECORD)) {
+      Object copy = object.getClass().newInstance();
+      for (Field f : s.getFields()) {
+        Object val = ReflectData.get().getField(object, f.name(), f.pos());
+        ReflectData.get().setField(copy, f.name(), f.pos(), val);
+      }
+      assertEquals("setField", object, copy);
+    }
+  }
+
+  public static enum E { A, B };
+  @Test public void testEnum() throws Exception {
+    check(E.class, "{\"type\":\"enum\",\"name\":\"E\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"symbols\":[\"A\",\"B\"]}");
+  }
+
+  public static class R { int a; long b; }
+  @Test public void testRecord() throws Exception {
+    check(R.class, "{\"type\":\"record\",\"name\":\"R\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
+          +"{\"name\":\"a\",\"type\":\"int\"},"
+          +"{\"name\":\"b\",\"type\":\"long\"}]}");
+  }
+  
+  public static class RAvroIgnore { @AvroIgnore int a; }
+  @Test public void testAnnotationAvroIgnore() throws Exception {
+    check(RAvroIgnore.class, "{\"type\":\"record\",\"name\":\"RAvroIgnore\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[]}");
+  }
+  
+  public static class RAvroMeta { @AvroMeta(key="K", value="V") int a; }
+  @Test public void testAnnotationAvroMeta() throws Exception {
+    check(RAvroMeta.class, "{\"type\":\"record\",\"name\":\"RAvroMeta\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"{\"name\":\"a\",\"type\":\"int\",\"K\":\"V\"}]}");
+  }
+  
+  public static class RAvroName { @AvroName("b") int a; }
+  @Test public void testAnnotationAvroName() throws Exception {
+    check(RAvroName.class, "{\"type\":\"record\",\"name\":\"RAvroName\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"{\"name\":\"b\",\"type\":\"int\"}]}");
+  }
+  
+  public static class RAvroNameCollide { @AvroName("b") int a; int b; }
+  @Test(expected=Exception.class)
+  public void testAnnotationAvroNameCollide() throws Exception {
+    check(RAvroNameCollide.class, "{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"{\"name\":\"b\",\"type\":\"int\"}," 
+          +"{\"name\":\"b\",\"type\":\"int\"}]}");
+  }
+  
+  public static class RAvroStringableField { @Stringable int a; }
+  public void testAnnotationAvroStringableFields() throws Exception {
+    check(RAvroStringableField.class, "{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
+          +"\"org.apache.avro.reflect.TestReflect$\",\"fields\":[" 
+          +"{\"name\":\"a\",\"type\":\"String\"}]}");
+  }
+  
+  
+  
+
+  private void check(Object o, String schemaJson) {
+    check(o.getClass(), schemaJson);
+  }
+
+  private void check(Type type, String schemaJson) {
+    assertEquals(schemaJson, ReflectData.get().getSchema(type).toString());
+  }
+
+  @Test
+  public void testRecordIO() throws IOException {
+    Schema schm = ReflectData.get().getSchema(SampleRecord.class);
+    ReflectDatumWriter<SampleRecord> writer = 
+      new ReflectDatumWriter<SampleRecord>(schm);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    SampleRecord record = new SampleRecord();
+    record.x = 5;
+    record.y = 10;
+    writer.write(record, factory.directBinaryEncoder(out, null));
+    ReflectDatumReader<SampleRecord> reader = 
+      new ReflectDatumReader<SampleRecord>(schm);
+    SampleRecord decoded =
+      reader.read(null, DecoderFactory.get().binaryDecoder(
+          out.toByteArray(), null));
+    assertEquals(record, decoded);
+  }
+
+  public static class AvroEncRecord {
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date date;
+    
+    @Override 
+    public boolean equals(Object o) {
+      if (!(o instanceof AvroEncRecord)) return false;
+      return date.equals(((AvroEncRecord)o).date);
+    }
+  }
+  
+  public static class multipleAnnotationRecord {
+    @AvroIgnore
+    @Stringable
+    Integer i1;
+    
+    @AvroIgnore
+    @Nullable
+    Integer i2;
+
+    @AvroIgnore
+    @AvroName("j")
+    Integer i3;
+    
+    @AvroIgnore
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date i4;
+    
+    @Stringable
+    @Nullable
+    Integer i5;
+    
+    @Stringable
+    @AvroName("j6")
+    Integer i6 = 6;    
+    
+    @Stringable
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date i7 = new java.util.Date(7L);
+    
+    @Nullable
+    @AvroName("j8")
+    Integer i8;    
+      
+    @Nullable
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date i9;
+
+    @AvroName("j10")
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date i10 = new java.util.Date(10L);
+
+    @Stringable
+    @Nullable
+    @AvroName("j11")
+    @AvroEncode(using=DateAsLongEncoding.class)
+    java.util.Date i11;
+  }
+  
+  @Test
+  public void testMultipleAnnotations() throws IOException {
+    Schema schm = ReflectData.get().getSchema(multipleAnnotationRecord.class);
+    ReflectDatumWriter<multipleAnnotationRecord> writer = 
+      new ReflectDatumWriter<multipleAnnotationRecord>(schm);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    multipleAnnotationRecord record = new multipleAnnotationRecord();
+    record.i1 = 1;
+    record.i2 = 2;
+    record.i3 = 3;
+    record.i4 = new java.util.Date(4L);
+    record.i5 = 5;
+    record.i6 = 6;
+    record.i7 = new java.util.Date(7L);
+    record.i8 = 8;
+    record.i9 = new java.util.Date(9L);
+    record.i10 = new java.util.Date(10L);
+    record.i11 = new java.util.Date(11L);
+    
+    writer.write(record, factory.directBinaryEncoder(out, null));
+    ReflectDatumReader<multipleAnnotationRecord> reader = 
+      new ReflectDatumReader<multipleAnnotationRecord>(schm);
+      multipleAnnotationRecord decoded =
+      reader.read(new multipleAnnotationRecord(), DecoderFactory.get().binaryDecoder(
+          out.toByteArray(), null));
+    assertTrue(decoded.i1 == null);
+    assertTrue(decoded.i2 == null);
+    assertTrue(decoded.i3 == null);
+    assertTrue(decoded.i4 == null);
+    assertTrue(decoded.i5 == 5);
+    assertTrue(decoded.i6 == 6);
+    assertTrue(decoded.i7.getTime() == 7);
+    assertTrue(decoded.i8 == 8);
+    assertTrue(decoded.i9.getTime() == 9);
+    assertTrue(decoded.i10.getTime() == 10);
+    assertTrue(decoded.i11.getTime() == 11);
+  }
+  
+  
+  @Test
+  public void testAvroEncodeInducing() throws IOException {
+    Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
+    assertEquals(schm.toString(), "{\"type\":\"record\",\"name\":\"AvroEncRecord\",\"namespace" +
+      "\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[{\"name\":\"date\"," +
+      "\"type\":{\"type\":\"long\",\"CustomEncoding\":\"DateAsLongEncoding\"}}]}");
+  }
+  
+  @Test
+  public void testAvroEncodeIO() throws IOException {
+    Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
+    ReflectDatumWriter<AvroEncRecord> writer = 
+      new ReflectDatumWriter<AvroEncRecord>(schm);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    AvroEncRecord record = new AvroEncRecord();
+    record.date = new java.util.Date(948833323L);
+    writer.write(record, factory.directBinaryEncoder(out, null));
+    ReflectDatumReader<AvroEncRecord> reader = 
+      new ReflectDatumReader<AvroEncRecord>(schm);
+    AvroEncRecord decoded =
+      reader.read(new AvroEncRecord(), DecoderFactory.get().binaryDecoder(
+          out.toByteArray(), null));
+    assertEquals(record, decoded);
+  }
+  
+  @Test
+  public void testRecordWithNullIO() throws IOException {
+    ReflectData reflectData = ReflectData.AllowNull.get();
+    Schema schm = reflectData.getSchema(AnotherSampleRecord.class);
+    ReflectDatumWriter<AnotherSampleRecord> writer = 
+      new ReflectDatumWriter<AnotherSampleRecord>(schm);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    // keep record.a null and see if that works
+    Encoder e = factory.directBinaryEncoder(out, null);
+    AnotherSampleRecord a = new AnotherSampleRecord();
+    writer.write(a, e);
+    AnotherSampleRecord b = new AnotherSampleRecord(10);
+    writer.write(b, e);
+    e.flush();
+    ReflectDatumReader<AnotherSampleRecord> reader = 
+      new ReflectDatumReader<AnotherSampleRecord>(schm);
+    ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
+    Decoder d = DecoderFactory.get().binaryDecoder(in, null);
+    AnotherSampleRecord decoded = reader.read(null, d);
+    assertEquals(a, decoded);
+    decoded = reader.read(null, d);
+    assertEquals(b, decoded);
+  }
+
+  @Test public void testDisableUnsafe() throws Exception {
+    String saved = System.getProperty("avro.disable.unsafe");
+    try {
+      System.setProperty("avro.disable.unsafe", "true");
+      ReflectData.ACCESSOR_CACHE.clear();
+      ReflectionUtil.resetFieldAccess();
+      testMultipleAnnotations();
+      testRecordWithNullIO();
+    } finally {
+      if (saved == null)
+        System.clearProperty("avro.disable.unsafe");
+      else
+        System.setProperty("avro.disable.unsafe", saved);
+      ReflectData.ACCESSOR_CACHE.clear();
+      ReflectionUtil.resetFieldAccess();
+    }
+  }
+
+  public static class SampleRecord {
+    public int x = 1;
+    private int y = 2;
+
+    @Override
+    public int hashCode() {
+      return x + y;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj)
+        return true;
+      if (obj == null)
+        return false;
+      if (getClass() != obj.getClass())
+        return false;
+      final SampleRecord other = (SampleRecord)obj;
+      if (x != other.x)
+        return false;
+      if (y != other.y)
+        return false;
+      return true;
+    }
+    
+    public static class AnotherSampleRecord {
+      private Integer a = null;
+      private SampleRecord s = null;
+
+      public AnotherSampleRecord() {
+      }
+
+      AnotherSampleRecord(Integer a) {
+        this.a = a;
+        this.s = new SampleRecord();
+      }
+
+      @Override
+      public int hashCode() {
+        int hash = (a != null ? a.hashCode() : 0);
+        hash += (s != null ? s.hashCode() : 0);
+        return hash;
+      }
+
+      @Override
+      public boolean equals(Object other) {
+        if (other instanceof AnotherSampleRecord) {
+          AnotherSampleRecord o = (AnotherSampleRecord) other;
+          if ( (this.a == null && o.a != null) ||
+               (this.a != null && !this.a.equals(o.a)) ||
+               (this.s == null && o.s != null) ||
+               (this.s != null && !this.s.equals(o.s)) ) {
+            return false;
+          }
+          return true;
+        } else {
+          return false;
+        }
+      }
+    }
+  }
+
+  public static class X { int i; }
+  public static class B1 { X x; }
+  public static class B2 { X x; }
+  public static class A { B1 b1; B2 b2; }
+  public static interface C { void foo(A a); }
+
+  @Test
+  public void testForwardReference() {
+    ReflectData data = ReflectData.get();
+    Protocol reflected = data.getProtocol(C.class);
+    Protocol reparsed = Protocol.parse(reflected.toString());
+    assertEquals(reflected, reparsed);
+    assert(reparsed.getTypes().contains(data.getSchema(A.class)));
+    assert(reparsed.getTypes().contains(data.getSchema(B1.class)));
+    assert(reparsed.getTypes().contains(data.getSchema(B2.class)));
+    assert(reparsed.getTypes().contains(data.getSchema(X.class)));
+  }
+
+  public static interface P3 {
+    void m1();
+    void m1(int x);
+  }
+
+  @Test(expected=AvroTypeException.class)
+  public void testOverloadedMethod() { 
+    ReflectData.get().getProtocol(P3.class);
+  }
+
+  @Test
+  public void testNoPackageSchema() throws Exception {
+    ReflectData.get().getSchema(Class.forName("NoPackage"));
+  }
+
+  @Test
+  public void testNoPackageProtocol() throws Exception {
+    ReflectData.get().getProtocol(Class.forName("NoPackage"));
+  }
+
+  public static class Y {
+    int i;
+  }
+
+  @Test
+  /** Test nesting of reflect data within generic. */
+  public void testReflectWithinGeneric() throws Exception {
+    ReflectData data = ReflectData.get();
+    // define a record with a field that's a specific Y
+    Schema schema = Schema.createRecord("Foo", "", "x.y.z", false);
+    List<Schema.Field> fields = new ArrayList<Schema.Field>();
+    fields.add(new Schema.Field("f", data.getSchema(Y.class), "", null));
+    schema.setFields(fields);
+
+    // create a generic instance of this record
+    Y y = new Y();
+    y.i = 1;
+    GenericData.Record record = new GenericData.Record(schema);
+    record.put("f", y);
+
+    // test that this instance can be written & re-read
+    checkBinary(schema, record);
+  }
+  
+  @Test
+  public void testPrimitiveArray() throws Exception {
+    testPrimitiveArrays(false);
+  }
+  
+  @Test
+  public void testPrimitiveArrayBlocking() throws Exception {
+    testPrimitiveArrays(true);
+  }
+  
+  private void testPrimitiveArrays(boolean blocking) throws Exception {
+    testPrimitiveArray(boolean.class, blocking);
+    testPrimitiveArray(byte.class, blocking);
+    testPrimitiveArray(short.class, blocking);
+    testPrimitiveArray(char.class, blocking);
+    testPrimitiveArray(int.class, blocking);
+    testPrimitiveArray(long.class, blocking);
+    testPrimitiveArray(float.class, blocking);
+    testPrimitiveArray(double.class, blocking);
+  }
+
+  private void testPrimitiveArray(Class<?> c, boolean blocking) throws Exception {
+    ReflectData data = new ReflectData();
+    Random r = new Random();
+    int size = 200;
+    Object array = Array.newInstance(c, size);
+    Schema s = data.getSchema(array.getClass());
+    for(int i = 0; i < size; i++) {
+      Array.set(array, i, randomFor(c, r));
+    }
+    checkBinary(data, s, array, false, blocking);
+  }
+
+  private Object randomFor(Class<?> c, Random r) {
+    if (c == boolean.class)
+      return r.nextBoolean();
+    if (c == int.class)
+      return r.nextInt();
+    if (c == long.class)
+      return r.nextLong();
+    if (c == byte.class)
+      return (byte)r.nextInt();
+    if (c == float.class)
+      return r.nextFloat();
+    if (c == double.class)
+      return r.nextDouble();
+    if (c == char.class)
+      return (char)r.nextInt();
+    if (c == short.class)
+      return (short)r.nextInt();
+    return null;
+  }
+
+  /** Test union of null and an array. */
+  @Test
+  public void testNullArray() throws Exception {
+    String json = "[{\"type\":\"array\", \"items\": \"long\"}, \"null\"]";
+    Schema schema = new Schema.Parser().parse(json);
+    checkBinary(schema, null);
+  }
+
+  /** Test stringable classes. */
+  @Test public void testStringables() throws Exception {
+    checkStringable(java.math.BigDecimal.class, "10");
+    checkStringable(java.math.BigInteger.class, "20");
+    checkStringable(java.net.URI.class, "foo://bar:9000/baz");
+    checkStringable(java.net.URL.class, "http://bar:9000/baz");
+    checkStringable(java.io.File.class, "foo.bar");
+  }
+
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void checkStringable(Class c, String value) throws Exception {
+    ReflectData data = new ReflectData();
+    Schema schema = data.getSchema(c);
+    assertEquals
+      ("{\"type\":\"string\",\"java-class\":\""+c.getName()+"\"}",
+       schema.toString());
+    checkBinary(schema, c.getConstructor(String.class).newInstance(value));
+  }
+
+  public static class M1 {
+    Map<Integer, String> integerKeyMap;
+    Map<java.math.BigInteger, String> bigIntegerKeyMap;
+    Map<java.math.BigDecimal, String> bigDecimalKeyMap;
+    Map<java.io.File, String> fileKeyMap;
+  }
+
+  /** Test Map with stringable key classes. */
+  @Test public void testStringableMapKeys() throws Exception {
+    M1 record = new M1();
+    record.integerKeyMap = new HashMap<Integer, String>(1);
+    record.integerKeyMap.put(10, "foo");
+
+    record.bigIntegerKeyMap = new HashMap<java.math.BigInteger, String>(1);
+    record.bigIntegerKeyMap.put(java.math.BigInteger.TEN, "bar");
+
+    record.bigDecimalKeyMap = new HashMap<java.math.BigDecimal, String>(1);
+    record.bigDecimalKeyMap.put(java.math.BigDecimal.ONE, "bigDecimal");
+
+    record.fileKeyMap = new HashMap<java.io.File, String>(1);
+    record.fileKeyMap.put(new java.io.File("foo.bar"), "file");
+
+    ReflectData data = new ReflectData().addStringable(Integer.class);
+
+    checkBinary(data, data.getSchema(M1.class), record, true);
+  }
+
+  public static class NullableStringable {
+    java.math.BigDecimal number;
+  }
+
+  @Test public void testNullableStringableField() throws Exception {
+    NullableStringable datum = new NullableStringable();
+    datum.number = java.math.BigDecimal.TEN;
+
+    Schema schema = ReflectData.AllowNull.get().getSchema(NullableStringable.class);
+    checkBinary(schema, datum);
+  }
+
+  public static void checkBinary(ReflectData reflectData, Schema schema,
+      Object datum, boolean equals) throws IOException {
+    checkBinary(reflectData, schema, datum, equals, false);
+  }
+  
+  private static void checkBinary(ReflectData reflectData, Schema schema,
+      Object datum, boolean equals, boolean blocking) throws IOException {
+    ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(schema);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    if (!blocking) {
+      writer.write(datum, EncoderFactory.get().directBinaryEncoder(out, null));
+    } else {
+      writer.write(datum, new EncoderFactory().configureBlockSize(64)
+          .blockingBinaryEncoder(out, null));
+    }
+    writer.write(datum, EncoderFactory.get().directBinaryEncoder(out, null));
+    byte[] data = out.toByteArray();
+
+    ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(schema);
+    Object decoded = reader.read(null,
+        DecoderFactory.get().binaryDecoder(data, null));
+
+    assertEquals(0, reflectData.compare(datum, decoded, schema, equals));
+  }
+
+  public static void checkBinary(Schema schema, Object datum)
+    throws IOException {
+    checkBinary(ReflectData.get(), schema, datum, false);
+  }
+
+  /** Test that the error message contains the name of the class. */
+  @Test
+  public void testReflectFieldError() throws Exception {
+    Object datum = "";
+    try {
+      ReflectData.get().getField(datum, "notAFieldOfString", 0);
+    } catch (AvroRuntimeException e) {
+      assertTrue(e.getMessage().contains(datum.getClass().getName()));
+    }
+  }
+
+  @AvroAlias(alias="a", space="b")
+  private static class AliasA { }
+  @AvroAlias(alias="a", space="")
+  private static class AliasB { }
+  @AvroAlias(alias="a")
+  private static class AliasC { }  
+  
+  @Test
+  public void testAvroAlias() {
+    check(AliasA.class, "{\"type\":\"record\",\"name\":\"AliasA\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"b.a\"]}");
+    check(AliasB.class, "{\"type\":\"record\",\"name\":\"AliasB\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"a\"]}");
+    check(AliasC.class, "{\"type\":\"record\",\"name\":\"AliasC\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[],\"aliases\":[\"a\"]}");    
+  }
+
+  private static class DefaultTest {
+    @AvroDefault("1")
+    int foo;
+  }  
+  
+  @Test
+  public void testAvroDefault() {
+    check(DefaultTest.class,
+          "{\"type\":\"record\",\"name\":\"DefaultTest\","
+          +"\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":["
+          +"{\"name\":\"foo\",\"type\":\"int\",\"default\":1}]}");
+  }
+
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectAllowNulls.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectAllowNulls.java
new file mode 100644
index 0000000..3a772f0
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectAllowNulls.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.util.Arrays;
+import org.apache.avro.Schema;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestReflectAllowNulls {
+
+  private static class Primitives {
+    boolean aBoolean;
+    byte aByte;
+    short aShort;
+    int anInt;
+    long aLong;
+    float aFloat;
+    double aDouble;
+  }
+
+  private static class Wrappers {
+    Boolean aBoolean;
+    Byte aByte;
+    Short aShort;
+    Integer anInt;
+    Long aLong;
+    Float aFloat;
+    Double aDouble;
+    Primitives anObject;
+  }
+
+  private static class AllowNullWithNullable {
+    @Nullable
+    Double aDouble;
+
+    @AvroSchema("[\"double\", \"long\"]")
+    Object doubleOrLong;
+
+    @Nullable
+    @AvroSchema("[\"double\", \"long\"]")
+    Object doubleOrLongOrNull1;
+
+    @AvroSchema("[\"double\", \"long\", \"null\"]")
+    Object doubleOrLongOrNull2;
+
+    @Nullable
+    @AvroSchema("[\"double\", \"long\", \"null\"]")
+    Object doubleOrLongOrNull3;
+  }
+
+  @Test
+  public void testPrimitives() {
+    // AllowNull only makes fields nullable, so testing must use a base record
+    Schema primitives = ReflectData.AllowNull.get().getSchema(Primitives.class);
+    Assert.assertEquals(requiredSchema(boolean.class),
+        primitives.getField("aBoolean").schema());
+    Assert.assertEquals(requiredSchema(byte.class),
+        primitives.getField("aByte").schema());
+    Assert.assertEquals(requiredSchema(short.class),
+        primitives.getField("aShort").schema());
+    Assert.assertEquals(requiredSchema(int.class),
+        primitives.getField("anInt").schema());
+    Assert.assertEquals(requiredSchema(long.class),
+        primitives.getField("aLong").schema());
+    Assert.assertEquals(requiredSchema(float.class),
+        primitives.getField("aFloat").schema());
+    Assert.assertEquals(requiredSchema(double.class),
+        primitives.getField("aDouble").schema());
+  }
+
+  @Test
+  public void testWrappers() {
+    // AllowNull only makes fields nullable, so testing must use a base record
+    Schema wrappers = ReflectData.AllowNull.get().getSchema(Wrappers.class);
+    Assert.assertEquals(nullableSchema(boolean.class),
+        wrappers.getField("aBoolean").schema());
+    Assert.assertEquals(nullableSchema(byte.class),
+        wrappers.getField("aByte").schema());
+    Assert.assertEquals(nullableSchema(short.class),
+        wrappers.getField("aShort").schema());
+    Assert.assertEquals(nullableSchema(int.class),
+        wrappers.getField("anInt").schema());
+    Assert.assertEquals(nullableSchema(long.class),
+        wrappers.getField("aLong").schema());
+    Assert.assertEquals(nullableSchema(float.class),
+        wrappers.getField("aFloat").schema());
+    Assert.assertEquals(nullableSchema(double.class),
+        wrappers.getField("aDouble").schema());
+    Assert.assertEquals(nullableSchema(Primitives.class),
+        wrappers.getField("anObject").schema());
+  }
+
+  @Test
+  public void testAllowNullWithNullableAnnotation() {
+    Schema withNullable = ReflectData.AllowNull.get()
+        .getSchema(AllowNullWithNullable.class);
+
+    Assert.assertEquals("Should produce a nullable double",
+        nullableSchema(double.class),
+        withNullable.getField("aDouble").schema());
+
+    Schema nullableDoubleOrLong = Schema.createUnion(Arrays.asList(
+            Schema.create(Schema.Type.NULL),
+            Schema.create(Schema.Type.DOUBLE),
+            Schema.create(Schema.Type.LONG)));
+
+    Assert.assertEquals("Should add null to a non-null union",
+        nullableDoubleOrLong,
+        withNullable.getField("doubleOrLong").schema());
+
+    Assert.assertEquals("Should add null to a non-null union",
+        nullableDoubleOrLong,
+        withNullable.getField("doubleOrLongOrNull1").schema());
+
+    Schema doubleOrLongOrNull = Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.DOUBLE),
+        Schema.create(Schema.Type.LONG),
+        Schema.create(Schema.Type.NULL)));
+
+    Assert.assertEquals("Should add null to a non-null union",
+        doubleOrLongOrNull,
+        withNullable.getField("doubleOrLongOrNull2").schema());
+
+    Assert.assertEquals("Should add null to a non-null union",
+        doubleOrLongOrNull,
+        withNullable.getField("doubleOrLongOrNull3").schema());
+  }
+
+  private Schema requiredSchema(Class<?> type) {
+    return ReflectData.get().getSchema(type);
+  }
+
+  private Schema nullableSchema(Class<?> type) {
+    return Schema.createUnion(Arrays.asList(
+        Schema.create(Schema.Type.NULL),
+        ReflectData.get().getSchema(type)));
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumReader.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumReader.java
new file mode 100644
index 0000000..0554aa6
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumReader.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.reflect;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.junit.Test;
+
+public class TestReflectDatumReader {
+
+  private static <T> byte[] serializeWithReflectDatumWriter(T toSerialize, Class<T> toSerializeClass)
+      throws IOException {
+    ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<T>(toSerializeClass);
+    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
+    datumWriter.write(toSerialize, encoder);
+    encoder.flush();
+    return byteArrayOutputStream.toByteArray();
+  }
+
+  @Test
+  public void testRead_PojoWithList() throws IOException {
+    PojoWithList pojoWithList = new PojoWithList();
+    pojoWithList.setId(42);
+    pojoWithList.setRelatedIds(Arrays.asList(1, 2, 3));
+
+    byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithList, PojoWithList.class);
+
+    Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
+    ReflectDatumReader<PojoWithList> reflectDatumReader = new ReflectDatumReader<PojoWithList>(
+        PojoWithList.class);
+
+    PojoWithList deserialized = new PojoWithList();
+    reflectDatumReader.read(deserialized, decoder);
+
+    assertEquals(pojoWithList, deserialized);
+
+  }
+
+  @Test
+  public void testRead_PojoWithArray() throws IOException {
+    PojoWithArray pojoWithArray = new PojoWithArray();
+    pojoWithArray.setId(42);
+    pojoWithArray.setRelatedIds(new int[] { 1, 2, 3 });
+
+    byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithArray, PojoWithArray.class);
+
+    Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
+    ReflectDatumReader<PojoWithArray> reflectDatumReader = new ReflectDatumReader<PojoWithArray>(
+        PojoWithArray.class);
+
+    PojoWithArray deserialized = new PojoWithArray();
+    reflectDatumReader.read(deserialized, decoder);
+
+    assertEquals(pojoWithArray, deserialized);
+  }
+
+  public static class PojoWithList {
+    private int id;
+    private List<Integer> relatedIds;
+
+    public int getId() {
+      return id;
+    }
+
+    public void setId(int id) {
+      this.id = id;
+    }
+
+    public List<Integer> getRelatedIds() {
+      return relatedIds;
+    }
+
+    public void setRelatedIds(List<Integer> relatedIds) {
+      this.relatedIds = relatedIds;
+    }
+
+    @Override
+    public int hashCode() {
+      final int prime = 31;
+      int result = 1;
+      result = prime * result + id;
+      result = prime * result + ((relatedIds == null) ? 0 : relatedIds.hashCode());
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj)
+        return true;
+      if (obj == null)
+        return false;
+      if (getClass() != obj.getClass())
+        return false;
+      PojoWithList other = (PojoWithList) obj;
+      if (id != other.id)
+        return false;
+      if (relatedIds == null) {
+        if (other.relatedIds != null)
+          return false;
+      } else if (!relatedIds.equals(other.relatedIds))
+        return false;
+      return true;
+    }
+  }
+
+  public static class PojoWithArray {
+    private int id;
+    private int[] relatedIds;
+
+    public int getId() {
+      return id;
+    }
+
+    public void setId(int id) {
+      this.id = id;
+    }
+
+    public int[] getRelatedIds() {
+      return relatedIds;
+    }
+
+    public void setRelatedIds(int[] relatedIds) {
+      this.relatedIds = relatedIds;
+    }
+
+    @Override
+    public int hashCode() {
+      final int prime = 31;
+      int result = 1;
+      result = prime * result + id;
+      result = prime * result + Arrays.hashCode(relatedIds);
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj)
+        return true;
+      if (obj == null)
+        return false;
+      if (getClass() != obj.getClass())
+        return false;
+      PojoWithArray other = (PojoWithArray) obj;
+      if (id != other.id)
+        return false;
+      if (!Arrays.equals(relatedIds, other.relatedIds))
+        return false;
+      return true;
+    }
+
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectLogicalTypes.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectLogicalTypes.java
new file mode 100644
index 0000000..06bbaaf
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectLogicalTypes.java
@@ -0,0 +1,829 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.reflect;
+
+import java.io.File;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.UUID;
+import org.apache.avro.Conversion;
+import org.apache.avro.Conversions;
+import org.apache.avro.LogicalType;
+import org.apache.avro.LogicalTypes;
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaBuilder;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.specific.SpecificData;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+/**
+ * Tests various logical types
+ * * string => UUID
+ * * fixed and bytes => Decimal
+ * * record => Pair
+ */
+public class TestReflectLogicalTypes {
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  public static final ReflectData REFLECT = new ReflectData();
+
+  @BeforeClass
+  public static void addUUID() {
+    REFLECT.addLogicalTypeConversion(new Conversions.UUIDConversion());
+    REFLECT.addLogicalTypeConversion(new Conversions.DecimalConversion());
+  }
+
+  @Test
+  public void testReflectedSchema() {
+    Schema expected = SchemaBuilder.record(RecordWithUUIDList.class.getName())
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    expected.getField("uuids").schema().addProp(
+        SpecificData.CLASS_PROP, List.class.getName());
+    LogicalTypes.uuid().addToSchema(
+        expected.getField("uuids").schema().getElementType());
+
+    Schema actual = REFLECT.getSchema(RecordWithUUIDList.class);
+
+    Assert.assertEquals("Should use the UUID logical type", expected, actual);
+  }
+
+  // this can be static because the schema only comes from reflection
+  public static class DecimalRecordBytes {
+    // scale is required and will not be set by the conversion
+    @AvroSchema("{" +
+        "\"type\": \"bytes\"," +
+        "\"logicalType\": \"decimal\"," +
+        "\"precision\": 9," +
+        "\"scale\": 2" +
+        "}")
+    private BigDecimal decimal;
+
+    @Override
+    public boolean equals(Object other) {
+      if (this == other) {
+        return true;
+      }
+
+      if (other == null || getClass() != other.getClass()) {
+        return false;
+      }
+
+      DecimalRecordBytes that = (DecimalRecordBytes) other;
+      if (decimal == null) {
+        return (that.decimal == null);
+      }
+
+      return decimal.equals(that.decimal);
+    }
+
+    @Override
+    public int hashCode() {
+      return decimal != null ? decimal.hashCode() : 0;
+    }
+  }
+
+  @Test
+  public void testDecimalBytes() throws IOException {
+    Schema schema = REFLECT.getSchema(DecimalRecordBytes.class);
+    Assert.assertEquals("Should have the correct record name",
+        "org.apache.avro.reflect.TestReflectLogicalTypes$",
+        schema.getNamespace());
+    Assert.assertEquals("Should have the correct record name",
+        "DecimalRecordBytes",
+        schema.getName());
+    Assert.assertEquals("Should have the correct logical type",
+        LogicalTypes.decimal(9, 2),
+        LogicalTypes.fromSchema(schema.getField("decimal").schema()));
+
+    DecimalRecordBytes record = new DecimalRecordBytes();
+    record.decimal = new BigDecimal("3.14");
+
+    File test = write(REFLECT, schema, record);
+    Assert.assertEquals("Should match the decimal after round trip",
+        Arrays.asList(record),
+        read(REFLECT.createDatumReader(schema), test));
+  }
+
+  // this can be static because the schema only comes from reflection
+  public static class DecimalRecordFixed {
+    // scale is required and will not be set by the conversion
+    @AvroSchema("{" +
+        "\"name\": \"decimal_9\"," +
+        "\"type\": \"fixed\"," +
+        "\"size\": 4," +
+        "\"logicalType\": \"decimal\"," +
+        "\"precision\": 9," +
+        "\"scale\": 2" +
+        "}")
+    private BigDecimal decimal;
+
+    @Override
+    public boolean equals(Object other) {
+      if (this == other) {
+        return true;
+      }
+
+      if (other == null || getClass() != other.getClass()) {
+        return false;
+      }
+
+      DecimalRecordFixed that = (DecimalRecordFixed) other;
+      if (decimal == null) {
+        return (that.decimal == null);
+      }
+
+      return decimal.equals(that.decimal);
+    }
+
+    @Override
+    public int hashCode() {
+      return decimal != null ? decimal.hashCode() : 0;
+    }
+  }
+
+  @Test
+  public void testDecimalFixed() throws IOException {
+    Schema schema = REFLECT.getSchema(DecimalRecordFixed.class);
+    Assert.assertEquals("Should have the correct record name",
+        "org.apache.avro.reflect.TestReflectLogicalTypes$",
+        schema.getNamespace());
+    Assert.assertEquals("Should have the correct record name",
+        "DecimalRecordFixed",
+        schema.getName());
+    Assert.assertEquals("Should have the correct logical type",
+        LogicalTypes.decimal(9, 2),
+        LogicalTypes.fromSchema(schema.getField("decimal").schema()));
+
+    DecimalRecordFixed record = new DecimalRecordFixed();
+    record.decimal = new BigDecimal("3.14");
+
+    File test = write(REFLECT, schema, record);
+    Assert.assertEquals("Should match the decimal after round trip",
+        Arrays.asList(record),
+        read(REFLECT.createDatumReader(schema), test));
+  }
+
+  public static class Pair<X, Y> {
+    private final X first;
+    private final Y second;
+
+    private Pair(X first, Y second) {
+      this.first = first;
+      this.second = second;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+      if (this == other) {
+        return true;
+      }
+
+      if (other == null || getClass() != other.getClass()) {
+        return false;
+      }
+
+      Pair<?, ?> that = (Pair<?, ?>) other;
+      if (first == null) {
+        if (that.first != null) {
+          return false;
+        }
+      } else if (first.equals(that.first)) {
+        return false;
+      }
+
+      if (second == null) {
+        if (that.second != null) {
+          return false;
+        }
+      } else if (second.equals(that.second)) {
+        return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(new Object[] {first, second});
+    }
+
+    public static <X, Y> Pair<X, Y> of(X first, Y second) {
+      return new Pair<X, Y>(first, second);
+    }
+  }
+
+  public static class PairRecord {
+    @AvroSchema("{" +
+        "\"name\": \"Pair\"," +
+        "\"type\": \"record\"," +
+        "\"fields\": [" +
+        "    {\"name\": \"x\", \"type\": \"long\"}," +
+        "    {\"name\": \"y\", \"type\": \"long\"}" +
+        "  ]," +
+        "\"logicalType\": \"pair\"" +
+        "}")
+    Pair<Long, Long> pair;
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testPairRecord() throws IOException {
+    ReflectData model = new ReflectData();
+    model.addLogicalTypeConversion(new Conversion<Pair>() {
+      @Override
+      public Class<Pair> getConvertedType() {
+        return Pair.class;
+      }
+
+      @Override
+      public String getLogicalTypeName() {
+        return "pair";
+      }
+
+      @Override
+      public Pair fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
+        return Pair.of(value.get(0), value.get(1));
+      }
+
+      @Override
+      public IndexedRecord toRecord(Pair value, Schema schema, LogicalType type) {
+        GenericData.Record record = new GenericData.Record(schema);
+        record.put(0, value.first);
+        record.put(1, value.second);
+        return record;
+      }
+    });
+
+    LogicalTypes.register("pair", new LogicalTypes.LogicalTypeFactory() {
+      private final LogicalType PAIR = new LogicalType("pair");
+      @Override
+      public LogicalType fromSchema(Schema schema) {
+        return PAIR;
+      }
+    });
+
+    Schema schema = model.getSchema(PairRecord.class);
+    Assert.assertEquals("Should have the correct record name",
+        "org.apache.avro.reflect.TestReflectLogicalTypes$",
+        schema.getNamespace());
+    Assert.assertEquals("Should have the correct record name",
+        "PairRecord",
+        schema.getName());
+    Assert.assertEquals("Should have the correct logical type",
+        "pair",
+        LogicalTypes.fromSchema(schema.getField("pair").schema()).getName());
+
+    PairRecord record = new PairRecord();
+    record.pair = Pair.of(34L, 35L);
+    List<PairRecord> expected = new ArrayList<PairRecord>();
+    expected.add(record);
+
+    File test = write(model, schema, record);
+    Pair<Long, Long> actual = ((PairRecord) TestReflectLogicalTypes.
+        <PairRecord>read(model.createDatumReader(schema), test).get(0)).pair;
+    Assert.assertEquals("Data should match after serialization round-trip",
+        34L, (long) actual.first);
+    Assert.assertEquals("Data should match after serialization round-trip",
+        35L, (long) actual.second);
+  }
+
+  @Test
+  public void testReadUUID() throws IOException {
+    Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithStringUUID r1 = new RecordWithStringUUID();
+    r1.uuid = u1.toString();
+    RecordWithStringUUID r2 = new RecordWithStringUUID();
+    r2.uuid = u2.toString();
+
+    List<RecordWithUUID> expected = Arrays.asList(
+        new RecordWithUUID(), new RecordWithUUID());
+    expected.get(0).uuid = u1;
+    expected.get(1).uuid = u2;
+
+    File test = write(
+        ReflectData.get().getSchema(RecordWithStringUUID.class), r1, r2);
+
+    Assert.assertEquals("Should convert Strings to UUIDs",
+        expected, read(REFLECT.createDatumReader(uuidSchema), test));
+
+    // verify that the field's type overrides the logical type
+    Schema uuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidStringSchema.getField("uuid").schema());
+
+    Assert.assertEquals("Should not convert to UUID if accessor is String",
+        Arrays.asList(r1, r2),
+        read(REFLECT.createDatumReader(uuidStringSchema), test));
+  }
+
+  @Test
+  public void testWriteUUID() throws IOException {
+    Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithUUID r1 = new RecordWithUUID();
+    r1.uuid = u1;
+    RecordWithUUID r2 = new RecordWithUUID();
+    r2.uuid = u2;
+
+    List<RecordWithStringUUID> expected = Arrays.asList(
+        new RecordWithStringUUID(), new RecordWithStringUUID());
+    expected.get(0).uuid = u1.toString();
+    expected.get(1).uuid = u2.toString();
+
+    File test = write(REFLECT, uuidSchema, r1, r2);
+
+    // verify that the field's type overrides the logical type
+    Schema uuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+
+    Assert.assertEquals("Should read uuid as String without UUID conversion",
+        expected,
+        read(REFLECT.createDatumReader(uuidStringSchema), test));
+
+    LogicalTypes.uuid().addToSchema(uuidStringSchema.getField("uuid").schema());
+    Assert.assertEquals("Should read uuid as String without UUID logical type",
+        expected,
+        read(ReflectData.get().createDatumReader(uuidStringSchema), test));
+  }
+
+  @Test
+  public void testWriteNullableUUID() throws IOException {
+    Schema nullableUuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+        .fields().optionalString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(
+        nullableUuidSchema.getField("uuid").schema().getTypes().get(1));
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithUUID r1 = new RecordWithUUID();
+    r1.uuid = u1;
+    RecordWithUUID r2 = new RecordWithUUID();
+    r2.uuid = u2;
+
+    List<RecordWithStringUUID> expected = Arrays.asList(
+        new RecordWithStringUUID(), new RecordWithStringUUID());
+    expected.get(0).uuid = u1.toString();
+    expected.get(1).uuid = u2.toString();
+
+    File test = write(REFLECT, nullableUuidSchema, r1, r2);
+
+    // verify that the field's type overrides the logical type
+    Schema nullableUuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().optionalString("uuid").endRecord();
+
+    Assert.assertEquals("Should read uuid as String without UUID conversion",
+        expected,
+        read(ReflectData.get().createDatumReader(nullableUuidStringSchema), test));
+  }
+
+  @Test
+  public void testWriteNullableUUIDReadRequiredString() throws IOException {
+    Schema nullableUuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+        .fields().optionalString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(
+        nullableUuidSchema.getField("uuid").schema().getTypes().get(1));
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithUUID r1 = new RecordWithUUID();
+    r1.uuid = u1;
+    RecordWithUUID r2 = new RecordWithUUID();
+    r2.uuid = u2;
+
+    List<RecordWithStringUUID> expected = Arrays.asList(
+        new RecordWithStringUUID(), new RecordWithStringUUID());
+    expected.get(0).uuid = u1.toString();
+    expected.get(1).uuid = u2.toString();
+
+    File test = write(REFLECT, nullableUuidSchema, r1, r2);
+
+    // verify that the field's type overrides the logical type
+    Schema uuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+
+    Assert.assertEquals("Should read uuid as String without UUID conversion",
+        expected,
+        read(REFLECT.createDatumReader(uuidStringSchema), test));
+  }
+
+  @Test
+  public void testReadUUIDMissingLogicalTypeUnsafe() throws IOException {
+    String unsafeValue = System.getProperty("avro.disable.unsafe");
+    try {
+      // only one FieldAccess can be set per JVM
+      System.clearProperty("avro.disable.unsafe");
+      Assume.assumeTrue(
+          ReflectionUtil.getFieldAccess() instanceof FieldAccessUnsafe);
+
+      Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+          .fields().requiredString("uuid").endRecord();
+      LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+      UUID u1 = UUID.randomUUID();
+
+      RecordWithStringUUID r1 = new RecordWithStringUUID();
+      r1.uuid = u1.toString();
+
+      File test = write(
+          ReflectData.get().getSchema(RecordWithStringUUID.class), r1);
+
+      RecordWithUUID datum = (RecordWithUUID) read(
+          ReflectData.get().createDatumReader(uuidSchema), test).get(0);
+      Object uuid = datum.uuid;
+      Assert.assertTrue("UUID should be a String (unsafe)",
+          uuid instanceof String);
+    } finally {
+      if (unsafeValue != null) {
+        System.setProperty("avro.disable.unsafe", unsafeValue);
+      }
+    }
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testReadUUIDMissingLogicalTypeReflect() throws IOException {
+    String unsafeValue = System.getProperty("avro.disable.unsafe");
+    try {
+      // only one FieldAccess can be set per JVM
+      System.setProperty("avro.disable.unsafe", "true");
+      Assume.assumeTrue(
+          ReflectionUtil.getFieldAccess() instanceof FieldAccessReflect);
+
+      Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+          .fields().requiredString("uuid").endRecord();
+      LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+      UUID u1 = UUID.randomUUID();
+
+      RecordWithStringUUID r1 = new RecordWithStringUUID();
+      r1.uuid = u1.toString();
+
+      File test = write(
+          ReflectData.get().getSchema(RecordWithStringUUID.class), r1);
+
+      read(ReflectData.get().createDatumReader(uuidSchema), test).get(0);
+    } finally {
+      if (unsafeValue != null) {
+        System.setProperty("avro.disable.unsafe", unsafeValue);
+      }
+    }
+  }
+
+  @Test(expected = DataFileWriter.AppendWriteException.class)
+  public void testWriteUUIDMissingLogicalType() throws IOException {
+    Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithUUID r1 = new RecordWithUUID();
+    r1.uuid = u1;
+    RecordWithUUID r2 = new RecordWithUUID();
+    r2.uuid = u2;
+
+    // write without using REFLECT, which has the logical type
+    File test = write(uuidSchema, r1, r2);
+
+    // verify that the field's type overrides the logical type
+    Schema uuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+
+    // this fails with an AppendWriteException wrapping ClassCastException
+    // because the UUID isn't converted to a CharSequence expected internally
+    read(ReflectData.get().createDatumReader(uuidStringSchema), test);
+  }
+
+  @Test
+  public void testReadUUIDGenericRecord() throws IOException {
+    Schema uuidSchema = SchemaBuilder.record("RecordWithUUID")
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    RecordWithStringUUID r1 = new RecordWithStringUUID();
+    r1.uuid = u1.toString();
+    RecordWithStringUUID r2 = new RecordWithStringUUID();
+    r2.uuid = u2.toString();
+
+    List<GenericData.Record> expected = Arrays.asList(
+        new GenericData.Record(uuidSchema), new GenericData.Record(uuidSchema));
+    expected.get(0).put("uuid", u1);
+    expected.get(1).put("uuid", u2);
+
+    File test = write(
+        ReflectData.get().getSchema(RecordWithStringUUID.class), r1, r2);
+
+    Assert.assertEquals("Should convert Strings to UUIDs",
+        expected, read(REFLECT.createDatumReader(uuidSchema), test));
+
+    // verify that the field's type overrides the logical type
+    Schema uuidStringSchema = SchemaBuilder
+        .record(RecordWithStringUUID.class.getName())
+        .fields().requiredString("uuid").endRecord();
+    LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
+
+    Assert.assertEquals("Should not convert to UUID if accessor is String",
+        Arrays.asList(r1, r2),
+        read(REFLECT.createDatumReader(uuidStringSchema), test));
+  }
+
+  @Test
+  public void testReadUUIDArray() throws IOException {
+    Schema uuidArraySchema = SchemaBuilder.record(RecordWithUUIDArray.class.getName())
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    LogicalTypes.uuid().addToSchema(
+        uuidArraySchema.getField("uuids").schema().getElementType());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    GenericRecord r = new GenericData.Record(uuidArraySchema);
+    r.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
+
+    RecordWithUUIDArray expected = new RecordWithUUIDArray();
+    expected.uuids = new UUID[] {u1, u2};
+
+    File test = write(uuidArraySchema, r);
+
+    Assert.assertEquals("Should convert Strings to UUIDs",
+        expected,
+        read(REFLECT.createDatumReader(uuidArraySchema), test).get(0));
+  }
+
+  @Test
+  public void testWriteUUIDArray() throws IOException {
+    Schema uuidArraySchema = SchemaBuilder.record(RecordWithUUIDArray.class.getName())
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    LogicalTypes.uuid().addToSchema(
+        uuidArraySchema.getField("uuids").schema().getElementType());
+
+    Schema stringArraySchema = SchemaBuilder.record("RecordWithUUIDArray")
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    stringArraySchema.getField("uuids").schema()
+        .addProp(SpecificData.CLASS_PROP, List.class.getName());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    GenericRecord expected = new GenericData.Record(stringArraySchema);
+    List<String> uuids = new ArrayList<String>();
+    uuids.add(u1.toString());
+    uuids.add(u2.toString());
+    expected.put("uuids", uuids);
+
+    RecordWithUUIDArray r = new RecordWithUUIDArray();
+    r.uuids = new UUID[] {u1, u2};
+
+    File test = write(REFLECT, uuidArraySchema, r);
+
+    Assert.assertEquals("Should read UUIDs as Strings",
+        expected,
+        read(ReflectData.get().createDatumReader(stringArraySchema), test).get(0));
+  }
+
+  @Test
+  public void testReadUUIDList() throws IOException {
+    Schema uuidListSchema = SchemaBuilder.record(RecordWithUUIDList.class.getName())
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    uuidListSchema.getField("uuids").schema().addProp(
+        SpecificData.CLASS_PROP, List.class.getName());
+    LogicalTypes.uuid().addToSchema(
+        uuidListSchema.getField("uuids").schema().getElementType());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    GenericRecord r = new GenericData.Record(uuidListSchema);
+    r.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
+
+    RecordWithUUIDList expected = new RecordWithUUIDList();
+    expected.uuids = Arrays.asList(u1, u2);
+
+    File test = write(uuidListSchema, r);
+
+    Assert.assertEquals("Should convert Strings to UUIDs",
+        expected, read(REFLECT.createDatumReader(uuidListSchema), test).get(0));
+  }
+
+  @Test
+  public void testWriteUUIDList() throws IOException {
+    Schema uuidListSchema = SchemaBuilder.record(RecordWithUUIDList.class.getName())
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    uuidListSchema.getField("uuids").schema().addProp(
+        SpecificData.CLASS_PROP, List.class.getName());
+    LogicalTypes.uuid().addToSchema(
+        uuidListSchema.getField("uuids").schema().getElementType());
+
+    Schema stringArraySchema = SchemaBuilder.record("RecordWithUUIDArray")
+        .fields()
+        .name("uuids").type().array().items().stringType().noDefault()
+        .endRecord();
+    stringArraySchema.getField("uuids").schema()
+        .addProp(SpecificData.CLASS_PROP, List.class.getName());
+
+    UUID u1 = UUID.randomUUID();
+    UUID u2 = UUID.randomUUID();
+
+    GenericRecord expected = new GenericData.Record(stringArraySchema);
+    expected.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
+
+    RecordWithUUIDList r = new RecordWithUUIDList();
+    r.uuids = Arrays.asList(u1, u2);
+
+    File test = write(REFLECT, uuidListSchema, r);
+
+    Assert.assertEquals("Should read UUIDs as Strings",
+        expected,
+        read(REFLECT.createDatumReader(stringArraySchema), test).get(0));
+  }
+
+  private static <D> List<D> read(DatumReader<D> reader, File file) throws IOException {
+    List<D> data = new ArrayList<D>();
+    FileReader<D> fileReader = null;
+
+    try {
+      fileReader = new DataFileReader<D>(file, reader);
+      for (D datum : fileReader) {
+        data.add(datum);
+      }
+    } finally {
+      if (fileReader != null) {
+        fileReader.close();
+      }
+    }
+
+    return data;
+  }
+
+  private <D> File write(Schema schema, D... data) throws IOException {
+    return write(ReflectData.get(), schema, data);
+  }
+
+  @SuppressWarnings("unchecked")
+  private <D> File write(GenericData model, Schema schema, D... data) throws IOException {
+    File file = temp.newFile();
+    DatumWriter<D> writer = model.createDatumWriter(schema);
+    DataFileWriter<D> fileWriter = new DataFileWriter<D>(writer);
+
+    try {
+      fileWriter.create(schema, file);
+      for (D datum : data) {
+        fileWriter.append(datum);
+      }
+    } finally {
+      fileWriter.close();
+    }
+
+    return file;
+  }
+}
+
+class RecordWithUUID {
+  UUID uuid;
+
+  @Override
+  public int hashCode() {
+    return uuid.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof RecordWithUUID)) {
+      return false;
+    }
+    RecordWithUUID that = (RecordWithUUID) obj;
+    return this.uuid.equals(that.uuid);
+  }
+}
+
+class RecordWithStringUUID {
+  String uuid;
+
+  @Override
+  public int hashCode() {
+    return uuid.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof RecordWithStringUUID)) {
+      return false;
+    }
+    RecordWithStringUUID that = (RecordWithStringUUID) obj;
+    return this.uuid.equals(that.uuid);
+  }
+}
+
+class RecordWithUUIDArray {
+  UUID[] uuids;
+
+  @Override
+  public int hashCode() {
+    return Arrays.hashCode(uuids);
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof RecordWithUUIDArray)) {
+      return false;
+    }
+    RecordWithUUIDArray that = (RecordWithUUIDArray) obj;
+    return Arrays.equals(this.uuids, that.uuids);
+  }
+}
+
+class RecordWithUUIDList {
+  List<UUID> uuids;
+
+  @Override
+  public int hashCode() {
+    return uuids.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof RecordWithUUIDList)) {
+      return false;
+    }
+    RecordWithUUIDList that = (RecordWithUUIDList) obj;
+    return this.uuids.equals(that.uuids);
+  }
+}
+
diff --git a/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
new file mode 100644
index 0000000..4414d20
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.reflect;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.junit.Test;
+
+public class TestReflectionUtil {
+
+  @Test
+  public void testUnsafeUtil() {
+    new Tester().checkUnsafe();
+  }
+
+  @Test
+  public void testUnsafeWhenNotExists() throws Exception {
+    ClassLoader cl = new NoUnsafe();
+    Class<?> testerClass = cl.loadClass(Tester.class.getName());
+    testerClass.getDeclaredMethod("checkUnsafe").invoke(testerClass.newInstance());
+  }
+  
+  public static final class Tester {
+    public Tester() {}
+    public void checkUnsafe() {
+      ReflectionUtil.getFieldAccess();
+    }
+    
+  }
+
+  private static final class NoUnsafe extends ClassLoader {
+    private ClassLoader parent = TestReflectionUtil.class.getClassLoader();
+
+    @Override
+    public java.lang.Class<?> loadClass(String name)
+        throws ClassNotFoundException {
+      Class<?> clazz = findLoadedClass(name);
+      if (clazz != null) {
+        return clazz;
+      }
+      if ("sun.misc.Unsafe".equals(name)) {
+        throw new ClassNotFoundException(name);
+      }
+      if (!name.startsWith("org.apache.avro.")) {
+        return parent.loadClass(name);
+      }
+
+      InputStream data = parent.getResourceAsStream(name.replace('.', '/')
+          + ".class");
+      byte[] buf = new byte[10240]; // big enough, too lazy to loop
+      int size;
+      try {
+        size = data.read(buf);
+      } catch (IOException e) {
+        throw new ClassNotFoundException();
+      }
+      clazz = defineClass(name, buf, 0, size);
+      resolveClass(clazz);
+      return clazz;
+    }
+
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java
new file mode 100644
index 0000000..daa5dc6
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.specific;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.junit.Before;
+import org.junit.Test;
+
+/*
+ * If integerClass is primitive, reflection to find method will
+ * result in a NoSuchMethodException in the case of a UNION schema
+ */
+public class TestSpecificData {
+
+  private Class<?> intClass;
+  private Class<?> integerClass;
+
+  @Before
+  public void setUp() {
+    Schema intSchema = Schema.create(Type.INT);
+    intClass = SpecificData.get().getClass(intSchema);
+    Schema nullSchema = Schema.create(Type.NULL);
+    Schema nullIntUnionSchema = Schema.createUnion(Arrays.asList(nullSchema, intSchema));
+    integerClass = SpecificData.get().getClass(nullIntUnionSchema);
+  }
+
+  @Test
+  public void testClassTypes() {
+    assertTrue(intClass.isPrimitive());
+    assertFalse(integerClass.isPrimitive());
+  }
+
+  @Test
+  public void testPrimitiveParam() throws Exception {
+    assertNotNull(Reflection.class.getMethod("primitive", intClass));
+  }
+
+  @Test(expected = NoSuchMethodException.class)
+  public void testPrimitiveParamError() throws Exception {
+    Reflection.class.getMethod("primitiveWrapper", intClass);
+  }
+
+  @Test
+  public void testPrimitiveWrapperParam() throws Exception {
+    assertNotNull(Reflection.class.getMethod("primitiveWrapper", integerClass));
+  }
+
+  @Test(expected = NoSuchMethodException.class)
+  public void testPrimitiveWrapperParamError() throws Exception {
+    Reflection.class.getMethod("primitive", integerClass);
+  }
+
+  static class Reflection {
+    public void primitive(int i) {}
+    public void primitiveWrapper(Integer i) {}
+  }
+
+  public static class TestRecord extends SpecificRecordBase {
+    private static final Schema SCHEMA = Schema.createRecord("TestRecord", null, null, false);
+    static {
+      List<Field> fields = new ArrayList<Field>();
+      fields.add(new Field("x", Schema.create(Type.INT), null, null));
+      Schema stringSchema = Schema.create(Type.STRING);
+      GenericData.setStringType(stringSchema, GenericData.StringType.String);
+      fields.add(new Field("y", stringSchema, null, null));
+      SCHEMA.setFields(fields);
+    }
+    private int x;
+    private String y;
+
+    @Override
+    public void put(int i, Object v) {
+      switch (i) {
+      case 0: x = (Integer) v; break;
+      case 1: y = (String) v; break;
+      default: throw new RuntimeException();
+      }
+    }
+
+    @Override
+    public Object get(int i) {
+      switch (i) {
+      case 0: return x;
+      case 1: return y;
+      }
+      throw new RuntimeException();
+    }
+
+    @Override
+    public Schema getSchema() {
+      return SCHEMA;
+    }
+
+  }
+
+  @Test
+  public void testSpecificRecordBase() {
+    final TestRecord record = new TestRecord();
+    record.put("x", 1);
+    record.put("y", "str");
+    assertEquals(1, record.get("x"));
+    assertEquals("str", record.get("y"));
+  }
+
+  @Test public void testExternalizeable() throws Exception {
+    final TestRecord before = new TestRecord();
+    before.put("x", 1);
+    before.put("y", "str");
+    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+    ObjectOutputStream out = new ObjectOutputStream(bytes);
+    out.writeObject(before);
+    out.close();
+
+    ObjectInputStream in =
+      new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()));
+    TestRecord after = (TestRecord)in.readObject();
+
+    assertEquals(before, after);
+  }
+
+  /** Tests that non Stringable datum are rejected by specific writers. */
+  @Test
+  public void testNonStringable() throws Exception {
+    final Schema string = Schema.create(Type.STRING);
+    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    final Encoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
+    final DatumWriter<Object> writer = new SpecificDatumWriter<Object>(string);
+    try {
+      writer.write(new Object(), encoder);
+      fail("Non stringable object should be rejected.");
+    } catch (ClassCastException cce) {
+      // Expected error
+    }
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java b/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java
new file mode 100644
index 0000000..ddfc2a8
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+/** Parser for files containing test cases consisting of
+ * <code><String,String></code> pairs, where the first string is
+ * the input to the test case, and the second string is the expected
+ * output of the test case.
+ *
+ * <p> A test-case file is a sequence of <a
+ * href="en.wikipedia.org/wiki/Here_document">here documents</a>
+ * ("heredocs"), very similar in syntax to Unix Shell heredocs.
+ * Heredocs labeled "INPUT" indicate the start of a new case, and
+ * these INPUT heredocs the inputs of test cases.  Following an
+ * "INPUT" heredoc can more zero or more "expected-output" heredocs.
+ * Each of these expected-output heredocs defines what we call a
+ * <dfn>subcase</dfn>.  The assumption here is that for each
+ * interesting test input, there are often multiple different tests
+ * one could run, each with different expected outputs.
+ *
+ * <p> Consumers of this class call the {@link #find} method to find
+ * all subcases marked with a given label.  For example, imagine the
+ * following test-case file:
+ * <blockquote> <pre>
+ *    <<INPUT 0
+ *    <<VALUE 0
+ *    <<PPRINT 0
+ *    <<INPUT 1+1
+ *    <<VALUE 2
+ *    <<PPRINT 1 + 1
+ *    <<SEXP (+ 1 1)
+ *    SEXP
+ * </pre> </blockquote>
+ * Calling {@link #find} on the label "VALUE" will return two test
+ * cases, the pair <code><"0","0"></code> and
+ * <code><"1+1","2"></code>.  Calling it on the label "PPRINT"
+ * will return <code><"0","0"></code> and <code><"1+1","1 +
+ * 1"></code>.  Notice that there need not be a subcase for every
+ * INPUT.  In the case of "SEXP", for example, {@link #find} will
+ * return only the single pair <code><"1+1","(+ 1 1)"></code>.
+ *
+ * <p> There are two forms of heredocs, single-line and multi-line.
+ * The examples above (except "SEXP") are single-line heredocs.  The
+ * general syntax for these is:
+ * <blockquote> <pre>
+ * ^<<([a-zA-Z][_a-zA-Z0-9]*) (.*)$
+ * </pre> </blockquote>
+ * The first group in this regex is the label of the heredoc, and the
+ * second group is the text of the heredoc.  A single space separates
+ * the two groups and is not part of there heredoc (subsequent spaces
+ * <em>will</em> be included in the heredoc).  A "line terminator" as
+ * defined by the Java language (i.e., CR, LR, or CR followed by LF)
+ * terminates a singline-line heredoc but is not included in the text
+ * of the heredoc.
+ *
+ * <p> As the name implies, multi-line heredocs are spread across
+ * multiple lines, as in this example:
+ * <blockquote> <pre>
+ *    <<INPUT
+ *    1
+ *    +1 +
+ *    1
+ *    INPUT
+ *    <<VALUE 3
+ *    <<PPRINT 1 + 1 + 1
+ * </pre> </blockquote>
+ * In this case, the input to the test case is spread across multiple
+ * lines (the line terminators in these documents are preserved as
+ * part of the document text).  Multi-line heredocs can be used for
+ * both the inputs of text cases and the expected outputs of them.
+
+ * <p> The syntax of multi-line heredocs obey the following pseudo-regex:
+ * <blockquote> <pre>
+ * ^<<([a-zA-Z][_a-zA-Z0-9]*)$(.*)$^\1$
+ * </pre> </blockquote>
+ * That is, as illustrated by the example, a multi-line heredoc named
+ * "LABEL" consists of the text <code><lt;LABEL</code> on a line by
+ * itself, followed by the text of the heredoc, followed by the text
+ * <code>LABEL</code> on a line by itself (if LABEL starts a line but
+ * is not the <em>only</em> text on that line, then that entire line
+ * is part of the heredoc, and the heredoc is not terminated by that
+ * line).
+ *
+ * <p>In multi-line heredocs, neither the line terminator that
+ * terminates the start of the document, nor the one just before the
+ * label that ends the heredoc, are part of the text of the heredoc.
+ * Thus, for example, the text of the multi-line input from above
+ * would be exactly <code>"1\n+1 +\n1"</code>.  If you want a new
+ * line at the end of a multi-line heredoc, put a blank line before
+ * the label ending the heredoc.
+ *
+ * <p>Also in multi-line heredocs, line-terminators within the heredoc
+ * are normalized to line-feeds ('\n').  Thus, for example, when a
+ * test file written on a Windows machine is parsed on any machine,
+ * the Windows-style line terminators within heredocs will be
+ * translated to Unix-style line terminators, no matter what platform
+ * the tests are run on.
+ *
+ * <p> Note that lines between heredocs are ignored, and can be used
+ * to provide spacing between and/or commentary on the test cases.
+ */
+public class CaseFinder {
+  /** Scan test-case file <code>in</code> looking for test subcases
+    * marked with <code>caseLabel</code>.  Any such cases are appended
+    * (in order) to the "cases" parameter.  If <code>caseLabel</code>
+    * equals the string <code>"INPUT"</code>, then returns the list of
+    * <<i>input</i>, <code>null</code>> pairs for <i>input</i>
+    * equal to all heredoc's named INPUT's found in the input
+    * stream. */
+  public static List<Object[]> find(BufferedReader in, String label,
+                                    List<Object[]> cases)
+    throws IOException
+  {
+    if (! Pattern.matches(LABEL_REGEX, label))
+      throw new IllegalArgumentException("Bad case subcase label: " + label);
+
+    final String subcaseMarker = "<<" + label;
+
+    for (String line = in.readLine();;) {
+      // Find next new case
+      while (line != null && !line.startsWith(NEW_CASE_MARKER))
+        line = in.readLine();
+      if (line == null) break;
+      String input;
+      input = processHereDoc(in, line);
+
+      if (label.equals(NEW_CASE_NAME)) {
+        cases.add(new Object[] { input, null });
+        line = in.readLine();
+        continue;
+      }
+
+      // Check to see if there's a subcase named "label" for that case
+      do {
+        line = in.readLine();
+      } while (line != null && (!line.startsWith(NEW_CASE_MARKER)
+                                && !line.startsWith(subcaseMarker)));
+      if (line == null || line.startsWith(NEW_CASE_MARKER)) continue;
+      String expectedOutput = processHereDoc(in, line);
+
+      cases.add(new Object[] { input, expectedOutput });
+    }
+    in.close();
+    return cases;
+  }
+
+  private static final String NEW_CASE_NAME = "INPUT";
+  private static final String NEW_CASE_MARKER = "<<"+NEW_CASE_NAME;
+  private static final String LABEL_REGEX = "[a-zA-Z][_a-zA-Z0-9]*";
+  private static final Pattern START_LINE_PATTERN
+    = Pattern.compile("^<<("+LABEL_REGEX+")(.*)$");
+
+  /** Reads and returns content of a heredoc.  Assumes we just read a
+    * start-of-here-doc marker for a here-doc labeled "docMarker."
+    * Replaces arbitrary newlines with sytem newlines, but strips
+    * newline from final line of heredoc.  Throws IOException if EOF
+    * is reached before heredoc is terminate. */
+  private static String processHereDoc(BufferedReader in, String docStart)
+    throws IOException
+  {
+    Matcher m = START_LINE_PATTERN.matcher(docStart);
+    if (! m.matches())
+      throw new IllegalArgumentException("Wasn't given the start of a heredoc (\""+docStart+"\")");
+    String docName = m.group(1);
+
+    // Determine if this is a single-line heredoc, and process if it is
+    String singleLineText = m.group(2);
+    if (singleLineText.length() != 0) {
+      if (! singleLineText.startsWith(" ")) 
+        throw new IOException("Single-line heredoc missing initial space (\""+docStart+"\")");
+      return singleLineText.substring(1);
+    }
+
+    // Process multi-line heredocs
+    StringBuilder result = new StringBuilder();
+    String line = in.readLine();
+    String prevLine = "";
+    boolean firstTime = true;
+    while (line != null && !line.equals(docName)) {
+      if (! firstTime) result.append(prevLine).append('\n');
+      else firstTime = false;
+      prevLine = line;
+      line = in.readLine();
+    }
+    if (line == null)
+      throw new IOException("Here document (" + docName
+                            + ") terminated by end-of-file.");
+    return result.append(prevLine).toString();
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java b/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java
new file mode 100644
index 0000000..73701df
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.io.BufferedReader;
+import java.io.StringReader;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.junit.experimental.runners.Enclosed;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+ at RunWith(Enclosed.class)
+public class TestCaseFinder {
+
+  @RunWith(Parameterized.class)
+  public static class SimpleCases {
+    String input, label;
+    List<Object[]> expectedOutput;
+
+    public SimpleCases(String input, String label, Object[][] ex) {
+      this.input = input;
+      this.label = label;
+      this.expectedOutput = Arrays.asList(ex);
+    }
+
+    @Parameters
+    public static List<Object[]> cases() {
+      List<Object[]> result = new ArrayList<Object[]>();
+      result.add(new Object[] { "", "foo", new Object[][] { } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT b", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT b\n", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT b\n\n", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\r<<OUTPUT b", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "// This is a test\n<<INPUT a\n\n\n<<OUTPUT b",
+                                "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT",
+                                new Object[][] { {"a","b"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\n\nOUTPUT", "OUTPUT",
+                                new Object[][] { {"a","b\n"} } });
+      result.add(new Object[] { "<<INPUT a\n<<OUTPUT\n\n  b  \n\nOUTPUT",
+                                "OUTPUT",
+                                new Object[][] { {"a","\n  b  \n"} } });
+      result.add(new Object[] { "<<INPUT a\n<<O b\n<<INPUT c\n<<O d", "O",
+                                new Object[][] { {"a","b"}, {"c","d"} } });
+      result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d",
+                                "O",
+                                new Object[][] { {"a","b"}, {"c","d"} } });
+      result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d",
+                                "F",
+                                new Object[][] { {"a","z"} } });
+      result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT\nc\nINPUT\n<<O d\n<<INPUT e",
+                                "INPUT",
+                                new Object[][] { {"a",null}, {"c",null}, {"e", null} } });
+      return result;
+    }
+
+    @Test public void testOutput() throws Exception {
+      List<Object[]> result = new ArrayList<Object[]>();
+      CaseFinder.find(mk(input), label, result);
+      assertTrue(pr(result), eq(result, expectedOutput));
+    }
+  }
+
+  public static class NonParameterized {
+    @Test (expected=java.lang.IllegalArgumentException.class)
+    public void testBadDocLabel1() throws Exception {
+      List<Object[]> result = new ArrayList<Object[]>();
+      CaseFinder.find(mk("<<INPUT blah"), "", result);
+    }
+
+    public void testBadDocLabel2() throws Exception {
+      List<Object[]> result = new ArrayList<Object[]>();
+      CaseFinder.find(mk("<<INPUT blah"), "kill-er", result);
+    }
+
+    @Test (expected=java.io.IOException.class)
+    public void testBadSingleLineHeredoc() throws Exception {
+      List<Object[]> result = new ArrayList<Object[]>();
+      CaseFinder.find(mk("<<INPUTblah"), "foo", result);
+    }
+
+    @Test (expected=java.io.IOException.class)
+    public void testUnterminatedHeredoc() throws Exception {
+      List<Object[]> result = new ArrayList<Object[]>();
+      CaseFinder.find(mk("<<INPUT"), "foo", result);
+    }
+  }
+
+  private static BufferedReader mk(String s)
+  { return new BufferedReader(new StringReader(s)); }
+
+  private static String pr(List<Object[]> t) {
+    StringBuilder b = new StringBuilder();
+    b.append("{ ");
+    boolean firstTime = true;
+    for (Object[] p: t) {
+      if (! firstTime) b.append(", "); else firstTime = false;
+      b.append("{ \"").append(p[0]).append("\", \"")
+        .append(p[1]).append("\" }");
+    }
+    b.append("}");
+    return b.toString();
+  }
+
+  private static boolean eq(List<Object[]> l1, List<Object[]> l2) {
+    if (l1 == null || l2 == null) return l1 == l2;
+    if (l1.size() != l2.size()) return false;
+    for (int i = 0; i < l1.size(); i++)
+      if (! Arrays.equals(l1.get(i), l2.get(i))) return false;
+    return true;
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
new file mode 100644
index 0000000..758e3e5
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util;
+
+import java.io.UnsupportedEncodingException;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import static junit.framework.Assert.assertSame;
+import static org.junit.Assert.assertEquals;
+
+public class TestUtf8 {
+  @Test public void testByteConstructor() throws Exception {
+    byte[] bs = "Foo".getBytes("UTF-8");
+    Utf8 u = new Utf8(bs);
+    assertEquals(bs.length, u.getLength());
+    for (int i=0; i<bs.length; i++) {
+      assertEquals(bs[i], u.getBytes()[i]);
+    }
+  }
+  
+  @Test public void testArrayReusedWhenLargerThanRequestedSize() throws UnsupportedEncodingException {
+    byte[] bs = "55555".getBytes("UTF-8");    
+    Utf8 u = new Utf8(bs);
+    assertEquals(5, u.getByteLength());
+    byte[] content = u.getBytes();
+    u.setByteLength(3);
+    assertEquals(3, u.getByteLength());
+    assertSame(content, u.getBytes());
+    u.setByteLength(4);
+    assertEquals(4, u.getByteLength());
+    assertSame(content, u.getBytes());
+  }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/util/internal/TestJacksonUtils.java b/lang/java/avro/src/test/java/org/apache/avro/util/internal/TestJacksonUtils.java
new file mode 100644
index 0000000..79937cc
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/util/internal/TestJacksonUtils.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.util.internal;
+
+import java.util.Collections;
+import org.apache.avro.JsonProperties;
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaBuilder;
+import org.codehaus.jackson.node.ArrayNode;
+import org.codehaus.jackson.node.BooleanNode;
+import org.codehaus.jackson.node.DoubleNode;
+import org.codehaus.jackson.node.IntNode;
+import org.codehaus.jackson.node.JsonNodeFactory;
+import org.codehaus.jackson.node.LongNode;
+import org.codehaus.jackson.node.NullNode;
+import org.codehaus.jackson.node.ObjectNode;
+import org.codehaus.jackson.node.TextNode;
+import org.junit.Test;
+
+import static org.apache.avro.util.internal.JacksonUtils.toJsonNode;
+import static org.apache.avro.util.internal.JacksonUtils.toObject;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+public class TestJacksonUtils {
+
+  enum Direction {
+    UP, DOWN;
+  }
+
+  @Test
+  public void testToJsonNode() {
+    assertEquals(null, toJsonNode(null));
+    assertEquals(NullNode.getInstance(), toJsonNode(JsonProperties.NULL_VALUE));
+    assertEquals(BooleanNode.TRUE, toJsonNode(true));
+    assertEquals(IntNode.valueOf(1), toJsonNode(1));
+    assertEquals(LongNode.valueOf(2), toJsonNode(2L));
+    assertEquals(DoubleNode.valueOf(1.0), toJsonNode(1.0f));
+    assertEquals(DoubleNode.valueOf(2.0), toJsonNode(2.0));
+    assertEquals(TextNode.valueOf("\u0001\u0002"), toJsonNode(new byte[] { 1, 2 }));
+    assertEquals(TextNode.valueOf("a"), toJsonNode("a"));
+    assertEquals(TextNode.valueOf("UP"), toJsonNode(Direction.UP));
+
+    ArrayNode an = JsonNodeFactory.instance.arrayNode();
+    an.add(1);
+    assertEquals(an, toJsonNode(Collections.singletonList(1)));
+
+    ObjectNode on = JsonNodeFactory.instance.objectNode();
+    on.put("a", 1);
+    assertEquals(on, toJsonNode(Collections.singletonMap("a", 1)));
+  }
+
+  @Test
+  public void testToObject() {
+    assertEquals(null, toObject(null));
+    assertEquals(JsonProperties.NULL_VALUE, toObject(NullNode.getInstance()));
+    assertEquals(true, toObject(BooleanNode.TRUE));
+    assertEquals(1, toObject(IntNode.valueOf(1)));
+    assertEquals(2L, toObject(IntNode.valueOf(2), Schema.create(Schema.Type.LONG)));
+    assertEquals(1.0f, toObject(DoubleNode.valueOf(1.0), Schema.create(Schema.Type.FLOAT)));
+    assertEquals(2.0, toObject(DoubleNode.valueOf(2.0)));
+    assertEquals(TextNode.valueOf("\u0001\u0002"), toJsonNode(new byte[]{1, 2}));
+    assertArrayEquals(new byte[]{1, 2},
+        (byte[]) toObject(TextNode.valueOf("\u0001\u0002"), Schema.create(Schema.Type.BYTES)));
+    assertEquals("a", toObject(TextNode.valueOf("a")));
+    assertEquals("UP", toObject(TextNode.valueOf("UP"),
+        SchemaBuilder.enumeration("Direction").symbols("UP", "DOWN")));
+
+    ArrayNode an = JsonNodeFactory.instance.arrayNode();
+    an.add(1);
+    assertEquals(Collections.singletonList(1), toObject(an));
+
+    ObjectNode on = JsonNodeFactory.instance.objectNode();
+    on.put("a", 1);
+    assertEquals(Collections.singletonMap("a", 1), toObject(on));
+    assertEquals(Collections.singletonMap("a", 1L), toObject(on,
+        SchemaBuilder.record("r").fields().requiredLong("a").endRecord()));
+
+    assertEquals(JsonProperties.NULL_VALUE, toObject(NullNode.getInstance(),
+        SchemaBuilder.unionOf().nullType().and().intType().endUnion()));
+
+    assertEquals("a", toObject(TextNode.valueOf("a"),
+        SchemaBuilder.unionOf().stringType().and().intType().endUnion()));
+  }
+
+}
diff --git a/lang/java/avro/src/test/resources/SchemaBuilder.avsc b/lang/java/avro/src/test/resources/SchemaBuilder.avsc
new file mode 100644
index 0000000..f87f3a9
--- /dev/null
+++ b/lang/java/avro/src/test/resources/SchemaBuilder.avsc
@@ -0,0 +1,284 @@
+{
+  "type" : "record",
+  "name" : "recordAll",
+  "fields" : [ {
+    "name" : "requiredBoolean",
+    "type" : "boolean"
+  }, {
+    "name" : "requiredBooleanWithDefault",
+    "type" : "boolean",
+    "default" : true
+  }, {
+    "name" : "optionalBoolean",
+    "type" : [ "null", "boolean" ],
+    "default" : null
+  }, {
+    "name" : "optionalBooleanWithDefault",
+    "type" : [ "boolean", "null" ],
+    "default" : true
+  }, {
+    "name" : "requiredInt",
+    "type" : "int"
+  }, {
+    "name" : "optionalInt",
+    "type" : [ "null", "int" ],
+    "default" : null
+  }, {
+    "name" : "optionalIntWithDefault",
+    "type" : [ "int", "null" ],
+    "default" : 1
+  }, {
+    "name" : "requiredLong",
+    "type" : "long"
+  }, {
+    "name" : "optionalLong",
+    "type" : [ "null", "long" ],
+    "default" : null
+  }, {
+    "name" : "optionalLongWithDefault",
+    "type" : [ "long", "null" ],
+    "default" : 1
+  }, {
+    "name" : "requiredFloat",
+    "type" : "float"
+  }, {
+    "name" : "optionalFloat",
+    "type" : [ "null", "float" ],
+    "default" : null
+  }, {
+    "name" : "optionalFloatWithDefault",
+    "type" : [ "float", "null" ],
+    "default" : 1.0
+  }, {
+    "name" : "requiredDouble",
+    "type" : "double"
+  }, {
+    "name" : "optionalDouble",
+    "type" : [ "null", "double" ],
+    "default" : null
+  }, {
+    "name" : "optionalDoubleWithDefault",
+    "type" : [ "double", "null" ],
+    "default" : 1.0
+  }, {
+    "name" : "requiredBytes",
+    "type" : "bytes"
+  }, {
+    "name" : "optionalBytes",
+    "type" : [ "null", "bytes" ],
+    "default" : null
+  }, {
+    "name" : "optionalBytesWithDefault",
+    "type" : [ "bytes", "null" ],
+    "default" : "A"
+  }, {
+    "name" : "requiredString",
+    "type" : "string"
+  }, {
+    "name" : "optionalString",
+    "type" : [ "null", "string" ],
+    "default" : null
+  }, {
+    "name" : "optionalStringWithDefault",
+    "type" : [ "string", "null" ],
+    "default" : "a"
+  }, {
+    "name" : "requiredRecord",
+    "type" : {
+      "type" : "record",
+      "name" : "nestedRequiredRecord",
+      "fields" : [ {
+        "name" : "nestedRequiredBoolean",
+        "type" : "boolean"
+      } ]
+    }
+  }, {
+    "name" : "optionalRecord",
+    "type" : [ "null", {
+      "type" : "record",
+      "name" : "nestedOptionalRecord",
+      "fields" : [ {
+        "name" : "nestedRequiredBoolean",
+        "type" : "boolean"
+      } ]
+    } ],
+    "default" : null
+  }, {
+    "name" : "optionalRecordWithDefault",
+    "type" : [ {
+      "type" : "record",
+      "name" : "nestedOptionalRecordWithDefault",
+      "fields" : [ {
+        "name" : "nestedRequiredBoolean",
+        "type" : "boolean"
+      } ]
+    }, "null" ],
+    "default" : {
+      "nestedRequiredBoolean" : true
+    }
+  }, {
+    "name" : "requiredEnum",
+    "type" : {
+      "type" : "enum",
+      "name" : "requiredEnum",
+      "symbols" : [ "a", "b" ]
+    }
+  }, {
+    "name" : "optionalEnum",
+    "type" : [ "null", {
+      "type" : "enum",
+      "name" : "optionalEnum",
+      "symbols" : [ "a", "b" ]
+    } ],
+    "default" : null
+  }, {
+    "name" : "optionalEnumWithDefault",
+    "type" : [ {
+      "type" : "enum",
+      "name" : "optionalEnumWithDefault",
+      "symbols" : [ "a", "b" ]
+    }, "null" ],
+    "default" : "b"
+  }, {
+    "name" : "requiredArray",
+    "type" : {
+      "type" : "array",
+      "items" : "string"
+    }
+  }, {
+    "name" : "optionalArray",
+    "type" : [ "null", {
+      "type" : "array",
+      "items" : "string"
+    } ],
+    "default" : null
+  }, {
+    "name" : "optionalArrayWithDefault",
+    "type" : [ {
+      "type" : "array",
+      "items" : "string"
+    }, "null" ],
+    "default" : [ "a" ]
+  }, {
+    "name" : "requiredMap",
+    "type" : {
+      "type" : "map",
+      "values" : "string"
+    }
+  }, {
+    "name" : "optionalMap",
+    "type" : [ "null", {
+      "type" : "map",
+      "values" : "string"
+    } ],
+    "default" : null
+  }, {
+    "name" : "optionalMapWithDefault",
+    "type" : [ {
+      "type" : "map",
+      "values" : "string"
+    }, "null" ],
+    "default" : {
+      "a" : "b"
+    }
+  }, {
+    "name" : "requiredFixed",
+    "type" : {
+      "type" : "fixed",
+      "name" : "requiredFixed",
+      "size" : 1
+    }
+  }, {
+    "name" : "optionalFixed",
+    "type" : [ "null", {
+      "type" : "fixed",
+      "name" : "optionalFixed",
+      "size" : 1
+    } ],
+    "default" : null
+  }, {
+    "name" : "optionalFixedWithDefault",
+    "type" : [ {
+      "type" : "fixed",
+      "name" : "optionalFixedWithDefault",
+      "size" : 1
+    }, "null" ],
+    "default" : "A"
+  }, {
+    "name" : "unionType",
+    "type" : [ "long", "null" ]
+  }, {
+    "name" : "unionBooleanWithDefault",
+    "type" : [ "boolean", "int" ],
+    "default" : true
+  }, {
+    "name" : "unionIntWithDefault",
+    "type" : [ "int", "null" ],
+    "default" : 1
+  }, {
+    "name" : "unionLongWithDefault",
+    "type" : [ "long", "int" ],
+    "default" : 1
+  }, {
+    "name" : "unionFloatWithDefault",
+    "type" : [ "float", "int" ],
+    "default" : 1.0
+  }, {
+    "name" : "unionDoubleWithDefault",
+    "type" : [ "double", "int" ],
+    "default" : 1.0
+  }, {
+    "name" : "unionBytesWithDefault",
+    "type" : [ "bytes", "int" ],
+    "default" : "A"
+  }, {
+    "name" : "unionStringWithDefault",
+    "type" : [ "string", "int" ],
+    "default" : "a"
+  }, {
+    "name" : "unionRecordWithDefault",
+    "type" : [ {
+      "type" : "record",
+      "name" : "nestedUnionRecordWithDefault",
+      "fields" : [ {
+        "name" : "nestedRequiredBoolean",
+        "type" : "boolean"
+      } ]
+    }, "int" ],
+    "default" : {
+      "nestedRequiredBoolean" : true
+    }
+  }, {
+    "name" : "unionEnumWithDefault",
+    "type" : [ {
+      "type" : "enum",
+      "name" : "nestedUnionEnumWithDefault",
+      "symbols" : [ "a", "b" ]
+    }, "int" ],
+    "default" : "b"
+  }, {
+    "name" : "unionArrayWithDefault",
+    "type" : [ {
+      "type" : "array",
+      "items" : "string"
+    }, "int" ],
+    "default" : [ "a" ]
+  }, {
+    "name" : "unionMapWithDefault",
+    "type" : [ {
+      "type" : "map",
+      "values" : "string"
+    }, "int" ],
+    "default" : {
+      "a" : "b"
+    }
+  }, {
+    "name" : "unionFixedWithDefault",
+    "type" : [ {
+      "type" : "fixed",
+      "name" : "nestedUnionFixedWithDefault",
+      "size" : 1
+    }, "int" ],
+    "default" : "A"
+  } ]
+}
diff --git a/lang/java/checkstyle.xml b/lang/java/checkstyle.xml
new file mode 100644
index 0000000..a51089a
--- /dev/null
+++ b/lang/java/checkstyle.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!DOCTYPE module PUBLIC
+    "-//Puppy Crawl//DTD Check Configuration 1.2//EN"
+    "http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
+
+<module name="Checker">
+    <module name="FileTabCharacter"/>
+    <module name="NewlineAtEndOfFile">
+        <property name="lineSeparator" value="lf"/>
+    </module>
+
+    <module name="TreeWalker">
+        <module name="ConstantName"/>
+        <module name="LocalFinalVariableName"/>
+        <module name="LocalVariableName"/>
+        <module name="MemberName"/>
+        <module name="MethodName"/>
+        <module name="PackageName"/>
+        <module name="ParameterName"/>
+        <module name="StaticVariableName"/>
+        <module name="TypeName"/>
+
+        <module name="AvoidStarImport"/>
+        <module name="RedundantImport"/>
+        <module name="UnusedImports"/>
+
+        <module name="RedundantModifier"/>
+
+        <module name="EmptyStatement"/>
+        <module name="IllegalInstantiation"/>
+        <module name="RedundantThrows"/>
+        <module name="SimplifyBooleanExpression"/>
+        <module name="SimplifyBooleanReturn"/>
+
+        <module name="InterfaceIsType"/>
+
+        <module name="ArrayTypeStyle"/>
+        <module name="UpperEll"/>
+
+    </module>
+</module>
diff --git a/lang/java/compiler/pom.xml b/lang/java/compiler/pom.xml
new file mode 100644
index 0000000..046c486
--- /dev/null
+++ b/lang/java/compiler/pom.xml
@@ -0,0 +1,131 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-compiler</artifactId>
+
+  <name>Apache Avro Compiler</name>
+  <packaging>bundle</packaging>
+  <url>http://avro.apache.org</url>
+  <description>Compilers for Avro IDL and Avro Specific Java API</description>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro.compiler*,
+      org.apache.avro*;version="${project.version}",
+      org.apache.commons.lang*,
+      org.apache.commons.velocity*,
+      org.apache.ant*,
+      *
+    </osgi.import>
+    <osgi.export>org.apache.avro.compiler*;version="${project.version}"</osgi.export>
+  </properties>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>src/main/velocity</directory>
+      </resource>
+    </resources>
+    <testResources>
+      <testResource>
+        <directory>src/main/velocity</directory>
+      </testResource>
+      <testResource>
+        <directory>src/test/idl</directory>
+      </testResource>
+    </testResources>
+    <plugins>
+      <!-- Avro generates a parser class with javacc. By default, this looks in src/main/javacc
+        and outputs to target/generated-sources/javacc See http://mojo.codehaus.org/javacc-maven-plugin/javacc-mojo.html
+        for more info on using this plugin. -->
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>javacc-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>javacc</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>javacc</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <systemPropertyVariables>
+            <test.idl.dir>${project.basedir}/src/test/idl</test.idl.dir>
+          </systemPropertyVariables>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>${commons-lang.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.velocity</groupId>
+      <artifactId>velocity</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ant</groupId>
+      <artifactId>ant</artifactId>
+      <version>${ant.version}</version>
+      <!-- can only be used from within ant -->
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java
new file mode 100644
index 0000000..23f8d7e
--- /dev/null
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.compiler.specific;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Protocol;
+import org.apache.avro.generic.GenericData.StringType;
+
+import org.apache.tools.ant.BuildException;
+import org.apache.tools.ant.DirectoryScanner;
+import org.apache.tools.ant.Project;
+import org.apache.tools.ant.Task;
+import org.apache.tools.ant.types.FileSet;
+
+/** Ant task to generate Java interface and classes for a protocol. */
+public class ProtocolTask extends Task {
+  private File src;
+  private File dest = new File(".");
+  private StringType stringType = StringType.CharSequence;
+
+  private final ArrayList<FileSet> filesets = new ArrayList<FileSet>();
+  
+  /** Set the schema file. */
+  public void setFile(File file) { this.src = file; }
+  
+  /** Set the output directory */
+  public void setDestdir(File dir) { this.dest = dir; }
+  
+  /** Set the string type. */
+  public void setStringType(StringType type) { this.stringType = type; }
+  
+  /** Get the string type. */
+  public StringType getStringType() { return this.stringType; }
+  
+  /** Add a fileset. */
+  public void addFileset(FileSet set) { filesets.add(set); }
+  
+  /** Run the compiler. */
+  @Override
+  public void execute() {
+    if (src == null && filesets.size()==0)
+      throw new BuildException("No file or fileset specified.");
+
+    if (src != null)
+      compile(src);
+
+    Project myProject = getProject();
+    for (int i = 0; i < filesets.size(); i++) {
+      FileSet fs = filesets.get(i);
+      DirectoryScanner ds = fs.getDirectoryScanner(myProject);
+      File dir = fs.getDir(myProject);
+      String[] srcs = ds.getIncludedFiles();
+      for (int j = 0; j < srcs.length; j++) {
+        compile(new File(dir, srcs[j]));
+      }
+    }
+  }
+  
+  protected void doCompile(File src, File dir) throws IOException {
+    Protocol protocol = Protocol.parse(src);
+    SpecificCompiler compiler = new SpecificCompiler(protocol);
+    compiler.setStringType(getStringType());
+    compiler.compileToDestination(src, dest);
+  }
+
+  private void compile(File file) {
+    try {
+      doCompile(file, dest);
+    } catch (AvroRuntimeException e) {
+      throw new BuildException(e);
+    } catch (IOException e) {
+      throw new BuildException(e);
+    }
+  }
+}
+
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SchemaTask.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SchemaTask.java
new file mode 100644
index 0000000..9c5c12a
--- /dev/null
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SchemaTask.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.compiler.specific;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+
+/** Ant task to generate Java interface and classes for a protocol. */
+public class SchemaTask extends ProtocolTask {
+  @Override
+  protected void doCompile(File src, File dest) throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+    Schema schema = parser.parse(src);
+    SpecificCompiler compiler = new SpecificCompiler(schema);
+    compiler.setStringType(getStringType());
+    compiler.compileToDestination(src, dest);
+  }
+}
+
diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
new file mode 100644
index 0000000..c1fa08a
--- /dev/null
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java
@@ -0,0 +1,861 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.compiler.specific;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.LinkedHashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.avro.specific.SpecificData;
+import org.codehaus.jackson.JsonNode;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.SchemaNormalization;
+import org.apache.avro.JsonProperties;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericData.StringType;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.runtime.RuntimeServices;
+import org.apache.velocity.runtime.log.LogChute;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.avro.specific.SpecificData.RESERVED_WORDS;
+
+/**
+ * Generate specific Java interfaces and classes for protocols and schemas.
+ *
+ * Java reserved keywords are mangled to preserve compilation.
+ */
+public class SpecificCompiler {
+  public static enum FieldVisibility {
+    PUBLIC, PUBLIC_DEPRECATED, PRIVATE
+  }
+
+  private final Set<Schema> queue = new HashSet<Schema>();
+  private Protocol protocol;
+  private VelocityEngine velocityEngine;
+  private String templateDir;
+  private FieldVisibility fieldVisibility = FieldVisibility.PUBLIC_DEPRECATED;
+  private boolean createSetters = true;
+  private String outputCharacterEncoding;
+
+  /* Reserved words for accessor/mutator methods */
+  private static final Set<String> ACCESSOR_MUTATOR_RESERVED_WORDS = 
+      new HashSet<String>(Arrays.asList(new String[] {
+            "class", "schema", "classSchema"
+          }));
+  static {
+    // Add reserved words to accessor/mutator reserved words
+    ACCESSOR_MUTATOR_RESERVED_WORDS.addAll(RESERVED_WORDS);
+  }
+  
+  /* Reserved words for error types */
+  private static final Set<String> ERROR_RESERVED_WORDS = new HashSet<String>(
+      Arrays.asList(new String[] { "message", "cause" }));
+  static {
+    // Add accessor/mutator reserved words to error reserved words
+    ERROR_RESERVED_WORDS.addAll(ACCESSOR_MUTATOR_RESERVED_WORDS);
+  }
+  
+  private static final String FILE_HEADER = 
+      "/**\n" +
+      " * Autogenerated by Avro\n" +
+      " * \n" +
+      " * DO NOT EDIT DIRECTLY\n" +
+      " */\n";
+  
+  public SpecificCompiler(Protocol protocol) {
+    this();
+    // enqueue all types
+    for (Schema s : protocol.getTypes()) {
+      enqueue(s);
+    }
+    this.protocol = protocol;
+  }
+
+  public SpecificCompiler(Schema schema) {
+    this();
+    enqueue(schema);
+    this.protocol = null;
+  }
+  
+  SpecificCompiler() {
+    this.templateDir =
+      System.getProperty("org.apache.avro.specific.templates",
+                         "/org/apache/avro/compiler/specific/templates/java/classic/");
+    initializeVelocity();
+  }
+
+  /** Set the resource directory where templates reside. First, the compiler checks
+   * the system path for the specified file, if not it is assumed that it is
+   * present on the classpath.*/
+  public void setTemplateDir(String templateDir) {
+    this.templateDir = templateDir;
+  }
+
+  /**
+   * @return true if the record fields should be marked as deprecated
+   */
+  public boolean deprecatedFields() {
+    return (this.fieldVisibility == FieldVisibility.PUBLIC_DEPRECATED);
+  }
+
+  /**
+   * @return true if the record fields should be public
+   */
+  public boolean publicFields() {
+    return (this.fieldVisibility == FieldVisibility.PUBLIC ||
+            this.fieldVisibility == FieldVisibility.PUBLIC_DEPRECATED);
+  }
+
+  /**
+   * @return true if the record fields should be private
+   */
+  public boolean privateFields() {
+    return (this.fieldVisibility == FieldVisibility.PRIVATE);
+  }
+
+  /**
+   * Sets the field visibility option.
+   */
+  public void setFieldVisibility(FieldVisibility fieldVisibility) {
+    this.fieldVisibility = fieldVisibility;
+  }
+
+  public boolean isCreateSetters() {
+      return this.createSetters;
+  }
+
+  /**
+   * Set to false to not create setter methods for the fields of the record.
+   */
+  public void setCreateSetters(boolean createSetters) {
+    this.createSetters = createSetters;
+  }
+
+  private static String logChuteName = null;
+
+  private void initializeVelocity() {
+    this.velocityEngine = new VelocityEngine();
+
+    // These  properties tell Velocity to use its own classpath-based
+    // loader, then drop down to check the root and the current folder
+    velocityEngine.addProperty("resource.loader", "class, file");
+    velocityEngine.addProperty("class.resource.loader.class",
+        "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
+    velocityEngine.addProperty("file.resource.loader.class", 
+        "org.apache.velocity.runtime.resource.loader.FileResourceLoader");
+    velocityEngine.addProperty("file.resource.loader.path", "/, .");
+    velocityEngine.setProperty("runtime.references.strict", true);
+
+    // try to use Slf4jLogChute, but if we can't use the null one.
+    if (null == logChuteName) {
+      // multiple threads can get here concurrently, but that's ok.
+      try {
+        new Slf4jLogChute();
+        logChuteName = Slf4jLogChute.class.getName();
+      } catch (Exception e) {
+        logChuteName = "org.apache.velocity.runtime.log.NullLogChute";
+      }
+    }
+    velocityEngine.setProperty("runtime.log.logsystem.class", logChuteName);
+  }
+
+  /**
+   * Captures output file path and contents.
+   */
+  static class OutputFile {
+    String path;
+    String contents;
+    String outputCharacterEncoding;
+
+    /**
+     * Writes output to path destination directory when it is newer than src,
+     * creating directories as necessary.  Returns the created file.
+     */
+    File writeToDestination(File src, File destDir) throws IOException {
+      File f = new File(destDir, path);
+      if (src != null && f.exists() && f.lastModified() >= src.lastModified())
+        return f;                                 // already up to date: ignore
+      f.getParentFile().mkdirs();
+      Writer fw;
+      if (outputCharacterEncoding != null) {
+        fw = new OutputStreamWriter(new FileOutputStream(f), outputCharacterEncoding);
+      } else {
+        fw = new FileWriter(f);
+      }
+      try {
+        fw.write(FILE_HEADER);
+        fw.write(contents);
+      } finally {
+        fw.close();
+      }
+      return f;
+    }
+  }
+
+  /**
+   * Generates Java interface and classes for a protocol.
+   * @param src the source Avro protocol file
+   * @param dest the directory to place generated files in
+   */
+  public static void compileProtocol(File src, File dest) throws IOException {
+    compileProtocol(new File[] {src}, dest);
+  }
+
+  /**
+   * Generates Java interface and classes for a number of protocol files.
+   * @param srcFiles the source Avro protocol files
+   * @param dest the directory to place generated files in
+   */
+  public static void compileProtocol(File[] srcFiles, File dest) throws IOException {
+    for (File src : srcFiles) {
+      Protocol protocol = Protocol.parse(src);
+      SpecificCompiler compiler = new SpecificCompiler(protocol);
+      compiler.compileToDestination(src, dest);
+    }
+  }
+
+  /** Generates Java classes for a schema. */
+  public static void compileSchema(File src, File dest) throws IOException {
+    compileSchema(new File[] {src}, dest);
+  }
+
+  /** Generates Java classes for a number of schema files. */
+  public static void compileSchema(File[] srcFiles, File dest) throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+
+    for (File src : srcFiles) {
+      Schema schema = parser.parse(src);
+      SpecificCompiler compiler = new SpecificCompiler(schema);
+      compiler.compileToDestination(src, dest);
+    }
+  }
+
+  /** Recursively enqueue schemas that need a class generated. */
+  private void enqueue(Schema schema) {
+    if (queue.contains(schema)) return;
+    switch (schema.getType()) {
+    case RECORD:
+      queue.add(schema);
+      for (Schema.Field field : schema.getFields())
+        enqueue(field.schema());
+      break;
+    case MAP:
+      enqueue(schema.getValueType());
+      break;
+    case ARRAY:
+      enqueue(schema.getElementType());
+      break;
+    case UNION:
+      for (Schema s : schema.getTypes())
+        enqueue(s);
+      break;
+    case ENUM:
+    case FIXED:
+      queue.add(schema);
+      break;
+    case STRING: case BYTES:
+    case INT: case LONG:
+    case FLOAT: case DOUBLE:
+    case BOOLEAN: case NULL:
+      break;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  /** Generate java classes for enqueued schemas. */
+  Collection<OutputFile> compile() {
+    List<OutputFile> out = new ArrayList<OutputFile>();
+    for (Schema schema : queue) {
+      out.add(compile(schema));
+    }
+    if (protocol != null) {
+      out.add(compileInterface(protocol));
+    }
+    return out;
+  }
+
+  /** Generate output under dst, unless existing file is newer than src. */
+  public void compileToDestination(File src, File dst) throws IOException {
+    for (Schema schema : queue) {
+      OutputFile o = compile(schema);
+      o.writeToDestination(src, dst);
+    }
+    if (protocol != null) {
+      compileInterface(protocol).writeToDestination(src, dst);
+    }
+  }
+
+  private String renderTemplate(String templateName, VelocityContext context) {
+    Template template;
+    try {
+      template = this.velocityEngine.getTemplate(templateName);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    StringWriter writer = new StringWriter();
+    template.merge(context, writer);
+    return writer.toString();
+  }
+
+  OutputFile compileInterface(Protocol protocol) {
+    protocol = addStringType(protocol);           // annotate protocol as needed
+    VelocityContext context = new VelocityContext();
+    context.put("protocol", protocol);
+    context.put("this", this);
+    String out = renderTemplate(templateDir+"protocol.vm", context);
+
+    OutputFile outputFile = new OutputFile();
+    String mangledName = mangle(protocol.getName());
+    outputFile.path = makePath(mangledName, protocol.getNamespace());
+    outputFile.contents = out;
+    outputFile.outputCharacterEncoding = outputCharacterEncoding;
+    return outputFile;
+  }
+
+  static String makePath(String name, String space) {
+    if (space == null || space.isEmpty()) {
+      return name + ".java";
+    } else {
+      return space.replace('.', File.separatorChar) + File.separatorChar + name
+          + ".java";
+    }
+  }
+
+  OutputFile compile(Schema schema) {
+    schema = addStringType(schema);               // annotate schema as needed
+    String output = "";
+    VelocityContext context = new VelocityContext();
+    context.put("this", this);
+    context.put("schema", schema);
+
+    switch (schema.getType()) {
+    case RECORD:
+      output = renderTemplate(templateDir+"record.vm", context);
+      break;
+    case ENUM:
+      output = renderTemplate(templateDir+"enum.vm", context);
+      break;
+    case FIXED:
+      output = renderTemplate(templateDir+"fixed.vm", context);
+      break;
+    case BOOLEAN:
+    case NULL:
+      break;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+
+    OutputFile outputFile = new OutputFile();
+    String name = mangle(schema.getName());
+    outputFile.path = makePath(name, schema.getNamespace());
+    outputFile.contents = output;
+    outputFile.outputCharacterEncoding = outputCharacterEncoding;
+    return outputFile;
+  }
+
+  private StringType stringType = StringType.CharSequence;
+
+  /** Set the Java type to be emitted for string schemas. */
+  public void setStringType(StringType t) { this.stringType = t; }
+
+  // annotate map and string schemas with string type
+  private Protocol addStringType(Protocol p) {
+    if (stringType != StringType.String)
+      return p;
+
+    Protocol newP = new Protocol(p.getName(), p.getDoc(), p.getNamespace());
+    Map<Schema,Schema> types = new LinkedHashMap<Schema,Schema>();
+
+    // Copy properties
+    for (Map.Entry<String,JsonNode> prop : p.getJsonProps().entrySet())
+      newP.addProp(prop.getKey(), prop.getValue());   // copy props
+
+    // annotate types
+    Collection<Schema> namedTypes = new LinkedHashSet<Schema>();
+    for (Schema s : p.getTypes())
+      namedTypes.add(addStringType(s, types));
+    newP.setTypes(namedTypes);
+
+    // annotate messages
+    Map<String,Message> newM = newP.getMessages();
+    for (Message m : p.getMessages().values())
+      newM.put(m.getName(), m.isOneWay()
+               ? newP.createMessage(m.getName(), m.getDoc(), m.getJsonProps(),
+                                    addStringType(m.getRequest(), types))
+               : newP.createMessage(m.getName(), m.getDoc(), m.getJsonProps(),
+                                    addStringType(m.getRequest(), types),
+                                    addStringType(m.getResponse(), types),
+                                    addStringType(m.getErrors(), types)));
+    return newP;
+  }
+
+  private Schema addStringType(Schema s) {
+    if (stringType != StringType.String)
+      return s;
+    return addStringType(s, new LinkedHashMap<Schema,Schema>());
+  }
+
+  // annotate map and string schemas with string type
+  private Schema addStringType(Schema s, Map<Schema,Schema> seen) {
+    if (seen.containsKey(s)) return seen.get(s); // break loops
+    Schema result = s;
+    switch (s.getType()) {
+    case STRING:
+      result = Schema.create(Schema.Type.STRING);
+      GenericData.setStringType(result, stringType);
+      break;
+    case RECORD:
+      result =
+        Schema.createRecord(s.getFullName(), s.getDoc(), null, s.isError());
+      for (String alias : s.getAliases())
+        result.addAlias(alias, null);             // copy aliases
+      seen.put(s, result);
+      List<Field> newFields = new ArrayList<Field>();
+      for (Field f : s.getFields()) {
+        Schema fSchema = addStringType(f.schema(), seen);
+        Field newF =
+          new Field(f.name(), fSchema, f.doc(), f.defaultValue(), f.order());
+        for (Map.Entry<String,JsonNode> p : f.getJsonProps().entrySet())
+          newF.addProp(p.getKey(), p.getValue()); // copy props
+        for (String a : f.aliases())
+          newF.addAlias(a);                       // copy aliases
+        newFields.add(newF);
+      }
+      result.setFields(newFields);
+      break;
+    case ARRAY:
+      Schema e = addStringType(s.getElementType(), seen);
+      result = Schema.createArray(e);
+      break;
+    case MAP:
+      Schema v = addStringType(s.getValueType(), seen);
+      result = Schema.createMap(v);
+      GenericData.setStringType(result, stringType);
+      break;
+    case UNION:
+      List<Schema> types = new ArrayList<Schema>();
+      for (Schema branch : s.getTypes())
+        types.add(addStringType(branch, seen));
+      result = Schema.createUnion(types);
+      break;
+    }
+    for (Map.Entry<String,JsonNode> p : s.getJsonProps().entrySet())
+      result.addProp(p.getKey(), p.getValue());   // copy props
+    seen.put(s, result);
+    return result;
+  }
+
+  private String getStringType(JsonNode overrideClassProperty) {
+    if (overrideClassProperty != null)
+      return overrideClassProperty.getTextValue();
+    switch (stringType) {
+    case String:        return "java.lang.String";
+    case Utf8:          return "org.apache.avro.util.Utf8";
+    case CharSequence:  return "java.lang.CharSequence";
+    default: throw new RuntimeException("Unknown string type: "+stringType);
+   }
+  }
+ 
+  private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
+
+  /** Utility for template use.  Returns the java type for a Schema. */
+  public String javaType(Schema schema) {
+    switch (schema.getType()) {
+    case RECORD:
+    case ENUM:
+    case FIXED:
+      return mangle(schema.getFullName());
+    case ARRAY:
+      return "java.util.List<" + javaType(schema.getElementType()) + ">";
+    case MAP:
+      return "java.util.Map<"
+        + getStringType(schema.getJsonProp(SpecificData.KEY_CLASS_PROP))+","
+        + javaType(schema.getValueType()) + ">";
+    case UNION:
+      List<Schema> types = schema.getTypes(); // elide unions with null
+      if ((types.size() == 2) && types.contains(NULL_SCHEMA))
+        return javaType(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
+      return "java.lang.Object";
+    case STRING:
+      return getStringType(schema.getJsonProp(SpecificData.CLASS_PROP));
+    case BYTES:   return "java.nio.ByteBuffer";
+    case INT:     return "java.lang.Integer";
+    case LONG:    return "java.lang.Long";
+    case FLOAT:   return "java.lang.Float";
+    case DOUBLE:  return "java.lang.Double";
+    case BOOLEAN: return "java.lang.Boolean";
+    case NULL:    return "java.lang.Void";
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  /** Utility for template use.  Returns the unboxed java type for a Schema. */
+  public String javaUnbox(Schema schema) {
+    switch (schema.getType()) {
+    case INT:     return "int";
+    case LONG:    return "long";
+    case FLOAT:   return "float";
+    case DOUBLE:  return "double";
+    case BOOLEAN: return "boolean";
+    default:      return javaType(schema);
+    }
+  }
+
+  /** Utility for template use.  Returns the java annotations for a schema. */
+  public String[] javaAnnotations(JsonProperties props) {
+    JsonNode value = props.getJsonProp("javaAnnotation");
+    if (value == null)
+      return new String[0];
+    if (value.isTextual())
+      return new String[] { value.getTextValue() };
+    if (value.isArray()) {
+      int i = 0;
+      String[] result = new String[value.size()];
+      for (JsonNode v : value)
+        result[i++] = v.getTextValue();
+      return result;
+    }
+    return new String[0];
+  }
+
+  // maximum size for string constants, to avoid javac limits
+  int maxStringChars = 8192;
+
+  /** Utility for template use. Takes a (potentially overly long) string and
+   *  splits it into a quoted, comma-separted sequence of escaped strings.
+   *  @param s The string to split
+   *  @return A sequence of quoted, comma-separated, escaped strings
+   */
+  public String javaSplit(String s) throws IOException {
+    StringBuilder b = new StringBuilder("\"");    // initial quote
+    for (int i = 0; i < s.length(); i += maxStringChars) {
+      if (i != 0) b.append("\",\"");              // insert quote-comma-quote
+      String chunk = s.substring(i, Math.min(s.length(), i + maxStringChars));
+      b.append(javaEscape(chunk));                // escape chunks
+    }
+    b.append("\"");                               // final quote
+    return b.toString();
+  }
+  
+  /** Utility for template use.  Escapes quotes and backslashes. */
+  public static String javaEscape(Object o) {
+      return o.toString().replace("\\","\\\\").replace("\"", "\\\"");
+  }
+
+  /** Utility for template use.  Escapes comment end with HTML entities. */
+  public static String escapeForJavadoc(String s) {
+      return s.replace("*/", "*/");
+  }
+  
+  /** Utility for template use.  Returns empty string for null. */
+  public static String nullToEmpty(String x) {
+    return x == null ? "" : x;
+  }
+
+  /** Utility for template use.  Adds a dollar sign to reserved words. */
+  public static String mangle(String word) {
+    return mangle(word, false);
+  }
+  
+  /** Utility for template use.  Adds a dollar sign to reserved words. */
+  public static String mangle(String word, boolean isError) {
+    return mangle(word, isError ? ERROR_RESERVED_WORDS : RESERVED_WORDS);
+  }
+  
+  /** Utility for template use.  Adds a dollar sign to reserved words. */
+  public static String mangle(String word, Set<String> reservedWords) {
+    return mangle(word, reservedWords, false);
+  }
+  
+  /** Utility for template use.  Adds a dollar sign to reserved words. */
+  public static String mangle(String word, Set<String> reservedWords, 
+      boolean isMethod) {
+    if (word.contains(".")) {
+      // If the 'word' is really a full path of a class we must mangle just the classname
+      int lastDot = word.lastIndexOf(".");
+      String packageName = word.substring(0, lastDot + 1);
+      String className = word.substring(lastDot + 1);
+      return packageName + mangle(className, reservedWords, isMethod);
+    }
+    if (reservedWords.contains(word) || 
+        (isMethod && reservedWords.contains(
+            Character.toLowerCase(word.charAt(0)) + 
+            ((word.length() > 1) ? word.substring(1) : "")))) {
+      return word + "$";
+    }
+    return word;
+  }
+  
+  /** Utility for use by templates. Return schema fingerprint as a long. */
+  public static long fingerprint64(Schema schema) {
+    return SchemaNormalization.parsingFingerprint64(schema);
+  }
+
+  /**
+   * Generates the name of a field accessor method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the accessor name.
+   * @return the name of the accessor method for the given field.
+   */
+  public static String generateGetMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "get", "");
+  }
+  
+  /**
+   * Generates the name of a field mutator method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the mutator name.
+   * @return the name of the mutator method for the given field.
+   */
+  public static String generateSetMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "set", "");
+  }
+  
+  /**
+   * Generates the name of a field "has" method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the "has" method name.
+   * @return the name of the has method for the given field.
+   */
+  public static String generateHasMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "has", "");
+  }
+  
+  /**
+   * Generates the name of a field "clear" method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the accessor name.
+   * @return the name of the has method for the given field.
+   */
+  public static String generateClearMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "clear", "");
+  }
+  
+  /** Utility for use by templates. Does this schema have a Builder method? */
+  public static boolean hasBuilder(Schema schema) {
+    switch (schema.getType()) {
+      case RECORD:
+        return true;
+
+      case UNION:
+        List<Schema> types = schema.getTypes(); // elide unions with null
+        if ((types.size() == 2) && types.contains(NULL_SCHEMA)) {
+          return hasBuilder(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
+        }
+        return false;
+
+      default:
+        return false;
+    }
+  }
+
+  /**
+   * Generates the name of a field Builder accessor method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the Builder accessor name.
+   * @return the name of the Builder accessor method for the given field.
+   */
+  public static String generateGetBuilderMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "get", "Builder");
+  }
+
+  /**
+   * Generates the name of a field Builder mutator method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the Builder mutator name.
+   * @return the name of the Builder mutator method for the given field.
+   */
+  public static String generateSetBuilderMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "set", "Builder");
+  }
+
+  /**
+   * Generates the name of a field Builder "has" method.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the "has" Builder method name.
+   * @return the name of the "has" Builder method for the given field.
+   */
+  public static String generateHasBuilderMethod(Schema schema, Field field) {
+    return generateMethodName(schema, field, "has", "Builder");
+  }
+
+  /**
+   * Generates a method name from a field name.
+   * @param schema the schema in which the field is defined.
+   * @param field the field for which to generate the accessor name.
+   * @param prefix method name prefix, e.g. "get" or "set".
+   * @param postfix method name postfix, e.g. "" or "Builder".
+   * @return the generated method name.
+   */
+  private static String generateMethodName(Schema schema, Field field, 
+      String prefix, String postfix) {
+
+    // Check for the special case in which the schema defines two fields whose 
+    // names are identical except for the case of the first character:
+    char firstChar = field.name().charAt(0);
+    String conflictingFieldName = (Character.isLowerCase(firstChar) ?
+        Character.toUpperCase(firstChar) : Character.toLowerCase(firstChar)) +
+        (field.name().length() > 1 ? field.name().substring(1) : "");
+    boolean fieldNameConflict = schema.getField(conflictingFieldName) != null;
+    
+    StringBuilder methodBuilder = new StringBuilder(prefix);
+    String fieldName = mangle(field.name(), 
+        schema.isError() ? ERROR_RESERVED_WORDS : 
+          ACCESSOR_MUTATOR_RESERVED_WORDS, true);
+
+    boolean nextCharToUpper = true;
+    for (int ii = 0; ii < fieldName.length(); ii++) {
+      if (fieldName.charAt(ii) == '_') {
+        nextCharToUpper = true;
+      }
+      else if (nextCharToUpper) {
+        methodBuilder.append(Character.toUpperCase(fieldName.charAt(ii)));
+        nextCharToUpper = false;
+      }
+      else {
+        methodBuilder.append(fieldName.charAt(ii));
+      }
+    }
+    methodBuilder.append(postfix);
+    
+    // If there is a field name conflict append $0 or $1
+    if (fieldNameConflict) {
+      if (methodBuilder.charAt(methodBuilder.length() - 1) != '$') {
+        methodBuilder.append('$');
+      }
+      methodBuilder.append(Character.isLowerCase(firstChar) ? '0' : '1');
+    }
+
+    return methodBuilder.toString();
+  }
+  
+  /** Tests whether an unboxed Java type can be set to null */
+  public static boolean isUnboxedJavaTypeNullable(Schema schema) {
+    switch (schema.getType()) {
+    // Primitives can't be null; assume anything else can
+    case INT:
+    case LONG:
+    case FLOAT:
+    case DOUBLE:
+    case BOOLEAN: return false;
+    default: return true;
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    //compileSchema(new File(args[0]), new File(args[1]));
+    compileProtocol(new File(args[0]), new File(args[1]));
+  }
+  
+  public static final class Slf4jLogChute implements LogChute {
+    private Logger logger = LoggerFactory.getLogger("AvroVelocityLogChute");
+    @Override
+    public void init(RuntimeServices rs) throws Exception {
+      // nothing to do
+    }
+
+    @Override
+    public void log(int level, String message) {
+      switch (level) {
+      case LogChute.DEBUG_ID:
+        logger.debug(message);
+        break;
+      case LogChute.TRACE_ID:
+        logger.trace(message);
+        break;
+      case LogChute.WARN_ID:
+        logger.warn(message);
+        break;
+      case LogChute.ERROR_ID:
+        logger.error(message);
+        break;
+      default:
+      case LogChute.INFO_ID:
+        logger.info(message);
+        break;
+      }
+    }
+
+    @Override
+    public void log(int level, String message, Throwable t) {
+      switch (level) {
+      case LogChute.DEBUG_ID:
+        logger.debug(message, t);
+        break;
+      case LogChute.TRACE_ID:
+        logger.trace(message, t);
+        break;
+      case LogChute.WARN_ID:
+        logger.warn(message, t);
+        break;
+      case LogChute.ERROR_ID:
+        logger.error(message, t);
+        break;
+      default:
+      case LogChute.INFO_ID:
+        logger.info(message, t);
+        break;
+      }
+    }
+
+    @Override
+    public boolean isLevelEnabled(int level) {
+      switch (level) {
+      case LogChute.DEBUG_ID:
+        return logger.isDebugEnabled();
+      case LogChute.TRACE_ID:
+        return logger.isTraceEnabled();
+      case LogChute.WARN_ID:
+        return logger.isWarnEnabled();
+      case LogChute.ERROR_ID:
+        return logger.isErrorEnabled();
+      default:
+      case LogChute.INFO_ID:
+        return logger.isInfoEnabled();
+      }
+    }
+  }
+
+  /** Sets character encoding for generated java file
+  * @param outputCharacterEncoding Character encoding for output files (defaults to system encoding)
+  */
+  public void setOutputCharacterEncoding(String outputCharacterEncoding) {
+    this.outputCharacterEncoding = outputCharacterEncoding;
+  }
+}
+
diff --git a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
new file mode 100644
index 0000000..8f60b83
--- /dev/null
+++ b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
@@ -0,0 +1,1615 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Some portions of this file were modeled after the example Java 1.5
+ * parser included with JavaCC. The following license applies to those
+ * portions:
+ *
+ * Copyright (c) 2006, Sun Microsystems, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *     * Redistributions of source code must retain the above copyright notice,
+ *       this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above copyright
+ *       notice, this list of conditions and the following disclaimer in the
+ *       documentation and/or other materials provided with the distribution.
+ *     * Neither the name of the Sun Microsystems, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived from
+ *       this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+options {
+  JAVA_UNICODE_ESCAPE = true;
+  UNICODE_INPUT = true;
+  ERROR_REPORTING = true;
+  STATIC = false;
+  FORCE_LA_CHECK = true;
+  JDK_VERSION = "1.6";
+}
+
+PARSER_BEGIN(Idl)
+
+package org.apache.avro.compiler.idl;
+
+import java.io.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.net.URL;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.*;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.*;
+
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.node.*;
+
+import org.apache.commons.lang.StringEscapeUtils;
+
+/**
+ * Grammar to parse a higher-level language into an Avro Schema.
+ *
+ * Note: each instance is not thread-safe, but multiple separate
+ * instances are safely independent.
+ */
+public class Idl implements Closeable {
+  static JsonNodeFactory FACTORY = JsonNodeFactory.instance;
+
+  File inputDir = new File(".");
+  ClassLoader resourceLoader = null;
+  String namespace;
+  Map<String,Schema> names = new LinkedHashMap<String,Schema>();
+
+  private static final ThreadLocal<String> DOC = new ThreadLocal<String>();
+  static void setDoc(String doc) { DOC.set(doc.trim()); }
+  static String getDoc() {
+    String doc = DOC.get();
+    DOC.set(null);
+    return doc;
+  }
+
+  public Idl(File inputFile) throws IOException {
+    this(new FileInputStream(inputFile), "UTF-8");
+    this.inputDir = inputFile.getParentFile();
+  }
+
+  public Idl(File inputFile, ClassLoader resourceLoader) throws IOException {
+    this(inputFile);
+    this.resourceLoader = resourceLoader;
+  }
+  
+  private Idl(URL input, Idl parent) throws IOException {
+    this(input.openStream(), "UTF-8");
+    this.inputDir = "file".equals(input.getProtocol())
+      ? new File(input.getPath()).getParentFile()
+      : parent.inputDir;
+    this.resourceLoader = parent.resourceLoader;
+  }
+  
+  public void close() throws IOException {
+    jj_input_stream.inputStream.close();
+  }
+
+  private ParseException error(String message, Token token) {
+    return new ParseException
+      (message+", at line "+token.beginLine+", column "+token.beginColumn);
+  }
+
+  private String getTextProp(String key, Map<String,JsonNode> props,
+                             Token token) throws ParseException {
+    JsonNode value = props.get(key);
+    if (value.isTextual())
+      return value.getTextValue();
+    throw error(key+" property must be textual: "+value, token);
+  }
+
+  private List<String> getTextProps(String key, Map<String,JsonNode> props,
+                             Token token) throws ParseException {
+    JsonNode value = props.get(key);
+    if (!value.isArray())
+      throw error(key+" property must be array: "+value, token);
+    List<String> values = new ArrayList<String>();
+    for (JsonNode n : value)
+      if (n.isTextual())
+        values.add(n.getTextValue());
+      else
+        throw error(key+" values must be textual: "+n, token);
+    return values;
+  }
+  
+  private URL findFile(String importFile) throws IOException {
+    File file = new File(this.inputDir, importFile);
+    URL result = null;
+    if (file.exists())
+      result = file.toURL();
+    else if (this.resourceLoader != null)
+      result = this.resourceLoader.getResource(importFile);
+    if (result == null)
+      throw new FileNotFoundException(importFile);
+    return result;
+  }
+
+}
+
+PARSER_END(Idl)
+
+/* WHITE SPACE */
+
+SKIP :
+{
+  " "
+| "\t"
+| "\n"
+| "\r"
+| "\f"
+}
+
+/* COMMENTS */
+
+SKIP :
+{
+  <SINGLE_LINE_COMMENT: "//" (~["\n", "\r"])* ("\n" | "\r" | "\r\n")?>
+}
+
+SKIP :
+{
+  <"/**" ~["/"]> { input_stream.backup(1); } : DOC_COMMENT
+|
+  "/*" : MULTI_LINE_COMMENT
+}
+
+<DOC_COMMENT,MULTI_LINE_COMMENT>
+MORE :
+{
+  < ~[] >
+}
+
+<DOC_COMMENT>
+SPECIAL_TOKEN :
+{
+  <"*/" > {Idl.setDoc(image.substring(0, image.length()-2));} : DEFAULT
+}
+
+<MULTI_LINE_COMMENT>
+SKIP :
+{
+  <"*/" > : DEFAULT
+}
+
+/* RESERVED WORDS AND LITERALS */
+
+TOKEN :
+{
+  < ARRAY: "array" >
+| < BOOLEAN: "boolean" >
+| < DOUBLE: "double" >
+| < ENUM: "enum" >
+| < ERROR: "error" >
+| < FALSE: "false" >
+| < FIXED: "fixed" >
+| < FLOAT: "float" >
+| < IDL: "idl" >
+| < IMPORT: "import" >
+| < INT: "int" >
+| < LONG: "long" >
+| < MAP: "map" >
+| < ONEWAY: "oneway" >
+| < BYTES: "bytes" >
+| < SCHEMA: "schema" >
+| < STRING: "string" >
+| < NULL: "null" >
+| < PROTOCOL: "protocol" >
+| < RECORD: "record" >
+| < THROWS: "throws" >
+| < TRUE: "true" >
+| < UNION: "union" >
+| < VOID: "void" >
+}
+
+/* LITERALS */
+
+TOKEN :
+{
+  < INTEGER_LITERAL:
+  ("-")?
+    (   <DECIMAL_LITERAL> (["l","L"])?
+      | <HEX_LITERAL> (["l","L"])?
+      | <OCTAL_LITERAL> (["l","L"])?
+        )  
+  >
+|
+  < #DECIMAL_LITERAL: ["1"-"9"] (["0"-"9"])* >
+|
+  < #HEX_LITERAL: "0" ["x","X"] (["0"-"9","a"-"f","A"-"F"])+ >
+|
+  < #OCTAL_LITERAL: "0" (["0"-"7"])* >
+|
+  < FLOATING_POINT_LITERAL:
+  ("-")?
+    ( "NaN" | "Infinity" |
+      <DECIMAL_FLOATING_POINT_LITERAL> | <HEXADECIMAL_FLOATING_POINT_LITERAL> )
+  >
+|
+  < #DECIMAL_FLOATING_POINT_LITERAL:
+        (["0"-"9"])+ "." (["0"-"9"])* (<DECIMAL_EXPONENT>)? (["f","F","d","D"])?
+      | "." (["0"-"9"])+ (<DECIMAL_EXPONENT>)? (["f","F","d","D"])?
+      | (["0"-"9"])+ <DECIMAL_EXPONENT> (["f","F","d","D"])?
+      | (["0"-"9"])+ (<DECIMAL_EXPONENT>)? ["f","F","d","D"]
+  >
+|
+  < #DECIMAL_EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
+|
+  < #HEXADECIMAL_FLOATING_POINT_LITERAL:
+        "0" ["x", "X"] (["0"-"9","a"-"f","A"-"F"])+ (".")? <HEXADECIMAL_EXPONENT> (["f","F","d","D"])?
+      | "0" ["x", "X"] (["0"-"9","a"-"f","A"-"F"])* "." (["0"-"9","a"-"f","A"-"F"])+ <HEXADECIMAL_EXPONENT> (["f","F","d","D"])?
+  >
+|
+  < #HEXADECIMAL_EXPONENT: ["p","P"] (["+","-"])? (["0"-"9"])+ >
+|
+  < CHARACTER_LITERAL:
+      "'"
+      (   (~["'","\\","\n","\r"])
+        | ("\\"
+            ( ["n","t","b","r","f","\\","'","\""]
+            | ["0"-"7"] ( ["0"-"7"] )?
+            | ["0"-"3"] ["0"-"7"] ["0"-"7"]
+            )
+          )
+      )
+      "'"
+  >
+|
+  < STRING_LITERAL:
+      "\""
+      (   (~["\"","\\","\n","\r"])
+        | ("\\"
+            ( ["n","t","b","r","f","\\","'","\""]
+            | ["0"-"7"] ( ["0"-"7"] )?
+            | ["0"-"3"] ["0"-"7"] ["0"-"7"]
+            )
+          )
+      )*
+      "\""
+  >
+}
+
+/* IDENTIFIERS */
+
+TOKEN :
+{
+  < IDENTIFIER: <LETTER> (<PART_LETTER>)* >
+|
+  < #LETTER:
+      [  // all chars for which Character.isIdentifierStart is true
+         "$",
+         "A"-"Z",
+         "_",
+         "a"-"z",
+         "\u00a2"-"\u00a5",
+         "\u00aa",
+         "\u00b5",
+         "\u00ba",
+         "\u00c0"-"\u00d6",
+         "\u00d8"-"\u00f6",
+         "\u00f8"-"\u021f",
+         "\u0222"-"\u0233",
+         "\u0250"-"\u02ad",
+         "\u02b0"-"\u02b8",
+         "\u02bb"-"\u02c1",
+         "\u02d0"-"\u02d1",
+         "\u02e0"-"\u02e4",
+         "\u02ee",
+         "\u037a",
+         "\u0386",
+         "\u0388"-"\u038a",
+         "\u038c",
+         "\u038e"-"\u03a1",
+         "\u03a3"-"\u03ce",
+         "\u03d0"-"\u03d7",
+         "\u03da"-"\u03f3",
+         "\u0400"-"\u0481",
+         "\u048c"-"\u04c4",
+         "\u04c7"-"\u04c8",
+         "\u04cb"-"\u04cc",
+         "\u04d0"-"\u04f5",
+         "\u04f8"-"\u04f9",
+         "\u0531"-"\u0556",
+         "\u0559",
+         "\u0561"-"\u0587",
+         "\u05d0"-"\u05ea",
+         "\u05f0"-"\u05f2",
+         "\u0621"-"\u063a",
+         "\u0640"-"\u064a",
+         "\u0671"-"\u06d3",
+         "\u06d5",
+         "\u06e5"-"\u06e6",
+         "\u06fa"-"\u06fc",
+         "\u0710",
+         "\u0712"-"\u072c",
+         "\u0780"-"\u07a5",
+         "\u0905"-"\u0939",
+         "\u093d",
+         "\u0950",
+         "\u0958"-"\u0961",
+         "\u0985"-"\u098c",
+         "\u098f"-"\u0990",
+         "\u0993"-"\u09a8",
+         "\u09aa"-"\u09b0",
+         "\u09b2",
+         "\u09b6"-"\u09b9",
+         "\u09dc"-"\u09dd",
+         "\u09df"-"\u09e1",
+         "\u09f0"-"\u09f3",
+         "\u0a05"-"\u0a0a",
+         "\u0a0f"-"\u0a10",
+         "\u0a13"-"\u0a28",
+         "\u0a2a"-"\u0a30",
+         "\u0a32"-"\u0a33",
+         "\u0a35"-"\u0a36",
+         "\u0a38"-"\u0a39",
+         "\u0a59"-"\u0a5c",
+         "\u0a5e",
+         "\u0a72"-"\u0a74",
+         "\u0a85"-"\u0a8b",
+         "\u0a8d",
+         "\u0a8f"-"\u0a91",
+         "\u0a93"-"\u0aa8",
+         "\u0aaa"-"\u0ab0",
+         "\u0ab2"-"\u0ab3",
+         "\u0ab5"-"\u0ab9",
+         "\u0abd",
+         "\u0ad0",
+         "\u0ae0",
+         "\u0b05"-"\u0b0c",
+         "\u0b0f"-"\u0b10",
+         "\u0b13"-"\u0b28",
+         "\u0b2a"-"\u0b30",
+         "\u0b32"-"\u0b33",
+         "\u0b36"-"\u0b39",
+         "\u0b3d",
+         "\u0b5c"-"\u0b5d",
+         "\u0b5f"-"\u0b61",
+         "\u0b85"-"\u0b8a",
+         "\u0b8e"-"\u0b90",
+         "\u0b92"-"\u0b95",
+         "\u0b99"-"\u0b9a",
+         "\u0b9c",
+         "\u0b9e"-"\u0b9f",
+         "\u0ba3"-"\u0ba4",
+         "\u0ba8"-"\u0baa",
+         "\u0bae"-"\u0bb5",
+         "\u0bb7"-"\u0bb9",
+         "\u0c05"-"\u0c0c",
+         "\u0c0e"-"\u0c10",
+         "\u0c12"-"\u0c28",
+         "\u0c2a"-"\u0c33",
+         "\u0c35"-"\u0c39",
+         "\u0c60"-"\u0c61",
+         "\u0c85"-"\u0c8c",
+         "\u0c8e"-"\u0c90",
+         "\u0c92"-"\u0ca8",
+         "\u0caa"-"\u0cb3",
+         "\u0cb5"-"\u0cb9",
+         "\u0cde",
+         "\u0ce0"-"\u0ce1",
+         "\u0d05"-"\u0d0c",
+         "\u0d0e"-"\u0d10",
+         "\u0d12"-"\u0d28",
+         "\u0d2a"-"\u0d39",
+         "\u0d60"-"\u0d61",
+         "\u0d85"-"\u0d96",
+         "\u0d9a"-"\u0db1",
+         "\u0db3"-"\u0dbb",
+         "\u0dbd",
+         "\u0dc0"-"\u0dc6",
+         "\u0e01"-"\u0e30",
+         "\u0e32"-"\u0e33",
+         "\u0e3f"-"\u0e46",
+         "\u0e81"-"\u0e82",
+         "\u0e84",
+         "\u0e87"-"\u0e88",
+         "\u0e8a",
+         "\u0e8d",
+         "\u0e94"-"\u0e97",
+         "\u0e99"-"\u0e9f",
+         "\u0ea1"-"\u0ea3",
+         "\u0ea5",
+         "\u0ea7",
+         "\u0eaa"-"\u0eab",
+         "\u0ead"-"\u0eb0",
+         "\u0eb2"-"\u0eb3",
+         "\u0ebd",
+         "\u0ec0"-"\u0ec4",
+         "\u0ec6",
+         "\u0edc"-"\u0edd",
+         "\u0f00",
+         "\u0f40"-"\u0f47",
+         "\u0f49"-"\u0f6a",
+         "\u0f88"-"\u0f8b",
+         "\u1000"-"\u1021",
+         "\u1023"-"\u1027",
+         "\u1029"-"\u102a",
+         "\u1050"-"\u1055",
+         "\u10a0"-"\u10c5",
+         "\u10d0"-"\u10f6",
+         "\u1100"-"\u1159",
+         "\u115f"-"\u11a2",
+         "\u11a8"-"\u11f9",
+         "\u1200"-"\u1206",
+         "\u1208"-"\u1246",
+         "\u1248",
+         "\u124a"-"\u124d",
+         "\u1250"-"\u1256",
+         "\u1258",
+         "\u125a"-"\u125d",
+         "\u1260"-"\u1286",
+         "\u1288",
+         "\u128a"-"\u128d",
+         "\u1290"-"\u12ae",
+         "\u12b0",
+         "\u12b2"-"\u12b5",
+         "\u12b8"-"\u12be",
+         "\u12c0",
+         "\u12c2"-"\u12c5",
+         "\u12c8"-"\u12ce",
+         "\u12d0"-"\u12d6",
+         "\u12d8"-"\u12ee",
+         "\u12f0"-"\u130e",
+         "\u1310",
+         "\u1312"-"\u1315",
+         "\u1318"-"\u131e",
+         "\u1320"-"\u1346",
+         "\u1348"-"\u135a",
+         "\u13a0"-"\u13f4",
+         "\u1401"-"\u166c",
+         "\u166f"-"\u1676",
+         "\u1681"-"\u169a",
+         "\u16a0"-"\u16ea",
+         "\u1780"-"\u17b3",
+         "\u17db",
+         "\u1820"-"\u1877",
+         "\u1880"-"\u18a8",
+         "\u1e00"-"\u1e9b",
+         "\u1ea0"-"\u1ef9",
+         "\u1f00"-"\u1f15",
+         "\u1f18"-"\u1f1d",
+         "\u1f20"-"\u1f45",
+         "\u1f48"-"\u1f4d",
+         "\u1f50"-"\u1f57",
+         "\u1f59",
+         "\u1f5b",
+         "\u1f5d",
+         "\u1f5f"-"\u1f7d",
+         "\u1f80"-"\u1fb4",
+         "\u1fb6"-"\u1fbc",
+         "\u1fbe",
+         "\u1fc2"-"\u1fc4",
+         "\u1fc6"-"\u1fcc",
+         "\u1fd0"-"\u1fd3",
+         "\u1fd6"-"\u1fdb",
+         "\u1fe0"-"\u1fec",
+         "\u1ff2"-"\u1ff4",
+         "\u1ff6"-"\u1ffc",
+         "\u203f"-"\u2040",
+         "\u207f",
+         "\u20a0"-"\u20af",
+         "\u2102",
+         "\u2107",
+         "\u210a"-"\u2113",
+         "\u2115",
+         "\u2119"-"\u211d",
+         "\u2124",
+         "\u2126",
+         "\u2128",
+         "\u212a"-"\u212d",
+         "\u212f"-"\u2131",
+         "\u2133"-"\u2139",
+         "\u2160"-"\u2183",
+         "\u3005"-"\u3007",
+         "\u3021"-"\u3029",
+         "\u3031"-"\u3035",
+         "\u3038"-"\u303a",
+         "\u3041"-"\u3094",
+         "\u309d"-"\u309e",
+         "\u30a1"-"\u30fe",
+         "\u3105"-"\u312c",
+         "\u3131"-"\u318e",
+         "\u31a0"-"\u31b7",
+         "\u3400"-"\u4db5",
+         "\u4e00"-"\u9fa5",
+         "\ua000"-"\ua48c",
+         "\uac00"-"\ud7a3",
+         "\uf900"-"\ufa2d",
+         "\ufb00"-"\ufb06",
+         "\ufb13"-"\ufb17",
+         "\ufb1d",
+         "\ufb1f"-"\ufb28",
+         "\ufb2a"-"\ufb36",
+         "\ufb38"-"\ufb3c",
+         "\ufb3e",
+         "\ufb40"-"\ufb41",
+         "\ufb43"-"\ufb44",
+         "\ufb46"-"\ufbb1",
+         "\ufbd3"-"\ufd3d",
+         "\ufd50"-"\ufd8f",
+         "\ufd92"-"\ufdc7",
+         "\ufdf0"-"\ufdfb",
+         "\ufe33"-"\ufe34",
+         "\ufe4d"-"\ufe4f",
+         "\ufe69",
+         "\ufe70"-"\ufe72",
+         "\ufe74",
+         "\ufe76"-"\ufefc",
+         "\uff04",
+         "\uff21"-"\uff3a",
+         "\uff3f",
+         "\uff41"-"\uff5a",
+         "\uff65"-"\uffbe",
+         "\uffc2"-"\uffc7",
+         "\uffca"-"\uffcf",
+         "\uffd2"-"\uffd7",
+         "\uffda"-"\uffdc",
+         "\uffe0"-"\uffe1",
+         "\uffe5"-"\uffe6"
+      ]
+  >
+|
+  < #PART_LETTER:
+      [  // all chars for which Character.isIdentifierPart is true
+         "\u0000"-"\u0008",
+         "\u000e"-"\u001b",
+         "$",
+         "0"-"9",
+         "A"-"Z",
+         "_",
+         "a"-"z",
+         "\u007f"-"\u009f",
+         "\u00a2"-"\u00a5",
+         "\u00aa",
+         "\u00b5",
+         "\u00ba",
+         "\u00c0"-"\u00d6",
+         "\u00d8"-"\u00f6",
+         "\u00f8"-"\u021f",
+         "\u0222"-"\u0233",
+         "\u0250"-"\u02ad",
+         "\u02b0"-"\u02b8",
+         "\u02bb"-"\u02c1",
+         "\u02d0"-"\u02d1",
+         "\u02e0"-"\u02e4",
+         "\u02ee",
+         "\u0300"-"\u034e",
+         "\u0360"-"\u0362",
+         "\u037a",
+         "\u0386",
+         "\u0388"-"\u038a",
+         "\u038c",
+         "\u038e"-"\u03a1",
+         "\u03a3"-"\u03ce",
+         "\u03d0"-"\u03d7",
+         "\u03da"-"\u03f3",
+         "\u0400"-"\u0481",
+         "\u0483"-"\u0486",
+         "\u048c"-"\u04c4",
+         "\u04c7"-"\u04c8",
+         "\u04cb"-"\u04cc",
+         "\u04d0"-"\u04f5",
+         "\u04f8"-"\u04f9",
+         "\u0531"-"\u0556",
+         "\u0559",
+         "\u0561"-"\u0587",
+         "\u0591"-"\u05a1",
+         "\u05a3"-"\u05b9",
+         "\u05bb"-"\u05bd",
+         "\u05bf",
+         "\u05c1"-"\u05c2",
+         "\u05c4",
+         "\u05d0"-"\u05ea",
+         "\u05f0"-"\u05f2",
+         "\u0621"-"\u063a",
+         "\u0640"-"\u0655",
+         "\u0660"-"\u0669",
+         "\u0670"-"\u06d3",
+         "\u06d5"-"\u06dc",
+         "\u06df"-"\u06e8",
+         "\u06ea"-"\u06ed",
+         "\u06f0"-"\u06fc",
+         "\u070f"-"\u072c",
+         "\u0730"-"\u074a",
+         "\u0780"-"\u07b0",
+         "\u0901"-"\u0903",
+         "\u0905"-"\u0939",
+         "\u093c"-"\u094d",
+         "\u0950"-"\u0954",
+         "\u0958"-"\u0963",
+         "\u0966"-"\u096f",
+         "\u0981"-"\u0983",
+         "\u0985"-"\u098c",
+         "\u098f"-"\u0990",
+         "\u0993"-"\u09a8",
+         "\u09aa"-"\u09b0",
+         "\u09b2",
+         "\u09b6"-"\u09b9",
+         "\u09bc",
+         "\u09be"-"\u09c4",
+         "\u09c7"-"\u09c8",
+         "\u09cb"-"\u09cd",
+         "\u09d7",
+         "\u09dc"-"\u09dd",
+         "\u09df"-"\u09e3",
+         "\u09e6"-"\u09f3",
+         "\u0a02",
+         "\u0a05"-"\u0a0a",
+         "\u0a0f"-"\u0a10",
+         "\u0a13"-"\u0a28",
+         "\u0a2a"-"\u0a30",
+         "\u0a32"-"\u0a33",
+         "\u0a35"-"\u0a36",
+         "\u0a38"-"\u0a39",
+         "\u0a3c",
+         "\u0a3e"-"\u0a42",
+         "\u0a47"-"\u0a48",
+         "\u0a4b"-"\u0a4d",
+         "\u0a59"-"\u0a5c",
+         "\u0a5e",
+         "\u0a66"-"\u0a74",
+         "\u0a81"-"\u0a83",
+         "\u0a85"-"\u0a8b",
+         "\u0a8d",
+         "\u0a8f"-"\u0a91",
+         "\u0a93"-"\u0aa8",
+         "\u0aaa"-"\u0ab0",
+         "\u0ab2"-"\u0ab3",
+         "\u0ab5"-"\u0ab9",
+         "\u0abc"-"\u0ac5",
+         "\u0ac7"-"\u0ac9",
+         "\u0acb"-"\u0acd",
+         "\u0ad0",
+         "\u0ae0",
+         "\u0ae6"-"\u0aef",
+         "\u0b01"-"\u0b03",
+         "\u0b05"-"\u0b0c",
+         "\u0b0f"-"\u0b10",
+         "\u0b13"-"\u0b28",
+         "\u0b2a"-"\u0b30",
+         "\u0b32"-"\u0b33",
+         "\u0b36"-"\u0b39",
+         "\u0b3c"-"\u0b43",
+         "\u0b47"-"\u0b48",
+         "\u0b4b"-"\u0b4d",
+         "\u0b56"-"\u0b57",
+         "\u0b5c"-"\u0b5d",
+         "\u0b5f"-"\u0b61",
+         "\u0b66"-"\u0b6f",
+         "\u0b82"-"\u0b83",
+         "\u0b85"-"\u0b8a",
+         "\u0b8e"-"\u0b90",
+         "\u0b92"-"\u0b95",
+         "\u0b99"-"\u0b9a",
+         "\u0b9c",
+         "\u0b9e"-"\u0b9f",
+         "\u0ba3"-"\u0ba4",
+         "\u0ba8"-"\u0baa",
+         "\u0bae"-"\u0bb5",
+         "\u0bb7"-"\u0bb9",
+         "\u0bbe"-"\u0bc2",
+         "\u0bc6"-"\u0bc8",
+         "\u0bca"-"\u0bcd",
+         "\u0bd7",
+         "\u0be7"-"\u0bef",
+         "\u0c01"-"\u0c03",
+         "\u0c05"-"\u0c0c",
+         "\u0c0e"-"\u0c10",
+         "\u0c12"-"\u0c28",
+         "\u0c2a"-"\u0c33",
+         "\u0c35"-"\u0c39",
+         "\u0c3e"-"\u0c44",
+         "\u0c46"-"\u0c48",
+         "\u0c4a"-"\u0c4d",
+         "\u0c55"-"\u0c56",
+         "\u0c60"-"\u0c61",
+         "\u0c66"-"\u0c6f",
+         "\u0c82"-"\u0c83",
+         "\u0c85"-"\u0c8c",
+         "\u0c8e"-"\u0c90",
+         "\u0c92"-"\u0ca8",
+         "\u0caa"-"\u0cb3",
+         "\u0cb5"-"\u0cb9",
+         "\u0cbe"-"\u0cc4",
+         "\u0cc6"-"\u0cc8",
+         "\u0cca"-"\u0ccd",
+         "\u0cd5"-"\u0cd6",
+         "\u0cde",
+         "\u0ce0"-"\u0ce1",
+         "\u0ce6"-"\u0cef",
+         "\u0d02"-"\u0d03",
+         "\u0d05"-"\u0d0c",
+         "\u0d0e"-"\u0d10",
+         "\u0d12"-"\u0d28",
+         "\u0d2a"-"\u0d39",
+         "\u0d3e"-"\u0d43",
+         "\u0d46"-"\u0d48",
+         "\u0d4a"-"\u0d4d",
+         "\u0d57",
+         "\u0d60"-"\u0d61",
+         "\u0d66"-"\u0d6f",
+         "\u0d82"-"\u0d83",
+         "\u0d85"-"\u0d96",
+         "\u0d9a"-"\u0db1",
+         "\u0db3"-"\u0dbb",
+         "\u0dbd",
+         "\u0dc0"-"\u0dc6",
+         "\u0dca",
+         "\u0dcf"-"\u0dd4",
+         "\u0dd6",
+         "\u0dd8"-"\u0ddf",
+         "\u0df2"-"\u0df3",
+         "\u0e01"-"\u0e3a",
+         "\u0e3f"-"\u0e4e",
+         "\u0e50"-"\u0e59",
+         "\u0e81"-"\u0e82",
+         "\u0e84",
+         "\u0e87"-"\u0e88",
+         "\u0e8a",
+         "\u0e8d",
+         "\u0e94"-"\u0e97",
+         "\u0e99"-"\u0e9f",
+         "\u0ea1"-"\u0ea3",
+         "\u0ea5",
+         "\u0ea7",
+         "\u0eaa"-"\u0eab",
+         "\u0ead"-"\u0eb9",
+         "\u0ebb"-"\u0ebd",
+         "\u0ec0"-"\u0ec4",
+         "\u0ec6",
+         "\u0ec8"-"\u0ecd",
+         "\u0ed0"-"\u0ed9",
+         "\u0edc"-"\u0edd",
+         "\u0f00",
+         "\u0f18"-"\u0f19",
+         "\u0f20"-"\u0f29",
+         "\u0f35",
+         "\u0f37",
+         "\u0f39",
+         "\u0f3e"-"\u0f47",
+         "\u0f49"-"\u0f6a",
+         "\u0f71"-"\u0f84",
+         "\u0f86"-"\u0f8b",
+         "\u0f90"-"\u0f97",
+         "\u0f99"-"\u0fbc",
+         "\u0fc6",
+         "\u1000"-"\u1021",
+         "\u1023"-"\u1027",
+         "\u1029"-"\u102a",
+         "\u102c"-"\u1032",
+         "\u1036"-"\u1039",
+         "\u1040"-"\u1049",
+         "\u1050"-"\u1059",
+         "\u10a0"-"\u10c5",
+         "\u10d0"-"\u10f6",
+         "\u1100"-"\u1159",
+         "\u115f"-"\u11a2",
+         "\u11a8"-"\u11f9",
+         "\u1200"-"\u1206",
+         "\u1208"-"\u1246",
+         "\u1248",
+         "\u124a"-"\u124d",
+         "\u1250"-"\u1256",
+         "\u1258",
+         "\u125a"-"\u125d",
+         "\u1260"-"\u1286",
+         "\u1288",
+         "\u128a"-"\u128d",
+         "\u1290"-"\u12ae",
+         "\u12b0",
+         "\u12b2"-"\u12b5",
+         "\u12b8"-"\u12be",
+         "\u12c0",
+         "\u12c2"-"\u12c5",
+         "\u12c8"-"\u12ce",
+         "\u12d0"-"\u12d6",
+         "\u12d8"-"\u12ee",
+         "\u12f0"-"\u130e",
+         "\u1310",
+         "\u1312"-"\u1315",
+         "\u1318"-"\u131e",
+         "\u1320"-"\u1346",
+         "\u1348"-"\u135a",
+         "\u1369"-"\u1371",
+         "\u13a0"-"\u13f4",
+         "\u1401"-"\u166c",
+         "\u166f"-"\u1676",
+         "\u1681"-"\u169a",
+         "\u16a0"-"\u16ea",
+         "\u1780"-"\u17d3",
+         "\u17db",
+         "\u17e0"-"\u17e9",
+         "\u180b"-"\u180e",
+         "\u1810"-"\u1819",
+         "\u1820"-"\u1877",
+         "\u1880"-"\u18a9",
+         "\u1e00"-"\u1e9b",
+         "\u1ea0"-"\u1ef9",
+         "\u1f00"-"\u1f15",
+         "\u1f18"-"\u1f1d",
+         "\u1f20"-"\u1f45",
+         "\u1f48"-"\u1f4d",
+         "\u1f50"-"\u1f57",
+         "\u1f59",
+         "\u1f5b",
+         "\u1f5d",
+         "\u1f5f"-"\u1f7d",
+         "\u1f80"-"\u1fb4",
+         "\u1fb6"-"\u1fbc",
+         "\u1fbe",
+         "\u1fc2"-"\u1fc4",
+         "\u1fc6"-"\u1fcc",
+         "\u1fd0"-"\u1fd3",
+         "\u1fd6"-"\u1fdb",
+         "\u1fe0"-"\u1fec",
+         "\u1ff2"-"\u1ff4",
+         "\u1ff6"-"\u1ffc",
+         "\u200c"-"\u200f",
+         "\u202a"-"\u202e",
+         "\u203f"-"\u2040",
+         "\u206a"-"\u206f",
+         "\u207f",
+         "\u20a0"-"\u20af",
+         "\u20d0"-"\u20dc",
+         "\u20e1",
+         "\u2102",
+         "\u2107",
+         "\u210a"-"\u2113",
+         "\u2115",
+         "\u2119"-"\u211d",
+         "\u2124",
+         "\u2126",
+         "\u2128",
+         "\u212a"-"\u212d",
+         "\u212f"-"\u2131",
+         "\u2133"-"\u2139",
+         "\u2160"-"\u2183",
+         "\u3005"-"\u3007",
+         "\u3021"-"\u302f",
+         "\u3031"-"\u3035",
+         "\u3038"-"\u303a",
+         "\u3041"-"\u3094",
+         "\u3099"-"\u309a",
+         "\u309d"-"\u309e",
+         "\u30a1"-"\u30fe",
+         "\u3105"-"\u312c",
+         "\u3131"-"\u318e",
+         "\u31a0"-"\u31b7",
+         "\u3400"-"\u4db5",
+         "\u4e00"-"\u9fa5",
+         "\ua000"-"\ua48c",
+         "\uac00"-"\ud7a3",
+         "\uf900"-"\ufa2d",
+         "\ufb00"-"\ufb06",
+         "\ufb13"-"\ufb17",
+         "\ufb1d"-"\ufb28",
+         "\ufb2a"-"\ufb36",
+         "\ufb38"-"\ufb3c",
+         "\ufb3e",
+         "\ufb40"-"\ufb41",
+         "\ufb43"-"\ufb44",
+         "\ufb46"-"\ufbb1",
+         "\ufbd3"-"\ufd3d",
+         "\ufd50"-"\ufd8f",
+         "\ufd92"-"\ufdc7",
+         "\ufdf0"-"\ufdfb",
+         "\ufe20"-"\ufe23",
+         "\ufe33"-"\ufe34",
+         "\ufe4d"-"\ufe4f",
+         "\ufe69",
+         "\ufe70"-"\ufe72",
+         "\ufe74",
+         "\ufe76"-"\ufefc",
+         "\ufeff",
+         "\uff04",
+         "\uff10"-"\uff19",
+         "\uff21"-"\uff3a",
+         "\uff3f",
+         "\uff41"-"\uff5a",
+         "\uff65"-"\uffbe",
+         "\uffc2"-"\uffc7",
+         "\uffca"-"\uffcf",
+         "\uffd2"-"\uffd7",
+         "\uffda"-"\uffdc",
+         "\uffe0"-"\uffe1",
+         "\uffe5"-"\uffe6",
+         "\ufff9"-"\ufffb"
+      ]
+  >
+}
+
+/* SEPARATORS */
+
+TOKEN :
+{
+  < LPAREN: "(" >
+| < RPAREN: ")" >
+| < LBRACE: "{" >
+| < RBRACE: "}" >
+| < LBRACK: "[" >
+| < RBRACK: "]" >
+| < COLON:  ":" >
+| < SEMICOLON: ";" >
+| < COMMA: "," >
+| < AT: "@" >
+| < EQUALS: "=" >
+| < DOT: "." >
+| < DASH: "-" >
+}
+
+TOKEN :
+{
+  < LT: "<" >
+| < GT: ">" >
+| < TICK: "`" >
+}
+
+
+/********************************************
+ * THE AVRO IDL LANGUAGE GRAMMAR STARTS HERE *
+ ********************************************/
+
+/**
+ * The input to Idl is a CompilationUnit, which is currently
+ * just a single Protocol.
+ */
+Protocol CompilationUnit():
+{
+  Protocol p;
+}
+{
+  p = ProtocolDeclaration()
+  ( < "\u001a" > )?
+  ( <STUFF_TO_IGNORE: ~[]> )?
+  <EOF>
+  { return p; }
+}
+
+/*
+ * Declaration syntax follows.
+ */
+Schema NamedSchemaDeclaration(Map<String, JsonNode> props):
+{
+  Schema s;
+  String savedSpace = this.namespace;
+}
+{
+  {
+    if (props.containsKey("namespace"))
+      this.namespace = getTextProp("namespace", props, token);
+  }
+ (
+     s = FixedDeclaration()
+   | s = EnumDeclaration()
+   | s = RecordDeclaration()
+ )
+ {
+   this.namespace = savedSpace;
+
+   for (String key : props.keySet())
+     if ("namespace".equals(key)) {               // already handled: ignore
+     } else if ("aliases".equals(key)) {          // aliases
+       for (String alias : getTextProps("aliases", props, token))
+         s.addAlias(alias);
+     } else {                                     // add all other props
+       s.addProp(key, props.get(key));
+     }
+
+   return s;
+ }
+}
+
+Schema UnionDefinition():
+{
+  Schema s;
+  List<Schema> schemata = new ArrayList<Schema>();
+}
+{
+ // TODO should probably disallow other unions here in the parser?
+
+  "union"
+  "{"
+  s = Type()
+  { schemata.add(s); }
+
+  (
+    ","
+    s = Type()
+    { schemata.add(s); }
+  )*
+  "}"
+  {
+    return Schema.createUnion(schemata);
+  }
+}
+
+
+Protocol ProtocolDeclaration():
+{
+  String name;
+  Protocol p;
+  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+}
+{
+  ( SchemaProperty(props) )*
+  {
+    if (props.containsKey("namespace"))
+      namespace = getTextProp("namespace", props, token);
+  }
+ "protocol"
+   name = Identifier()
+ {
+   p = new Protocol(name, getDoc(), namespace);
+   for (String key : props.keySet())
+     if ("namespace".equals(key)) {               // already handled: ignore
+     } else {                                     // add all other props
+       p.addProp(key, props.get(key));
+     }
+ }
+ ProtocolBody(p)
+ {
+   return p;
+ }
+}
+
+
+Schema EnumDeclaration():
+{
+  String name;
+  List<String> symbols;
+}
+{
+  "enum"
+  name = Identifier()
+  symbols = EnumBody()
+  {
+    Schema s = Schema.createEnum(name, getDoc(), this.namespace, symbols);
+    names.put(s.getFullName(), s);
+    return s;
+  }
+}
+
+List<String> EnumBody():
+{
+  List<String> symbols = new ArrayList<String>();
+}
+{
+  "{"
+  [ EnumConstant(symbols) ( LOOKAHEAD(2) "," EnumConstant(symbols) )* ]
+  "}"
+  {
+    return symbols;
+  }
+}
+
+void EnumConstant(List<String> symbols):
+{
+  String sym;
+}
+{
+  sym = Identifier() { symbols.add(sym); }
+}
+
+void ProtocolBody(Protocol p):
+{
+  Schema schema;
+  Message message;
+  Protocol importProtocol;
+  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+}
+{
+  "{"
+  (
+   <IMPORT>
+   ((( importProtocol = ImportIdl() | importProtocol = ImportProtocol()) {
+       for (Schema s : importProtocol.getTypes())
+         names.put(s.getFullName(), s);
+       p.getMessages().putAll(importProtocol.getMessages());
+     })
+     | schema = ImportSchema()
+     )
+   |
+   ( SchemaProperty(props) )*
+   (
+     schema = NamedSchemaDeclaration(props)
+     |
+     message = MessageDeclaration(p, props) {
+       p.getMessages().put(message.getName(), message);
+     }
+    )  { props.clear(); }
+  ) *
+  "}"
+
+  {
+    p.setTypes(names.values());
+  }
+}
+
+
+Protocol ImportIdl() : {
+  String importFile;
+}
+{
+  <IDL> importFile = JsonString() ";"
+    {
+      try {
+        Idl idl = new Idl(findFile(importFile), this); 
+        try {
+          return idl.CompilationUnit();
+        } finally {
+          idl.close();
+        }
+      } catch (IOException e) {
+        throw error("Error importing "+importFile+": "+e, token);
+      }        
+    }
+}
+
+Protocol ImportProtocol() : {
+  String importFile;
+}
+{
+  <PROTOCOL> importFile = JsonString() ";"
+    {
+
+      try {
+        InputStream stream = findFile(importFile).openStream();
+        try {
+          return Protocol.parse(stream);
+        } finally {
+          stream.close();
+        }
+      } catch (IOException e) {
+        throw error("Error importing "+importFile+": "+e, token);
+      }        
+    }
+}
+
+Schema ImportSchema() : {
+  String importFile;
+}
+{
+  <SCHEMA> importFile = JsonString() ";"
+    {
+      try {
+        Parser parser = new Schema.Parser();
+        parser.addTypes(names);                   // inherit names
+        InputStream stream = findFile(importFile).openStream();
+        try {
+          Schema value = parser.parse(stream);
+          names = parser.getTypes();                // update names
+          return value;
+        } finally {
+          stream.close();
+        }
+      } catch (IOException e) {
+        throw error("Error importing "+importFile+": "+e, token);
+      }        
+    }
+}
+
+Schema FixedDeclaration():
+{
+  String name;
+  Token sizeTok;
+}
+{
+  "fixed" name = Identifier() "(" sizeTok = <INTEGER_LITERAL> ")"
+  ";"
+  {
+    Schema s = Schema.createFixed(name, getDoc(), this.namespace,
+                                  Integer.parseInt(sizeTok.image));
+    names.put(s.getFullName(), s);
+    return s;
+  }
+}
+
+Schema RecordDeclaration():
+{
+  String name;
+  List<Field> fields = new ArrayList<Field>();
+  boolean isError;
+}
+{
+  (
+    "record" { isError = false; }
+    | "error" { isError = true; }
+  )
+  name = Identifier()
+  {
+    Schema result = Schema.createRecord(
+      name, getDoc(), this.namespace, isError);
+    names.put(result.getFullName(), result);
+  }
+  "{"
+    ( FieldDeclaration(fields) )*
+  "}"
+  {
+    result.setFields(fields);
+    return result;
+  }
+}
+
+void SchemaProperty(Map<String, JsonNode> properties):
+{
+  String key;
+  JsonNode val;
+}
+{
+  "@" key = PropertyName() "(" val = Json() ")"
+  {
+    if (properties.containsKey(key))
+      throw error("Property '" + key + "' already specified", token);
+    properties.put(key, val);
+  }
+}
+
+
+void FieldDeclaration(List<Field> fields):
+{
+  Schema type;
+  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+}
+{
+  // TODO should we be able to specify properties on any Type?
+  // or just on field declarations as done here
+
+  ( SchemaProperty(props) )*
+  type = Type()
+  VariableDeclarator(type, fields) ( "," VariableDeclarator(type, fields) )*
+  ";"
+  {
+    for (String key : props.keySet())
+      type.addProp(key, props.get(key));
+  }
+}
+
+void VariableDeclarator(Schema type, List<Field> fields):
+{
+  String name;
+  JsonNode defaultValue = null;
+  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+}
+{
+    ( SchemaProperty(props) )*
+    
+  name = Identifier()
+
+    [ <EQUALS> defaultValue=Json() ]
+    
+  {
+    Field.Order order = Field.Order.ASCENDING;
+    for (String key : props.keySet())
+      if ("order".equals(key))
+        order = Field.Order.valueOf(getTextProp(key,props,token).toUpperCase());
+    Field field = new Field(name, type, getDoc(), defaultValue, order);
+    for (String key : props.keySet())
+      if ("order".equals(key)) {                  // already handled: ignore
+      } else if ("aliases".equals(key)) {         // aliases
+        for (String alias : getTextProps("aliases", props, token))
+          field.addAlias(alias);
+      } else {                                    // add all other props
+        field.addProp(key, props.get(key));
+      }
+    fields.add(field);
+  }
+}
+
+
+String MessageDocumentation():
+{}
+{
+   // Don't parse anything, just return the doc string
+   {
+       return getDoc();
+   }
+}
+
+Message MessageDeclaration(Protocol p, Map<String, JsonNode> props):
+{
+  String msgDoc;
+  String name;
+  Schema request;
+  Schema response;
+  boolean oneWay = false;
+  List<Schema> errorSchemata = new ArrayList<Schema>();
+  errorSchemata.add(Protocol.SYSTEM_ERROR);
+}
+{
+  msgDoc = MessageDocumentation()
+  response = ResultType()
+  name = Identifier()
+  request = FormalParameters()
+  [ "oneway" {oneWay = true; } | "throws" ErrorList(errorSchemata) ]
+  ";"
+  {
+    Schema errors = Schema.createUnion(errorSchemata);
+    if (oneWay && response.getType() != Type.NULL)
+      throw error("One-way message'"+name+"' must return void", token);
+    return oneWay
+    ? p.createMessage(name, msgDoc, props, request)
+    : p.createMessage(name, msgDoc, props, request, response, errors);
+    
+  }
+}
+
+void ErrorList(List<Schema> errors):
+{
+  Schema s;
+}
+{
+  s = ReferenceType() { errors.add(s); }
+  ( "," s = ReferenceType() { errors.add(s); } )*
+}
+
+Schema FormalParameters():
+{
+  List<Field> fields = new ArrayList<Field>();
+}
+{
+  (
+    "(" [ FormalParameter(fields) ( "," FormalParameter(fields) )* ] ")"
+  )
+  {
+    return Schema.createRecord(fields);
+  }
+}
+
+void FormalParameter(List<Field> fields):
+{
+  Schema type;
+}
+{
+  type = Type()
+  VariableDeclarator(type, fields)
+}
+
+Schema Type():
+{
+  Schema s;
+  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+}
+{
+
+    ( SchemaProperty(props) )*
+  (
+      LOOKAHEAD(2) s = ReferenceType()
+    | s = PrimitiveType()
+    | s = UnionDefinition()
+    | s = ArrayType()
+    | s = MapType()
+  )
+  {
+    for (String key : props.keySet())
+      s.addProp(key, props.get(key));
+    return s;
+  }
+}
+
+Schema ArrayType():
+{
+  Schema elemSchema;
+}
+{
+  "array" "<" elemSchema = Type() ">"
+  {
+    return Schema.createArray(elemSchema);
+  }
+}
+
+Schema MapType():
+{
+  Schema elemSchema;
+}
+{
+  "map" "<" elemSchema = Type() ">"
+  {
+    return Schema.createMap(elemSchema);
+  }
+}
+
+/**
+ * A reference to some other existing type
+ */
+Schema ReferenceType():
+{
+  String part;
+  Token tok;
+  StringBuilder sb = new StringBuilder();
+}
+{
+  (
+    part = Identifier() { sb.append(part); }
+    ("." tok = AnyIdentifier() { sb.append(".").append(tok.image); })*
+    )
+  {
+    String name = sb.toString();
+    if ((name.indexOf('.') == -1) && namespace != null)
+      name = namespace + "." + name;
+    Schema type = names.get(name);
+    if (type == null)
+      throw error("Undefined name '" + name + "'", token);
+    return type;
+  }
+}
+
+Schema PrimitiveType():
+{}
+{
+  "boolean" { return Schema.create(Type.BOOLEAN); }
+| "bytes" { return Schema.create(Type.BYTES); }
+| "int" { return Schema.create(Type.INT); }
+| "string" { return Schema.create(Type.STRING); }
+| "float" { return Schema.create(Type.FLOAT); }
+| "double" { return Schema.create(Type.DOUBLE); }
+| "long" { return Schema.create(Type.LONG); }
+| "null" { return Schema.create(Type.NULL); }
+}
+
+/**
+ * Result types are like other types, except we provide "void" as
+ * an alias of "null"
+ */
+Schema ResultType():
+{
+  Schema schema;
+}
+{
+  LOOKAHEAD(2)
+    "void"          { return Schema.create(Type.NULL); }
+  | schema = Type() { return schema; }
+}
+
+String PropertyName():
+{
+  Token t;
+  StringBuffer name = new StringBuffer();
+}
+{
+  t = <IDENTIFIER>   { name.append(t.image); }
+  ( t = <DASH>       { name.append(t.image); }
+    t = <IDENTIFIER> { name.append(t.image); }
+  ) *
+  { return name.toString(); }
+}
+
+String Identifier():
+{
+  Token t;
+}
+{
+  ( t = <IDENTIFIER> { return t.image; } )
+| ( "`" t = AnyIdentifier() "`" {
+    return t.image;
+  })
+}
+
+Token AnyIdentifier():
+{
+  Token t;
+}
+{
+  (t = <ARRAY> |
+   t = <BOOLEAN> |
+   t = <DOUBLE> |
+   t = <ENUM> |
+   t = <ERROR> |
+   t = <FALSE> |
+   t = <FIXED> |
+   t = <FLOAT> |
+   t = <INT> |
+   t = <LONG> |
+   t = <MAP> |
+   t = <BYTES> |
+   t = <STRING> |
+   t = <PROTOCOL> |
+   t = <RECORD> |
+   t = <THROWS> |
+   t = <TRUE> |
+   t = <UNION> |
+   t = <VOID> |
+   t = <IDENTIFIER>)
+  {
+    return t;
+  }
+}
+
+JsonNode Json() :
+{ String s; Token t; JsonNode n; }
+{ 
+( s = JsonString() { n = new TextNode(s); }
+| (t=<INTEGER_LITERAL> { n = new LongNode(Long.parseLong(t.image)); })
+| (t=<FLOATING_POINT_LITERAL> {n=new DoubleNode(Double.parseDouble(t.image));})
+| n=JsonObject()
+| n=JsonArray()
+| ( "true" { n = BooleanNode.TRUE; } )
+| ( "false" { n = BooleanNode.FALSE; } )
+| ( "null" { n = NullNode.instance; } )
+ )
+  { return n; }
+}
+
+String JsonString() :
+{ Token t; }
+{
+  t = <STRING_LITERAL>
+  {
+    String betweenQuotes = t.image.substring(1, t.image.length() - 1);
+    return StringEscapeUtils.unescapeJavaScript(betweenQuotes);
+  }
+}
+
+JsonNode JsonObject() :
+{ 
+  ObjectNode o = FACTORY.objectNode();
+}
+{
+  "{" [ JsonFields(o) ] "}"
+  { return o; }
+}
+
+void JsonFields(ObjectNode o) :
+{}
+{
+  JsonPair(o) [ "," JsonFields(o) ]
+}
+
+void JsonPair(ObjectNode o) :
+{
+  String name;
+  JsonNode value;
+}
+{
+  name=JsonString() <COLON> value=Json()
+    { o.put(name, value); } 
+}
+
+JsonNode JsonArray() :
+{ ArrayNode a = FACTORY.arrayNode(); }
+{
+  <LBRACK> [ JsonElements(a) ] <RBRACK>
+    { return a; }
+}
+
+void JsonElements(ArrayNode a) :
+{ JsonNode element; }
+{
+  element=Json() { a.add(element); } [ "," JsonElements(a) ]
+}
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm
new file mode 100644
index 0000000..2056b1d
--- /dev/null
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/enum.vm
@@ -0,0 +1,34 @@
+##
+## Licensed to the Apache Software Foundation (ASF) under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  The ASF licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+#if ($schema.getNamespace())
+package $schema.getNamespace();  
+#end  
+ at SuppressWarnings("all")
+#if ($schema.getDoc())
+/** $schema.getDoc() */
+#end
+#foreach ($annotation in $this.javaAnnotations($schema))
+@$annotation
+#end
+ at org.apache.avro.specific.AvroGenerated
+public enum ${this.mangle($schema.getName())} { 
+  #foreach ($symbol in ${schema.getEnumSymbols()})${this.mangle($symbol)}#if ($velocityHasNext), #end#end
+  ;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("${this.javaEscape($schema.toString())}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm
new file mode 100644
index 0000000..aff3597
--- /dev/null
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/fixed.vm
@@ -0,0 +1,65 @@
+##
+## Licensed to the Apache Software Foundation (ASF) under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  The ASF licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+#if ($schema.getNamespace())
+package $schema.getNamespace();  
+#end
+ at SuppressWarnings("all")
+#if ($schema.getDoc())
+/** $schema.getDoc() */
+#end
+#foreach ($annotation in $this.javaAnnotations($schema))
+@$annotation
+#end
+ at org.apache.avro.specific.FixedSize($schema.getFixedSize())
+ at org.apache.avro.specific.AvroGenerated
+public class ${this.mangle($schema.getName())} extends org.apache.avro.specific.SpecificFixed {
+  private static final long serialVersionUID = ${this.fingerprint64($schema)}L;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("${this.javaEscape($schema.toString())}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  
+  /** Creates a new ${this.mangle($schema.getName())} */
+  public ${this.mangle($schema.getName())}() {
+    super();
+  }
+  
+  /**
+   * Creates a new ${this.mangle($schema.getName())} with the given bytes.
+   * @param bytes The bytes to create the new ${this.mangle($schema.getName())}. 
+   */
+  public ${this.mangle($schema.getName())}(byte[] bytes) {
+    super(bytes);
+  }
+
+  private static final org.apache.avro.io.DatumWriter
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+
+  @Override public void writeExternal(java.io.ObjectOutput out)
+    throws java.io.IOException {
+    WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
+  }
+
+  private static final org.apache.avro.io.DatumReader
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+
+  @Override public void readExternal(java.io.ObjectInput in)
+    throws java.io.IOException {
+    READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
+  }
+
+}
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/protocol.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/protocol.vm
new file mode 100644
index 0000000..b330796
--- /dev/null
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/protocol.vm
@@ -0,0 +1,96 @@
+##
+## Licensed to the Apache Software Foundation (ASF) under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  The ASF licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+#if ($protocol.getNamespace())
+package $protocol.getNamespace();
+#end
+
+ at SuppressWarnings("all")
+#if ($protocol.getDoc())
+/** $protocol.getDoc() */
+#end
+#foreach ($annotation in $this.javaAnnotations($protocol))
+@$annotation
+#end
+ at org.apache.avro.specific.AvroGenerated
+public interface $this.mangle($protocol.getName()) {
+  public static final org.apache.avro.Protocol PROTOCOL = org.apache.avro.Protocol.parse(${this.javaSplit($protocol.toString())});
+#foreach ($e in $protocol.getMessages().entrySet())
+#set ($name = $e.getKey())
+#set ($message = $e.getValue())
+#set ($response = $message.getResponse())
+  /**
+#if ($message.getDoc())
+   * $this.escapeForJavadoc($message.getDoc())
+#end
+#foreach ($p in $message.getRequest().getFields())##
+#if ($p.doc())   * @param ${this.mangle($p.name())} $p.doc()
+#end
+#end
+   */
+#foreach ($annotation in $this.javaAnnotations($message))
+  @$annotation
+#end
+  #if ($message.isOneWay())void#else${this.javaUnbox($response)}#end
+ ${this.mangle($name)}(##
+#foreach ($p in $message.getRequest().getFields())##
+#*      *#${this.javaUnbox($p.schema())} ${this.mangle($p.name())}#if ($velocityHasNext), #end
+#end
+)#if (! $message.isOneWay())
+ throws org.apache.avro.AvroRemoteException##
+## The first error is always "string", so we skip it.
+#foreach ($error in $message.getErrors().getTypes().subList(1, $message.getErrors().getTypes().size()))
+, ${this.mangle($error.getFullName())}##
+#end##    (error list)
+#end##    (one way)
+;
+#end## (requests)
+
+## Generate nested callback API
+  @SuppressWarnings("all")
+#if ($protocol.getDoc())
+  /** $protocol.getDoc() */
+#end
+  public interface Callback extends $this.mangle($protocol.getName()) {
+    public static final org.apache.avro.Protocol PROTOCOL = #if ($protocol.getNamespace())$protocol.getNamespace().#end${this.mangle($protocol.getName())}.PROTOCOL;
+#foreach ($e in $protocol.getMessages().entrySet())
+#set ($name = $e.getKey())
+#set ($message = $e.getValue())
+#set ($response = $message.getResponse())
+## Generate callback method if the message is not one-way:
+#if (! $message.isOneWay())
+    /**
+#if ($message.getDoc())
+     * $this.escapeForJavadoc($message.getDoc())
+#end
+#foreach ($p in $message.getRequest().getFields())##
+#if ($p.doc())     * @param ${this.mangle($p.name())} $p.doc()
+#end
+#end
+     * @throws java.io.IOException The async call could not be completed.
+     */
+    void ${this.mangle($name)}(##
+#foreach ($p in $message.getRequest().getFields())##
+#*      *#${this.javaUnbox($p.schema())} ${this.mangle($p.name())}#if ($velocityHasNext), #end
+#end
+#if ($message.getRequest().getFields().size() > 0), #end
+org.apache.avro.ipc.Callback<${this.javaType($response)}> callback) throws java.io.IOException;
+#end## (generate callback method)
+#end## (requests)
+  }## End of Callback interface
+
+}## End of protocol interface
diff --git a/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm
new file mode 100644
index 0000000..1cd17e6
--- /dev/null
+++ b/lang/java/compiler/src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/record.vm
@@ -0,0 +1,379 @@
+##
+## Licensed to the Apache Software Foundation (ASF) under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  The ASF licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+#if ($schema.getNamespace())
+package $schema.getNamespace();  
+#end
+ at SuppressWarnings("all")
+#if ($schema.getDoc())
+/** $schema.getDoc() */
+#end
+#foreach ($annotation in $this.javaAnnotations($schema))
+@$annotation
+#end
+ at org.apache.avro.specific.AvroGenerated
+public class ${this.mangle($schema.getName())}#if ($schema.isError()) extends org.apache.avro.specific.SpecificExceptionBase#else extends org.apache.avro.specific.SpecificRecordBase#end implements org.apache.avro.specific.SpecificRecord {
+  private static final long serialVersionUID = ${this.fingerprint64($schema)}L;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(${this.javaSplit($schema.toString())});
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+#foreach ($field in $schema.getFields())
+#if ($field.doc())
+  /** $field.doc() */
+#end
+#foreach ($annotation in $this.javaAnnotations($field))
+  @$annotation
+#end
+  #if (${this.deprecatedFields()})@Deprecated#end #if (${this.publicFields()})public#elseif (${this.privateFields()})private#end ${this.javaUnbox($field.schema())} ${this.mangle($field.name(), $schema.isError())};
+#end
+#if ($schema.isError())
+
+  public ${this.mangle($schema.getName())}() {
+    super();
+  }
+  
+  public ${this.mangle($schema.getName())}(Object value) {
+    super(value);
+  }
+
+  public ${this.mangle($schema.getName())}(Throwable cause) {
+    super(cause);
+  }
+
+  public ${this.mangle($schema.getName())}(Object value, Throwable cause) {
+    super(value, cause);
+  }
+  
+#else
+#if ($schema.getFields().size() > 0)  
+
+  /**
+   * Default constructor.  Note that this does not initialize fields
+   * to their default values from the schema.  If that is desired then
+   * one should use <code>newBuilder()</code>. 
+   */
+  public ${this.mangle($schema.getName())}() {}
+
+  /**
+   * All-args constructor.
+#foreach ($field in $schema.getFields())
+#if ($field.doc())   * @param ${this.mangle($field.name())} $field.doc()
+#end
+#end
+   */
+  public ${this.mangle($schema.getName())}(#foreach($field in $schema.getFields())${this.javaType($field.schema())} ${this.mangle($field.name())}#if($velocityCount < $schema.getFields().size()), #end#end) {
+#foreach ($field in $schema.getFields())
+    this.${this.mangle($field.name())} = ${this.mangle($field.name())};
+#end
+  }
+#end
+
+#end
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  // Used by DatumWriter.  Applications should not call. 
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+#set ($i = 0)
+#foreach ($field in $schema.getFields())
+    case $i: return ${this.mangle($field.name(), $schema.isError())};
+#set ($i = $i + 1)
+#end
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  // Used by DatumReader.  Applications should not call. 
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+#set ($i = 0)
+#foreach ($field in $schema.getFields())
+    case $i: ${this.mangle($field.name(), $schema.isError())} = (${this.javaType($field.schema())})value$; break;
+#set ($i = $i + 1)
+#end
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+
+#foreach ($field in $schema.getFields())
+  /**
+   * Gets the value of the '${this.mangle($field.name(), $schema.isError())}' field.
+#if ($field.doc())   * @return $field.doc()
+#end
+   */
+  public ${this.javaType($field.schema())} ${this.generateGetMethod($schema, $field)}() {
+    return ${this.mangle($field.name(), $schema.isError())};
+  }
+
+#if ($this.createSetters)
+  /**
+   * Sets the value of the '${this.mangle($field.name(), $schema.isError())}' field.
+#if ($field.doc())   * $field.doc()
+#end
+   * @param value the value to set.
+   */
+  public void ${this.generateSetMethod($schema, $field)}(${this.javaType($field.schema())} value) {
+    this.${this.mangle($field.name(), $schema.isError())} = value;
+  }
+#end
+
+#end
+  /**
+   * Creates a new ${this.mangle($schema.getName())} RecordBuilder.
+   * @return A new ${this.mangle($schema.getName())} RecordBuilder
+   */
+  public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder() {
+    return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder();
+  }
+  
+  /**
+   * Creates a new ${this.mangle($schema.getName())} RecordBuilder by copying an existing Builder.
+   * @param other The existing builder to copy.
+   * @return A new ${this.mangle($schema.getName())} RecordBuilder
+   */
+  public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder other) {
+    return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder(other);
+  }
+  
+  /**
+   * Creates a new ${this.mangle($schema.getName())} RecordBuilder by copying an existing $this.mangle($schema.getName()) instance.
+   * @param other The existing instance to copy.
+   * @return A new ${this.mangle($schema.getName())} RecordBuilder
+   */
+  public static #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder newBuilder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())} other) {
+    return new #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder(other);
+  }
+  
+  /**
+   * RecordBuilder for ${this.mangle($schema.getName())} instances.
+   */
+  public static class Builder extends#if ($schema.isError()) org.apache.avro.specific.SpecificErrorBuilderBase<${this.mangle($schema.getName())}>#else org.apache.avro.specific.SpecificRecordBuilderBase<${this.mangle($schema.getName())}>#end
+
+    implements#if ($schema.isError()) org.apache.avro.data.ErrorBuilder<${this.mangle($schema.getName())}>#else org.apache.avro.data.RecordBuilder<${this.mangle($schema.getName())}>#end {
+
+#foreach ($field in $schema.getFields())
+#if ($field.doc())
+    /** $field.doc() */
+#end
+    private ${this.javaUnbox($field.schema())} ${this.mangle($field.name(), $schema.isError())};
+#if (${this.hasBuilder($field.schema())})
+    private ${this.javaUnbox($field.schema())}.Builder ${this.mangle($field.name(), $schema.isError())}Builder;
+#end
+#end
+
+    /** Creates a new Builder */
+    private Builder() {
+      super(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.SCHEMA$);
+    }
+    
+    /**
+     * Creates a Builder by copying an existing Builder.
+     * @param other The existing Builder to copy.
+     */
+    private Builder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder other) {
+      super(other);
+#foreach ($field in $schema.getFields())
+      if (isValidValue(fields()[$field.pos()], other.${this.mangle($field.name(), $schema.isError())})) {
+        this.${this.mangle($field.name(), $schema.isError())} = data().deepCopy(fields()[$field.pos()].schema(), other.${this.mangle($field.name(), $schema.isError())});
+        fieldSetFlags()[$field.pos()] = true;
+      }
+#if (${this.hasBuilder($field.schema())})
+      if (other.${this.generateHasBuilderMethod($schema, $field)}()) {
+        this.${this.mangle($field.name(), $schema.isError())}Builder = ${this.javaType($field.schema())}.newBuilder(other.${this.generateGetBuilderMethod($schema, $field)}());
+      }
+#end
+#end
+    }
+    
+    /**
+     * Creates a Builder by copying an existing $this.mangle($schema.getName()) instance
+     * @param other The existing instance to copy.
+     */
+    private Builder(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())} other) {
+      #if ($schema.isError())super(other)#else
+      super(#if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.SCHEMA$)#end;
+#foreach ($field in $schema.getFields())
+      if (isValidValue(fields()[$field.pos()], other.${this.mangle($field.name(), $schema.isError())})) {
+        this.${this.mangle($field.name(), $schema.isError())} = data().deepCopy(fields()[$field.pos()].schema(), other.${this.mangle($field.name(), $schema.isError())});
+        fieldSetFlags()[$field.pos()] = true;
+      }
+#if (${this.hasBuilder($field.schema())})
+      this.${this.mangle($field.name(), $schema.isError())}Builder = null;
+#end
+#end
+    }
+#if ($schema.isError())
+
+    @Override
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder setValue(Object value) {
+      super.setValue(value);
+      return this;
+    }
+    
+    @Override
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder clearValue() {
+      super.clearValue();
+      return this;
+    }
+
+    @Override
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder setCause(Throwable cause) {
+      super.setCause(cause);
+      return this;
+    }
+    
+    @Override
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder clearCause() {
+      super.clearCause();
+      return this;
+    }
+#end
+
+#foreach ($field in $schema.getFields())
+    /**
+      * Gets the value of the '${this.mangle($field.name(), $schema.isError())}' field.
+#if ($field.doc())      * $field.doc()
+#end
+      * @return The value.
+      */
+    public ${this.javaType($field.schema())} ${this.generateGetMethod($schema, $field)}() {
+      return ${this.mangle($field.name(), $schema.isError())};
+    }
+
+    /**
+      * Sets the value of the '${this.mangle($field.name(), $schema.isError())}' field.
+#if ($field.doc())      * $field.doc()
+#end
+      * @param value The value of '${this.mangle($field.name(), $schema.isError())}'.
+      * @return This builder.
+      */
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder ${this.generateSetMethod($schema, $field)}(${this.javaUnbox($field.schema())} value) {
+      validate(fields()[$field.pos()], value);
+#if (${this.hasBuilder($field.schema())})
+      this.${this.mangle($field.name(), $schema.isError())}Builder = null;
+#end
+      this.${this.mangle($field.name(), $schema.isError())} = value;
+      fieldSetFlags()[$field.pos()] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the '${this.mangle($field.name(), $schema.isError())}' field has been set.
+#if ($field.doc())      * $field.doc()
+#end
+      * @return True if the '${this.mangle($field.name(), $schema.isError())}' field has been set, false otherwise.
+      */
+    public boolean ${this.generateHasMethod($schema, $field)}() {
+      return fieldSetFlags()[$field.pos()];
+    }
+
+#if (${this.hasBuilder($field.schema())})
+    /**
+     * Gets the Builder instance for the '${this.mangle($field.name(), $schema.isError())}' field and creates one if it doesn't exist yet.
+#if ($field.doc())     * $field.doc()
+#end
+     * @return This builder.
+     */
+    public ${this.javaType($field.schema())}.Builder ${this.generateGetBuilderMethod($schema, $field)}() {
+      if (${this.mangle($field.name(), $schema.isError())}Builder == null) {
+        if (${this.generateHasMethod($schema, $field)}()) {
+          ${this.generateSetBuilderMethod($schema, $field)}(${this.javaType($field.schema())}.newBuilder(${this.mangle($field.name(), $schema.isError())}));
+        } else {
+          ${this.generateSetBuilderMethod($schema, $field)}(${this.javaType($field.schema())}.newBuilder());
+        }
+      }
+      return ${this.mangle($field.name(), $schema.isError())}Builder;
+    }
+
+    /**
+     * Sets the Builder instance for the '${this.mangle($field.name(), $schema.isError())}' field
+#if ($field.doc())     * $field.doc()
+#end
+     * @return This builder.
+     */
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder ${this.generateSetBuilderMethod($schema, $field)}(${this.javaUnbox($field.schema())}.Builder value) {
+      ${this.generateClearMethod($schema, $field)}();
+      ${this.mangle($field.name(), $schema.isError())}Builder = value;
+      return this;
+    }
+
+    /**
+     * Checks whether the '${this.mangle($field.name(), $schema.isError())}' field has an active Builder instance
+#if ($field.doc())     * $field.doc()
+#end
+     * @return True if the '${this.mangle($field.name(), $schema.isError())}' field has an active Builder instance
+     */
+    public boolean ${this.generateHasBuilderMethod($schema, $field)}() {
+      return ${this.mangle($field.name(), $schema.isError())}Builder != null;
+    }
+#end
+
+    /**
+      * Clears the value of the '${this.mangle($field.name(), $schema.isError())}' field.
+#if ($field.doc())      * $field.doc()
+#end
+      * @return This builder.
+      */
+    public #if ($schema.getNamespace())$schema.getNamespace().#end${this.mangle($schema.getName())}.Builder ${this.generateClearMethod($schema, $field)}() {
+#if (${this.isUnboxedJavaTypeNullable($field.schema())})
+      ${this.mangle($field.name(), $schema.isError())} = null;
+#end
+#if (${this.hasBuilder($field.schema())})
+      ${this.mangle($field.name(), $schema.isError())}Builder = null;
+#end
+      fieldSetFlags()[$field.pos()] = false;
+      return this;
+    }
+
+#end
+    @Override
+    public ${this.mangle($schema.getName())} build() {
+      try {
+        ${this.mangle($schema.getName())} record = new ${this.mangle($schema.getName())}(#if ($schema.isError())getValue(), getCause()#end);
+#foreach ($field in $schema.getFields())
+#if (${this.hasBuilder($field.schema())})
+        if (${this.mangle($field.name(), $schema.isError())}Builder != null) {
+          record.${this.mangle($field.name(), $schema.isError())} = this.${this.mangle($field.name(), $schema.isError())}Builder.build();
+        } else {
+          record.${this.mangle($field.name(), $schema.isError())} = fieldSetFlags()[$field.pos()] ? this.${this.mangle($field.name(), $schema.isError())} : (${this.javaType($field.schema())}) defaultValue(fields()[$field.pos()]);
+        }
+#else
+        record.${this.mangle($field.name(), $schema.isError())} = fieldSetFlags()[$field.pos()] ? this.${this.mangle($field.name(), $schema.isError())} : (${this.javaType($field.schema())}) defaultValue(fields()[$field.pos()]);
+#end
+#end
+        return record;
+      } catch (Exception e) {
+        throw new org.apache.avro.AvroRuntimeException(e);
+      }
+    }
+  }
+
+  private static final org.apache.avro.io.DatumWriter
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+
+  @Override public void writeExternal(java.io.ObjectOutput out)
+    throws java.io.IOException {
+    WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
+  }
+
+  private static final org.apache.avro.io.DatumReader
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+
+  @Override public void readExternal(java.io.ObjectInput in)
+    throws java.io.IOException {
+    READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
+  }
+
+}
diff --git a/lang/java/compiler/src/test/idl/input/bar.avpr b/lang/java/compiler/src/test/idl/input/bar.avpr
new file mode 100644
index 0000000..5e9b194
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/bar.avpr
@@ -0,0 +1,2 @@
+{"protocol": "org.foo.Bar",
+ "messages": { "bar": {"request": [], "response": "null"}}}
diff --git a/lang/java/compiler/src/test/idl/input/baseball.avdl b/lang/java/compiler/src/test/idl/input/baseball.avdl
new file mode 100644
index 0000000..e92326e
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/baseball.avdl
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at namespace("avro.examples.baseball")
+protocol Baseball {
+   import schema "position.avsc";
+   import schema "player.avsc";
+}
diff --git a/lang/java/compiler/src/test/idl/input/foo.avsc b/lang/java/compiler/src/test/idl/input/foo.avsc
new file mode 100644
index 0000000..83267ea
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/foo.avsc
@@ -0,0 +1,3 @@
+{"type": "record", "name": "org.foo.Foo",
+ "fields": [ {"name": "x", "type": "int"} ]
+}
diff --git a/lang/java/compiler/src/test/idl/input/import.avdl b/lang/java/compiler/src/test/idl/input/import.avdl
new file mode 100644
index 0000000..56b0836
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/import.avdl
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at namespace("org.foo")
+protocol Import {
+  import idl "reservedwords.avdl";
+  import idl "nestedimport.avdl";
+
+  //Note that this import is resolve via the classpath, not relative path.
+  import idl "OnTheClasspath.avdl";
+  import protocol "OnTheClasspath.avpr";
+  import schema "OnTheClasspath.avsc";
+  
+  import schema "foo.avsc";
+  import protocol "bar.avpr";
+  
+  record Bar {
+    Foo foo;
+  }
+
+  Bar barf(Foo foo);
+}
diff --git a/lang/java/compiler/src/test/idl/input/interop.avdl b/lang/java/compiler/src/test/idl/input/interop.avdl
new file mode 100644
index 0000000..7e05666
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/interop.avdl
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Currently genavro only does Protocols.
+ at namespace("org.apache.avro")
+protocol InteropProtocol {
+  record Foo {
+    string label;
+  }
+
+  enum Kind { A, B, C }
+  fixed MD5(16);
+
+  record Node {
+    string label;
+    array<Node> children = [];
+  }
+
+  record Interop {
+    int intField = 1;
+    long longField = -1;
+    string stringField;
+    boolean boolField = false;
+    float floatField = 0.0;
+    double doubleField = -1.0e12;
+    null nullField;
+    array<double> arrayField = [];
+    map<Foo> mapField;
+    union { boolean, double, array<bytes> } unionFIeld;
+    Kind enumField;
+    MD5 fixedField;
+    Node recordField;
+  }
+
+}
diff --git a/lang/java/compiler/src/test/idl/input/mr_events.avdl b/lang/java/compiler/src/test/idl/input/mr_events.avdl
new file mode 100644
index 0000000..63568fb
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/mr_events.avdl
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Genavro format for a particular protocol found in Hadoop MapReduce.
+ * Used as a test case/example to show that we can express real-world stuff more
+ * succinctly.
+ */
+ at namespace("org.apache.hadoop.mapreduce.jobhistory")
+protocol Events {
+  record JhCounter {
+    string name;
+    string displayName;
+    long value;
+  }
+
+  record JhCounterGroup {
+    string name;
+    string displayName;
+    array<JhCounter> counts;
+  }
+
+  record JhCounters {
+    string name;
+    array<JhCounterGroup> groups;
+  }
+
+  record JobFinished {
+    string jobid;
+    long finishTime;
+    int finishedMaps;
+    int finishedReduces;
+    int failedMaps;
+    int failedReduces;
+    JhCounters totalCounters;
+    JhCounters mapCounters;
+    JhCounters reduceCounters;
+  }
+
+  record JobInited {
+    string jobid;
+    long launchTime;
+    int totalMaps;
+    int totalReduces;
+    string jobStatus;
+  }
+
+  record JobSubmitted {
+    string jobid;
+    string jobName;
+    string userName;
+    long submitTime;
+    string jobConfPath;
+  }
+
+  // ... TODO continue
+}
diff --git a/lang/java/compiler/src/test/idl/input/namespaces.avdl b/lang/java/compiler/src/test/idl/input/namespaces.avdl
new file mode 100644
index 0000000..fe371b4
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/namespaces.avdl
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at namespace("avro.test.protocol")
+protocol TestNamespace {
+  @namespace("avro.test.fixed")
+  fixed FixedInOtherNamespace(16);
+
+  fixed FixedInThisNamespace(16);
+
+  @namespace("avro.test.record")
+  record RecordInOtherNamespace {}
+
+  @namespace("avro.test.error")
+  error ErrorInOtherNamespace {}
+
+  @namespace("avro.test.enum")
+  enum EnumInOtherNamespace { FOO }
+
+  record RefersToOthers {
+    avro.test.fixed.FixedInOtherNamespace someFixed;
+    avro.test.record.RecordInOtherNamespace someRecord;
+    avro.test.error.ErrorInOtherNamespace someError;
+    avro.test.enum.EnumInOtherNamespace someEnum;
+    FixedInThisNamespace thisFixed;
+  }
+}
diff --git a/lang/java/compiler/src/test/idl/input/nestedimport.avdl b/lang/java/compiler/src/test/idl/input/nestedimport.avdl
new file mode 100644
index 0000000..1e7eb12
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/nestedimport.avdl
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at version("1.0.5")
+ at namespace("org.apache.avro.ipc.specific")
+protocol nestedimport {
+    import idl "reservedwords.avdl";
+    import protocol "bar.avpr";
+    import schema "position.avsc";
+    import schema "player.avsc";
+
+    record ImportBar {
+        avro.examples.baseball.Player  foo;
+    }
+
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/input/player.avsc b/lang/java/compiler/src/test/idl/input/player.avsc
new file mode 100644
index 0000000..0492850
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/player.avsc
@@ -0,0 +1,8 @@
+{"type":"record", "name":"Player", "namespace": "avro.examples.baseball",
+  "fields": [
+   {"name": "number", "type": "int"},
+   {"name": "first_name", "type": "string"},
+   {"name": "last_name", "type": "string"},
+   {"name": "position", "type": {"type": "array", "items": "Position"} }
+  ]
+}
diff --git a/lang/java/compiler/src/test/idl/input/position.avsc b/lang/java/compiler/src/test/idl/input/position.avsc
new file mode 100644
index 0000000..a47065a
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/position.avsc
@@ -0,0 +1,3 @@
+{"type":"enum", "name": "Position", "namespace": "avro.examples.baseball",
+    "symbols": ["P", "C", "B1", "B2", "B3", "SS", "LF", "CF", "RF", "DH"]
+}
diff --git a/lang/java/compiler/src/test/idl/input/reservedwords.avdl b/lang/java/compiler/src/test/idl/input/reservedwords.avdl
new file mode 100644
index 0000000..3f07cbe
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/reservedwords.avdl
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+protocol Foo {
+  void `error`();
+  void `void`();
+}
diff --git a/lang/java/compiler/src/test/idl/input/simple.avdl b/lang/java/compiler/src/test/idl/input/simple.avdl
new file mode 100644
index 0000000..db22309
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/simple.avdl
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A simple test case.
+ */
+ at version("1.0.5")
+ at namespace("org.apache.avro.test")
+protocol Simple {
+  /** A kind of record. */
+  @aliases(["org.foo.KindOf"])
+  enum Kind {
+    FOO,
+    BAR, // the bar enum value
+    BAZ
+  }
+
+  /** An MD5 hash. */
+  fixed MD5(16);
+
+  /** A TestRecord. */
+  @my-property({"key":3})
+  record TestRecord {
+    string @order("ignore") name = "foo";
+
+    /** The kind of record. */
+    Kind @order("descending") kind;
+
+    @foo("bar") MD5 hash = "0000000000000000";
+
+    union {null, MD5} @aliases(["hash", "hsh"]) nullableHash = null;
+
+    double value = NaN;
+    float average = -Infinity;
+  }
+
+  error TestError {
+    string message;
+  }
+
+  /** method 'hello' takes @parameter 'greeting' */
+  string hello(string greeting);
+  TestRecord echo(TestRecord `record` = {"name":"bar","kind":"BAR"});
+  /** method 'add' takes @parameter 'arg1' @parameter 'arg2' */
+  @specialProp("test")
+  int add(int arg1, int arg2 = 0);
+  bytes echoBytes(bytes data);
+  void `error`() throws TestError;
+  void ping() oneway;
+}
diff --git a/lang/java/compiler/src/test/idl/input/unicode.avdl b/lang/java/compiler/src/test/idl/input/unicode.avdl
new file mode 100644
index 0000000..c7c40f1
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/input/unicode.avdl
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* This is a test that UTF8 functions correctly.
+* このテストでは、UTF - 8で正しく機能している。
+* 这是一个测试,UTF - 8的正常运行。
+*/
+protocol Протоколы {
+  record Структура {
+    string Строковый;
+    string 文字列;
+  }
+}
diff --git a/lang/java/compiler/src/test/idl/output/baseball.avpr b/lang/java/compiler/src/test/idl/output/baseball.avpr
new file mode 100644
index 0000000..2e17aff
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/baseball.avpr
@@ -0,0 +1,31 @@
+{
+  "protocol" : "Baseball",
+  "namespace" : "avro.examples.baseball",
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "types" : [ {
+    "type" : "enum",
+    "name" : "Position",
+    "symbols" : [ "P", "C", "B1", "B2", "B3", "SS", "LF", "CF", "RF", "DH" ]
+  }, {
+    "type" : "record",
+    "name" : "Player",
+    "fields" : [ {
+      "name" : "number",
+      "type" : "int"
+    }, {
+      "name" : "first_name",
+      "type" : "string"
+    }, {
+      "name" : "last_name",
+      "type" : "string"
+    }, {
+      "name" : "position",
+      "type" : {
+        "type" : "array",
+        "items" : "Position"
+      }
+    } ]
+  } ],
+  "messages" : {
+  }
+}
diff --git a/lang/java/compiler/src/test/idl/output/import.avpr b/lang/java/compiler/src/test/idl/output/import.avpr
new file mode 100644
index 0000000..8fd2978
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/import.avpr
@@ -0,0 +1,94 @@
+{
+  "protocol" : "Import",
+  "namespace" : "org.foo",
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "types" : [ {
+    "type" : "enum",
+    "name" : "Position",
+    "namespace" : "avro.examples.baseball",
+    "symbols" : [ "P", "C", "B1", "B2", "B3", "SS", "LF", "CF", "RF", "DH" ]
+  }, {
+    "type" : "record",
+    "name" : "Player",
+    "namespace" : "avro.examples.baseball",
+    "fields" : [ {
+      "name" : "number",
+      "type" : "int"
+    }, {
+      "name" : "first_name",
+      "type" : "string"
+    }, {
+      "name" : "last_name",
+      "type" : "string"
+    }, {
+      "name" : "position",
+      "type" : {
+        "type" : "array",
+        "items" : "Position"
+      }
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "ImportBar",
+    "namespace" : "org.apache.avro.ipc.specific",
+    "fields" : [ {
+      "name" : "foo",
+      "type" : "avro.examples.baseball.Player"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "NestedType",
+    "namespace" : "org.on.the.classpath",
+    "fields" : [ ]
+  }, {
+    "type" : "record",
+    "name" : "FromAfar",
+    "namespace" : "org.on.the.classpath",
+    "fields" : [ ]
+  }, {
+    "type" : "record",
+    "name" : "VeryFar",
+    "namespace" : "org.on.the.classpath",
+    "fields" : [ ]
+  }, {
+    "type" : "record",
+    "name" : "FarAway",
+    "namespace" : "org.on.the.classpath",
+    "fields" : [ ]
+  }, {
+    "type" : "record",
+    "name" : "Foo",
+    "fields" : [ {
+      "name" : "x",
+      "type" : "int"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "Bar",
+    "fields" : [ {
+      "name" : "foo",
+      "type" : "Foo"
+    } ]
+  } ],
+  "messages" : {
+    "error" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "void" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "bar" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "barf" : {
+      "request" : [ {
+        "name" : "foo",
+        "type" : "Foo"
+      } ],
+      "response" : "Bar"
+    }
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/interop.avpr b/lang/java/compiler/src/test/idl/output/interop.avpr
new file mode 100644
index 0000000..ad6e070
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/interop.avpr
@@ -0,0 +1,94 @@
+{
+  "protocol" : "InteropProtocol",
+  "namespace" : "org.apache.avro",
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "types" : [ {
+    "type" : "record",
+    "name" : "Foo",
+    "fields" : [ {
+      "name" : "label",
+      "type" : "string"
+    } ]
+  }, {
+    "type" : "enum",
+    "name" : "Kind",
+    "symbols" : [ "A", "B", "C" ]
+  }, {
+    "type" : "fixed",
+    "name" : "MD5",
+    "size" : 16
+  }, {
+    "type" : "record",
+    "name" : "Node",
+    "fields" : [ {
+      "name" : "label",
+      "type" : "string"
+    }, {
+      "name" : "children",
+      "type" : {
+        "type" : "array",
+        "items" : "Node"
+      },
+      "default" : [ ]
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "Interop",
+    "fields" : [ {
+      "name" : "intField",
+      "type" : "int",
+      "default" : 1
+    }, {
+      "name" : "longField",
+      "type" : "long",
+      "default" : -1
+    }, {
+      "name" : "stringField",
+      "type" : "string"
+    }, {
+      "name" : "boolField",
+      "type" : "boolean",
+      "default" : false
+    }, {
+      "name" : "floatField",
+      "type" : "float",
+      "default" : 0.0
+    }, {
+      "name" : "doubleField",
+      "type" : "double",
+      "default" : -1.0E12
+    }, {
+      "name" : "nullField",
+      "type" : "null"
+    }, {
+      "name" : "arrayField",
+      "type" : {
+        "type" : "array",
+        "items" : "double"
+      },
+      "default" : [ ]
+    }, {
+      "name" : "mapField",
+      "type" : {
+        "type" : "map",
+        "values" : "Foo"
+      }
+    }, {
+      "name" : "unionFIeld",
+      "type" : [ "boolean", "double", {
+        "type" : "array",
+        "items" : "bytes"
+      } ]
+    }, {
+      "name" : "enumField",
+      "type" : "Kind"
+    }, {
+      "name" : "fixedField",
+      "type" : "MD5"
+    }, {
+      "name" : "recordField",
+      "type" : "Node"
+    } ]
+  } ],
+  "messages" : { }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/mr_events.avpr b/lang/java/compiler/src/test/idl/output/mr_events.avpr
new file mode 100644
index 0000000..25da3a8
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/mr_events.avpr
@@ -0,0 +1,119 @@
+{
+  "protocol" : "Events",
+  "namespace" : "org.apache.hadoop.mapreduce.jobhistory",
+  "doc" : "* Genavro format for a particular protocol found in Hadoop MapReduce.\n * Used as a test case/example to show that we can express real-world stuff more\n * succinctly.",
+  "types" : [ {
+    "type" : "record",
+    "name" : "JhCounter",
+    "fields" : [ {
+      "name" : "name",
+      "type" : "string"
+    }, {
+      "name" : "displayName",
+      "type" : "string"
+    }, {
+      "name" : "value",
+      "type" : "long"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "JhCounterGroup",
+    "fields" : [ {
+      "name" : "name",
+      "type" : "string"
+    }, {
+      "name" : "displayName",
+      "type" : "string"
+    }, {
+      "name" : "counts",
+      "type" : {
+        "type" : "array",
+        "items" : "JhCounter"
+      }
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "JhCounters",
+    "fields" : [ {
+      "name" : "name",
+      "type" : "string"
+    }, {
+      "name" : "groups",
+      "type" : {
+        "type" : "array",
+        "items" : "JhCounterGroup"
+      }
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "JobFinished",
+    "fields" : [ {
+      "name" : "jobid",
+      "type" : "string"
+    }, {
+      "name" : "finishTime",
+      "type" : "long"
+    }, {
+      "name" : "finishedMaps",
+      "type" : "int"
+    }, {
+      "name" : "finishedReduces",
+      "type" : "int"
+    }, {
+      "name" : "failedMaps",
+      "type" : "int"
+    }, {
+      "name" : "failedReduces",
+      "type" : "int"
+    }, {
+      "name" : "totalCounters",
+      "type" : "JhCounters"
+    }, {
+      "name" : "mapCounters",
+      "type" : "JhCounters"
+    }, {
+      "name" : "reduceCounters",
+      "type" : "JhCounters"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "JobInited",
+    "fields" : [ {
+      "name" : "jobid",
+      "type" : "string"
+    }, {
+      "name" : "launchTime",
+      "type" : "long"
+    }, {
+      "name" : "totalMaps",
+      "type" : "int"
+    }, {
+      "name" : "totalReduces",
+      "type" : "int"
+    }, {
+      "name" : "jobStatus",
+      "type" : "string"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "JobSubmitted",
+    "fields" : [ {
+      "name" : "jobid",
+      "type" : "string"
+    }, {
+      "name" : "jobName",
+      "type" : "string"
+    }, {
+      "name" : "userName",
+      "type" : "string"
+    }, {
+      "name" : "submitTime",
+      "type" : "long"
+    }, {
+      "name" : "jobConfPath",
+      "type" : "string"
+    } ]
+  } ],
+  "messages" : {
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/namespaces.avpr b/lang/java/compiler/src/test/idl/output/namespaces.avpr
new file mode 100644
index 0000000..e9d5fa6
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/namespaces.avpr
@@ -0,0 +1,51 @@
+{
+  "protocol" : "TestNamespace",
+  "namespace" : "avro.test.protocol",
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "types" : [ {
+    "type" : "fixed",
+    "name" : "FixedInOtherNamespace",
+    "namespace" : "avro.test.fixed",
+    "size" : 16
+  }, {
+    "type" : "fixed",
+    "name" : "FixedInThisNamespace",
+    "size" : 16
+  }, {
+    "type" : "record",
+    "name" : "RecordInOtherNamespace",
+    "namespace" : "avro.test.record",
+    "fields" : [ ]
+  }, {
+    "type" : "error",
+    "name" : "ErrorInOtherNamespace",
+    "namespace" : "avro.test.error",
+    "fields" : [ ]
+  }, {
+    "type" : "enum",
+    "name" : "EnumInOtherNamespace",
+    "namespace" : "avro.test.enum",
+    "symbols" : [ "FOO" ]
+  }, {
+    "type" : "record",
+    "name" : "RefersToOthers",
+    "fields" : [ {
+      "name" : "someFixed",
+      "type" : "avro.test.fixed.FixedInOtherNamespace"
+    }, {
+      "name" : "someRecord",
+      "type" : "avro.test.record.RecordInOtherNamespace"
+    }, {
+      "name" : "someError",
+      "type" : "avro.test.error.ErrorInOtherNamespace"
+    }, {
+      "name" : "someEnum",
+      "type" : "avro.test.enum.EnumInOtherNamespace"
+    }, {
+      "name" : "thisFixed",
+      "type" : "FixedInThisNamespace"
+    } ]
+  } ],
+  "messages" : {
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/nestedimport.avpr b/lang/java/compiler/src/test/idl/output/nestedimport.avpr
new file mode 100644
index 0000000..9a5b6b6
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/nestedimport.avpr
@@ -0,0 +1,53 @@
+{
+  "protocol" : "nestedimport",
+  "namespace" : "org.apache.avro.ipc.specific",
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "version" : "1.0.5",
+  "types" : [ {
+    "type" : "enum",
+    "name" : "Position",
+    "namespace" : "avro.examples.baseball",
+    "symbols" : [ "P", "C", "B1", "B2", "B3", "SS", "LF", "CF", "RF", "DH" ]
+  }, {
+    "type" : "record",
+    "name" : "Player",
+    "namespace" : "avro.examples.baseball",
+    "fields" : [ {
+      "name" : "number",
+      "type" : "int"
+    }, {
+      "name" : "first_name",
+      "type" : "string"
+    }, {
+      "name" : "last_name",
+      "type" : "string"
+    }, {
+      "name" : "position",
+      "type" : {
+        "type" : "array",
+        "items" : "Position"
+      }
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "ImportBar",
+    "fields" : [ {
+      "name" : "foo",
+      "type" : "avro.examples.baseball.Player"
+    } ]
+  } ],
+  "messages" : {
+    "error" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "void" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "bar" : {
+      "request" : [ ],
+      "response" : "null"
+    }
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/reservedwords.avpr b/lang/java/compiler/src/test/idl/output/reservedwords.avpr
new file mode 100644
index 0000000..507ff68
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/reservedwords.avpr
@@ -0,0 +1,16 @@
+{
+  "protocol" : "Foo",
+  "namespace" : null,
+  "doc" : "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2 [...]
+  "types" : [ ],
+  "messages" : {
+    "error" : {
+      "request" : [ ],
+      "response" : "null"
+    },
+    "void" : {
+      "request" : [ ],
+      "response" : "null"
+    }
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/simple.avpr b/lang/java/compiler/src/test/idl/output/simple.avpr
new file mode 100644
index 0000000..40cb58a
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/simple.avpr
@@ -0,0 +1,112 @@
+{
+  "protocol" : "Simple",
+  "namespace" : "org.apache.avro.test",
+  "doc" : "* A simple test case.",
+  "version" : "1.0.5",
+  "types" : [ {
+    "type" : "enum",
+    "name" : "Kind",
+    "doc" : "A kind of record.",
+    "symbols" : [ "FOO", "BAR", "BAZ" ],
+    "aliases" : [ "org.foo.KindOf" ]
+  }, {
+    "type" : "fixed",
+    "name" : "MD5",
+    "doc" : "An MD5 hash.",
+    "size" : 16,
+    "foo" : "bar"
+  }, {
+    "type" : "record",
+    "name" : "TestRecord",
+    "doc" : "A TestRecord.",
+    "fields" : [ {
+      "name" : "name",
+      "type" : "string",
+      "default" : "foo",
+      "order" : "ignore"
+    }, {
+      "name" : "kind",
+      "type" : "Kind",
+      "doc" : "The kind of record.",
+      "order" : "descending"
+    }, {
+      "name" : "hash",
+      "type" : "MD5",
+      "default" : "0000000000000000"
+    }, {
+      "name" : "nullableHash",
+      "type" : [ "null", "MD5" ],
+      "default" : null,
+      "aliases" : [ "hash", "hsh" ]
+    }, {
+      "name" : "value",
+      "type" : "double",
+      "default" : "NaN"
+    }, {
+      "name" : "average",
+      "type" : "float",
+      "default" : "-Infinity"
+    } ],
+    "my-property" : {
+      "key" : 3
+    }
+  }, {
+    "type" : "error",
+    "name" : "TestError",
+    "fields" : [ {
+      "name" : "message",
+      "type" : "string"
+    } ]
+  } ],
+  "messages" : {
+    "hello" : {
+      "doc" : "method 'hello' takes @parameter 'greeting'",
+      "request" : [ {
+        "name" : "greeting",
+        "type" : "string"
+      } ],
+      "response" : "string"
+    },
+    "echo" : {
+      "request" : [ {
+        "name" : "record",
+        "type" : "TestRecord",
+        "default" : {
+          "name" : "bar",
+          "kind" : "BAR"
+        }
+      } ],
+      "response" : "TestRecord"
+    },
+    "add" : {
+      "doc" : "method 'add' takes @parameter 'arg1' @parameter 'arg2'",
+      "specialProp" : "test",
+      "request" : [ {
+        "name" : "arg1",
+        "type" : "int"
+      }, {
+        "name" : "arg2",
+        "type" : "int",
+        "default" : 0
+      } ],
+      "response" : "int"
+    },
+    "echoBytes" : {
+      "request" : [ {
+        "name" : "data",
+        "type" : "bytes"
+      } ],
+      "response" : "bytes"
+    },
+    "error" : {
+      "request" : [ ],
+      "response" : "null",
+      "errors" : [ "TestError" ]
+    },
+    "ping" : {
+      "request" : [ ],
+      "response" : "null",
+      "one-way" : true
+    }
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/output/unicode.avpr b/lang/java/compiler/src/test/idl/output/unicode.avpr
new file mode 100644
index 0000000..4809f6a
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/output/unicode.avpr
@@ -0,0 +1,18 @@
+{
+  "protocol" : "Протоколы",
+  "namespace" : null,
+  "doc" : "* This is a test that UTF8 functions correctly.\n* このテストでは、UTF - 8で正しく機能している。\n* 这是一个测试,UTF - 8的正常运行。",
+  "types" : [ {
+    "type" : "record",
+    "name" : "Структура",
+    "fields" : [ {
+      "name" : "Строковый",
+      "type" : "string"
+    }, {
+      "name" : "文字列",
+      "type" : "string"
+    } ]
+  } ],
+  "messages" : {
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl
new file mode 100644
index 0000000..4533c54
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avdl
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at namespace("org.on.the.classpath")
+protocol OnTheClasspath {
+    import idl "nestedtypes.avdl";
+	record FromAfar {
+	}
+}
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avpr b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avpr
new file mode 100644
index 0000000..46951c2
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avpr
@@ -0,0 +1,11 @@
+{
+  "protocol" : "OnTheClasspath",
+  "namespace" : "org.on.the.classpath",
+  "types" : [ {
+    "type" : "record",
+    "name" : "VeryFar",
+    "fields" : [ ]
+  } ],
+  "messages" : {
+  }
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avsc b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avsc
new file mode 100644
index 0000000..40d3595
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/OnTheClasspath.avsc
@@ -0,0 +1,6 @@
+{
+  "type" : "record",
+  "name" : "FarAway",
+  "namespace" : "org.on.the.classpath",
+  "fields" : [ ]
+}
\ No newline at end of file
diff --git a/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl b/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl
new file mode 100644
index 0000000..a8aafe4
--- /dev/null
+++ b/lang/java/compiler/src/test/idl/putOnClassPath/nestedtypes.avdl
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ at namespace("org.on.the.classpath")
+protocol OnTheClasspathTypes {
+	record NestedType {
+	}
+}
diff --git a/lang/java/compiler/src/test/java/org/apache/avro/compiler/TestSpecificCompiler.java b/lang/java/compiler/src/test/java/org/apache/avro/compiler/TestSpecificCompiler.java
new file mode 100644
index 0000000..7c4fcc2
--- /dev/null
+++ b/lang/java/compiler/src/test/java/org/apache/avro/compiler/TestSpecificCompiler.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.compiler;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertThat;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.equalTo;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.nio.charset.Charset;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.compiler.specific.SpecificCompiler;
+import org.apache.avro.generic.GenericData.StringType;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+ at RunWith(JUnit4.class)
+public class TestSpecificCompiler {
+  private final String schemaSrcPath = "src/test/resources/simple_record.avsc";
+  private final String velocityTemplateDir =
+      "src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/";
+  private File src;
+  private File outputDir;
+  private File outputFile;
+
+  @Before
+  public void setUp() {
+    this.src = new File(this.schemaSrcPath);
+    this.outputDir = AvroTestUtil.tempDirectory(getClass(), "specific-output");
+    this.outputFile = new File(this.outputDir, "SimpleRecord.java");
+  }
+
+  @After
+  public void tearDow() {
+    if (this.outputFile != null) {
+      this.outputFile.delete();
+    }
+  }
+
+  private SpecificCompiler createCompiler() throws IOException {
+    Schema.Parser parser = new Schema.Parser();
+    Schema schema = parser.parse(this.src);
+    SpecificCompiler compiler = new SpecificCompiler(schema);
+    compiler.setTemplateDir(this.velocityTemplateDir);
+    compiler.setStringType(StringType.CharSequence);
+    return compiler;
+  }
+
+  @Test
+  public void testCanReadTemplateFilesOnTheFilesystem() throws IOException, URISyntaxException{
+    SpecificCompiler compiler = createCompiler();
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+  }
+
+  @Test
+  public void testPublicFieldVisibility() throws IOException {
+    SpecificCompiler compiler = createCompiler();
+    compiler.setFieldVisibility(SpecificCompiler.FieldVisibility.PUBLIC);
+    assertFalse(compiler.deprecatedFields());
+    assertTrue(compiler.publicFields());
+    assertFalse(compiler.privateFields());
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+    BufferedReader reader = new BufferedReader(new FileReader(this.outputFile));
+    String line = null;
+    while ((line = reader.readLine()) != null) {
+      // No line, once trimmed, should start with a deprecated field declaration
+      // nor a private field declaration.  Since the nested builder uses private
+      // fields, we cannot do the second check.
+      line = line.trim();
+      assertFalse("Line started with a deprecated field declaration: " + line,
+        line.startsWith("@Deprecated public int value"));
+    }
+  }
+
+  @Test
+  public void testPublicDeprecatedFieldVisibility() throws IOException {
+    SpecificCompiler compiler = createCompiler();
+    assertTrue(compiler.deprecatedFields());
+    assertTrue(compiler.publicFields());
+    assertFalse(compiler.privateFields());
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+    BufferedReader reader = new BufferedReader(new FileReader(this.outputFile));
+    String line = null;
+    while ((line = reader.readLine()) != null) {
+      // No line, once trimmed, should start with a public field declaration
+      line = line.trim();
+      assertFalse("Line started with a public field declaration: " + line,
+        line.startsWith("public int value"));
+    }
+  }
+
+  @Test
+  public void testPrivateFieldVisibility() throws IOException {
+    SpecificCompiler compiler = createCompiler();
+    compiler.setFieldVisibility(SpecificCompiler.FieldVisibility.PRIVATE);
+    assertFalse(compiler.deprecatedFields());
+    assertFalse(compiler.publicFields());
+    assertTrue(compiler.privateFields());
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+    BufferedReader reader = new BufferedReader(new FileReader(this.outputFile));
+    String line = null;
+    while ((line = reader.readLine()) != null) {
+      // No line, once trimmed, should start with a public field declaration
+      // or with a deprecated public field declaration
+      line = line.trim();
+      assertFalse("Line started with a public field declaration: " + line,
+        line.startsWith("public int value"));
+      assertFalse("Line started with a deprecated field declaration: " + line,
+        line.startsWith("@Deprecated public int value"));
+    }
+  }
+
+  @Test
+  public void testSettersCreatedByDefault() throws IOException {
+    SpecificCompiler compiler = createCompiler();
+    assertTrue(compiler.isCreateSetters());
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+    BufferedReader reader = new BufferedReader(new FileReader(this.outputFile));
+    int foundSetters = 0;
+    String line = null;
+    while ((line = reader.readLine()) != null) {
+      // We should find the setter in the main class
+      line = line.trim();
+      if (line.startsWith("public void setValue(")) {
+        foundSetters++;
+      }
+    }
+    assertEquals("Found the wrong number of setters", 1, foundSetters);
+  }
+
+  @Test
+  public void testSettersNotCreatedWhenOptionTurnedOff() throws IOException {
+    SpecificCompiler compiler = createCompiler();
+    compiler.setCreateSetters(false);
+    assertFalse(compiler.isCreateSetters());
+    compiler.compileToDestination(this.src, this.outputDir);
+    assertTrue(this.outputFile.exists());
+    BufferedReader reader = new BufferedReader(new FileReader(this.outputFile));
+    String line = null;
+    while ((line = reader.readLine()) != null) {
+      // No setter should be found
+      line = line.trim();
+      assertFalse("No line should include the setter: " + line,
+        line.startsWith("public void setValue("));
+    }
+  }
+
+  @Test
+  public void testSettingOutputCharacterEncoding() throws Exception {
+    SpecificCompiler compiler = createCompiler();
+    // Generated file in default encoding
+    compiler.compileToDestination(this.src, this.outputDir);
+    byte[] fileInDefaultEncoding = new byte[(int) this.outputFile.length()];
+    new FileInputStream(this.outputFile).read(fileInDefaultEncoding);
+    this.outputFile.delete();
+    // Generate file in another encoding (make sure it has different number of bytes per character)
+    String differentEncoding = Charset.defaultCharset().equals(Charset.forName("UTF-16")) ? "UTF-32" : "UTF-16";
+    compiler.setOutputCharacterEncoding(differentEncoding);
+    compiler.compileToDestination(this.src, this.outputDir);
+    byte[] fileInDifferentEncoding = new byte[(int) this.outputFile.length()];
+    new FileInputStream(this.outputFile).read(fileInDifferentEncoding);
+    // Compare as bytes
+    assertThat("Generated file should contain different bytes after setting non-default encoding",
+      fileInDefaultEncoding, not(equalTo(fileInDifferentEncoding)));
+    // Compare as strings
+    assertThat("Generated files should contain the same characters in the proper encodings",
+      new String(fileInDefaultEncoding), equalTo(new String(fileInDifferentEncoding, differentEncoding)));
+  }
+}
diff --git a/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
new file mode 100644
index 0000000..7e4f686
--- /dev/null
+++ b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.compiler.idl;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Protocol;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Simple test harness for Idl.
+ * This relies on an input/ and output/ directory. Inside
+ * the input/ directory are .avdl files. Each file should have
+ * a corresponding .avpr file in output/. When the test is run,
+ * it generates and stringifies each .avdl file and compares
+ * it to the expected output, failing if the two differ.
+ *
+ * To make it simpler to write these tests, you can run
+ *   ant -Dtestcase=TestIdl -Dtest.idl.mode=write
+ * which will *replace* all expected output.
+ */
+public class TestIdl {
+  private static final File TEST_DIR =
+    new File(System.getProperty("test.idl.dir", "src/test/idl"));
+
+  private static final File TEST_INPUT_DIR =
+    new File(TEST_DIR, "input");
+
+  private static final File TEST_OUTPUT_DIR =
+    new File(TEST_DIR, "output");
+
+  private static final String TEST_MODE =
+    System.getProperty("test.idl.mode", "run");
+
+  private List<GenTest> tests;
+
+  @Before
+  public void loadTests() {
+    assertTrue(TEST_DIR.exists());
+    assertTrue(TEST_INPUT_DIR.exists());
+    assertTrue(TEST_OUTPUT_DIR.exists());
+
+    tests = new ArrayList<GenTest>();
+    for (File inF : TEST_INPUT_DIR.listFiles()) {
+      if (!inF.getName().endsWith(".avdl")) continue;
+      if (inF.getName().startsWith(".")) continue;
+
+      File outF = new File(
+        TEST_OUTPUT_DIR,
+        inF.getName().replaceFirst("\\.avdl$", ".avpr"));
+      tests.add(new GenTest(inF, outF));
+    }
+  }
+
+  @Test
+  public void runTests() throws Exception {
+    if (! "run".equals(TEST_MODE)) return;
+
+    int passed = 0, failed = 0;
+    
+    for (GenTest t : tests) {
+      try {
+        t.run();
+        passed++;
+      } catch (Exception e) {
+        failed++;
+        System.err.println("Failed: " + t.testName());
+        e.printStackTrace(System.err);
+      }
+    }
+
+    if (failed > 0) {
+      fail(String.valueOf(failed) + " tests failed");
+    }
+  }
+
+  @Test
+  public void writeTests() throws Exception {
+    if (! "write".equals(TEST_MODE)) return;
+
+    for (GenTest t : tests) {
+      t.write();
+    }
+  }
+
+
+  /**
+   * An invididual comparison test
+   */
+  private static class GenTest {
+    private final File in, expectedOut;
+
+    public GenTest(File in, File expectedOut) {
+      this.in = in;
+      this.expectedOut = expectedOut;
+    }
+
+    private String generate() throws Exception {
+      ClassLoader cl = Thread.currentThread().getContextClassLoader();
+
+      // Calculate the absolute path to src/test/resources/putOnClassPath/
+      File file = new File(".");
+      String currentWorkPath = file.toURI().toURL().toString();
+      String newPath = currentWorkPath + "src" + File.separator + "test"
+        + File.separator + "idl" + File.separator
+        + "putOnClassPath" + File.separator;
+      URL[] newPathURL = new URL[]{new URL(newPath)}; 
+      URLClassLoader ucl = new URLClassLoader(newPathURL, cl);
+
+      Idl parser = new Idl(in, ucl);
+      Protocol p = parser.CompilationUnit();
+      parser.close();
+      return p.toString();
+    }
+
+    public String testName() {
+        return this.in.getName();
+    }
+
+    public void run() throws Exception {
+      String output = generate();
+      String slurped = slurp(expectedOut);
+      assertEquals(slurped.trim(), output.replace("\r", "").trim());
+    }
+
+    public void write() throws Exception {
+      writeFile(expectedOut, generate());
+    }
+
+    private static String slurp(File f) throws IOException {
+      BufferedReader in = new BufferedReader(
+          new InputStreamReader(new FileInputStream(f), "UTF-8"));
+
+      String line = null;
+      StringBuilder builder = new StringBuilder();
+      while ((line = in.readLine()) != null) {
+        builder.append(line);
+      }
+      in.close();
+      ObjectMapper mapper = new ObjectMapper();
+      JsonNode json = mapper.readTree(builder.toString());
+      return mapper.writer().writeValueAsString(json);
+    }
+
+    private static void writeFile(File f, String s) throws IOException {
+      FileWriter w = new FileWriter(f);
+      w.write(s);
+      w.close();
+    }
+  }
+}
diff --git a/lang/java/compiler/src/test/resources/simple_record.avsc b/lang/java/compiler/src/test/resources/simple_record.avsc
new file mode 100644
index 0000000..85781c5
--- /dev/null
+++ b/lang/java/compiler/src/test/resources/simple_record.avsc
@@ -0,0 +1,7 @@
+{
+  "type": "record", 
+  "name": "SimpleRecord",
+  "fields" : [
+    {"name": "value", "type": "int"}
+  ]
+}
\ No newline at end of file
diff --git a/lang/java/ipc/pom.xml b/lang/java/ipc/pom.xml
new file mode 100644
index 0000000..9802559
--- /dev/null
+++ b/lang/java/ipc/pom.xml
@@ -0,0 +1,153 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-ipc</artifactId>
+
+  <name>Apache Avro IPC</name>
+  <url>http://avro.apache.org</url>
+  <description>Avro inter-process communication components</description>
+  <packaging>bundle</packaging>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro.ipc*,
+      org.apache.avro*;version="${project.version}",
+      org.jboss.netty*,
+      javax.servlet*;resolution:=optional,
+      org.mortbay*;resolution:=optional,
+      org.apache.velocity*;resolution:=optional,
+      *
+    </osgi.import>
+    <osgi.export>org.apache.avro.ipc*;version="${project.version}"</osgi.export>
+  </properties>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>src/main/velocity</directory>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>${project.groupId}</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
+        <version>${project.version}</version>
+        <executions>
+          <execution>
+            <id>schemas</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>schema</goal>
+              <goal>protocol</goal>
+              <goal>idl-protocol</goal>
+            </goals>
+            <configuration>
+              <excludes>
+                <exclude>**/mapred/tether/**</exclude>
+                <exclude>org/apache/avro/data/Json.avsc</exclude>
+              </excludes>
+              <stringType>String</stringType>
+              <sourceDirectory>${parent.project.basedir}/../../../../share/schemas/</sourceDirectory>
+              <outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
+              <testSourceDirectory>${parent.project.basedir}/../../../../share/test/schemas/</testSourceDirectory>
+              <testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-compiler</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <version>${jetty.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.netty</groupId>
+      <artifactId>netty</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.velocity</groupId>
+      <artifactId>velocity</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.mortbay.jetty</groupId>
+      <artifactId>servlet-api</artifactId>
+      <version>${jetty-servlet-api.version}</version>
+    </dependency>
+
+  </dependencies>
+
+</project>
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java
new file mode 100644
index 0000000..77561d3
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+/**
+ * A Future implementation for RPCs.
+ */
+public class CallFuture<T> implements Future<T>, Callback<T> {
+  private final CountDownLatch latch = new CountDownLatch(1);
+  private final Callback<T> chainedCallback;
+  private T result = null;
+  private Throwable error = null;
+  
+  /**
+   * Creates a CallFuture.
+   */
+  public CallFuture() {
+    this(null);
+  }
+  
+  /**
+   * Creates a CallFuture with a chained Callback which will be invoked
+   * when this CallFuture's Callback methods are invoked.
+   * @param chainedCallback the chained Callback to set.
+   */
+  public CallFuture(Callback<T> chainedCallback) {
+    this.chainedCallback = chainedCallback;
+  }
+  
+  /**
+   * Sets the RPC response, and unblocks all threads waiting on {@link #get()} 
+   * or {@link #get(long, TimeUnit)}.
+   * @param result the RPC result to set.
+   */
+  @Override
+  public void handleResult(T result) {
+    this.result = result;
+    latch.countDown();
+    if (chainedCallback != null) {
+      chainedCallback.handleResult(result);
+    }
+  }
+  
+  /**
+   * Sets an error thrown during RPC execution, and unblocks all threads waiting 
+   * on {@link #get()} or {@link #get(long, TimeUnit)}.
+   * @param error the RPC error to set.
+   */
+  @Override
+  public void handleError(Throwable error) {
+    this.error = error;
+    latch.countDown();
+    if (chainedCallback != null) {
+      chainedCallback.handleError(error);
+    }
+  }
+
+  /**
+   * Gets the value of the RPC result without blocking.
+   * Using {@link #get()} or {@link #get(long, TimeUnit)} is usually 
+   * preferred because these methods block until the result is available or 
+   * an error occurs. 
+   * @return the value of the response, or null if no result was returned or 
+   * the RPC has not yet completed.
+   */
+  public T getResult() {
+    return result;
+  }
+  
+  /**
+   * Gets the error that was thrown during RPC execution.  Does not block.
+   * Either {@link #get()} or {@link #get(long, TimeUnit)} should be called 
+   * first because these methods block until the RPC has completed.
+   * @return the RPC error that was thrown, or null if no error has occurred or 
+   * if the RPC has not yet completed.
+   */
+  public Throwable getError() {
+    return error;
+  }
+
+  @Override
+  public boolean cancel(boolean mayInterruptIfRunning) {
+    return false;
+  }
+
+  @Override
+  public boolean isCancelled() {
+    return false;
+  }
+
+  @Override
+  public T get() throws InterruptedException,
+      ExecutionException {
+    latch.await();
+    if (error != null) {
+      throw new ExecutionException(error);
+    }
+    return result;
+  }
+
+  @Override
+  public T get(long timeout, TimeUnit unit)
+      throws InterruptedException, ExecutionException, TimeoutException {
+    if (latch.await(timeout, unit)) {
+      if (error != null) {
+        throw new ExecutionException(error);
+      }
+      return result;
+    } else {
+      throw new TimeoutException();
+    }
+  }
+  
+  /**
+   * Waits for the CallFuture to complete without returning the result.
+   * @throws InterruptedException if interrupted.
+   */
+  public void await() throws InterruptedException {
+    latch.await();
+  }
+  
+  /**
+   * Waits for the CallFuture to complete without returning the result.
+   * @param timeout the maximum time to wait.
+   * @param unit the time unit of the timeout argument.
+   * @throws InterruptedException if interrupted.
+   * @throws TimeoutException if the wait timed out.
+   */
+  public void await(long timeout, TimeUnit unit) 
+    throws InterruptedException, TimeoutException {
+    if (!latch.await(timeout, unit)) {
+      throw new TimeoutException();
+    }
+  }
+
+  @Override
+  public boolean isDone() {
+    return latch.getCount() <= 0;
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java
new file mode 100644
index 0000000..fdad4a7
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+/**
+ * Interface for receiving asynchronous callbacks.
+ * For each request with an asynchronous callback, 
+ * either {@link #handleResult(Object)} or {@link #handleError(Throwable)} 
+ * will be invoked.
+ */
+public interface Callback<T> {
+  /**
+   * Receives a callback result.
+   * @param result the result returned in the callback.
+   */
+  void handleResult(T result);
+  
+  /**
+   * Receives an error.
+   * @param error the error returned in the callback.
+   */
+  void handleError(Throwable error);
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
new file mode 100644
index 0000000..f0a8f1b
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.nio.channels.ClosedChannelException;
+import java.nio.channels.DatagramChannel;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A datagram-based server implementation. This uses a simple, non-standard
+ * wire protocol and is not intended for production services. */
+public class DatagramServer extends Thread implements Server {
+  private static final Logger LOG =
+    LoggerFactory.getLogger(DatagramServer.class);
+
+  private final Responder responder;
+  private final DatagramChannel channel;
+  private final Transceiver transceiver;
+
+  public DatagramServer(Responder responder, SocketAddress addr)
+    throws IOException {
+    String name = "DatagramServer on "+addr;
+
+    this.responder = responder;
+
+    this.channel = DatagramChannel.open();
+    channel.socket().bind(addr);
+
+    this.transceiver = new DatagramTransceiver(channel);
+
+    setName(name);
+    setDaemon(true);
+  }
+
+  public int getPort() { return channel.socket().getLocalPort(); }
+
+  public void run() {
+    while (true) {
+      try {
+        transceiver.writeBuffers(responder.respond(transceiver.readBuffers()));
+      } catch (ClosedChannelException e) {
+        return;
+      } catch (IOException e) {
+        LOG.warn("unexpected error", e);
+        throw new RuntimeException(e);
+      }
+    }
+  }
+  
+  public void close() { this.interrupt(); }
+
+  public static void main(String[] arg) throws Exception {
+    DatagramServer server = new DatagramServer(null, new InetSocketAddress(0));
+    server.start();
+    System.out.println("started");
+    server.join();
+  }
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java
new file mode 100644
index 0000000..db4c1f0
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.io.IOException;
+import java.net.SocketAddress;
+import java.nio.ByteBuffer;
+import java.nio.channels.DatagramChannel;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A datagram-based {@link Transceiver} implementation. This uses a simple,
+ * non-standard wire protocol and is not intended for production services. */
+public class DatagramTransceiver extends Transceiver {
+  private static final Logger LOG
+    = LoggerFactory.getLogger(DatagramTransceiver.class);
+
+  private static final int MAX_SIZE = 16 * 1024;
+
+  private DatagramChannel channel;
+  private SocketAddress remote;
+  private ByteBuffer buffer = ByteBuffer.allocate(MAX_SIZE);
+
+  public String getRemoteName() { return remote.toString(); }
+
+  public DatagramTransceiver(SocketAddress remote) throws IOException {
+    this(DatagramChannel.open());
+    this.remote = remote;
+  }
+
+  public DatagramTransceiver(DatagramChannel channel) {
+    this.channel = channel;
+  }
+
+  public synchronized List<ByteBuffer> readBuffers() throws IOException {
+    buffer.clear();
+    remote = channel.receive(buffer);
+    LOG.info("received from "+remote);
+    buffer.flip();
+    List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    while (true) {
+      int length = buffer.getInt();
+      if (length == 0) {                          // end of buffers
+        return buffers;
+      }
+      ByteBuffer chunk = buffer.slice();          // use data without copying
+      chunk.limit(length);
+      buffer.position(buffer.position()+length);
+      buffers.add(chunk);
+    }
+  }
+
+  public synchronized void writeBuffers(List<ByteBuffer> buffers)
+    throws IOException {
+    buffer.clear();
+    for (ByteBuffer b : buffers) {
+      buffer.putInt(b.remaining());
+      buffer.put(b);                              // copy data.  sigh.
+    }
+    buffer.putInt(0);
+    buffer.flip();
+    channel.send(buffer, remote);
+    LOG.info("sent to "+remote);
+  }
+
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpServer.java
new file mode 100644
index 0000000..be6bbb2
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpServer.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+
+import org.apache.avro.AvroRuntimeException;
+
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.nio.SelectChannelConnector;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.ServletHolder;
+
+/** An HTTP-based RPC {@link Server}. */
+public class HttpServer implements Server {
+  private org.mortbay.jetty.Server server;
+
+  /** Constructs a server to run on the named port. */
+  public HttpServer(Responder responder, int port) throws IOException {
+    this(new ResponderServlet(responder), null, port);
+  }
+
+  /** Constructs a server to run on the named port. */
+  public HttpServer(ResponderServlet servlet, int port) throws IOException {
+    this(servlet, null, port);
+  }
+
+  /** Constructs a server to run on the named port on the specified address. */
+  public HttpServer(Responder responder, String bindAddress, int port) throws IOException {
+    this(new ResponderServlet(responder), bindAddress, port);
+  }
+
+  /** Constructs a server to run on the named port on the specified address. */
+  public HttpServer(ResponderServlet servlet, String bindAddress, int port) throws IOException {
+    this.server = new org.mortbay.jetty.Server();
+    SelectChannelConnector connector = new SelectChannelConnector();
+    connector.setLowResourceMaxIdleTime(10000);
+    connector.setAcceptQueueSize(128);
+    connector.setResolveNames(false);
+    connector.setUseDirectBuffers(false);
+    if (bindAddress != null) {
+      connector.setHost(bindAddress);
+    }
+    connector.setPort(port);
+    server.addConnector(connector);
+    new Context(server, "/").addServlet(new ServletHolder(servlet), "/*");
+  }
+
+  /** Constructs a server to run with the given connector. */
+  public HttpServer(Responder responder, Connector connector) throws IOException {
+    this(new ResponderServlet(responder), connector);
+  }
+
+  /** Constructs a server to run with the given connector. */
+  public HttpServer(ResponderServlet servlet, Connector connector) throws IOException {
+    this.server = new org.mortbay.jetty.Server();
+    server.addConnector(connector);
+    new Context(server, "/").addServlet(new ServletHolder(servlet), "/*");
+  }
+
+  public void addConnector(Connector connector) {
+    server.addConnector(connector);
+  }
+
+  @Override
+  public int getPort() { return server.getConnectors()[0].getLocalPort(); }
+
+  @Override
+  public void close() {
+    try {
+      server.stop();
+    } catch (Exception e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  /** Start the server.
+   * @throws AvroRuntimeException if the underlying Jetty server
+   * throws any exception while starting.
+  */
+  @Override
+  public void start() {
+    try {
+      server.start();
+    } catch (Exception e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override
+  public void join() throws InterruptedException {
+    server.join();
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
new file mode 100644
index 0000000..9f6572f
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.EOFException;
+import java.net.Proxy;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.net.URL;
+import java.net.HttpURLConnection;
+
+/** An HTTP-based {@link Transceiver} implementation. */
+public class HttpTransceiver extends Transceiver {
+  static final String CONTENT_TYPE = "avro/binary"; 
+
+  private URL url;
+  private Proxy proxy;
+  private HttpURLConnection connection;
+  private int timeout;
+  
+  public HttpTransceiver(URL url) { this.url = url; }
+
+  public HttpTransceiver(URL url, Proxy proxy) {
+    this(url);
+    this.proxy = proxy;
+  }
+
+  /** Set the connect and read timeouts, in milliseconds. */
+  public void setTimeout(int timeout) { this.timeout = timeout; }
+
+  public String getRemoteName() { return this.url.toString(); }
+    
+  public synchronized List<ByteBuffer> readBuffers() throws IOException {
+    InputStream in = connection.getInputStream();
+    try {
+      return readBuffers(in);
+    } finally {
+      in.close();
+    }
+  }
+
+  public synchronized void writeBuffers(List<ByteBuffer> buffers)
+    throws IOException {
+    if (proxy == null)
+      connection = (HttpURLConnection)url.openConnection();
+    else
+      connection = (HttpURLConnection)url.openConnection(proxy);
+
+    connection.setRequestMethod("POST");
+    connection.setRequestProperty("Content-Type", CONTENT_TYPE);
+    connection.setRequestProperty("Content-Length",
+                                  Integer.toString(getLength(buffers)));
+    connection.setDoOutput(true);
+    connection.setReadTimeout(timeout);
+    connection.setConnectTimeout(timeout);
+
+    OutputStream out = connection.getOutputStream();
+    try {
+      writeBuffers(buffers, out);
+    } finally {
+      out.close();
+    }
+  }
+
+  static int getLength(List<ByteBuffer> buffers) {
+    int length = 0;
+    for (ByteBuffer buffer : buffers) {
+      length += 4;
+      length += buffer.remaining();
+    }
+    length += 4;
+    return length;
+  }
+
+  static List<ByteBuffer> readBuffers(InputStream in)
+    throws IOException {
+    List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    while (true) {
+      int length = (in.read()<<24)+(in.read()<<16)+(in.read()<<8)+in.read();
+      if (length == 0) {                       // end of buffers
+        return buffers;
+      }
+      ByteBuffer buffer = ByteBuffer.allocate(length);
+      while (buffer.hasRemaining()) {
+        int p = buffer.position();
+        int i = in.read(buffer.array(), p, buffer.remaining());
+        if (i < 0)
+          throw new EOFException("Unexpected EOF");
+        buffer.position(p+i);
+      }
+      buffer.flip();
+      buffers.add(buffer);
+    }
+  }
+
+  static void writeBuffers(List<ByteBuffer> buffers, OutputStream out)
+    throws IOException {
+    for (ByteBuffer buffer : buffers) {
+      writeLength(buffer.limit(), out);           // length-prefix
+      out.write(buffer.array(), buffer.position(), buffer.remaining());
+      buffer.position(buffer.limit());
+    }
+    writeLength(0, out);                          // null-terminate
+  }
+
+  private static void writeLength(int length, OutputStream out)
+    throws IOException {
+    out.write(0xff & (length >>> 24));
+    out.write(0xff & (length >>> 16));
+    out.write(0xff & (length >>> 8));
+    out.write(0xff & length);
+  }
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Ipc.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Ipc.java
new file mode 100644
index 0000000..435644c
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Ipc.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+
+/** IPC utilities, including client and server factories. */
+public class Ipc {
+  private Ipc() {}                                // no public ctor
+
+  /** Create a client {@link Transceiver} connecting to the provided URI. */
+  public static Transceiver createTransceiver(URI uri) throws IOException {
+    if ("http".equals(uri.getScheme()))
+       return new HttpTransceiver(uri.toURL());
+    else if ("avro".equals(uri.getScheme()))
+      return new SaslSocketTransceiver
+        (new InetSocketAddress(uri.getHost(), uri.getPort()));
+    else
+      throw new IOException("unknown uri scheme: "+uri);
+  }
+
+  /** Create a {@link Server} listening at the named URI using the provided
+   * responder. */
+  public static Server createServer(Responder responder,
+                                    URI uri) throws IOException {
+    if ("http".equals(uri.getScheme()))
+      return new HttpServer(responder, uri.getPort());
+    else if ("avro".equals(uri.getScheme()))
+      return new SaslSocketServer
+        (responder, new InetSocketAddress(uri.getHost(), uri.getPort()));
+    else
+      throw new IOException("unknown uri scheme: "+uri);
+  }
+
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/LocalTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/LocalTransceiver.java
new file mode 100644
index 0000000..fa93ac8
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/LocalTransceiver.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+/** Implementation of IPC that remains in process. */
+public class LocalTransceiver extends Transceiver {
+  private Responder responder;
+
+  public LocalTransceiver(Responder responder) {
+    this.responder = responder;
+  }
+
+  @Override
+  public String getRemoteName() {
+    return "local";
+  }
+
+  @Override
+  public List<ByteBuffer> transceive(List<ByteBuffer> request)
+      throws IOException {
+    return responder.respond(request);
+  }
+
+  @Override
+  public List<ByteBuffer> readBuffers() throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java
new file mode 100644
index 0000000..a86ebbe
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyServer.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+
+import org.apache.avro.ipc.NettyTransportCodec.NettyDataPack;
+import org.apache.avro.ipc.NettyTransportCodec.NettyFrameDecoder;
+import org.apache.avro.ipc.NettyTransportCodec.NettyFrameEncoder;
+import org.jboss.netty.bootstrap.ServerBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelEvent;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.jboss.netty.channel.group.ChannelGroup;
+import org.jboss.netty.channel.group.ChannelGroupFuture;
+import org.jboss.netty.channel.group.DefaultChannelGroup;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.jboss.netty.handler.execution.ExecutionHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A Netty-based RPC {@link Server} implementation.
+ */
+public class NettyServer implements Server {
+  private static final Logger LOG = LoggerFactory.getLogger(NettyServer.class
+      .getName());
+
+  private final Responder responder;
+
+  private final Channel serverChannel;
+  private final ChannelGroup allChannels = new DefaultChannelGroup(
+      "avro-netty-server");
+  private final ChannelFactory channelFactory;
+  private final CountDownLatch closed = new CountDownLatch(1);
+  private final ExecutionHandler executionHandler;            
+  
+  public NettyServer(Responder responder, InetSocketAddress addr) {
+    this(responder, addr, new NioServerSocketChannelFactory
+         (Executors .newCachedThreadPool(), Executors.newCachedThreadPool()));
+  }
+  
+  public NettyServer(Responder responder, InetSocketAddress addr,
+                     ChannelFactory channelFactory) {
+      this(responder, addr, channelFactory, null);
+  }
+
+  /**
+   * @param executionHandler if not null, will be inserted into the Netty
+   *                         pipeline. Use this when your responder does
+   *                         long, non-cpu bound processing (see Netty's
+   *                         ExecutionHandler javadoc).
+   * @param pipelineFactory  Avro-related handlers will be added on top of
+   *                         what this factory creates
+   */
+  public NettyServer(Responder responder, InetSocketAddress addr,
+                     ChannelFactory channelFactory,
+                     final ChannelPipelineFactory pipelineFactory,
+                     final ExecutionHandler executionHandler) {
+    this.responder = responder;
+    this.channelFactory = channelFactory;
+    this.executionHandler = executionHandler;
+    ServerBootstrap bootstrap = new ServerBootstrap(channelFactory);
+    bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
+      @Override
+      public ChannelPipeline getPipeline() throws Exception {
+        ChannelPipeline p = pipelineFactory.getPipeline();
+        p.addLast("frameDecoder", new NettyFrameDecoder());
+        p.addLast("frameEncoder", new NettyFrameEncoder());
+        if (executionHandler != null) {
+          p.addLast("executionHandler", executionHandler);
+        }
+        p.addLast("handler", new NettyServerAvroHandler());
+        return p;
+      }
+    });
+    serverChannel = bootstrap.bind(addr);
+    allChannels.add(serverChannel);
+  }
+
+  /**
+   * @param executionHandler if not null, will be inserted into the Netty
+   *                         pipeline. Use this when your responder does
+   *                         long, non-cpu bound processing (see Netty's
+   *                         ExecutionHandler javadoc).
+   */
+  public NettyServer(Responder responder, InetSocketAddress addr,
+                     ChannelFactory channelFactory,
+                     final ExecutionHandler executionHandler) {
+    this(responder, addr, channelFactory, new ChannelPipelineFactory() {
+      @Override
+      public ChannelPipeline getPipeline() throws Exception {
+        return Channels.pipeline();
+      }
+    }, executionHandler);
+  }
+    
+  @Override
+  public void start() {
+    // No-op.
+  }
+  
+  @Override
+  public void close() {
+    ChannelGroupFuture future = allChannels.close();
+    future.awaitUninterruptibly();
+    channelFactory.releaseExternalResources();
+    closed.countDown();
+  }
+  
+  @Override
+  public int getPort() {
+    return ((InetSocketAddress) serverChannel.getLocalAddress()).getPort();
+  }
+
+  @Override
+  public void join() throws InterruptedException {
+    closed.await();
+  }
+  
+  /**
+   *
+   * @return The number of clients currently connected to this server.
+   */
+  public int getNumActiveConnections() {
+    //allChannels also contains the server channel, so exclude that from the
+    //count.
+    return allChannels.size() - 1;
+  }
+
+  /**
+   * Avro server handler for the Netty transport 
+   */
+  class NettyServerAvroHandler extends SimpleChannelUpstreamHandler {
+
+    private NettyTransceiver connectionMetadata = new NettyTransceiver();
+    
+    @Override
+    public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e)
+        throws Exception {
+      if (e instanceof ChannelStateEvent) {
+        LOG.info(e.toString());
+      }
+      super.handleUpstream(ctx, e);
+    }
+
+    @Override
+    public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
+        throws Exception {
+      allChannels.add(e.getChannel());
+      super.channelOpen(ctx, e);
+    }
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
+      try {
+        NettyDataPack dataPack = (NettyDataPack) e.getMessage();
+        List<ByteBuffer> req = dataPack.getDatas();
+        List<ByteBuffer> res = responder.respond(req, connectionMetadata);
+        // response will be null for oneway messages.
+        if(res != null) {
+          dataPack.setDatas(res);
+          e.getChannel().write(dataPack);          
+        }
+      } catch (IOException ex) {
+        LOG.warn("unexpect error");
+      }
+    }
+
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
+      LOG.warn("Unexpected exception from downstream.", e.getCause());
+      e.getChannel().close();
+      allChannels.remove(e.getChannel());
+    }
+
+    @Override
+    public void channelClosed(
+            ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
+      LOG.info("Connection to {} disconnected.",
+              e.getChannel().getRemoteAddress());
+      super.channelClosed(ctx, e);
+      e.getChannel().close();
+      allChannels.remove(e.getChannel());
+    }
+
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java
new file mode 100644
index 0000000..a8a2e3d
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransceiver.java
@@ -0,0 +1,651 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.ipc.NettyTransportCodec.NettyDataPack;
+import org.apache.avro.ipc.NettyTransportCodec.NettyFrameDecoder;
+import org.apache.avro.ipc.NettyTransportCodec.NettyFrameEncoder;
+import org.jboss.netty.bootstrap.ClientBootstrap;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelEvent;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.ChannelFuture;
+import org.jboss.netty.channel.ChannelFutureListener;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.ChannelState;
+import org.jboss.netty.channel.ChannelStateEvent;
+import org.jboss.netty.channel.ChannelUpstreamHandler;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.ExceptionEvent;
+import org.jboss.netty.channel.MessageEvent;
+import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A Netty-based {@link Transceiver} implementation.
+ */
+public class NettyTransceiver extends Transceiver {
+  /** If not specified, the default connection timeout will be used (60 sec). */
+  public static final long DEFAULT_CONNECTION_TIMEOUT_MILLIS = 60 * 1000L;
+  public static final String NETTY_CONNECT_TIMEOUT_OPTION = 
+      "connectTimeoutMillis";
+  public static final String NETTY_TCP_NODELAY_OPTION = "tcpNoDelay";
+  public static final String NETTY_KEEPALIVE_OPTION = "keepAlive";
+  public static final boolean DEFAULT_TCP_NODELAY_VALUE = true;
+  
+  private static final Logger LOG = LoggerFactory.getLogger(NettyTransceiver.class
+      .getName());
+
+  private final AtomicInteger serialGenerator = new AtomicInteger(0);
+  private final Map<Integer, Callback<List<ByteBuffer>>> requests = 
+    new ConcurrentHashMap<Integer, Callback<List<ByteBuffer>>>();
+  
+  private final ChannelFactory channelFactory;
+  private final long connectTimeoutMillis;
+  private final ClientBootstrap bootstrap;
+  private final InetSocketAddress remoteAddr;
+  
+  volatile ChannelFuture channelFuture;
+  volatile boolean stopping;
+  private final Object channelFutureLock = new Object();
+
+  /**
+   * Read lock must be acquired whenever using non-final state.
+   * Write lock must be acquired whenever modifying state.
+   */
+  private final ReentrantReadWriteLock stateLock = new ReentrantReadWriteLock();
+  private Channel channel;       // Synchronized on stateLock
+  private Protocol remote;       // Synchronized on stateLock
+
+  NettyTransceiver() {
+    channelFactory = null;
+    connectTimeoutMillis = 0L;
+    bootstrap = null;
+    remoteAddr = null;
+    channelFuture = null;
+  }
+
+  /**
+   * Creates a NettyTransceiver, and attempts to connect to the given address.
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection 
+   * timeout.
+   * @param addr the address to connect to.
+   * @throws IOException if an error occurs connecting to the given address.
+   */
+  public NettyTransceiver(InetSocketAddress addr) throws IOException {
+    this(addr, DEFAULT_CONNECTION_TIMEOUT_MILLIS);
+  }
+  
+  /**
+   * Creates a NettyTransceiver, and attempts to connect to the given address.
+   * @param addr the address to connect to.
+   * @param connectTimeoutMillis maximum amount of time to wait for connection 
+   * establishment in milliseconds, or null to use 
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS}.
+   * @throws IOException if an error occurs connecting to the given address.
+   */
+  public NettyTransceiver(InetSocketAddress addr, 
+      Long connectTimeoutMillis) throws IOException {
+    this(addr, new NioClientSocketChannelFactory(
+        Executors.newCachedThreadPool(new NettyTransceiverThreadFactory(
+            "Avro " + NettyTransceiver.class.getSimpleName() + " Boss")), 
+        Executors.newCachedThreadPool(new NettyTransceiverThreadFactory(
+            "Avro " + NettyTransceiver.class.getSimpleName() + " I/O Worker"))), 
+        connectTimeoutMillis);
+  }
+
+  /**
+   * Creates a NettyTransceiver, and attempts to connect to the given address.
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS} is used for the connection 
+   * timeout.
+   * @param addr the address to connect to.
+   * @param channelFactory the factory to use to create a new Netty Channel.
+   * @throws IOException if an error occurs connecting to the given address.
+   */
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory) 
+    throws IOException {
+    this(addr, channelFactory, buildDefaultBootstrapOptions(null));
+  }
+  
+  /**
+   * Creates a NettyTransceiver, and attempts to connect to the given address.
+   * @param addr the address to connect to.
+   * @param channelFactory the factory to use to create a new Netty Channel.
+   * @param connectTimeoutMillis maximum amount of time to wait for connection 
+   * establishment in milliseconds, or null to use 
+   * {@link #DEFAULT_CONNECTION_TIMEOUT_MILLIS}.
+   * @throws IOException if an error occurs connecting to the given address.
+   */
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory, 
+      Long connectTimeoutMillis) throws IOException {
+    this(addr, channelFactory, 
+        buildDefaultBootstrapOptions(connectTimeoutMillis));
+  }
+  
+  /**
+   * Creates a NettyTransceiver, and attempts to connect to the given address.
+   * It is strongly recommended that the {@link #NETTY_CONNECT_TIMEOUT_OPTION} 
+   * option be set to a reasonable timeout value (a Long value in milliseconds) 
+   * to prevent connect/disconnect attempts from hanging indefinitely.  It is 
+   * also recommended that the {@link #NETTY_TCP_NODELAY_OPTION} option be set 
+   * to true to minimize RPC latency.
+   * @param addr the address to connect to.
+   * @param channelFactory the factory to use to create a new Netty Channel.
+   * @param nettyClientBootstrapOptions map of Netty ClientBootstrap options 
+   * to use.
+   * @throws IOException if an error occurs connecting to the given address.
+   */
+  public NettyTransceiver(InetSocketAddress addr, ChannelFactory channelFactory, 
+      Map<String, Object> nettyClientBootstrapOptions) throws IOException {
+    if (channelFactory == null) {
+      throw new NullPointerException("channelFactory is null");
+    }
+    
+    // Set up.
+    this.channelFactory = channelFactory;
+    this.connectTimeoutMillis = (Long) 
+        nettyClientBootstrapOptions.get(NETTY_CONNECT_TIMEOUT_OPTION);
+    bootstrap = new ClientBootstrap(channelFactory);
+    remoteAddr = addr;
+
+    // Configure the event pipeline factory.
+    bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
+      @Override
+      public ChannelPipeline getPipeline() throws Exception {
+        ChannelPipeline p = Channels.pipeline();
+        p.addLast("frameDecoder", new NettyFrameDecoder());
+        p.addLast("frameEncoder", new NettyFrameEncoder());
+        p.addLast("handler", createNettyClientAvroHandler());
+        return p;
+      }
+    });
+
+    if (nettyClientBootstrapOptions != null) {
+      LOG.debug("Using Netty bootstrap options: " + 
+          nettyClientBootstrapOptions);
+      bootstrap.setOptions(nettyClientBootstrapOptions);
+    }
+
+    // Make a new connection.
+    stateLock.readLock().lock();
+    try {
+      getChannel();
+    } catch (Throwable e) {
+      // must attempt to clean up any allocated channel future
+      if (channelFuture != null) {
+        channelFuture.getChannel().close();
+      }
+
+      if (e instanceof IOException)
+        throw (IOException)e;
+      if (e instanceof RuntimeException)
+        throw (RuntimeException)e;
+      // all that's left is Error
+      throw (Error)e;
+    } finally {
+      stateLock.readLock().unlock();
+    }
+  }
+  
+  /**
+   * Creates a Netty ChannelUpstreamHandler for handling events on the 
+   * Netty client channel.
+   * @return the ChannelUpstreamHandler to use.
+   */
+  protected ChannelUpstreamHandler createNettyClientAvroHandler() {
+    return new NettyClientAvroHandler();
+  }
+  
+  /**
+   * Creates the default options map for the Netty ClientBootstrap.
+   * @param connectTimeoutMillis connection timeout in milliseconds, or null 
+   * if no timeout is desired.
+   * @return the map of Netty bootstrap options.
+   */
+  protected static Map<String, Object> buildDefaultBootstrapOptions(
+      Long connectTimeoutMillis) {
+    Map<String, Object> options = new HashMap<String, Object>(3);
+    options.put(NETTY_TCP_NODELAY_OPTION, DEFAULT_TCP_NODELAY_VALUE);
+    options.put(NETTY_KEEPALIVE_OPTION, true);
+    options.put(NETTY_CONNECT_TIMEOUT_OPTION, 
+        connectTimeoutMillis == null ? DEFAULT_CONNECTION_TIMEOUT_MILLIS : 
+          connectTimeoutMillis);
+    return options;
+  }
+  
+  /**
+   * Tests whether the given channel is ready for writing.
+   * @return true if the channel is open and ready; false otherwise.
+   */
+  private static boolean isChannelReady(Channel channel) {
+    return (channel != null) && 
+      channel.isOpen() && channel.isBound() && channel.isConnected();
+  }
+  
+  /**
+   * Gets the Netty channel.  If the channel is not connected, first attempts 
+   * to connect.
+   * NOTE: The stateLock read lock *must* be acquired before calling this 
+   * method.
+   * @return the Netty channel
+   * @throws IOException if an error occurs connecting the channel.
+   */
+  private Channel getChannel() throws IOException {
+    if (!isChannelReady(channel)) {
+      // Need to reconnect
+      // Upgrade to write lock
+      stateLock.readLock().unlock();
+      stateLock.writeLock().lock();
+      try {
+        if (!isChannelReady(channel)) {
+          synchronized(channelFutureLock) {
+            if (!stopping) {
+          LOG.debug("Connecting to " + remoteAddr);
+              channelFuture = bootstrap.connect(remoteAddr);
+            }
+          }
+          if (channelFuture != null) {
+            try {
+              channelFuture.await(connectTimeoutMillis);
+            } catch (InterruptedException e) {
+              Thread.currentThread().interrupt(); // Reset interrupt flag
+              throw new IOException("Interrupted while connecting to " + 
+                  remoteAddr);
+            }
+
+            synchronized(channelFutureLock) {
+          if (!channelFuture.isSuccess()) {
+            throw new IOException("Error connecting to " + remoteAddr, 
+                channelFuture.getCause());
+          }
+          channel = channelFuture.getChannel();
+              channelFuture = null;
+            }
+          }
+        }
+      } finally {
+        // Downgrade to read lock:
+        stateLock.readLock().lock();
+        stateLock.writeLock().unlock();
+      }
+    }
+    return channel;
+  }
+  
+  /**
+   * Closes the connection to the remote peer if connected.
+   */
+  private void disconnect() {
+    disconnect(false, false, null);
+  }
+  
+  /**
+   * Closes the connection to the remote peer if connected.
+   * @param awaitCompletion if true, will block until the close has completed.
+   * @param cancelPendingRequests if true, will drain the requests map and 
+   * send an IOException to all Callbacks.
+   * @param cause if non-null and cancelPendingRequests is true, this Throwable 
+   * will be passed to all Callbacks.
+   */
+  private void disconnect(boolean awaitCompletion, boolean cancelPendingRequests,
+      Throwable cause) {
+    Channel channelToClose = null;
+    Map<Integer, Callback<List<ByteBuffer>>> requestsToCancel = null;
+    boolean stateReadLockHeld = stateLock.getReadHoldCount() != 0;
+
+    ChannelFuture channelFutureToCancel = null;
+    synchronized(channelFutureLock) {
+        if (stopping && channelFuture != null) {
+          channelFutureToCancel = channelFuture;
+          channelFuture = null;
+        }
+    }
+    if (channelFutureToCancel != null) {
+      channelFutureToCancel.cancel();
+    }
+    
+    if (stateReadLockHeld) {
+      stateLock.readLock().unlock();
+    }
+    stateLock.writeLock().lock();
+    try {
+      if (channel != null) {
+        if (cause != null) {
+          LOG.debug("Disconnecting from " + remoteAddr, cause);
+        }
+        else {
+          LOG.debug("Disconnecting from " + remoteAddr);
+        }
+        channelToClose = channel;
+        channel = null;
+        remote = null;
+        if (cancelPendingRequests) {
+          // Remove all pending requests (will be canceled after relinquishing 
+          // write lock).
+          requestsToCancel = 
+            new ConcurrentHashMap<Integer, Callback<List<ByteBuffer>>>(requests);
+          requests.clear();
+        }
+      }
+    } finally {
+      if (stateReadLockHeld) {
+        stateLock.readLock().lock();
+      }
+      stateLock.writeLock().unlock();
+    }
+    
+    // Cancel any pending requests by sending errors to the callbacks:
+    if ((requestsToCancel != null) && !requestsToCancel.isEmpty()) {
+      LOG.debug("Removing " + requestsToCancel.size() + " pending request(s).");
+      for (Callback<List<ByteBuffer>> request : requestsToCancel.values()) {
+        request.handleError(
+            cause != null ? cause : 
+              new IOException(getClass().getSimpleName() + " closed"));
+      }
+    }
+    
+    // Close the channel:
+    if (channelToClose != null) {
+      ChannelFuture closeFuture = channelToClose.close();
+      if (awaitCompletion && (closeFuture != null)) {
+        try {
+          closeFuture.await(connectTimeoutMillis);
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();   // Reset interrupt flag
+          LOG.warn("Interrupted while disconnecting", e);
+        }
+      }
+    }
+  }
+  
+  /**
+   * Netty channels are thread-safe, so there is no need to acquire locks.
+   * This method is a no-op.
+   */
+  @Override
+  public void lockChannel() {
+    
+  }
+  
+  /**
+   * Netty channels are thread-safe, so there is no need to acquire locks.
+   * This method is a no-op.
+   */
+  @Override
+  public void unlockChannel() {
+    
+  }
+
+  /**
+   * Closes this transceiver and disconnects from the remote peer.
+   * Cancels all pending RPCs, sends an IOException to all pending callbacks, 
+   * and blocks until the close has completed.
+   */
+  @Override
+  public void close() {
+    close(true);
+  }
+  
+  /**
+   * Closes this transceiver and disconnects from the remote peer.
+   * Cancels all pending RPCs and sends an IOException to all pending callbacks.
+   * @param awaitCompletion if true, will block until the close has completed.
+   */
+  public void close(boolean awaitCompletion) {
+    try {
+      // Close the connection:
+      stopping = true;
+      disconnect(awaitCompletion, true, null);
+    } finally {
+      // Shut down all thread pools to exit.
+      channelFactory.releaseExternalResources();
+    }
+  }
+
+  @Override
+  public String getRemoteName() throws IOException {
+    stateLock.readLock().lock();
+    try {
+      return getChannel().getRemoteAddress().toString();
+    } finally {
+      stateLock.readLock().unlock();
+    }
+  }
+
+  /**
+   * Override as non-synchronized method because the method is thread safe.
+   */
+  @Override
+  public List<ByteBuffer> transceive(List<ByteBuffer> request) 
+    throws IOException {
+    try {
+      CallFuture<List<ByteBuffer>> transceiverFuture = new CallFuture<List<ByteBuffer>>();
+      transceive(request, transceiverFuture);
+      return transceiverFuture.get();
+    } catch (InterruptedException e) {
+      LOG.debug("failed to get the response", e);
+      return null;
+    } catch (ExecutionException e) {
+      LOG.debug("failed to get the response", e);
+      return null;
+    }
+  }
+  
+  @Override
+  public void transceive(List<ByteBuffer> request, 
+      Callback<List<ByteBuffer>> callback) throws IOException {
+    stateLock.readLock().lock();
+    try {
+      int serial = serialGenerator.incrementAndGet();
+      NettyDataPack dataPack = new NettyDataPack(serial, request);
+      requests.put(serial, callback);
+      writeDataPack(dataPack);
+    } finally {
+      stateLock.readLock().unlock();
+    }
+  }
+  
+  @Override
+  public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
+    ChannelFuture writeFuture;
+    stateLock.readLock().lock();
+    try {
+      writeFuture = writeDataPack(
+          new NettyDataPack(serialGenerator.incrementAndGet(), buffers));
+    } finally {
+      stateLock.readLock().unlock();
+    }
+    
+    if (!writeFuture.isDone()) {
+      try {
+        writeFuture.await();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();   // Reset interrupt flag
+        throw new IOException("Interrupted while writing Netty data pack", e);
+      }
+    }
+    if (!writeFuture.isSuccess()) {
+      throw new IOException("Error writing buffers", writeFuture.getCause());
+    }
+  }
+  
+  /**
+   * Writes a NettyDataPack, reconnecting to the remote peer if necessary.
+   * NOTE: The stateLock read lock *must* be acquired before calling this 
+   * method.
+   * @param dataPack the data pack to write.
+   * @return the Netty ChannelFuture for the write operation.
+   * @throws IOException if an error occurs connecting to the remote peer.
+   */
+  private ChannelFuture writeDataPack(NettyDataPack dataPack) throws IOException {
+    return getChannel().write(dataPack);
+  }
+
+  @Override
+  public List<ByteBuffer> readBuffers() throws IOException {
+    throw new UnsupportedOperationException();  
+  }
+  
+  @Override
+  public Protocol getRemote() {
+    stateLock.readLock().lock();
+    try {
+      return remote;
+    } finally {
+      stateLock.readLock().unlock();
+    }
+  }
+
+  @Override
+  public boolean isConnected() {
+    stateLock.readLock().lock();
+    try {
+      return remote!=null;
+    } finally {
+      stateLock.readLock().unlock();
+    }
+  }
+
+  @Override
+  public void setRemote(Protocol protocol) {
+    stateLock.writeLock().lock();
+    try {
+      this.remote = protocol;
+    } finally {
+      stateLock.writeLock().unlock();
+    }
+  }
+  
+  /**
+   * A ChannelFutureListener for channel write operations that notifies 
+   * a {@link Callback} if an error occurs while writing to the channel.
+   */
+  protected class WriteFutureListener implements ChannelFutureListener {
+    protected final Callback<List<ByteBuffer>> callback;
+    
+    /**
+     * Creates a WriteFutureListener that notifies the given callback 
+     * if an error occurs writing data to the channel.
+     * @param callback the callback to notify, or null to skip notification.
+     */
+    public WriteFutureListener(Callback<List<ByteBuffer>> callback) {
+      this.callback = callback;
+    }
+    
+    @Override
+    public void operationComplete(ChannelFuture future) throws Exception {
+      if (!future.isSuccess() && (callback != null)) {
+        callback.handleError(
+            new IOException("Error writing buffers", future.getCause()));
+      }
+    }
+  }
+
+  /**
+   * Avro client handler for the Netty transport 
+   */
+  protected class NettyClientAvroHandler extends SimpleChannelUpstreamHandler {
+
+    @Override
+    public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e)
+        throws Exception {
+      if (e instanceof ChannelStateEvent) {
+        LOG.debug(e.toString());
+        ChannelStateEvent cse = (ChannelStateEvent)e;
+        if ((cse.getState() == ChannelState.OPEN) && (Boolean.FALSE.equals(cse.getValue()))) {
+          // Server closed connection; disconnect client side
+          LOG.debug("Remote peer " + remoteAddr + " closed connection.");
+          disconnect(false, true, null);
+        }
+      }
+      super.handleUpstream(ctx, e);
+    }
+
+    @Override
+    public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
+        throws Exception {
+      // channel = e.getChannel();
+      super.channelOpen(ctx, e);
+    }
+
+    @Override
+    public void messageReceived(ChannelHandlerContext ctx, final MessageEvent e) {
+      NettyDataPack dataPack = (NettyDataPack)e.getMessage();
+      Callback<List<ByteBuffer>> callback = requests.get(dataPack.getSerial());
+      if (callback==null) {
+        throw new RuntimeException("Missing previous call info");
+      }
+      try {
+        callback.handleResult(dataPack.getDatas());
+      } finally {
+        requests.remove(dataPack.getSerial());
+      }
+    }
+
+    @Override
+    public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) {
+      disconnect(false, true, e.getCause());      
+    }
+
+  }
+
+  /**
+   * Creates threads with unique names based on a specified name prefix.
+   */
+  protected static class NettyTransceiverThreadFactory implements ThreadFactory {
+    private final AtomicInteger threadId = new AtomicInteger(0);
+    private final String prefix;
+    
+    /**
+     * Creates a NettyTransceiverThreadFactory that creates threads with the 
+     * specified name.
+     * @param prefix the name prefix to use for all threads created by this 
+     * ThreadFactory.  A unique ID will be appended to this prefix to form the 
+     * final thread name.
+     */
+    public NettyTransceiverThreadFactory(String prefix) {
+      this.prefix = prefix;
+    }
+    
+    @Override
+    public Thread newThread(Runnable r) {
+      Thread thread = new Thread(r);
+      thread.setName(prefix + " " + threadId.incrementAndGet());
+      return thread;
+    }
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java
new file mode 100644
index 0000000..1668c5e
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/NettyTransportCodec.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.avro.AvroRuntimeException;
+
+import org.jboss.netty.buffer.ChannelBuffer;
+import org.jboss.netty.buffer.ChannelBuffers;
+import org.jboss.netty.channel.Channel;
+import org.jboss.netty.channel.ChannelHandlerContext;
+import org.jboss.netty.handler.codec.frame.FrameDecoder;
+import org.jboss.netty.handler.codec.oneone.OneToOneEncoder;
+
+/**
+ * Data structure, encoder and decoder classes for the Netty transport. 
+ */
+public class NettyTransportCodec {
+  /**
+   * Transport protocol data structure when using Netty. 
+   */
+  public static class NettyDataPack {
+    private int serial; // to track each call in client side
+    private List<ByteBuffer> datas;
+
+    public NettyDataPack() {}
+    
+    public NettyDataPack(int serial, List<ByteBuffer> datas) {
+      this.serial = serial;
+      this.datas = datas;
+    }
+    
+    public void setSerial(int serial) {
+      this.serial = serial;
+    }
+
+    public int getSerial() {
+      return serial;
+    }
+    
+    public void setDatas(List<ByteBuffer> datas) {
+      this.datas = datas;
+    }
+
+    public List<ByteBuffer> getDatas() {
+      return datas;
+    }
+    
+  }
+  
+  /**
+   * Protocol encoder which converts NettyDataPack which contains the 
+   * Responder's output List<ByteBuffer> to ChannelBuffer needed 
+   * by Netty.
+   */
+  public static class NettyFrameEncoder extends OneToOneEncoder {
+
+    /**
+     * encode msg to ChannelBuffer
+     * @param msg NettyDataPack from 
+     *            NettyServerAvroHandler/NettyClientAvroHandler in the pipeline
+     * @return encoded ChannelBuffer
+     */
+    @Override
+    protected Object encode(ChannelHandlerContext ctx, Channel channel, Object msg)
+        throws Exception {
+      NettyDataPack dataPack = (NettyDataPack)msg;
+      List<ByteBuffer> origs = dataPack.getDatas();
+      List<ByteBuffer> bbs = new ArrayList<ByteBuffer>(origs.size() * 2 + 1);
+      bbs.add(getPackHeader(dataPack)); // prepend a pack header including serial number and list size
+      for (ByteBuffer b : origs) {
+        bbs.add(getLengthHeader(b)); // for each buffer prepend length field
+        bbs.add(b);
+      }
+
+      return ChannelBuffers
+          .wrappedBuffer(bbs.toArray(new ByteBuffer[bbs.size()]));
+    }
+    
+    private ByteBuffer getPackHeader(NettyDataPack dataPack) {
+      ByteBuffer header = ByteBuffer.allocate(8);
+      header.putInt(dataPack.getSerial());
+      header.putInt(dataPack.getDatas().size());
+      header.flip();
+      return header;
+    }
+
+    private ByteBuffer getLengthHeader(ByteBuffer buf) {
+      ByteBuffer header = ByteBuffer.allocate(4);
+      header.putInt(buf.limit());
+      header.flip();
+      return header;
+    }
+  }
+
+  /**
+   * Protocol decoder which converts Netty's ChannelBuffer to 
+   * NettyDataPack which contains a List<ByteBuffer> needed 
+   * by Avro Responder.
+   */
+  public static class NettyFrameDecoder extends FrameDecoder {
+    private boolean packHeaderRead = false;
+    private int listSize;
+    private NettyDataPack dataPack;
+    private final long maxMem;
+    private static final long SIZEOF_REF = 8L; // mem usage of 64-bit pointer
+
+
+    public NettyFrameDecoder() {
+      maxMem = Runtime.getRuntime().maxMemory();
+    }
+    
+    /**
+     * decode buffer to NettyDataPack
+     */
+    @Override
+    protected Object decode(ChannelHandlerContext ctx, Channel channel,
+        ChannelBuffer buffer) throws Exception {
+
+      if (!packHeaderRead) {
+        if (decodePackHeader(ctx, channel, buffer)) {
+          packHeaderRead = true;
+        }
+        return null;
+      } else {
+        if (decodePackBody(ctx, channel, buffer)) {
+          packHeaderRead = false; // reset state
+          return dataPack;
+        } else {
+          return null;
+        }
+      }
+      
+    }
+    
+    private boolean decodePackHeader(ChannelHandlerContext ctx, Channel channel,
+        ChannelBuffer buffer) throws Exception {
+      if (buffer.readableBytes()<8) {
+        return false;
+      }
+
+      int serial = buffer.readInt();
+      int listSize = buffer.readInt();
+
+      // Sanity check to reduce likelihood of invalid requests being honored.
+      // Only allow 10% of available memory to go towards this list (too much!)
+      if (listSize * SIZEOF_REF > 0.1 * maxMem) {
+        channel.close().await();
+        throw new AvroRuntimeException("Excessively large list allocation " +
+            "request detected: " + listSize + " items! Connection closed.");
+      }
+
+      this.listSize = listSize;
+      dataPack = new NettyDataPack(serial, new ArrayList<ByteBuffer>(listSize));
+
+      return true;
+    }
+    
+    private boolean decodePackBody(ChannelHandlerContext ctx, Channel channel,
+        ChannelBuffer buffer) throws Exception {
+      if (buffer.readableBytes() < 4) {
+        return false;
+      }
+
+      buffer.markReaderIndex();
+      
+      int length = buffer.readInt();
+
+      if (buffer.readableBytes() < length) {
+        buffer.resetReaderIndex();
+        return false;
+      }
+
+      ByteBuffer bb = ByteBuffer.allocate(length);
+      buffer.readBytes(bb);
+      bb.flip();
+      dataPack.getDatas().add(bb);
+      
+      return dataPack.getDatas().size()==listSize;
+    }
+
+  }
+  
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
new file mode 100644
index 0000000..99a88ac
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.Protocol.Message;
+
+/**
+ * This class represents the context of an RPC call or RPC handshake.
+ * Designed to provide information to RPC plugin writers,
+ * this class encapsulates information about the rpc exchange,
+ * including handshake and call metadata. Note: this data includes
+ * full copies of the RPC payload, so plugins which store RPCContexts
+ * beyond the life of each call should be conscious of memory use.
+ *
+ */
+public class RPCContext {
+  
+  private HandshakeRequest handshakeRequest;
+  private HandshakeResponse handshakeResponse;
+
+  protected Map<String,ByteBuffer> requestCallMeta, responseCallMeta;
+  
+  protected Object response;
+  protected Exception error;
+  private Message message;
+  List<ByteBuffer> requestPayload;
+  List<ByteBuffer> responsePayload;
+  
+  /** Set the handshake request of this RPC. */
+  public void setHandshakeRequest(HandshakeRequest handshakeRequest) {
+    this.handshakeRequest = handshakeRequest;
+  }
+  
+  /** Get the handshake request of this RPC. */
+  public HandshakeRequest getHandshakeRequest() {
+    return this.handshakeRequest;
+  }
+  
+  /** Set the handshake response of this RPC. */
+  public void setHandshakeResponse(HandshakeResponse handshakeResponse) {
+    this.handshakeResponse = handshakeResponse;
+  }
+  
+  /** Get the handshake response of this RPC. */
+  public HandshakeResponse getHandshakeResponse() {
+    return this.handshakeResponse;
+  }
+
+  /**
+   * This is an access method for the handshake state
+   * provided by the client to the server.
+   * @return a map representing handshake state from
+   * the client to the server
+   */
+  public Map<String,ByteBuffer> requestHandshakeMeta() {
+    if (handshakeRequest.meta == null)
+      handshakeRequest.meta = new HashMap<String,ByteBuffer>();
+    return handshakeRequest.meta;
+  }
+  
+  void setRequestHandshakeMeta(Map<String,ByteBuffer> newmeta) {
+    handshakeRequest.meta = newmeta;
+  }
+  
+  /**
+   * This is an access method for the handshake state
+   * provided by the server back to the client
+   * @return a map representing handshake state from
+   * the server to the client
+   */
+  public Map<String,ByteBuffer> responseHandshakeMeta() {
+    if (handshakeResponse.meta == null)
+      handshakeResponse.meta = new HashMap<String,ByteBuffer>();
+    return handshakeResponse.meta;
+  }
+  
+  void setResponseHandshakeMeta(Map<String,ByteBuffer> newmeta) {
+    handshakeResponse.meta = newmeta;
+  }
+  
+  /**
+   * This is an access method for the per-call state
+   * provided by the client to the server.
+   * @return a map representing per-call state from
+   * the client to the server
+   */
+  public Map<String,ByteBuffer> requestCallMeta() {
+    if (requestCallMeta == null) {
+      requestCallMeta = new HashMap<String,ByteBuffer>();
+    }
+    return requestCallMeta;
+  }
+  
+  void setRequestCallMeta(Map<String,ByteBuffer> newmeta) {
+    requestCallMeta = newmeta;
+  }
+  
+  /**
+   * This is an access method for the per-call state
+   * provided by the server back to the client.
+   * @return a map representing per-call state from
+   * the server to the client
+   */
+  public Map<String,ByteBuffer> responseCallMeta() {
+    if (responseCallMeta == null) {
+      responseCallMeta = new HashMap<String,ByteBuffer>();
+    }
+    return responseCallMeta;
+  }
+  
+  void setResponseCallMeta(Map<String,ByteBuffer> newmeta) {
+    responseCallMeta = newmeta;
+  }
+  
+  void setResponse(Object response) {
+    this.response = response;
+    this.error = null;
+  }
+  
+  /**
+   * The response object generated at the server,
+   * if it exists.  If an exception was generated,
+   * this will be null.
+   * @return the response created by this RPC, no
+   * null if an exception was generated
+   */
+  public Object response() {
+    return response;
+  }
+  
+  void setError(Exception error) {
+    this.response = null;
+    this.error = error;
+  }
+  
+  /**
+   * The exception generated at the server,
+   * or null if no such exception has occured
+   * @return the exception generated at the server, or
+   * null if no such exception
+   */
+  public Exception error() {
+    return error;
+  }
+  
+  /**
+   * Indicates whether an exception was generated
+   * at the server
+   * @return true is an exception was generated at
+   * the server, or false if not
+   */
+  public boolean isError() {
+    return error != null;
+  }
+  
+  /** Sets the {@link Message} corresponding to this RPC */
+  public void setMessage(Message message) {
+    this.message = message;    
+  }
+  
+  /** Returns the {@link Message} corresponding to this RPC
+   * @return this RPC's {@link Message} 
+   */
+  public Message getMessage() { return message; }
+  
+  /** Sets the serialized payload of the request in this RPC. Will
+   * not include handshake or meta-data. */
+  public void setRequestPayload(List<ByteBuffer> payload) {
+    this.requestPayload = payload;
+  }
+ 
+  /** Returns the serialized payload of the request in this RPC. Will only be
+   * generated from a Requestor and will not include handshake or meta-data. 
+   * If the request payload has not been set yet, returns null.
+   * 
+   * @return this RPC's request payload.*/
+  public List<ByteBuffer> getRequestPayload() {
+    return this.requestPayload;
+  }
+  
+  /** Returns the serialized payload of the response in this RPC. Will only be
+   * generated from a Responder and will not include handshake or meta-data. 
+   * If the response payload has not been set yet, returns null.
+   * 
+   * @return this RPC's response payload.*/
+  public List<ByteBuffer> getResponsePayload() {
+    return this.responsePayload;
+  }
+  
+  /** Sets the serialized payload of the response in this RPC. Will
+   * not include handshake or meta-data. */
+  public void setResponsePayload(List<ByteBuffer> payload) {
+    this.responsePayload = payload;
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java
new file mode 100644
index 0000000..64e1231
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+/**
+ * An instrumentation API for RPC metadata.  Each of these methods
+ * is invoked at key points during the RPC exchange.  Additionally,
+ * path-based <em>metadata</em> that is passed along with the RPC call
+ * and can be set or queried by subsequent instrumentation points.
+ */
+public class RPCPlugin {
+  
+  /**
+   * Called on the client before the initial RPC handshake to
+   * setup any handshake metadata for this plugin
+   * @param context the handshake rpc context
+   */
+  public void clientStartConnect(RPCContext context) { }
+  
+  /**
+   * Called on the server during the RPC handshake
+   * @param context the handshake rpc context
+   */
+  public void serverConnecting(RPCContext context) { }
+  
+  /**
+   * Called on the client after the initial RPC handshake
+   * @param context the handshake rpc context
+   */
+  public void clientFinishConnect(RPCContext context) { }
+
+  /**
+   * This method is invoked at the client before it issues the RPC call.
+   * @param context the per-call rpc context (in/out parameter)
+   */
+  public void clientSendRequest(RPCContext context) { }
+ 
+  
+  /**
+   * This method is invoked at the RPC server when the request is received,
+   * but before the call itself is executed
+   * @param context the per-call rpc context (in/out parameter)
+   */
+  public void serverReceiveRequest(RPCContext context) { }
+  
+  /**
+   * This method is invoked at the server before the response is executed,
+   * but before the response has been formulated
+   * @param context the per-call rpc context (in/out parameter)
+   */
+  public void serverSendResponse(RPCContext context) { }
+  
+  /**
+   * This method is invoked at the client after the call is executed,
+   * and after the client receives the response
+   * @param context the per-call rpc context
+   */
+  public void clientReceiveResponse(RPCContext context) { }
+
+  
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
new file mode 100644
index 0000000..5379945
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java
@@ -0,0 +1,563 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.util.ByteBufferInputStream;
+import org.apache.avro.util.ByteBufferOutputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Base class for the client side of a protocol interaction. */
+public abstract class Requestor {
+  private static final Logger LOG = LoggerFactory.getLogger(Requestor.class);
+
+  private static final Schema META =
+    Schema.createMap(Schema.create(Schema.Type.BYTES));
+  private static final GenericDatumReader<Map<String,ByteBuffer>>
+    META_READER = new GenericDatumReader<Map<String,ByteBuffer>>(META);
+  private static final GenericDatumWriter<Map<String,ByteBuffer>>
+    META_WRITER = new GenericDatumWriter<Map<String,ByteBuffer>>(META);
+
+  private final Protocol local;
+  private volatile Protocol remote;
+  private volatile boolean sendLocalText;
+  private final Transceiver transceiver;
+  private final ReentrantLock handshakeLock = new ReentrantLock();
+  
+  protected final List<RPCPlugin> rpcMetaPlugins;
+
+  public Protocol getLocal() { return local; }
+  public Transceiver getTransceiver() { return transceiver; }
+
+  protected Requestor(Protocol local, Transceiver transceiver)
+    throws IOException {
+    this.local = local;
+    this.transceiver = transceiver;
+    this.rpcMetaPlugins =
+      new CopyOnWriteArrayList<RPCPlugin>();
+  }
+  
+  /**
+   * Adds a new plugin to manipulate RPC metadata.  Plugins
+   * are executed in the order that they are added.
+   * @param plugin a plugin that will manipulate RPC metadata
+   */
+  public void addRPCPlugin(RPCPlugin plugin) {
+    rpcMetaPlugins.add(plugin);
+  }
+
+  private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory();
+  
+  /** Writes a request message and reads a response or error message. */
+  public Object request(String messageName, Object request)
+    throws Exception {
+    // Initialize request
+    Request rpcRequest = new Request(messageName, request, new RPCContext());
+    CallFuture<Object> future = /* only need a Future for two-way messages */
+      rpcRequest.getMessage().isOneWay() ? null : new CallFuture<Object>();
+    
+    // Send request
+    request(rpcRequest, future);
+    
+    if (future == null)        // the message is one-way, so return immediately
+      return null;
+    try {                      // the message is two-way, wait for the result
+      return future.get();
+    } catch (ExecutionException e) {
+      if (e.getCause() instanceof Exception) {
+        throw (Exception)e.getCause();
+      } else {
+        throw new AvroRemoteException(e.getCause());
+      }
+    }
+  }
+  
+  /**
+   * Writes a request message and returns the result through a Callback.
+   * Clients can also use a Future interface by creating a new CallFuture<T>,
+   * passing it in as the Callback parameter, and then waiting on that Future.
+   * @param <T> the return type of the message.
+   * @param messageName the name of the message to invoke.
+   * @param request the request data to send.
+   * @param callback the callback which will be invoked when the response is returned 
+   * or an error occurs.
+   * @throws Exception if an error occurs sending the message.
+   */
+  public <T> void request(String messageName, Object request, Callback<T> callback) 
+    throws Exception {
+    request(new Request(messageName, request, new RPCContext()), callback);
+  }
+  
+  /** Writes a request message and returns the result through a Callback. */
+  <T> void request(Request request, Callback<T> callback)
+    throws Exception {
+    Transceiver t = getTransceiver();
+    if (!t.isConnected()) {
+      // Acquire handshake lock so that only one thread is performing the
+      // handshake and other threads block until the handshake is completed
+      handshakeLock.lock();
+      try {
+        if (t.isConnected()) {
+          // Another thread already completed the handshake; no need to hold
+          // the write lock
+          handshakeLock.unlock();
+        } else {
+          CallFuture<T> callFuture = new CallFuture<T>(callback);
+          t.transceive(request.getBytes(),
+                       new TransceiverCallback<T>(request, callFuture));
+          // Block until handshake complete
+          callFuture.await();
+          if (request.getMessage().isOneWay()) {
+            Throwable error = callFuture.getError();
+            if (error != null) {
+              if (error instanceof Exception) {
+                throw (Exception) error;
+              } else {
+                throw new AvroRemoteException(error);
+              }
+            }
+          }
+          return;
+        }
+      } finally{
+        if (handshakeLock.isHeldByCurrentThread()) {
+          handshakeLock.unlock();
+        }
+      }
+    }
+    
+    if (request.getMessage().isOneWay()) {
+      t.lockChannel();
+      try {
+        t.writeBuffers(request.getBytes());
+        if (callback != null) {
+          callback.handleResult(null);
+        }
+      } finally {
+        t.unlockChannel();
+      }
+    } else {
+      t.transceive(request.getBytes(),
+                   new TransceiverCallback<T>(request, callback));
+    }
+    
+  }
+
+  private static final ConcurrentMap<String,MD5> REMOTE_HASHES =
+    new ConcurrentHashMap<String,MD5>();
+  private static final ConcurrentMap<MD5,Protocol> REMOTE_PROTOCOLS =
+    new ConcurrentHashMap<MD5,Protocol>();
+
+  private static final SpecificDatumWriter<HandshakeRequest> HANDSHAKE_WRITER =
+    new SpecificDatumWriter<HandshakeRequest>(HandshakeRequest.class);
+
+  private static final SpecificDatumReader<HandshakeResponse> HANDSHAKE_READER =
+    new SpecificDatumReader<HandshakeResponse>(HandshakeResponse.class);
+
+  private void writeHandshake(Encoder out) throws IOException {
+    if (getTransceiver().isConnected()) return;
+    MD5 localHash = new MD5();
+    localHash.bytes(local.getMD5());
+    String remoteName = transceiver.getRemoteName();
+    MD5 remoteHash = REMOTE_HASHES.get(remoteName);
+    if (remoteHash == null) {                     // guess remote is local
+      remoteHash = localHash;
+      remote = local;
+    } else {
+      remote = REMOTE_PROTOCOLS.get(remoteHash);
+    }
+    HandshakeRequest handshake = new HandshakeRequest();
+    handshake.clientHash = localHash;
+    handshake.serverHash = remoteHash;
+    if (sendLocalText)
+      handshake.clientProtocol = local.toString();
+    
+    RPCContext context = new RPCContext();
+    context.setHandshakeRequest(handshake);
+    for (RPCPlugin plugin : rpcMetaPlugins) {
+      plugin.clientStartConnect(context);
+    }
+    handshake.meta = context.requestHandshakeMeta();
+    
+    HANDSHAKE_WRITER.write(handshake, out);
+  }
+
+  private boolean readHandshake(Decoder in) throws IOException {
+    if (getTransceiver().isConnected()) return true;
+    boolean established = false;
+    HandshakeResponse handshake = HANDSHAKE_READER.read(null, in);
+    switch (handshake.match) {
+    case BOTH:
+      established = true;
+      sendLocalText = false;
+      break;
+    case CLIENT:
+      LOG.debug("Handshake match = CLIENT");
+      setRemote(handshake);
+      established = true;
+      sendLocalText = false;
+      break;
+    case NONE:
+      LOG.debug("Handshake match = NONE");
+      setRemote(handshake);
+      sendLocalText = true;
+      break;
+    default:
+      throw new AvroRuntimeException("Unexpected match: "+handshake.match);
+    }
+    
+    RPCContext context = new RPCContext();
+    context.setHandshakeResponse(handshake);
+    for (RPCPlugin plugin : rpcMetaPlugins) {
+      plugin.clientFinishConnect(context);
+    }
+    if (established)
+      getTransceiver().setRemote(remote);
+    return established;
+  }
+
+  private void setRemote(HandshakeResponse handshake) throws IOException {
+    remote = Protocol.parse(handshake.serverProtocol.toString());
+    MD5 remoteHash = (MD5)handshake.serverHash;
+    REMOTE_HASHES.put(transceiver.getRemoteName(), remoteHash);
+    REMOTE_PROTOCOLS.putIfAbsent(remoteHash, remote);
+  }
+
+  /** Return the remote protocol.  Force a handshake if required. */
+  public Protocol getRemote() throws IOException {
+    if (remote != null) return remote;            // already have it
+    MD5 remoteHash = REMOTE_HASHES.get(transceiver.getRemoteName());
+    if (remoteHash != null) {
+      remote = REMOTE_PROTOCOLS.get(remoteHash);
+      if (remote != null) return remote;            // already cached
+    }
+    handshakeLock.lock();
+    try {
+      // force handshake
+      ByteBufferOutputStream bbo = new ByteBufferOutputStream();
+      // direct because the payload is tiny.
+      Encoder out = ENCODER_FACTORY.directBinaryEncoder(bbo, null);
+      writeHandshake(out);
+      out.writeInt(0);                              // empty metadata
+      out.writeString("");                          // bogus message name
+      List<ByteBuffer> response =
+        getTransceiver().transceive(bbo.getBufferList());
+      ByteBufferInputStream bbi = new ByteBufferInputStream(response);
+      BinaryDecoder in =
+        DecoderFactory.get().binaryDecoder(bbi, null);
+      readHandshake(in);
+      return this.remote;
+    } finally {
+      handshakeLock.unlock();
+    }
+  }
+
+
+  /** Writes a request message. */
+  public abstract void writeRequest(Schema schema, Object request,
+                                    Encoder out) throws IOException;
+
+  @Deprecated                                     // for compatibility in 1.5
+  public Object readResponse(Schema schema, Decoder in) throws IOException {
+    return readResponse(schema, schema, in);
+  }
+
+  /** Reads a response message. */
+  public abstract Object readResponse(Schema writer, Schema reader, Decoder in)
+    throws IOException;
+
+  @Deprecated                                     // for compatibility in 1.5
+  public Object readError(Schema schema, Decoder in) throws IOException {
+    return readError(schema, schema, in);
+  }
+
+  /** Reads an error message. */
+  public abstract Exception readError(Schema writer, Schema reader, Decoder in)
+    throws IOException;
+  
+  /**
+   * Handles callbacks from transceiver invocations.
+   */
+  protected class TransceiverCallback<T> implements Callback<List<ByteBuffer>> {
+    private final Request request;
+    private final Callback<T> callback;
+    
+    /**
+     * Creates a TransceiverCallback.
+     * @param request the request to set.
+     * @param callback the callback to set.
+     */
+    public TransceiverCallback(Request request, Callback<T> callback) {
+      this.request = request;
+      this.callback = callback;
+    }
+    
+    @Override
+    @SuppressWarnings("unchecked")
+    public void handleResult(List<ByteBuffer> responseBytes) {
+      ByteBufferInputStream bbi = new ByteBufferInputStream(responseBytes);
+      BinaryDecoder in = DecoderFactory.get().binaryDecoder(bbi, null);
+      try {
+        if (!readHandshake(in)) {
+          // Resend the handshake and return
+          Request handshake = new Request(request);
+          getTransceiver().transceive
+            (handshake.getBytes(),
+             new TransceiverCallback<T>(handshake, callback));
+          return;
+        }
+      } catch (Exception e) {
+        LOG.error("Error handling transceiver callback: " + e, e);
+      }
+      
+      // Read response; invoke callback
+      Response response = new Response(request, in);
+      Object responseObject;
+      try {
+        try {
+          responseObject = response.getResponse();
+        } catch (Exception e) {
+          if (callback != null) {
+            callback.handleError(e);
+          }
+          return;
+        }
+        if (callback != null) {
+          callback.handleResult((T)responseObject);
+        }
+      } catch (Throwable t) {
+        LOG.error("Error in callback handler: " + t, t);
+      }
+    }
+    
+    @Override
+    public void handleError(Throwable error) {
+      callback.handleError(error);
+    }
+  }
+  
+  /**
+   * Encapsulates/generates a request.
+   */
+  class Request {
+    private final String messageName;
+    private final Object request;
+    private final RPCContext context;
+    private final BinaryEncoder encoder;
+    private Message message;
+    private List<ByteBuffer> requestBytes;
+    
+    /**
+     * Creates a Request.
+     * @param messageName the name of the message to invoke.
+     * @param request the request data to send.
+     * @param context the RPC context to use.
+     */
+    public Request(String messageName, Object request, RPCContext context) {
+      this(messageName, request, context, null);
+    }
+    
+    /**
+     * Creates a Request.
+     * @param messageName the name of the message to invoke.
+     * @param request the request data to send.
+     * @param context the RPC context to use.
+     * @param encoder the BinaryEncoder to use to serialize the request.
+     */
+    public Request(String messageName, Object request, RPCContext context,
+                   BinaryEncoder encoder) {
+      this.messageName = messageName;
+      this.request = request;
+      this.context = context;
+      this.encoder =
+        ENCODER_FACTORY.binaryEncoder(new ByteBufferOutputStream(), encoder);
+    }
+    
+    /**
+     * Copy constructor.
+     * @param other Request from which to copy fields.
+     */
+    public Request(Request other) {
+      this.messageName = other.messageName;
+      this.request = other.request;
+      this.context = other.context;
+      this.encoder = other.encoder;
+    }
+    
+    /**
+     * Gets the message name.
+     * @return the message name.
+     */
+    public String getMessageName() {
+      return messageName;
+    }
+    
+    /**
+     * Gets the RPC context.
+     * @return the RPC context.
+     */
+    public RPCContext getContext() {
+      return context;
+    }
+    
+    /**
+     * Gets the Message associated with this request.
+     * @return this request's message.
+     */
+    public Message getMessage() {
+      if (message == null) {
+        message = getLocal().getMessages().get(messageName);
+        if (message == null) {
+          throw new AvroRuntimeException("Not a local message: "+messageName);
+        }
+      }
+      return message;
+    }
+    
+    /**
+     * Gets the request data, generating it first if necessary.
+     * @return the request data.
+     * @throws Exception if an error occurs generating the request data.
+     */
+    public List<ByteBuffer> getBytes() 
+      throws Exception {
+      if (requestBytes == null) {
+        ByteBufferOutputStream bbo = new ByteBufferOutputStream();
+        BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bbo, encoder);
+
+        // use local protocol to write request
+        Message m = getMessage();
+        context.setMessage(m);
+
+        writeRequest(m.getRequest(), request, out); // write request payload
+
+        out.flush();
+        List<ByteBuffer> payload = bbo.getBufferList();
+
+        writeHandshake(out);                     // prepend handshake if needed
+
+        context.setRequestPayload(payload);
+        for (RPCPlugin plugin : rpcMetaPlugins) {
+          plugin.clientSendRequest(context);      // get meta-data from plugins
+        }
+        META_WRITER.write(context.requestCallMeta(), out);
+
+        out.writeString(m.getName());             // write message name
+
+        out.flush();
+        bbo.append(payload);
+
+        requestBytes = bbo.getBufferList();
+      }
+      return requestBytes;
+    }
+  }
+  
+  /**
+   * Encapsulates/parses a response.
+   */
+  class Response {
+    private final Request request;
+    private final BinaryDecoder in;
+    
+    /**
+     * Creates a Response.
+     * @param request the Request associated with this response.
+     */
+    public Response(Request request) {
+      this(request, null);
+    }
+    
+    /**
+     * Creates a Creates a Response.
+     * @param request the Request associated with this response.
+     * @param in the BinaryDecoder to use to deserialize the response.
+     */
+    public Response(Request request, BinaryDecoder in) {
+      this.request = request;
+      this.in = in;
+    }
+    
+    /**
+     * Gets the RPC response, reading/deserializing it first if necessary.
+     * @return the RPC response.
+     * @throws Exception if an error occurs reading/deserializing the response.
+     */
+    public Object getResponse() 
+      throws Exception {
+      Message lm = request.getMessage();
+      Message rm = remote.getMessages().get(request.getMessageName());
+      if (rm == null)
+        throw new AvroRuntimeException
+          ("Not a remote message: "+request.getMessageName());
+
+      Transceiver t = getTransceiver();
+      if ((lm.isOneWay() != rm.isOneWay()) && t.isConnected())
+        throw new AvroRuntimeException
+          ("Not both one-way messages: "+request.getMessageName());
+
+      if (lm.isOneWay() && t.isConnected()) return null; // one-way w/ handshake
+      
+      RPCContext context = request.getContext();
+      context.setResponseCallMeta(META_READER.read(null, in));
+
+      if (!in.readBoolean()) {                      // no error
+        Object response = readResponse(rm.getResponse(), lm.getResponse(), in);
+        context.setResponse(response);
+        for (RPCPlugin plugin : rpcMetaPlugins) {
+          plugin.clientReceiveResponse(context);
+        }
+        return response;
+
+      } else {
+        Exception error = readError(rm.getErrors(), lm.getErrors(), in);
+        context.setError(error);
+        for (RPCPlugin plugin : rpcMetaPlugins) {
+          plugin.clientReceiveResponse(context);
+        }
+        throw error;
+      }
+    }
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
new file mode 100644
index 0000000..6a1a3ff
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.UnresolvedUnionException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.util.ByteBufferInputStream;
+import org.apache.avro.util.ByteBufferOutputStream;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+
+/** Base class for the server side of a protocol interaction. */
+public abstract class Responder {
+  private static final Logger LOG = LoggerFactory.getLogger(Responder.class);
+
+  private static final Schema META =
+    Schema.createMap(Schema.create(Schema.Type.BYTES));
+  private static final GenericDatumReader<Map<String,ByteBuffer>>
+    META_READER = new GenericDatumReader<Map<String,ByteBuffer>>(META);
+  private static final GenericDatumWriter<Map<String,ByteBuffer>>
+    META_WRITER = new GenericDatumWriter<Map<String,ByteBuffer>>(META);
+
+  private static final ThreadLocal<Protocol> REMOTE =
+    new ThreadLocal<Protocol>();
+
+  private final Map<MD5,Protocol> protocols
+    = new ConcurrentHashMap<MD5,Protocol>();
+
+  private final Protocol local;
+  private final MD5 localHash;
+  protected final List<RPCPlugin> rpcMetaPlugins;
+
+  protected Responder(Protocol local) {
+    this.local = local;
+    this.localHash = new MD5();
+    localHash.bytes(local.getMD5());
+    protocols.put(localHash, local);
+    this.rpcMetaPlugins =
+      new CopyOnWriteArrayList<RPCPlugin>();
+  }
+
+  /** Return the remote protocol.  Accesses a {@link ThreadLocal} that's set
+   * around calls to {@link #respond(Protocol.Message, Object)}. */
+  public static Protocol getRemote() { return REMOTE.get(); }
+  
+  /** Return the local protocol. */
+  public Protocol getLocal() { return local; }
+  
+  /**
+   * Adds a new plugin to manipulate per-call metadata.  Plugins
+   * are executed in the order that they are added.
+   * @param plugin a plugin that will manipulate RPC metadata
+   */
+  public void addRPCPlugin(RPCPlugin plugin) {
+    rpcMetaPlugins.add(plugin);
+  }
+
+  /** Called by a server to deserialize a request, compute and serialize
+   * a response or error. */
+  public List<ByteBuffer> respond(List<ByteBuffer> buffers) throws IOException {
+    return respond(buffers, null);
+  }
+  
+  /** Called by a server to deserialize a request, compute and serialize a
+   * response or error.  Transciever is used by connection-based servers to
+   * track handshake status of connection. */
+  public List<ByteBuffer> respond(List<ByteBuffer> buffers,
+                                  Transceiver connection) throws IOException {
+    Decoder in = DecoderFactory.get().binaryDecoder(
+        new ByteBufferInputStream(buffers), null);
+    ByteBufferOutputStream bbo = new ByteBufferOutputStream();
+    BinaryEncoder out = EncoderFactory.get().binaryEncoder(bbo, null);
+    Exception error = null;
+    RPCContext context = new RPCContext();
+    List<ByteBuffer> payload = null;
+    List<ByteBuffer> handshake = null;
+    boolean wasConnected = connection != null && connection.isConnected();
+    try {
+      Protocol remote = handshake(in, out, connection);
+      out.flush();
+      if (remote == null)                        // handshake failed
+        return bbo.getBufferList();
+      handshake = bbo.getBufferList();
+      
+      // read request using remote protocol specification
+      context.setRequestCallMeta(META_READER.read(null, in));
+      String messageName = in.readString(null).toString();
+      if (messageName.equals(""))                 // a handshake ping
+        return handshake;
+      Message rm = remote.getMessages().get(messageName);
+      if (rm == null)
+        throw new AvroRuntimeException("No such remote message: "+messageName);
+      Message m = getLocal().getMessages().get(messageName);
+      if (m == null)
+        throw new AvroRuntimeException("No message named "+messageName
+                                       +" in "+getLocal());
+
+      Object request = readRequest(rm.getRequest(), m.getRequest(), in);
+      
+      context.setMessage(rm);
+      for (RPCPlugin plugin : rpcMetaPlugins) {
+        plugin.serverReceiveRequest(context);
+      }
+
+      // create response using local protocol specification
+      if ((m.isOneWay() != rm.isOneWay()) && wasConnected)
+        throw new AvroRuntimeException("Not both one-way: "+messageName);
+
+      Object response = null;
+      
+      try {
+        REMOTE.set(remote);
+        response = respond(m, request);
+        context.setResponse(response);
+      } catch (Exception e) {
+        error = e;
+        context.setError(error);
+        LOG.warn("user error", e);
+      } finally {
+        REMOTE.set(null);
+      }
+      
+      if (m.isOneWay() && wasConnected)           // no response data
+        return null;
+
+      out.writeBoolean(error != null);
+      if (error == null)
+        writeResponse(m.getResponse(), response, out);
+      else
+        try {
+          writeError(m.getErrors(), error, out);
+        } catch (UnresolvedUnionException e) {    // unexpected error
+          throw error;
+        }
+    } catch (Exception e) {                       // system error
+      LOG.warn("system error", e);
+      context.setError(e);
+      bbo = new ByteBufferOutputStream();
+      out = EncoderFactory.get().binaryEncoder(bbo, null);
+      out.writeBoolean(true);
+      writeError(Protocol.SYSTEM_ERRORS, new Utf8(e.toString()), out);
+      if (null == handshake) {
+        handshake = new ByteBufferOutputStream().getBufferList();
+      }
+    }
+    out.flush();
+    payload = bbo.getBufferList();
+    
+    // Grab meta-data from plugins
+    context.setResponsePayload(payload);
+    for (RPCPlugin plugin : rpcMetaPlugins) {
+      plugin.serverSendResponse(context);
+    }
+    META_WRITER.write(context.responseCallMeta(), out);
+    out.flush();
+    // Prepend handshake and append payload
+    bbo.prepend(handshake);
+    bbo.append(payload);
+
+    return bbo.getBufferList();
+  }
+
+  private SpecificDatumWriter<HandshakeResponse> handshakeWriter =
+    new SpecificDatumWriter<HandshakeResponse>(HandshakeResponse.class);
+  private SpecificDatumReader<HandshakeRequest> handshakeReader =
+    new SpecificDatumReader<HandshakeRequest>(HandshakeRequest.class);
+
+  private Protocol handshake(Decoder in, Encoder out, Transceiver connection)
+    throws IOException {
+    if (connection != null && connection.isConnected())
+      return connection.getRemote();
+    HandshakeRequest request = (HandshakeRequest)handshakeReader.read(null, in);
+    Protocol remote = protocols.get(request.clientHash);
+    if (remote == null && request.clientProtocol != null) {
+      remote = Protocol.parse(request.clientProtocol.toString());
+      protocols.put(request.clientHash, remote);
+    }
+    HandshakeResponse response = new HandshakeResponse();
+    if (localHash.equals(request.serverHash)) {
+      response.match =
+        remote == null ? HandshakeMatch.NONE : HandshakeMatch.BOTH;
+    } else {
+      response.match =
+        remote == null ? HandshakeMatch.NONE : HandshakeMatch.CLIENT;
+    }
+    if (response.match != HandshakeMatch.BOTH) {
+      response.serverProtocol = local.toString();
+      response.serverHash = localHash;
+    }
+    
+    RPCContext context = new RPCContext();
+    context.setHandshakeRequest(request);
+    context.setHandshakeResponse(response);
+    for (RPCPlugin plugin : rpcMetaPlugins) {
+      plugin.serverConnecting(context);
+    }
+    handshakeWriter.write(response, out);
+
+    if (connection != null && response.match != HandshakeMatch.NONE)
+      connection.setRemote(remote);
+
+    return remote;
+  }
+
+  /** Computes the response for a message. */
+  public abstract Object respond(Message message, Object request)
+    throws Exception;
+
+  /** Reads a request message. */
+  public abstract Object readRequest(Schema actual, Schema expected, Decoder in)
+    throws IOException;
+
+  /** Writes a response message. */
+  public abstract void writeResponse(Schema schema, Object response,
+                                     Encoder out) throws IOException;
+
+  /** Writes an error message. */
+  public abstract void writeError(Schema schema, Object error,
+                                  Encoder out) throws IOException;
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/ResponderServlet.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/ResponderServlet.java
new file mode 100644
index 0000000..53f6094
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/ResponderServlet.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.avro.AvroRuntimeException;
+
+/** An {@link HttpServlet} that responds to Avro RPC requests. */
+public class ResponderServlet extends HttpServlet {
+  private Responder responder;
+
+  public ResponderServlet(Responder responder) throws IOException {
+    this.responder = responder;
+  }
+
+  @Override
+  protected void doPost(HttpServletRequest request,
+                        HttpServletResponse response)
+    throws IOException, ServletException {
+    response.setContentType(HttpTransceiver.CONTENT_TYPE);
+    List<ByteBuffer> requestBufs =
+      HttpTransceiver.readBuffers(request.getInputStream());
+    try {
+      List<ByteBuffer> responseBufs = responder.respond(requestBufs);
+      response.setContentLength(HttpTransceiver.getLength(responseBufs));
+      HttpTransceiver.writeBuffers(responseBufs, response.getOutputStream());
+    } catch (AvroRuntimeException e) {
+      throw new ServletException(e);
+    }
+  }
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
new file mode 100644
index 0000000..5611ffd
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.util.Map;
+import java.net.SocketAddress;
+import java.nio.channels.SocketChannel;
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslServer;
+import javax.security.sasl.SaslException;
+import javax.security.auth.callback.CallbackHandler;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A {@link Server} that uses {@link javax.security.sasl} for authentication
+ * and encryption. */
+public class SaslSocketServer extends SocketServer {
+  private static final Logger LOG = LoggerFactory.getLogger(SaslServer.class);
+
+  private static abstract class SaslServerFactory {
+    protected abstract SaslServer getServer() throws SaslException;
+  }
+
+  private SaslServerFactory factory;
+
+  /** Create using SASL's anonymous (<a
+   * href="http://www.ietf.org/rfc/rfc2245.txt">RFC 2245) mechanism. */
+  public SaslSocketServer(Responder responder, SocketAddress addr)
+    throws IOException {
+    this(responder, addr,
+         new SaslServerFactory() {
+           public SaslServer getServer() { return new AnonymousServer(); }
+         });
+  }
+
+  /** Create using the specified {@link SaslServer} parameters. */
+  public SaslSocketServer(Responder responder, SocketAddress addr,
+                          final String mechanism, final String protocol,
+                          final String serverName, final Map<String,?> props,
+                          final CallbackHandler cbh) throws IOException {
+    this(responder, addr,
+         new SaslServerFactory() {
+           public SaslServer getServer() throws SaslException {
+             return Sasl.createSaslServer(mechanism, protocol, serverName,
+                                          props, cbh);
+           }
+         });
+  }
+
+  private SaslSocketServer(Responder responder, SocketAddress addr,
+                           SaslServerFactory factory) throws IOException {
+    super(responder, addr);
+    this.factory = factory;
+  }
+
+  @Override protected Transceiver getTransceiver(SocketChannel channel)
+    throws IOException {
+    return new SaslSocketTransceiver(channel, factory.getServer());
+  }
+
+  private static class AnonymousServer implements SaslServer {
+    private String user;
+    public String getMechanismName() { return "ANONYMOUS"; }
+    public byte[] evaluateResponse(byte[] response) throws SaslException {
+      try {
+        this.user = new String(response, "UTF-8");
+      } catch (IOException e) {
+        throw new SaslException(e.toString());
+      }
+      return null;
+    }
+    public boolean isComplete() { return user != null; }
+    public String getAuthorizationID() { return user; }
+    public byte[] unwrap(byte[] incoming, int offset, int len) {
+      throw new UnsupportedOperationException();
+    }
+    public byte[] wrap(byte[] outgoing, int offset, int len) {
+      throw new UnsupportedOperationException();
+    }
+    public Object getNegotiatedProperty(String propName) { return null; }
+    public void dispose() {}
+  }
+
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
new file mode 100644
index 0000000..880c7a5
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java
@@ -0,0 +1,403 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.io.EOFException;
+import java.io.UnsupportedEncodingException;
+import java.net.SocketAddress;
+import java.nio.channels.SocketChannel;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslException;
+import javax.security.sasl.SaslClient;
+import javax.security.sasl.SaslServer;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.util.ByteBufferOutputStream;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** A {@link Transceiver} that uses {@link javax.security.sasl} for
+ * authentication and encryption. */
+public class SaslSocketTransceiver extends Transceiver {
+  private static final Logger LOG =
+    LoggerFactory.getLogger(SaslSocketTransceiver.class);
+
+  private static final ByteBuffer EMPTY = ByteBuffer.allocate(0);
+
+  private static enum Status { START, CONTINUE, FAIL, COMPLETE }
+
+  private SaslParticipant sasl;
+  private SocketChannel channel;
+  private boolean dataIsWrapped;
+  private boolean saslResponsePiggybacked;
+
+  private Protocol remote;
+  
+  private ByteBuffer readHeader = ByteBuffer.allocate(4);
+  private ByteBuffer writeHeader = ByteBuffer.allocate(4);
+  private ByteBuffer zeroHeader = ByteBuffer.allocate(4).putInt(0);
+
+  /** Create using SASL's anonymous (<a
+   * href="http://www.ietf.org/rfc/rfc2245.txt">RFC 2245) mechanism. */
+  public SaslSocketTransceiver(SocketAddress address) throws IOException {
+    this(address, new AnonymousClient());
+  }
+
+  /** Create using the specified {@link SaslClient}. */
+  public SaslSocketTransceiver(SocketAddress address, SaslClient saslClient)
+    throws IOException {
+    this.sasl = new SaslParticipant(saslClient);
+    this.channel = SocketChannel.open(address);
+    this.channel.socket().setTcpNoDelay(true);
+    LOG.debug("open to {}", getRemoteName());
+    open(true);
+  }
+
+  /** Create using the specified {@link SaslServer}. */
+  public SaslSocketTransceiver(SocketChannel channel, SaslServer saslServer)
+    throws IOException {
+    this.sasl = new SaslParticipant(saslServer);
+    this.channel = channel;
+    LOG.debug("open from {}", getRemoteName());
+    open(false);
+  }
+
+  @Override public boolean isConnected() { return remote != null; }
+
+  @Override public void setRemote(Protocol remote) {
+    this.remote = remote;
+  }
+
+  @Override public Protocol getRemote() {
+    return remote;
+  }
+  @Override public String getRemoteName() {
+    return channel.socket().getRemoteSocketAddress().toString();
+  }
+
+  @Override
+  public synchronized List<ByteBuffer> transceive(List<ByteBuffer> request)
+    throws IOException {
+    if (saslResponsePiggybacked) {                // still need to read response
+      saslResponsePiggybacked = false;
+      Status status  = readStatus();
+      ByteBuffer frame = readFrame();
+      switch (status) {
+      case COMPLETE:
+        break;
+      case FAIL:
+        throw new SaslException("Fail: "+toString(frame));
+      default:
+        throw new IOException("Unexpected SASL status: "+status);
+      }
+    }
+    return super.transceive(request);
+  }
+
+  private void open(boolean isClient) throws IOException {
+    LOG.debug("beginning SASL negotiation");
+
+    if (isClient) {
+      ByteBuffer response = EMPTY;
+      if (sasl.client.hasInitialResponse())
+        response = ByteBuffer.wrap(sasl.evaluate(response.array()));
+      write(Status.START, sasl.getMechanismName(), response);
+      if (sasl.isComplete())
+        saslResponsePiggybacked = true;
+    }
+    
+    while (!sasl.isComplete()) {
+      Status status  = readStatus();
+      ByteBuffer frame = readFrame();
+      switch (status) {
+      case START:
+        String mechanism = toString(frame);
+        frame = readFrame();
+        if (!mechanism.equalsIgnoreCase(sasl.getMechanismName())) {
+          write(Status.FAIL, "Wrong mechanism: "+mechanism);
+          throw new SaslException("Wrong mechanism: "+mechanism);
+        }
+      case CONTINUE: 
+        byte[] response;
+        try {
+          response = sasl.evaluate(frame.array());
+          status = sasl.isComplete() ? Status.COMPLETE : Status.CONTINUE;
+        } catch (SaslException e) {
+          response = e.toString().getBytes("UTF-8");
+          status = Status.FAIL;
+        }
+        write(status, response!=null ? ByteBuffer.wrap(response) : EMPTY);
+        break;
+      case COMPLETE:
+        sasl.evaluate(frame.array());
+        if (!sasl.isComplete())
+          throw new SaslException("Expected completion!");
+        break;
+      case FAIL:
+        throw new SaslException("Fail: "+toString(frame));
+      default:
+        throw new IOException("Unexpected SASL status: "+status);
+      }
+    }
+    LOG.debug("SASL opened");
+
+    String qop = (String) sasl.getNegotiatedProperty(Sasl.QOP);
+    LOG.debug("QOP = {}", qop);
+    dataIsWrapped = (qop != null && !qop.equalsIgnoreCase("auth"));
+  }
+
+  private String toString(ByteBuffer buffer) throws IOException {
+    try {
+      return new String(buffer.array(), "UTF-8");
+    } catch (UnsupportedEncodingException e) {
+      throw new IOException(e.toString(), e);
+    }
+  }
+
+  @Override public synchronized List<ByteBuffer> readBuffers()
+    throws IOException {
+    List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    while (true) {
+      ByteBuffer buffer = readFrameAndUnwrap();
+      if (buffer.remaining() == 0)
+        return buffers;
+      buffers.add(buffer);
+    }
+  }
+
+  private Status readStatus() throws IOException {
+    ByteBuffer buffer = ByteBuffer.allocate(1);
+    read(buffer);
+    int status = buffer.get();
+    if (status > Status.values().length)
+      throw new IOException("Unexpected SASL status byte: "+status);
+    return Status.values()[status];
+  }
+
+  private ByteBuffer readFrameAndUnwrap() throws IOException {
+    ByteBuffer frame = readFrame();
+    if (!dataIsWrapped)
+      return frame;
+    ByteBuffer unwrapped = ByteBuffer.wrap(sasl.unwrap(frame.array()));
+    LOG.debug("unwrapped data of length: {}", unwrapped.remaining());
+    return unwrapped;
+  }
+
+  private ByteBuffer readFrame() throws IOException {
+    read(readHeader);
+    ByteBuffer buffer = ByteBuffer.allocate(readHeader.getInt());
+    LOG.debug("about to read: {} bytes", buffer.capacity());
+    read(buffer);
+    return buffer;
+  }
+  
+  private void read(ByteBuffer buffer) throws IOException {
+    buffer.clear();
+    while (buffer.hasRemaining())
+      if (channel.read(buffer) == -1)
+        throw new EOFException();
+    buffer.flip();
+  }
+
+  @Override public synchronized void writeBuffers(List<ByteBuffer> buffers)
+    throws IOException {
+    if (buffers == null) return;                  // no data to write
+    List<ByteBuffer> writes = new ArrayList<ByteBuffer>(buffers.size()*2+1);
+    int currentLength = 0;
+    ByteBuffer currentHeader = writeHeader;
+    for (ByteBuffer buffer : buffers) {           // gather writes
+      if (buffer.remaining() == 0) continue;      // ignore empties
+      if (dataIsWrapped) {
+        LOG.debug("wrapping data of length: {}", buffer.remaining());
+        buffer = ByteBuffer.wrap(sasl.wrap(buffer.array(), buffer.position(),
+                                           buffer.remaining()));
+      }
+      int length = buffer.remaining();
+      if (!dataIsWrapped                          // can append buffers on wire
+          && (currentLength + length) <= ByteBufferOutputStream.BUFFER_SIZE) {
+        if (currentLength == 0)
+          writes.add(currentHeader);
+        currentLength += length;
+        currentHeader.clear();
+        currentHeader.putInt(currentLength);
+        LOG.debug("adding {} to write, total now {}", length, currentLength);
+      } else {
+        currentLength = length;
+        currentHeader = ByteBuffer.allocate(4).putInt(length);
+        writes.add(currentHeader);
+        LOG.debug("planning write of {}", length);
+      }
+      currentHeader.flip();
+      writes.add(buffer);
+    }
+    zeroHeader.flip();                            // zero-terminate
+    writes.add(zeroHeader);
+
+    writeFully(writes.toArray(new ByteBuffer[writes.size()]));
+  }
+
+  private void write(Status status, String prefix, ByteBuffer response)
+    throws IOException {
+    LOG.debug("write status: {} {}", status, prefix);
+    write(status, prefix);
+    write(response);
+  }
+
+  private void write(Status status, String response) throws IOException {
+    write(status, ByteBuffer.wrap(response.getBytes("UTF-8")));
+  }
+
+  private void write(Status status, ByteBuffer response) throws IOException {
+    LOG.debug("write status: {}", status);
+    ByteBuffer statusBuffer = ByteBuffer.allocate(1);
+    statusBuffer.clear();
+    statusBuffer.put((byte)(status.ordinal())).flip();
+    writeFully(statusBuffer);
+    write(response);
+  }
+
+  private void write(ByteBuffer response) throws IOException {
+    LOG.debug("writing: {}", response.remaining());
+    writeHeader.clear();
+    writeHeader.putInt(response.remaining()).flip();
+    writeFully(writeHeader, response);
+  }
+
+  private void writeFully(ByteBuffer... buffers) throws IOException {
+    int length = buffers.length;
+    int start = 0;
+    do {
+      channel.write(buffers, start, length-start);
+      while (buffers[start].remaining() == 0) {
+        start++;
+        if (start == length)
+          return;
+      }
+    } while (true);
+  }
+
+  @Override public void close() throws IOException {
+    if (channel.isOpen()) {
+      LOG.info("closing to "+getRemoteName());
+      channel.close();
+    }
+    sasl.dispose();
+  }
+
+  /**
+   * Used to abstract over the <code>SaslServer</code> and
+   * <code>SaslClient</code> classes, which share a lot of their interface, but
+   * unfortunately don't share a common superclass.
+   */
+  private static class SaslParticipant {
+    // One of these will always be null.
+    public SaslServer server;
+    public SaslClient client;
+
+    public SaslParticipant(SaslServer server) {
+      this.server = server;
+    }
+
+    public SaslParticipant(SaslClient client) {
+      this.client = client;
+    }
+
+    public String getMechanismName() {
+      if (client != null)
+        return client.getMechanismName();
+      else
+        return server.getMechanismName();
+    }
+
+    public boolean isComplete() {
+      if (client != null)
+        return client.isComplete();
+      else
+        return server.isComplete();
+    }
+
+    public void dispose() throws SaslException {
+      if (client != null)
+        client.dispose();
+      else
+        server.dispose();
+    }
+
+    public byte[] unwrap(byte[] buf) throws SaslException {
+      if (client != null)
+        return client.unwrap(buf, 0, buf.length);
+      else
+        return server.unwrap(buf, 0, buf.length);
+    }
+
+    public byte[] wrap(byte[] buf) throws SaslException {
+      return wrap(buf, 0, buf.length);
+    }
+
+    public byte[] wrap(byte[] buf, int start, int len) throws SaslException {
+      if (client != null)
+        return client.wrap(buf, start, len);
+      else
+        return server.wrap(buf, start, len);
+    }
+
+    public Object getNegotiatedProperty(String propName) {
+      if (client != null)
+        return client.getNegotiatedProperty(propName);
+      else
+        return server.getNegotiatedProperty(propName);
+    }
+
+    public byte[] evaluate(byte[] buf) throws SaslException {
+      if (client != null)
+        return client.evaluateChallenge(buf);
+      else
+        return server.evaluateResponse(buf);
+    }
+
+  }
+
+  private static class AnonymousClient implements SaslClient {
+    public String getMechanismName() { return "ANONYMOUS"; }
+    public boolean hasInitialResponse() { return true; }
+    public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
+      try {
+        return System.getProperty("user.name").getBytes("UTF-8");
+      } catch (IOException e) {
+        throw new SaslException(e.toString());
+      }
+    }
+    public boolean isComplete() { return true; }
+    public byte[] unwrap(byte[] incoming, int offset, int len) {
+      throw new UnsupportedOperationException();
+    }
+    public byte[] wrap(byte[] outgoing, int offset, int len) {
+      throw new UnsupportedOperationException();
+    }
+    public Object getNegotiatedProperty(String propName) { return null; }
+    public void dispose() {}
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java
new file mode 100644
index 0000000..4ae6053
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+/** A server listening on a port. */
+public interface Server {
+  /** The port this server runs on. */
+  int getPort();
+
+  /** Start this server. */
+  void start();
+
+  /** Stop this server. */
+  void close();
+  
+  /** Wait for this server to exit. */
+  void join() throws InterruptedException;
+  
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
new file mode 100644
index 0000000..8db5d66
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.io.EOFException;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.nio.channels.ClosedChannelException;
+import java.nio.channels.ServerSocketChannel;
+import java.nio.channels.SocketChannel;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.ipc.generic.GenericResponder;
+
+/** A socket-based server implementation. This uses a simple, non-standard wire
+ * protocol and is not intended for production services.
+ * @deprecated use {@link SaslSocketServer} instead.
+ */
+public class SocketServer extends Thread implements Server {
+  private static final Logger LOG = LoggerFactory.getLogger(SocketServer.class);
+
+  private Responder responder;
+  private ServerSocketChannel channel;
+  private ThreadGroup group;
+
+  public SocketServer(Responder responder, SocketAddress addr)
+    throws IOException {
+    String name = "SocketServer on "+addr;
+
+    this.responder = responder;
+    this.group = new ThreadGroup(name);
+    this.channel = ServerSocketChannel.open();
+
+    channel.socket().bind(addr);
+
+    setName(name);
+    setDaemon(true);
+  }
+
+  public int getPort() { return channel.socket().getLocalPort(); }
+
+  public void run() {
+    LOG.info("starting "+channel.socket().getInetAddress());
+    try {
+      while (true) {
+        try {
+          new Connection(channel.accept());
+        } catch (ClosedChannelException e) {
+          return;
+        } catch (IOException e) {
+          LOG.warn("unexpected error", e);
+          throw new RuntimeException(e);
+        }
+      }
+    } finally {
+      LOG.info("stopping "+channel.socket().getInetAddress());
+      try {
+        channel.close();
+      } catch (IOException e) {
+      }
+    }
+  }
+
+  public void close() {
+    this.interrupt(); 
+    group.interrupt();
+  }
+
+  /** Creates an appropriate {@link Transceiver} for this server.
+   * Returns a {@link SocketTransceiver} by default. */
+  protected Transceiver getTransceiver(SocketChannel channel)
+    throws IOException {
+    return new SocketTransceiver(channel);
+  }
+
+  private class Connection implements Runnable {
+
+    SocketChannel channel;
+    Transceiver xc;
+
+    public Connection(SocketChannel channel) throws IOException {
+      this.channel = channel;
+
+      Thread thread = new Thread(group, this);
+      thread.setName("Connection to "+channel.socket().getRemoteSocketAddress());
+      thread.setDaemon(true);
+      thread.start();
+    }
+
+    public void run() {
+      try {
+        try {
+          this.xc = getTransceiver(channel);
+          while (true) {
+            xc.writeBuffers(responder.respond(xc.readBuffers(), xc));
+          }
+        } catch (EOFException e) {
+          return;
+        } catch (ClosedChannelException e) {
+          return;
+        } finally {
+          xc.close();
+        }
+      } catch (IOException e) {
+        LOG.warn("unexpected error", e);
+      }
+    }
+
+  }
+  
+  public static void main(String[] arg) throws Exception {
+    Responder responder =
+      new GenericResponder(Protocol.parse("{\"protocol\": \"X\"}")) {
+        public Object respond(Message message, Object request)
+          throws Exception {
+          throw new IOException("no messages!");
+        }
+      };
+    SocketServer server = new SocketServer(responder, new InetSocketAddress(0));
+    server.start();
+    System.out.println("server started on port: "+server.getPort());
+    server.join();
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
new file mode 100644
index 0000000..e2178c6
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.SocketAddress;
+import java.nio.channels.SocketChannel;
+import java.nio.channels.ClosedChannelException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.Protocol;
+
+/** A socket-based {@link Transceiver} implementation.  This uses a simple,
+ * non-standard wire protocol and is not intended for production services.
+ * @deprecated use {@link SaslSocketTransceiver} instead.
+ */
+public class SocketTransceiver extends Transceiver {
+  private static final Logger LOG
+    = LoggerFactory.getLogger(SocketTransceiver.class);
+
+  private SocketChannel channel;
+  private ByteBuffer header = ByteBuffer.allocate(4);
+
+  private Protocol remote;
+  
+  public SocketTransceiver(SocketAddress address) throws IOException {
+    this(SocketChannel.open(address));
+  }
+
+  public SocketTransceiver(SocketChannel channel) throws IOException {
+    this.channel = channel;
+    this.channel.socket().setTcpNoDelay(true);
+    LOG.info("open to "+getRemoteName());
+  }
+
+  public String getRemoteName() {
+    return channel.socket().getRemoteSocketAddress().toString();
+  }
+
+  public synchronized List<ByteBuffer> readBuffers() throws IOException {
+    List<ByteBuffer> buffers = new ArrayList<ByteBuffer>();
+    while (true) {
+      header.clear();
+      while (header.hasRemaining()) {
+        if (channel.read(header) < 0)
+          throw new ClosedChannelException();
+      }
+      header.flip();
+      int length = header.getInt();
+      if (length == 0) {                       // end of buffers
+        return buffers;
+      }
+      ByteBuffer buffer = ByteBuffer.allocate(length);
+      while (buffer.hasRemaining()) {
+        if (channel.read(buffer) < 0)
+          throw new ClosedChannelException();
+      }
+      buffer.flip();
+      buffers.add(buffer);
+    }
+  }
+
+  public synchronized void writeBuffers(List<ByteBuffer> buffers)
+    throws IOException {
+    if (buffers == null) return;                  // no data to write
+    for (ByteBuffer buffer : buffers) {
+      if (buffer.limit() == 0) continue;
+      writeLength(buffer.limit());                // length-prefix
+      channel.write(buffer);
+    }
+    writeLength(0);                               // null-terminate
+  }
+
+  private void writeLength(int length) throws IOException {
+    header.clear();
+    header.putInt(length);
+    header.flip();
+    channel.write(header);
+  }
+
+  @Override public boolean isConnected() { return remote != null; }
+
+  @Override public void setRemote(Protocol remote) {
+    this.remote = remote;
+  }
+
+  @Override public Protocol getRemote() {
+    return remote;
+  }
+
+  @Override public void close() throws IOException {
+    if (channel.isOpen()) {
+      LOG.info("closing to "+getRemoteName());
+      channel.close();
+    }
+  }
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
new file mode 100644
index 0000000..898fd77
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.avro.Protocol;
+
+/** Base transport class used by {@link Requestor}. */
+public abstract class Transceiver implements Closeable {
+  private final ReentrantLock channelLock = new ReentrantLock();
+
+  public abstract String getRemoteName() throws IOException;
+  
+  /**
+   * Acquires an exclusive lock on the transceiver's channel.
+   */
+  public void lockChannel() {
+    channelLock.lock();
+  }
+  
+  /**
+   * Releases the lock on the transceiver's channel if held by the calling thread.
+   */
+  public void unlockChannel() {
+    if (channelLock.isHeldByCurrentThread()) {
+      channelLock.unlock();
+    }
+  }
+
+  /** Called by {@link Requestor#request(String,Object)} for two-way messages.
+   * By default calls {@link #writeBuffers(List)} followed by
+   * {@link #readBuffers()}. */
+  public List<ByteBuffer> transceive(List<ByteBuffer> request)
+    throws IOException {
+    lockChannel();
+    try {
+      writeBuffers(request);
+      return readBuffers();
+    } finally {
+      unlockChannel();
+    }
+  }
+  
+  /** 
+   * Called by {@link Requestor#request(String,Object,Callback)} for two-way messages using callbacks.
+   */
+  public void transceive(List<ByteBuffer> request, Callback<List<ByteBuffer>> callback)
+    throws IOException {
+    // The default implementation works synchronously
+    try {
+      List<ByteBuffer> response = transceive(request);
+      callback.handleResult(response);
+    } catch (IOException e) {
+      callback.handleError(e);
+    }
+  }
+
+  /** Called by the default definition of {@link #transceive(List)}.*/
+  public abstract List<ByteBuffer> readBuffers() throws IOException;
+
+  /** Called by {@link Requestor#request(String,Object)} for one-way messages.*/
+  public abstract void writeBuffers(List<ByteBuffer> buffers)
+    throws IOException;
+
+  /** True if a handshake has been completed for this connection.  Used to
+   * determine whether a handshake need be completed prior to a one-way
+   * message.  Requests and responses are always prefixed by handshakes, but
+   * one-way messages.  If the first request sent over a connection is one-way,
+   * then a handshake-only response is returned.  Subsequent one-way messages
+   * over the connection will have no response data sent.  Returns false by
+   * default. */
+  public boolean isConnected() { return false; }
+
+  /** Called with the remote protocol when a handshake has been completed.
+   * After this has been called and while a connection is maintained, {@link
+   * #isConnected()} should return true and #getRemote() should return this
+   * protocol.  Does nothing by default. */
+  public void setRemote(Protocol protocol) {}
+
+  /** Returns the protocol passed to {@link #setRemote(Protocol)}.  Throws
+   * IllegalStateException by default. */
+  public Protocol getRemote() {
+    throw new IllegalStateException("Not connected.");
+  }
+
+  public void close() throws IOException {}
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericRequestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericRequestor.java
new file mode 100644
index 0000000..bce7168
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericRequestor.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.generic;
+
+import java.io.IOException;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.ipc.Callback;
+import org.apache.avro.ipc.Requestor;
+import org.apache.avro.ipc.Transceiver;
+
+/** {@link Requestor} implementation for generic Java data. */
+public class GenericRequestor extends Requestor {
+  GenericData data;
+
+  public GenericRequestor(Protocol protocol, Transceiver transceiver)
+    throws IOException {
+    this(protocol, transceiver, GenericData.get());
+  }
+
+  public GenericRequestor(Protocol protocol, Transceiver transceiver,
+                          GenericData data)
+    throws IOException {
+    super(protocol, transceiver);
+    this.data = data;
+  }
+
+  public GenericData getGenericData() { return data; }
+
+  @Override
+  public Object request(String messageName, Object request)
+    throws IOException {
+    try {
+      return super.request(messageName, request);
+    } catch (Exception e) {
+      if (e instanceof RuntimeException)
+        throw (RuntimeException)e;
+      if (e instanceof IOException)
+        throw (IOException)e;
+      throw new AvroRemoteException(e);
+    }
+  }
+
+  @Override
+  public <T> void request(String messageName, Object request, Callback<T> callback)
+    throws IOException {
+    try {
+      super.request(messageName, request, callback);
+    } catch (Exception e) {
+      if (e instanceof RuntimeException)
+        throw (RuntimeException)e;
+      if (e instanceof IOException)
+        throw (IOException)e;
+      throw new AvroRemoteException(e);
+    }
+  }
+
+  @Override
+  public void writeRequest(Schema schema, Object request, Encoder out)
+    throws IOException {
+    new GenericDatumWriter<Object>(schema, data).write(request, out);
+  }
+
+  @Override
+  public Object readResponse(Schema writer, Schema reader, Decoder in)
+    throws IOException {
+    return new GenericDatumReader<Object>(writer, reader, data).read(null, in);
+  }
+
+  @Override
+  public Exception readError(Schema writer, Schema reader, Decoder in)
+    throws IOException {
+    Object error = new GenericDatumReader<Object>(writer, reader, data)
+      .read(null,in);
+    if (error instanceof CharSequence)
+      return new AvroRuntimeException(error.toString()); // system error
+    return new AvroRemoteException(error);
+  }
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java
new file mode 100644
index 0000000..c5beac0
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.generic;
+
+import java.io.IOException;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.ipc.Responder;
+
+/** {@link Responder} implementation for generic Java data. */
+public abstract class GenericResponder extends Responder {
+  private GenericData data;
+
+  public GenericResponder(Protocol local) {
+    this(local, GenericData.get());
+    
+  }
+
+  public GenericResponder(Protocol local, GenericData data) {
+    super(local);
+    this.data = data;
+  }
+
+  public GenericData getGenericData() { return data; }
+
+  protected DatumWriter<Object> getDatumWriter(Schema schema) {
+    return new GenericDatumWriter<Object>(schema, data);
+  }
+
+  protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
+    return new GenericDatumReader<Object>(actual, expected, data);
+  }
+
+  @Override
+  public Object readRequest(Schema actual, Schema expected, Decoder in)
+    throws IOException {
+    return getDatumReader(actual, expected).read(null, in);
+  }
+
+  @Override
+  public void writeResponse(Schema schema, Object response, Encoder out)
+    throws IOException {
+    getDatumWriter(schema).write(response, out);
+  }
+
+  @Override
+  public void writeError(Schema schema, Object error,
+                         Encoder out) throws IOException {
+    if (error instanceof AvroRemoteException)
+      error = ((AvroRemoteException)error).getValue();
+    getDatumWriter(schema).write(error, out);
+  }
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java
new file mode 100644
index 0000000..84d798e
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.reflect;
+
+import java.io.IOException;
+import java.lang.reflect.Proxy;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+
+/** A {@link org.apache.avro.ipc.Requestor} for existing interfaces. */
+public class ReflectRequestor extends SpecificRequestor {
+  
+  public ReflectRequestor(Class<?> iface, Transceiver transceiver)
+    throws IOException {
+    this(iface, transceiver, new ReflectData(iface.getClassLoader()));
+  }
+
+  protected ReflectRequestor(Protocol protocol, Transceiver transceiver)
+    throws IOException {
+    this(protocol, transceiver, ReflectData.get());
+  }
+    
+  public ReflectRequestor(Class<?> iface, Transceiver transceiver,
+                          ReflectData data)
+    throws IOException {
+    this(data.getProtocol(iface), transceiver, data);
+  }
+    
+  public ReflectRequestor(Protocol protocol, Transceiver transceiver,
+                          ReflectData data)
+    throws IOException {
+    super(protocol, transceiver, data);
+  }
+    
+  public ReflectData getReflectData() { return (ReflectData)getSpecificData(); }
+
+  @Override
+  protected DatumWriter<Object> getDatumWriter(Schema schema) {
+    return new ReflectDatumWriter<Object>(schema, getReflectData());
+  }
+
+  @Override
+  protected DatumReader<Object> getDatumReader(Schema writer, Schema reader) {
+    return new ReflectDatumReader<Object>(writer, reader, getReflectData());
+  }
+
+  /** Create a proxy instance whose methods invoke RPCs. */
+  public static <T> T getClient(Class<T> iface, Transceiver transciever) 
+    throws IOException {
+    return getClient(iface, transciever,
+                     new ReflectData(iface.getClassLoader()));
+  }
+
+  /** Create a proxy instance whose methods invoke RPCs. */
+  @SuppressWarnings("unchecked")
+  public static <T> T getClient(Class<T> iface, Transceiver transciever,
+                                ReflectData reflectData) throws IOException {
+    Protocol protocol = reflectData.getProtocol(iface);
+    return (T)Proxy.newProxyInstance
+      (reflectData.getClassLoader(), 
+       new Class[] { iface },
+       new ReflectRequestor(protocol, transciever, reflectData));
+  }
+  
+  /** Create a proxy instance whose methods invoke RPCs. */
+  @SuppressWarnings("unchecked")
+  public static <T> T getClient(Class<T> iface, ReflectRequestor rreq) 
+    throws IOException {
+    return (T)Proxy.newProxyInstance(rreq.getReflectData().getClassLoader(), 
+                                  new Class[] { iface }, rreq);
+  }
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java
new file mode 100644
index 0000000..3e66943
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.reflect;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+
+/** {@link org.apache.avro.ipc.Responder} for existing interfaces.*/
+public class ReflectResponder extends SpecificResponder {
+  public ReflectResponder(Class iface, Object impl) {
+    this(iface, impl, new ReflectData(impl.getClass().getClassLoader()));
+  }
+  
+  public ReflectResponder(Protocol protocol, Object impl) {
+    this(protocol, impl, new ReflectData(impl.getClass().getClassLoader()));
+  }
+
+  public ReflectResponder(Class iface, Object impl, ReflectData data) {
+    this(data.getProtocol(iface), impl, data);
+  }
+
+  public ReflectResponder(Protocol protocol, Object impl, ReflectData data) {
+    super(protocol, impl, data);
+  }
+
+  public ReflectData getReflectData() { return (ReflectData)getSpecificData(); }
+
+  @Override
+  protected DatumWriter<Object> getDatumWriter(Schema schema) {
+    return new ReflectDatumWriter<Object>(schema, getReflectData());
+  }
+
+  @Override
+  protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
+    return new ReflectDatumReader<Object>(actual, expected, getReflectData());
+  }
+
+  @Override
+  public void writeError(Schema schema, Object error,
+                         Encoder out) throws IOException {
+    if (error instanceof CharSequence)
+      error = error.toString();                   // system error: convert
+    super.writeError(schema, error, out);
+  }
+
+
+
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java
new file mode 100644
index 0000000..d464737
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.specific;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Type;
+import java.util.Arrays;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.Requestor;
+import org.apache.avro.ipc.Callback;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+
+/** {@link org.apache.avro.ipc.Requestor Requestor} for generated interfaces. */
+public class SpecificRequestor extends Requestor implements InvocationHandler {
+  SpecificData data;
+
+  public SpecificRequestor(Class<?> iface, Transceiver transceiver)
+    throws IOException {
+    this(iface, transceiver, new SpecificData(iface.getClassLoader()));
+  }
+
+  protected SpecificRequestor(Protocol protocol, Transceiver transceiver)
+    throws IOException {
+    this(protocol, transceiver, SpecificData.get());
+  }
+
+  public SpecificRequestor(Class<?> iface, Transceiver transceiver,
+                           SpecificData data)
+    throws IOException {
+    this(data.getProtocol(iface), transceiver, data);
+  }
+
+  public SpecificRequestor(Protocol protocol, Transceiver transceiver,
+                           SpecificData data)
+    throws IOException {
+    super(protocol, transceiver);
+    this.data = data;
+  }
+
+  public SpecificData getSpecificData() { return data; }
+
+  @Override
+  public Object invoke(Object proxy, Method method, Object[] args)
+    throws Throwable {
+    String name = method.getName();
+    if (name.equals("hashCode")) {
+      return hashCode();
+    }
+    else if (name.equals("equals")) {
+      Object obj = args[0];
+      return (proxy == obj) || (obj != null && Proxy.isProxyClass(obj.getClass())
+                                && this.equals(Proxy.getInvocationHandler(obj)));
+    }
+    else if (name.equals("toString")) {
+      String protocol = "unknown";
+      String remote = "unknown";
+      Class<?>[] interfaces = proxy.getClass().getInterfaces();
+      if (interfaces.length > 0) {
+        try {
+          protocol = Class.forName(interfaces[0].getName()).getSimpleName();
+        } catch (ClassNotFoundException e) {
+        }
+
+        InvocationHandler handler = Proxy.getInvocationHandler(proxy);
+        if (handler instanceof Requestor) {
+          try {
+            remote = ((Requestor) handler).getTransceiver().getRemoteName();
+          } catch (IOException e) {
+          }
+        }
+      }
+      return "Proxy[" + protocol + "," + remote + "]";
+    }
+    else {
+      try {
+        // Check if this is a callback-based RPC:
+        Type[] parameterTypes = method.getParameterTypes();
+        if ((parameterTypes.length > 0) &&
+            (parameterTypes[parameterTypes.length - 1] instanceof Class) &&
+            Callback.class.isAssignableFrom(((Class<?>)parameterTypes[parameterTypes.length - 1]))) {
+          // Extract the Callback from the end of of the argument list
+          Object[] finalArgs = Arrays.copyOf(args, args.length - 1);
+          Callback<?> callback = (Callback<?>)args[args.length - 1];
+          request(method.getName(), finalArgs, callback);
+          return null;
+        }
+        else {
+          return request(method.getName(), args);
+        }
+      } catch (Exception e) {
+        // Check if this is a declared Exception:
+        for (Class<?> exceptionClass : method.getExceptionTypes()) {
+          if (exceptionClass.isAssignableFrom(e.getClass())) {
+            throw e;
+          }
+        }
+      
+        // Next, check for RuntimeExceptions:
+        if (e instanceof RuntimeException) {
+          throw e;
+        }
+      
+        // Not an expected Exception, so wrap it in AvroRemoteException:
+        throw new AvroRemoteException(e);
+      }
+    }
+  }
+
+  protected DatumWriter<Object> getDatumWriter(Schema schema) {
+    return new SpecificDatumWriter<Object>(schema, data);
+  }
+
+  @Deprecated                                     // for compatibility in 1.5
+  protected DatumReader<Object> getDatumReader(Schema schema) {
+    return getDatumReader(schema, schema);
+  }
+
+  protected DatumReader<Object> getDatumReader(Schema writer, Schema reader) {
+    return new SpecificDatumReader<Object>(writer, reader, data);
+  }
+
+  @Override
+  public void writeRequest(Schema schema, Object request, Encoder out)
+    throws IOException {
+    Object[] args = (Object[])request;
+    int i = 0;
+    for (Schema.Field param : schema.getFields())
+      getDatumWriter(param.schema()).write(args[i++], out);
+  }
+    
+  @Override
+  public Object readResponse(Schema writer, Schema reader, Decoder in)
+    throws IOException {
+    return getDatumReader(writer, reader).read(null, in);
+  }
+
+  @Override
+  public Exception readError(Schema writer, Schema reader, Decoder in)
+    throws IOException {
+    Object value = getDatumReader(writer, reader).read(null, in);
+    if (value instanceof Exception)
+      return (Exception)value;
+    return new AvroRuntimeException(value.toString());
+  }
+
+  /** Create a proxy instance whose methods invoke RPCs. */
+  public static  <T> T getClient(Class<T> iface, Transceiver transciever)
+    throws IOException {
+    return getClient(iface, transciever,
+                     new SpecificData(iface.getClassLoader()));
+  }
+
+  /** Create a proxy instance whose methods invoke RPCs. */
+  @SuppressWarnings("unchecked")
+  public static  <T> T getClient(Class<T> iface, Transceiver transciever,
+                                 SpecificData data)
+    throws IOException {
+    Protocol protocol = data.getProtocol(iface);
+    return (T)Proxy.newProxyInstance
+      (data.getClassLoader(),
+       new Class[] { iface },
+       new SpecificRequestor(protocol, transciever, data));
+  }
+
+  /** Create a proxy instance whose methods invoke RPCs. */
+  @SuppressWarnings("unchecked")
+  public static <T> T getClient(Class<T> iface, SpecificRequestor requestor)
+    throws IOException {
+    return (T)Proxy.newProxyInstance(requestor.data.getClassLoader(),
+                                  new Class[] { iface }, requestor);
+  }
+
+  /** Return the remote protocol for a proxy. */
+  public static Protocol getRemote(Object proxy) throws IOException {
+    return ((Requestor)Proxy.getInvocationHandler(proxy)).getRemote();
+    
+  }
+
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java
new file mode 100644
index 0000000..ae3a30d
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.specific;
+
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.ipc.generic.GenericResponder;
+
+/** {@link org.apache.avro.ipc.Responder Responder} for generated interfaces.*/
+public class SpecificResponder extends GenericResponder {
+  private Object impl;
+
+  public SpecificResponder(Class iface, Object impl) {
+    this(iface, impl, new SpecificData(impl.getClass().getClassLoader()));
+  }
+    
+  public SpecificResponder(Protocol protocol, Object impl) {
+    this(protocol, impl, new SpecificData(impl.getClass().getClassLoader()));
+  }
+
+  public SpecificResponder(Class iface, Object impl, SpecificData data) {
+    this(data.getProtocol(iface), impl, data);
+  }
+
+  public SpecificResponder(Protocol protocol, Object impl, SpecificData data) {
+    super(protocol, data);
+    this.impl = impl;
+  }
+
+  public SpecificData getSpecificData() {return (SpecificData)getGenericData();}
+
+  @Override
+  protected DatumWriter<Object> getDatumWriter(Schema schema) {
+    return new SpecificDatumWriter<Object>(schema, getSpecificData());
+  }
+
+  @Override
+  protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
+    return new SpecificDatumReader<Object>(actual, expected, getSpecificData());
+  }
+
+  @Override
+  public void writeError(Schema schema, Object error,
+                         Encoder out) throws IOException {
+    getDatumWriter(schema).write(error, out);
+  }
+
+  @Override
+  public Object respond(Message message, Object request) throws Exception {
+    int numParams = message.getRequest().getFields().size();
+    Object[] params = new Object[numParams];
+    Class[] paramTypes = new Class[numParams];
+    int i = 0;
+    try {
+      for (Schema.Field param: message.getRequest().getFields()) {
+        params[i] = ((GenericRecord)request).get(param.name());
+        paramTypes[i] = getSpecificData().getClass(param.schema());
+        i++;
+      }
+      Method method = impl.getClass().getMethod(message.getName(), paramTypes);
+      method.setAccessible(true);
+      return method.invoke(impl, params);
+    } catch (InvocationTargetException e) {
+      if (e.getTargetException() instanceof Exception) {
+        throw (Exception) e.getTargetException();
+      } else {
+        throw new Exception(e.getTargetException());
+      }
+    } catch (NoSuchMethodException e) {
+      throw new AvroRuntimeException(e);
+    } catch (IllegalAccessException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+}
+
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/FloatHistogram.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/FloatHistogram.java
new file mode 100644
index 0000000..15fd094
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/FloatHistogram.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+/**
+ * Specific implementation of histogram for floats,
+ * which also keeps track of basic summary statistics.
+ * @param <B>
+ */
+class FloatHistogram<B> extends Histogram<B, Float> {
+  private float runningSum;
+  private float runningSumOfSquares;
+
+  public FloatHistogram(Segmenter<B, Float> segmenter) {
+    super(segmenter);
+  }
+
+  @Override
+  public void add(Float value) {
+    super.add(value);
+    runningSum += value;
+    runningSumOfSquares += value*value;
+  }
+
+  public float getMean() {
+    if (totalCount == 0) {
+      return Float.NaN;
+    }
+    return runningSum / totalCount;
+  }
+
+  public float getUnbiasedStdDev() {
+    if (totalCount <= 1) {
+      return Float.NaN;
+    }
+    float mean = getMean();
+    return (float)Math.sqrt((runningSumOfSquares - totalCount*mean*mean)/(totalCount - 1));
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
new file mode 100644
index 0000000..521e1c2
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedSet;
+import java.util.TreeMap;
+
+/**
+ * Represents a histogram of values.  This class uses a {@link Segmenter}
+ * to determine which bucket to place a given value into. Also stores the last
+ * MAX_HISTORY_SIZE entries which have been added to this histogram, in order.
+ *
+ * Note that Histogram, by itself, is not synchronized.
+ * @param <B> Bucket type.  Often String, since buckets are typically
+ * used for their toString() representation.
+ * @param <T> Type of value
+ */
+class Histogram<B, T> {
+  /**
+   * How many recent additions we should track.
+   */
+  public static final int MAX_HISTORY_SIZE = 20; 
+  
+  private Segmenter<B, T> segmenter;
+  private int[] counts;
+  protected int totalCount;
+  private LinkedList<T> recentAdditions;
+
+  /**
+   * Interface to determine which bucket to place a value in.
+   *
+   * Segmenters should be immutable, so many histograms can re-use
+   * the same segmenter.
+   */
+  interface Segmenter<B, T> {
+    /** Number of buckets to use. */
+    int size();
+    /**
+     * Which bucket to place value in.
+     *
+     * @return Index of bucket for the value.  At least 0 and less than size().
+     * @throws SegmenterException if value does not fit in a bucket.
+     */
+    int segment(T value);
+    /**
+     * Returns an iterator of buckets. The order of iteration
+     * is consistent with the segment numbers.
+     */
+    Iterator<B> getBuckets();
+    
+    /**
+     * Returns a List of bucket boundaries. Useful for printing
+     * segmenters.
+     */
+    List<String> getBoundaryLabels();
+    
+    /**
+     * Returns the bucket labels as an array;
+     */
+    List<String> getBucketLabels();
+  }
+
+  public static class SegmenterException extends RuntimeException {
+    public SegmenterException(String s) {
+      super(s);
+    }
+  }
+
+  public static class TreeMapSegmenter<T extends Comparable<T>>
+      implements Segmenter<String, T> {
+    private TreeMap<T, Integer> index = new TreeMap<T, Integer>();
+    public TreeMapSegmenter(SortedSet<T> leftEndpoints) {
+      if (leftEndpoints.isEmpty()) {
+        throw new IllegalArgumentException(
+            "Endpoints must not be empty: " + leftEndpoints);
+      }
+      int i = 0;
+      for (T t : leftEndpoints) {
+        index.put(t, i++);
+      }
+    }
+
+    public int segment(T value) {
+      Map.Entry<T, Integer> e = index.floorEntry(value);
+      if (e == null) {
+        throw new SegmenterException("Could not find bucket for: " + value);
+      }
+      return e.getValue();
+    }
+
+    @Override
+    public int size() {
+      return index.size();
+    }
+
+    private String rangeAsString(T a, T b) {
+      return String.format("[%s,%s)", a, b == null ? "infinity" : b);
+    }
+    
+    @Override
+    public ArrayList<String> getBoundaryLabels() {
+      ArrayList<String> outArray = new ArrayList<String>(index.keySet().size());
+      for (T obj: index.keySet()) {
+        outArray.add(obj.toString());
+      }
+      return outArray;
+    }
+    
+    @Override
+    public ArrayList<String> getBucketLabels() {
+      ArrayList<String> outArray = new ArrayList<String>(index.keySet().size());
+      Iterator<String> bucketsIt = this.getBuckets();
+      while (bucketsIt.hasNext()) {
+        outArray.add(bucketsIt.next());
+      }
+      return outArray;
+    }
+    
+    @Override
+    public Iterator<String> getBuckets() {
+      return new Iterator<String>() {
+        Iterator<T> it = index.keySet().iterator();
+        T cur = it.next(); // there's always at least one element
+        int pos = 0;
+        
+        @Override
+        public boolean hasNext() {
+          return (pos < index.keySet().size());
+        }
+
+        @Override
+        public String next() {
+          pos = pos + 1;
+          T left = cur;
+          cur = it.hasNext() ? it.next() : null;
+          return rangeAsString(left, cur);
+        }
+
+        @Override
+        public void remove() {
+          throw new UnsupportedOperationException();
+        }
+      };
+    }
+  }
+
+  /**
+   * Creates a histogram using the specified segmenter.
+   */
+  public Histogram(Segmenter<B, T> segmenter) {
+    this.segmenter = segmenter;
+    this.counts = new int[segmenter.size()];
+    this.recentAdditions = new LinkedList<T>();
+  }
+
+  /** Tallies a value in the histogram. */
+  public void add(T value) {
+    int i = segmenter.segment(value);
+    counts[i]++;
+    totalCount++;
+    if (this.recentAdditions.size() > Histogram.MAX_HISTORY_SIZE) {
+      this.recentAdditions.pollLast();
+    }
+    this.recentAdditions.push(value);
+  }
+
+  /**
+   * Returns the underlying bucket values.
+   */
+  public int[] getHistogram() {
+    return counts;
+  }
+  
+  /**
+   * Returns the underlying segmenter used for this histogram.
+   */
+  public Segmenter<B, T> getSegmenter() {
+    return this.segmenter;
+  }
+  
+  /**
+   * Returns values recently added to this histogram. These are in reverse
+   * order (most recent first).
+   */
+  public List<T> getRecentAdditions() {
+    return this.recentAdditions;
+  }
+
+  /** Returns the total count of entries. */
+  public int getCount() {
+    return totalCount;
+  }
+  
+
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    boolean first = true;
+    for (Entry<B> e : entries()) {
+      if (!first) {
+        sb.append(";");
+      } else {
+        first = false;
+      }
+      sb.append(e.bucket).append("=").append(e.count);
+    }
+    return sb.toString();
+  }
+
+  static class Entry<B> {
+    public Entry(B bucket, int count) {
+      this.bucket = bucket;
+      this.count = count;
+    }
+    B bucket;
+    int count;
+  }
+
+  private class EntryIterator implements Iterable<Entry<B>>, Iterator<Entry<B>> {
+    int i = 0;
+    Iterator<B> bucketNameIterator = segmenter.getBuckets();
+
+    @Override
+    public Iterator<Entry<B>> iterator() {
+      return this;
+    }
+
+    @Override
+    public boolean hasNext() {
+      return i < segmenter.size();
+    }
+
+    @Override
+    public Entry<B> next() {
+      return new Entry<B>(bucketNameIterator.next(), counts[i++]);
+    }
+
+    @Override
+    public void remove() {
+      throw new UnsupportedOperationException();
+    }
+
+  }
+
+  public Iterable<Entry<B>> entries() {
+    return new EntryIterator();
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/IntegerHistogram.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/IntegerHistogram.java
new file mode 100644
index 0000000..072bf8b
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/IntegerHistogram.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+/**
+ * Specific implementation of histogram for integers,
+ * which also keeps track of basic summary statistics.
+ * @param <B>
+ */
+class IntegerHistogram<B> extends Histogram<B, Integer> {
+  private float runningSum;
+  private float runningSumOfSquares;
+
+  public IntegerHistogram(Segmenter<B, Integer> segmenter) {
+    super(segmenter);
+  }
+
+  @Override
+  public void add(Integer value) {
+    super.add(value);
+    runningSum += value;
+    runningSumOfSquares += value*value;
+  }
+
+  public float getMean() {
+    if (totalCount == 0) {
+      return -1;
+    }
+    return runningSum / (float) totalCount;
+  }
+
+  public float getUnbiasedStdDev() {
+    if (totalCount <= 1) {
+      return -1;
+    }
+    float mean = getMean();
+    return (float)Math.sqrt((runningSumOfSquares - totalCount*mean*mean)/
+        (float)(totalCount - 1));
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java
new file mode 100644
index 0000000..c079ec5
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StaticServlet.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc.stats;
+
+import java.io.IOException;
+import java.net.URL;
+
+import org.mortbay.jetty.servlet.DefaultServlet;
+import org.mortbay.resource.Resource;
+
+/**
+ * Very simple servlet class capable of serving static files.
+ */
+public class StaticServlet extends DefaultServlet {
+  public Resource getResource(String pathInContext) {
+    // Take only last slice of the URL as a filename, so we can adjust path. 
+    // This also prevents mischief like '../../foo.css'
+    String[] parts = pathInContext.split("/");
+    String filename =  parts[parts.length - 1];
+
+    try {
+      URL resource = getClass().getClassLoader().getResource(
+          "org/apache/avro/ipc/stats/static/" + filename);
+      if (resource == null) { return null; }
+      return Resource.newResource(resource);
+    } catch (IOException e) {
+      return null;
+    }
+  }
+} 
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java
new file mode 100644
index 0000000..565f532
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.ipc.RPCContext;
+import org.apache.avro.ipc.RPCPlugin;
+import org.apache.avro.ipc.stats.Histogram.Segmenter;
+import org.apache.avro.ipc.stats.Stopwatch.Ticks;
+
+/**
+ * Collects count and latency statistics about RPC calls.  Keeps
+ * data for every method. Can be added to a Requestor (client)
+ * or Responder (server). 
+ *
+ * This uses milliseconds as the standard unit of measure
+ * throughout the class, stored in floats.
+ */
+public class StatsPlugin extends RPCPlugin {
+  /** Static declaration of histogram buckets. */
+  static final Segmenter<String, Float> LATENCY_SEGMENTER =
+    new Histogram.TreeMapSegmenter<Float>(new TreeSet<Float>(Arrays.asList(
+            0f,
+           25f,
+           50f,
+           75f,
+          100f,
+          200f,
+          300f,
+          500f,
+          750f,
+         1000f, // 1 second
+         2000f,
+         5000f,
+        10000f,
+        60000f, // 1 minute
+       600000f)));
+
+  static final Segmenter<String, Integer> PAYLOAD_SEGMENTER =
+    new Histogram.TreeMapSegmenter<Integer>(new TreeSet<Integer>(Arrays.asList(
+            0,
+           25,
+           50,
+           75,
+          100,
+          200,
+          300,
+          500,
+          750,
+         1000, // 1 k
+         2000,
+         5000,
+        10000,
+        50000, 
+       100000)));
+  
+  /** Per-method histograms.
+   * Must be accessed while holding a lock. */
+  Map<Message, FloatHistogram<?>> methodTimings =
+    new HashMap<Message, FloatHistogram<?>>();
+
+  Map<Message, IntegerHistogram<?>> sendPayloads =
+    new HashMap<Message, IntegerHistogram<?>>();
+  
+  Map<Message, IntegerHistogram<?>> receivePayloads =
+    new HashMap<Message, IntegerHistogram<?>>();
+  
+  /** RPCs in flight. */
+  ConcurrentMap<RPCContext, Stopwatch> activeRpcs =
+    new ConcurrentHashMap<RPCContext, Stopwatch>();
+  private Ticks ticks;
+
+  /** How long I've been alive */
+  public Date startupTime = new Date();
+  
+  private Segmenter<?, Float> floatSegmenter;
+  private Segmenter<?, Integer> integerSegmenter;
+
+  /** Construct a plugin with custom Ticks and Segmenter implementations. */
+  StatsPlugin(Ticks ticks, Segmenter<?, Float> floatSegmenter, 
+      Segmenter<?, Integer> integerSegmenter) {
+    this.floatSegmenter = floatSegmenter;
+    this.integerSegmenter = integerSegmenter;
+    this.ticks = ticks;
+  }
+
+  /** Construct a plugin with default (system) ticks, and default
+   * histogram segmentation. */
+  public StatsPlugin() {
+    this(Stopwatch.SYSTEM_TICKS, LATENCY_SEGMENTER, PAYLOAD_SEGMENTER);
+  }
+  
+  /**
+   * Helper to get the size of an RPC payload.
+   */
+  private int getPayloadSize(List<ByteBuffer> payload) {
+    if (payload == null) {
+      return 0;
+    }
+    
+    int size = 0;
+    for (ByteBuffer bb: payload) {
+      size = size + bb.limit();
+    }
+    
+    return size;
+  }
+
+  @Override
+  public void serverReceiveRequest(RPCContext context) {
+    Stopwatch t = new Stopwatch(ticks);
+    t.start();
+    this.activeRpcs.put(context, t);
+    
+    synchronized(receivePayloads) {
+      IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
+      if (h == null) {
+        h = createNewIntegerHistogram();
+        receivePayloads.put(context.getMessage(), h);
+      }
+      h.add(getPayloadSize(context.getRequestPayload()));
+    }
+  }
+  
+  @Override
+  public void serverSendResponse(RPCContext context) {
+    Stopwatch t = this.activeRpcs.remove(context);
+    t.stop();
+    publish(context, t);
+    
+    synchronized(sendPayloads) {
+      IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
+      if (h == null) {
+        h = createNewIntegerHistogram();
+        sendPayloads.put(context.getMessage(), h);
+      }
+      h.add(getPayloadSize(context.getResponsePayload()));
+    }
+  }
+  
+  @Override
+  public void clientSendRequest(RPCContext context) {
+    Stopwatch t = new Stopwatch(ticks);
+    t.start();
+    this.activeRpcs.put(context, t);
+    
+    synchronized(sendPayloads) {
+      IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
+      if (h == null) {
+        h = createNewIntegerHistogram();
+       sendPayloads.put(context.getMessage(), h);
+      }
+      h.add(getPayloadSize(context.getRequestPayload()));
+    }
+  }
+  
+  @Override
+  public void clientReceiveResponse(RPCContext context) {
+    Stopwatch t = this.activeRpcs.remove(context);
+    t.stop();
+    publish(context, t);
+    
+    synchronized(receivePayloads) {
+      IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
+      if (h == null) {
+        h = createNewIntegerHistogram();
+        receivePayloads.put(context.getMessage(), h);
+      }
+      h.add(getPayloadSize(context.getRequestPayload()));
+    }
+  }
+  
+  /** Adds timing to the histograms. */
+  private void publish(RPCContext context, Stopwatch t) {
+    Message message = context.getMessage();
+    if (message == null) throw new IllegalArgumentException();
+    synchronized(methodTimings) {
+      FloatHistogram<?> h = methodTimings.get(context.getMessage());
+      if (h == null) {
+        h = createNewFloatHistogram();
+        methodTimings.put(context.getMessage(), h);
+      }
+      h.add(nanosToMillis(t.elapsedNanos()));
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private FloatHistogram<?> createNewFloatHistogram() {
+    return new FloatHistogram(floatSegmenter);
+  }
+
+  @SuppressWarnings("unchecked")
+  private IntegerHistogram<?> createNewIntegerHistogram() {
+    return new IntegerHistogram(integerSegmenter);
+  }
+  
+  /** Converts nanoseconds to milliseconds. */
+  static float nanosToMillis(long elapsedNanos) {
+    return elapsedNanos / 1000000.0f;
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java
new file mode 100644
index 0000000..3ae8ada
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServer.java
@@ -0,0 +1,54 @@
+package org.apache.avro.ipc.stats;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.servlet.Context;
+import org.mortbay.jetty.servlet.ServletHolder;
+
+/* This is a server that displays live information from a StatsPlugin.
+ * 
+ *  Typical usage is as follows:
+ *    StatsPlugin plugin = new StatsPlugin(); 
+ *    requestor.addPlugin(plugin);
+ *    StatsServer server = new StatsServer(plugin, 8080);
+ *    
+ *  */
+public class StatsServer {
+  Server httpServer;
+  StatsPlugin plugin;
+  
+  /* Start a stats server on the given port, 
+   * responsible for the given plugin. */
+  public StatsServer(StatsPlugin plugin, int port) throws Exception {
+    this.httpServer = new Server(port);
+    this.plugin = plugin;
+    
+    Context staticContext = new Context(httpServer, "/static");
+    staticContext.addServlet(new ServletHolder(new StaticServlet()), "/");
+    
+    Context context = new Context(httpServer, "/");
+    context.addServlet(new ServletHolder(new StatsServlet(plugin)), "/");
+    
+    httpServer.start();
+  }
+  
+  /* Stops this server. */
+  public void stop() throws Exception {
+    this.httpServer.stop();
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java
new file mode 100644
index 0000000..3af2ffd
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java
@@ -0,0 +1,270 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import java.io.IOException;
+import java.io.Writer;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import javax.servlet.ServletException;
+import javax.servlet.UnavailableException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.exception.ParseErrorException;
+import org.apache.velocity.exception.ResourceNotFoundException;
+
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.ipc.RPCContext;
+
+/**
+ * Exposes information provided by a StatsPlugin as
+ * a web page.
+ *
+ * This class follows the same synchronization conventions
+ * as StatsPlugin, to avoid requiring StatsPlugin to serve
+ * a copy of the data.
+ */ 
+public class StatsServlet extends HttpServlet {
+  private final StatsPlugin statsPlugin;
+  private VelocityEngine velocityEngine;
+  private static final SimpleDateFormat FORMATTER = 
+    new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss");
+
+  public StatsServlet(StatsPlugin statsPlugin) throws UnavailableException {
+    this.statsPlugin = statsPlugin;
+    this.velocityEngine = new VelocityEngine();
+    
+    // These two properties tell Velocity to use its own classpath-based loader
+    velocityEngine.addProperty("resource.loader", "class");
+    velocityEngine.addProperty("class.resource.loader.class",
+        "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
+    
+    velocityEngine.setProperty("runtime.references.strict", true);
+    String logChuteName = "org.apache.velocity.runtime.log.NullLogChute";
+    velocityEngine.setProperty("runtime.log.logsystem.class", logChuteName);
+  }
+  
+  /* Helper class to store per-message data which is passed to templates.
+   * 
+   * The template expects a list of charts, each of which is parameterized by
+   * map key-value string attributes. */
+  public class RenderableMessage { // Velocity brakes if not public
+    public String name;
+    public int numCalls;
+    public ArrayList<HashMap<String, String>> charts;
+    
+    public RenderableMessage(String name) {
+      this.name = name;
+      this.charts = new ArrayList<HashMap<String, String>>();
+    }
+    
+    public ArrayList<HashMap<String, String>> getCharts() {
+      return this.charts;
+    }
+    
+    public String getname() {
+      return this.name;
+    }
+    
+    public int getNumCalls() {
+      return this.numCalls;
+    }
+  }
+
+  /* Surround each string in an array with
+   * quotation marks and escape existing quotes.
+   * 
+   * This is useful when we have an array of strings that we want to turn into
+   * a javascript array declaration. 
+   */
+  protected static List<String> escapeStringArray(List<String> input) {
+    for (int i = 0; i < input.size(); i++) {
+      input.set(i, "\"" + input.get(i).replace("\"", "\\\"") + "\"");
+    }
+    return input;
+  }
+  
+  @Override
+  protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+      throws ServletException, IOException {
+    resp.setContentType("text/html");
+    String url = req.getRequestURL().toString();
+    String[] parts = url.split("//")[1].split("/");
+    
+    try {
+      writeStats(resp.getWriter()); 
+    }
+    catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  void writeStats(Writer w) throws IOException {
+    VelocityContext context = new VelocityContext();
+    context.put("title", "Avro RPC Stats"); 
+    
+    ArrayList<String> rpcs = new ArrayList<String>();  // in flight rpcs
+    
+    ArrayList<RenderableMessage> messages = 
+      new ArrayList<RenderableMessage>();
+    
+    for (Entry<RPCContext, Stopwatch> rpc : 
+         this.statsPlugin.activeRpcs.entrySet()) {
+      rpcs.add(renderActiveRpc(rpc.getKey(), rpc.getValue()));
+    }
+    
+    // Get set of all seen messages
+    Set<Message> keys = null;
+    synchronized(this.statsPlugin.methodTimings) {
+       keys = this.statsPlugin.methodTimings.keySet();
+    
+      for (Message m: keys) {
+        messages.add(renderMethod(m));
+      }
+    }
+    
+    context.put("inFlightRpcs", rpcs);
+    context.put("messages", messages);
+    
+    context.put("currTime", FORMATTER.format(new Date()));
+    context.put("startupTime", FORMATTER.format(statsPlugin.startupTime));
+    
+    Template t;
+    try {
+      t = velocityEngine.getTemplate(
+          "org/apache/avro/ipc/stats/templates/statsview.vm");
+    } catch (ResourceNotFoundException e) {
+      throw new IOException();
+    } catch (ParseErrorException e) {
+      throw new IOException();
+    } catch (Exception e) {
+      throw new IOException();
+    }
+    t.merge(context, w);
+  }
+
+  private String renderActiveRpc(RPCContext rpc, Stopwatch stopwatch) 
+      throws IOException {
+    String out = new String();
+    out += rpc.getMessage().getName() + ": " + 
+        formatMillis(StatsPlugin.nanosToMillis(stopwatch.elapsedNanos()));
+    return out;
+  }
+
+  
+  private RenderableMessage renderMethod(Message message) {
+    RenderableMessage out = new RenderableMessage(message.getName());
+    
+    synchronized(this.statsPlugin.methodTimings) {
+      FloatHistogram<?> hist = this.statsPlugin.methodTimings.get(message);
+      out.numCalls = hist.getCount();
+      
+      HashMap<String, String> latencyBar = new HashMap<String, String>();
+      // Fill in chart attributes for velocity
+      latencyBar.put("type", "bar");
+      latencyBar.put("title", "All-Time Latency");
+      latencyBar.put("units", "ms");
+      latencyBar.put("numCalls", Integer.toString(hist.getCount()));
+      latencyBar.put("avg", Float.toString(hist.getMean()));
+      latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
+      latencyBar.put("labelStr", 
+          Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
+      latencyBar.put("boundaryStr",
+          Arrays.toString(escapeStringArray(hist.getSegmenter().
+              getBucketLabels()).toArray()));
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      out.charts.add(latencyBar);
+      
+      HashMap<String, String> latencyDot = new HashMap<String, String>();
+      latencyDot.put("title", "Latency");
+      latencyDot.put("type", "dot");
+      latencyDot.put("dataStr", 
+          Arrays.toString(hist.getRecentAdditions().toArray()));
+      out.charts.add(latencyDot);
+    }
+    
+    synchronized(this.statsPlugin.sendPayloads) {
+      IntegerHistogram<?> hist = this.statsPlugin.sendPayloads.get(message);
+      HashMap<String, String> latencyBar = new HashMap<String, String>();
+      // Fill in chart attributes for velocity
+      latencyBar.put("type", "bar");
+      latencyBar.put("title", "All-Time Send Payload");
+      latencyBar.put("units", "ms");
+      latencyBar.put("numCalls", Integer.toString(hist.getCount()));
+      latencyBar.put("avg", Float.toString(hist.getMean()));
+      latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
+      latencyBar.put("labelStr", 
+          Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
+      latencyBar.put("boundaryStr",
+          Arrays.toString(escapeStringArray(hist.getSegmenter().
+              getBucketLabels()).toArray()));
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      out.charts.add(latencyBar);
+      
+      HashMap<String, String> latencyDot = new HashMap<String, String>();
+      latencyDot.put("title", "Send Payload");
+      latencyDot.put("type", "dot");
+      latencyDot.put("dataStr", 
+          Arrays.toString(hist.getRecentAdditions().toArray()));
+      out.charts.add(latencyDot);
+    }
+    
+    synchronized(this.statsPlugin.receivePayloads) {
+      IntegerHistogram<?> hist = this.statsPlugin.receivePayloads.get(message);
+      HashMap<String, String> latencyBar = new HashMap<String, String>();
+      // Fill in chart attributes for velocity
+      latencyBar.put("type", "bar");
+      latencyBar.put("title", "All-Time Receive Payload");
+      latencyBar.put("units", "ms");
+      latencyBar.put("numCalls", Integer.toString(hist.getCount()));
+      latencyBar.put("avg", Float.toString(hist.getMean()));
+      latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
+      latencyBar.put("labelStr", 
+          Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
+      latencyBar.put("boundaryStr",
+          Arrays.toString(escapeStringArray(hist.getSegmenter().
+              getBucketLabels()).toArray()));
+      latencyBar.put("dataStr", Arrays.toString(hist.getHistogram())); 
+      out.charts.add(latencyBar);
+      
+      HashMap<String, String> latencyDot = new HashMap<String, String>();
+      latencyDot.put("title", "Recv Payload");
+      latencyDot.put("type", "dot");
+      latencyDot.put("dataStr", 
+          Arrays.toString(hist.getRecentAdditions().toArray()));
+      out.charts.add(latencyDot);
+    }
+    
+    return out;
+  }
+  
+  private CharSequence formatMillis(float millis) {
+    return String.format("%.0fms", millis);
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Stopwatch.java b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Stopwatch.java
new file mode 100644
index 0000000..e15cac6
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Stopwatch.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+/** Encapsulates the passing of time. */
+class Stopwatch  {
+  /** Encapsulates ticking time sources. */
+  interface Ticks {
+    /**
+     * Returns a number of "ticks" in nanoseconds.
+     * This should be monotonically non-decreasing.
+     */
+    long ticks();
+  }
+
+  /** Default System time source. */
+  public final static Ticks SYSTEM_TICKS = new SystemTicks();
+
+  private Ticks ticks;
+  private long start;
+  private long elapsed = -1;
+  private boolean running;
+
+  public Stopwatch(Ticks ticks) {
+    this.ticks = ticks;
+  }
+
+  /** Returns seconds that have elapsed since start() */
+  public long elapsedNanos() {
+    if (running) {
+      return this.ticks.ticks() - start;
+    } else {
+      if (elapsed == -1) throw new IllegalStateException();
+      return elapsed;
+    }
+  }
+
+  /** Starts the stopwatch. */
+  public void start() {
+    if (running) throw new IllegalStateException();
+    start = ticks.ticks();
+    running = true;
+  }
+
+  /** Stops the stopwatch and calculates the elapsed time. */
+  public void stop() {
+    if (!running) throw new IllegalStateException();
+    elapsed = ticks.ticks() - start;
+    running = false;
+  }
+
+  /** Implementation of Ticks using System.nanoTime(). */
+  private static class SystemTicks implements Ticks {
+    @Override
+    public long ticks() {
+      return System.nanoTime();
+    }
+  }
+}
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/package.html b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/package.html
new file mode 100644
index 0000000..b27b866
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Utilities to collect and display IPC statistics.
+</body>
+</html>
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.css b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.css
new file mode 100644
index 0000000..f7199c5
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.css
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+table#charts_table tr {
+  padding-right: 10px;
+  padding-left: 10px;
+}
\ No newline at end of file
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.js
new file mode 100644
index 0000000..30da1b0
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/avro.js
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+function makeDotChart(yVals) {
+    var xVals = pv.range(1, yVals.length + 1);
+    var data = new Array();
+    var dotColors = pv.Colors.category20().range();
+    
+    for (i = 0; i < yVals.length; i = i + 1) {
+      data[i] = {x: xVals[i], y: yVals[i]};
+    }
+
+	/* Sizing and scales. */
+	var w = 200,
+	    h = 250,
+	    x = pv.Scale.linear(0, Math.max.apply(Math, xVals)).range(0, w),
+	    y = pv.Scale.linear(0, Math.max.apply(Math, yVals)).range(0, h),
+	    c = pv.Scale.linear(1, 20).range("orange", "brown");
+	
+	/* The root panel. */
+	var vis = new pv.Panel()
+	    .width(w)
+	    .height(h)
+	    .bottom(20)
+	    .left(50)
+	    .right(10)
+	    .top(5);
+	
+	/* Y-axis and ticks. */
+	vis.add(pv.Rule)
+	    .data(y.ticks())
+	    .bottom(y)
+	    .strokeStyle(function(d) d ? "#eee" : "#000")
+	  .anchor("left").add(pv.Label)
+	    .text(y.tickFormat);
+
+	
+	/* The dot plot! */
+	vis.add(pv.Panel)
+	    .data(data)
+	    .add(pv.Dot)
+	    .left(function(d) x(d.x))
+	    .bottom(function(d) y(d.y))
+	    .strokeStyle(function(d) dotColors[d.x % 20])
+	    .fillStyle(function() this.strokeStyle().alpha(1))
+	    .title(function(d) d.y)
+	    .event("mouseover", pv.Behavior.tipsy({gravity: "n", 
+	      fade: false, delayIn: 0}));
+	vis.render();
+}
+
+
+function makeBarChart(labels, boundries, data) {
+	var w = 200,
+	    h = 250,
+	    x = pv.Scale.ordinal(pv.range(data.length)).splitBanded(0, w, 4/5),
+	    y = pv.Scale.linear(0, Math.max.apply(Math, data)).range(0, h),
+	    i = -1,
+	    c = pv.Scale.linear(1, 5, 20).range("green", "yellow", "red");
+
+	var vis = new pv.Panel()
+	    .width(w)
+	    .height(h)
+	    .bottom(20)
+	    .left(40)
+	    .right(5)
+	    .top(30);
+	
+	var bar = vis.add(pv.Bar)
+	    .data(data)
+	    .left(function(){ return x(this.index); })
+	    .width(10)
+	    .bottom(0)
+	    .height(y)
+	    .fillStyle(function(d) "#1f77b4")
+	    .title(function() { return boundries[this.index]; })
+	    .event("mouseover", pv.Behavior.tipsy({gravity: "n", 
+	      fade: false, delayIn: 0}));
+	
+	bar.anchor("bottom").add(pv.Label)
+    	.textMargin(5)
+		.textBaseline("top")
+		.text(function() (this.index % 4 == 0) ? labels[this.index]: "");		
+	
+	vis.add(pv.Rule)
+	    .data(y.ticks())
+	    .bottom(function(d) Math.round(y(d)) - .5)
+	    .strokeStyle(function(d) d ? "rgba(255,255,255,.3)" : "#000")
+	  .add(pv.Rule)
+	    .left(0)
+	    .width(5)
+	    .strokeStyle("#000")
+	  .anchor("left").add(pv.Label)
+	    .text(function(d) d.toFixed(1));
+	vis.render();
+}
\ No newline at end of file
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/g.bar.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/g.bar.js
new file mode 100644
index 0000000..9ff5e5b
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/g.bar.js
@@ -0,0 +1,386 @@
+/*!
+ * g.Raphael 0.4.1 - Charting library, based on Raphaël
+ *
+ * Copyright (c) 2009 Dmitry Baranovskiy (http://g.raphaeljs.com)
+ * Licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) license.
+ */
+Raphael.fn.g.barchart = function (x, y, width, height, values, opts) {
+    opts = opts || {};
+    var type = {round: "round", sharp: "sharp", soft: "soft"}[opts.type] || "square",
+        gutter = parseFloat(opts.gutter || "20%"),
+        chart = this.set(),
+        bars = this.set(),
+        covers = this.set(),
+        covers2 = this.set(),
+        total = Math.max.apply(Math, values),
+        stacktotal = [],
+        paper = this,
+        multi = 0,
+        colors = opts.colors || this.g.colors,
+        len = values.length;
+    if (this.raphael.is(values[0], "array")) {
+        total = [];
+        multi = len;
+        len = 0;
+        for (var i = values.length; i--;) {
+            bars.push(this.set());
+            total.push(Math.max.apply(Math, values[i]));
+            len = Math.max(len, values[i].length);
+        }
+        if (opts.stacked) {
+            for (var i = len; i--;) {
+                var tot = 0;
+                for (var j = values.length; j--;) {
+                    tot +=+ values[j][i] || 0;
+                }
+                stacktotal.push(tot);
+            }
+        }
+        for (var i = values.length; i--;) {
+            if (values[i].length < len) {
+                for (var j = len; j--;) {
+                    values[i].push(0);
+                }
+            }
+        }
+        total = Math.max.apply(Math, opts.stacked ? stacktotal : total);
+    }
+    
+    total = (opts.to) || total;
+    var barwidth = width / (len * (100 + gutter) + gutter) * 100,
+        barhgutter = barwidth * gutter / 100,
+        barvgutter = opts.vgutter == null ? 20 : opts.vgutter,
+        stack = [],
+        X = x + barhgutter,
+        Y = (height - 2 * barvgutter) / total;
+    if (!opts.stretch) {
+        barhgutter = Math.round(barhgutter);
+        barwidth = Math.floor(barwidth);
+    }
+    !opts.stacked && (barwidth /= multi || 1);
+    for (var i = 0; i < len; i++) {
+        stack = [];
+        for (var j = 0; j < (multi || 1); j++) {
+            var h = Math.round((multi ? values[j][i] : values[i]) * Y),
+                top = y + height - barvgutter - h,
+                bar = this.g.finger(Math.round(X + barwidth / 2), top + h, barwidth, h, true, type).attr({stroke: "none", fill: colors[multi ? j : i]});
+            if (multi) {
+                bars[j].push(bar);
+            } else {
+                bars.push(bar);
+            }
+            bar.y = top;
+            bar.x = Math.round(X + barwidth / 2);
+            bar.w = barwidth;
+            bar.h = h;
+            bar.value = multi ? values[j][i] : values[i];
+            if (!opts.stacked) {
+                X += barwidth;
+            } else {
+                stack.push(bar);
+            }
+        }
+        if (opts.stacked) {
+            var cvr;
+            covers2.push(cvr = this.rect(stack[0].x - stack[0].w / 2, y, barwidth, height).attr(this.g.shim));
+            cvr.bars = this.set();
+            var size = 0;
+            for (var s = stack.length; s--;) {
+                stack[s].toFront();
+            }
+            for (var s = 0, ss = stack.length; s < ss; s++) {
+                var bar = stack[s],
+                    cover,
+                    h = (size + bar.value) * Y,
+                    path = this.g.finger(bar.x, y + height - barvgutter - !!size * .5, barwidth, h, true, type, 1);
+                cvr.bars.push(bar);
+                size && bar.attr({path: path});
+                bar.h = h;
+                bar.y = y + height - barvgutter - !!size * .5 - h;
+                covers.push(cover = this.rect(bar.x - bar.w / 2, bar.y, barwidth, bar.value * Y).attr(this.g.shim));
+                cover.bar = bar;
+                cover.value = bar.value;
+                size += bar.value;
+            }
+            X += barwidth;
+        }
+        X += barhgutter;
+    }
+    covers2.toFront();
+    X = x + barhgutter;
+    if (!opts.stacked) {
+        for (var i = 0; i < len; i++) {
+            for (var j = 0; j < (multi || 1); j++) {
+                var cover;
+                covers.push(cover = this.rect(Math.round(X), y + barvgutter, barwidth, height - barvgutter).attr(this.g.shim));
+                cover.bar = multi ? bars[j][i] : bars[i];
+                cover.value = cover.bar.value;
+                X += barwidth;
+            }
+            X += barhgutter;
+        }
+    }
+    chart.label = function (labels, isBottom) {
+        labels = labels || [];
+        this.labels = paper.set();
+        var L, l = -Infinity;
+        if (opts.stacked) {
+            for (var i = 0; i < len; i++) {
+                var tot = 0;
+                for (var j = 0; j < (multi || 1); j++) {
+                    tot += multi ? values[j][i] : values[i];
+                    if (j == multi - 1) {
+                        var label = paper.g.labelise(labels[i], tot, total);
+                        L = paper.g.text(bars[i * (multi || 1) + j].x, y + height - barvgutter / 2, label).insertBefore(covers[i * (multi || 1) + j]);
+                        var bb = L.getBBox();
+                        if (bb.x - 7 < l) {
+                            L.remove();
+                        } else {
+                            this.labels.push(L);
+                            l = bb.x + bb.width;
+                        }
+                    }
+                }
+            }
+        } else {
+            for (var i = 0; i < len; i++) {
+                for (var j = 0; j < (multi || 1); j++) {
+                    var label = paper.g.labelise(multi ? labels[j] && labels[j][i] : labels[i], multi ? values[j][i] : values[i], total);
+                    L = paper.g.text(bars[i * (multi || 1) + j].x, isBottom ? y + height - barvgutter / 2 : bars[i * (multi || 1) + j].y - 10, label).insertBefore(covers[i * (multi || 1) + j]);
+                    var bb = L.getBBox();
+                    if (bb.x - 7 < l) {
+                        L.remove();
+                    } else {
+                        this.labels.push(L);
+                        l = bb.x + bb.width;
+                    }
+                }
+            }
+        }
+        return this;
+    };
+    chart.hover = function (fin, fout) {
+        covers2.hide();
+        covers.show();
+        covers.mouseover(fin).mouseout(fout);
+        return this;
+    };
+    chart.hoverColumn = function (fin, fout) {
+        covers.hide();
+        covers2.show();
+        fout = fout || function () {};
+        covers2.mouseover(fin).mouseout(fout);
+        return this;
+    };
+    chart.click = function (f) {
+        covers2.hide();
+        covers.show();
+        covers.click(f);
+        return this;
+    };
+    chart.each = function (f) {
+        if (!Raphael.is(f, "function")) {
+            return this;
+        }
+        for (var i = covers.length; i--;) {
+            f.call(covers[i]);
+        }
+        return this;
+    };
+    chart.eachColumn = function (f) {
+        if (!Raphael.is(f, "function")) {
+            return this;
+        }
+        for (var i = covers2.length; i--;) {
+            f.call(covers2[i]);
+        }
+        return this;
+    };
+    chart.clickColumn = function (f) {
+        covers.hide();
+        covers2.show();
+        covers2.click(f);
+        return this;
+    };
+    chart.push(bars, covers, covers2);
+    chart.bars = bars;
+    chart.covers = covers;
+    return chart;
+};
+Raphael.fn.g.hbarchart = function (x, y, width, height, values, opts) {
+    opts = opts || {};
+    var type = {round: "round", sharp: "sharp", soft: "soft"}[opts.type] || "square",
+        gutter = parseFloat(opts.gutter || "20%"),
+        chart = this.set(),
+        bars = this.set(),
+        covers = this.set(),
+        covers2 = this.set(),
+        total = Math.max.apply(Math, values),
+        stacktotal = [],
+        paper = this,
+        multi = 0,
+        colors = opts.colors || this.g.colors,
+        len = values.length;
+    if (this.raphael.is(values[0], "array")) {
+        total = [];
+        multi = len;
+        len = 0;
+        for (var i = values.length; i--;) {
+            bars.push(this.set());
+            total.push(Math.max.apply(Math, values[i]));
+            len = Math.max(len, values[i].length);
+        }
+        if (opts.stacked) {
+            for (var i = len; i--;) {
+                var tot = 0;
+                for (var j = values.length; j--;) {
+                    tot +=+ values[j][i] || 0;
+                }
+                stacktotal.push(tot);
+            }
+        }
+        for (var i = values.length; i--;) {
+            if (values[i].length < len) {
+                for (var j = len; j--;) {
+                    values[i].push(0);
+                }
+            }
+        }
+        total = Math.max.apply(Math, opts.stacked ? stacktotal : total);
+    }
+    
+    total = (opts.to) || total;
+    var barheight = Math.floor(height / (len * (100 + gutter) + gutter) * 100),
+        bargutter = Math.floor(barheight * gutter / 100),
+        stack = [],
+        Y = y + bargutter,
+        X = (width - 1) / total;
+    !opts.stacked && (barheight /= multi || 1);
+    for (var i = 0; i < len; i++) {
+        stack = [];
+        for (var j = 0; j < (multi || 1); j++) {
+            var val = multi ? values[j][i] : values[i],
+                bar = this.g.finger(x, Y + barheight / 2, Math.round(val * X), barheight - 1, false, type).attr({stroke: "none", fill: colors[multi ? j : i]});
+            if (multi) {
+                bars[j].push(bar);
+            } else {
+                bars.push(bar);
+            }
+            bar.x = x + Math.round(val * X);
+            bar.y = Y + barheight / 2;
+            bar.w = Math.round(val * X);
+            bar.h = barheight;
+            bar.value = +val;
+            if (!opts.stacked) {
+                Y += barheight;
+            } else {
+                stack.push(bar);
+            }
+        }
+        if (opts.stacked) {
+            var cvr = this.rect(x, stack[0].y - stack[0].h / 2, width, barheight).attr(this.g.shim);
+            covers2.push(cvr);
+            cvr.bars = this.set();
+            var size = 0;
+            for (var s = stack.length; s--;) {
+                stack[s].toFront();
+            }
+            for (var s = 0, ss = stack.length; s < ss; s++) {
+                var bar = stack[s],
+                    cover,
+                    val = Math.round((size + bar.value) * X),
+                    path = this.g.finger(x, bar.y, val, barheight - 1, false, type, 1);
+                cvr.bars.push(bar);
+                size && bar.attr({path: path});
+                bar.w = val;
+                bar.x = x + val;
+                covers.push(cover = this.rect(x + size * X, bar.y - bar.h / 2, bar.value * X, barheight).attr(this.g.shim));
+                cover.bar = bar;
+                size += bar.value;
+            }
+            Y += barheight;
+        }
+        Y += bargutter;
+    }
+    covers2.toFront();
+    Y = y + bargutter;
+    if (!opts.stacked) {
+        for (var i = 0; i < len; i++) {
+            for (var j = 0; j < (multi || 1); j++) {
+                var cover = this.rect(x, Y, width, barheight).attr(this.g.shim);
+                covers.push(cover);
+                cover.bar = multi ? bars[j][i] : bars[i];
+                cover.value = cover.bar.value;
+                Y += barheight;
+            }
+            Y += bargutter;
+        }
+    }
+    chart.label = function (labels, isRight) {
+        labels = labels || [];
+        this.labels = paper.set();
+        for (var i = 0; i < len; i++) {
+            for (var j = 0; j < multi; j++) {
+                var  label = paper.g.labelise(multi ? labels[j] && labels[j][i] : labels[i], multi ? values[j][i] : values[i], total);
+                var X = isRight ? bars[i * (multi || 1) + j].x - barheight / 2 + 3 : x + 5,
+                    A = isRight ? "end" : "start",
+                    L;
+                this.labels.push(L = paper.g.text(X, bars[i * (multi || 1) + j].y, label).attr({"text-anchor": A}).insertBefore(covers[0]));
+                if (L.getBBox().x < x + 5) {
+                    L.attr({x: x + 5, "text-anchor": "start"});
+                } else {
+                    bars[i * (multi || 1) + j].label = L;
+                }
+            }
+        }
+        return this;
+    };
+    chart.hover = function (fin, fout) {
+        covers2.hide();
+        covers.show();
+        fout = fout || function () {};
+        covers.mouseover(fin).mouseout(fout);
+        return this;
+    };
+    chart.hoverColumn = function (fin, fout) {
+        covers.hide();
+        covers2.show();
+        fout = fout || function () {};
+        covers2.mouseover(fin).mouseout(fout);
+        return this;
+    };
+    chart.each = function (f) {
+        if (!Raphael.is(f, "function")) {
+            return this;
+        }
+        for (var i = covers.length; i--;) {
+            f.call(covers[i]);
+        }
+        return this;
+    };
+    chart.eachColumn = function (f) {
+        if (!Raphael.is(f, "function")) {
+            return this;
+        }
+        for (var i = covers2.length; i--;) {
+            f.call(covers2[i]);
+        }
+        return this;
+    };
+    chart.click = function (f) {
+        covers2.hide();
+        covers.show();
+        covers.click(f);
+        return this;
+    };
+    chart.clickColumn = function (f) {
+        covers.hide();
+        covers2.show();
+        covers2.click(f);
+        return this;
+    };
+    chart.push(bars, covers, covers2);
+    chart.bars = bars;
+    chart.covers = covers;
+    return chart;
+};
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery-1.4.2.min.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery-1.4.2.min.js
new file mode 100644
index 0000000..7c24308
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery-1.4.2.min.js
@@ -0,0 +1,154 @@
+/*!
+ * jQuery JavaScript Library v1.4.2
+ * http://jquery.com/
+ *
+ * Copyright 2010, John Resig
+ * Dual licensed under the MIT or GPL Version 2 licenses.
+ * http://jquery.org/license
+ *
+ * Includes Sizzle.js
+ * http://sizzlejs.com/
+ * Copyright 2010, The Dojo Foundation
+ * Released under the MIT, BSD, and GPL Licenses.
+ *
+ * Date: Sat Feb 13 22:33:48 2010 -0500
+ */
+(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o<i;o++)e(a[o],b,f?d.call(a[o],o,e(a[o],b)):d,j); [...]
+e(a[0],b):w}function J(){return(new Date).getTime()}function Y(){return false}function Z(){return true}function na(a,b,d){d[0].type=a;return c.event.handle.apply(b,d)}function oa(a){var b,d=[],f=[],e=arguments,j,i,o,k,n,r;i=c.data(this,"events");if(!(a.liveFired===this||!i||!i.live||a.button&&a.type==="click")){a.liveFired=this;var u=i.live.slice(0);for(k=0;k<u.length;k++){i=u[k];i.origType.replace(O,"")===a.type?f.push(i.selector):u.splice(k--,1)}j=c(a.target).closest(f,a.currentTarget) [...]
+j.length;n<r;n++)for(k=0;k<u.length;k++){i=u[k];if(j[n].selector===i.selector){o=j[n].elem;f=null;if(i.preType==="mouseenter"||i.preType==="mouseleave")f=c(a.relatedTarget).closest(i.selector)[0];if(!f||f!==o)d.push({elem:o,handleObj:i})}}n=0;for(r=d.length;n<r;n++){j=d[n];a.currentTarget=j.elem;a.data=j.handleObj.data;a.handleObj=j.handleObj;if(j.handleObj.origHandler.apply(j.elem,e)===false){b=false;break}}return b}}function pa(a,b){return"live."+(a&&a!=="*"?a+".":"")+b.replace(/\./g," [...]
+"&")}function qa(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function ra(a,b){var d=0;b.each(function(){if(this.nodeName===(a[d]&&a[d].nodeName)){var f=c.data(a[d++]),e=c.data(this,f);if(f=f&&f.events){delete e.handle;e.events={};for(var j in f)for(var i in f[j])c.event.add(this,j,f[j][i],f[j][i].data)}}})}function sa(a,b,d){var f,e,j;b=b&&b[0]?b[0].ownerDocument||b[0]:s;if(a.length===1&&typeof a[0]==="string"&&a[0].length<512&&b===s&&!ta.test(a[0])&&(c.support.checkClone||!ua [...]
+true;if(j=c.fragments[a[0]])if(j!==1)f=j}if(!f){f=b.createDocumentFragment();c.clean(a,b,f,d)}if(e)c.fragments[a[0]]=j?f:1;return{fragment:f,cacheable:e}}function K(a,b){var d={};c.each(va.concat.apply([],va.slice(0,b)),function(){d[this]=a});return d}function wa(a){return"scrollTo"in a&&a.document?a:a.nodeType===9?a.defaultView||a.parentWindow:false}var c=function(a,b){return new c.fn.init(a,b)},Ra=A.jQuery,Sa=A.$,s=A.document,T,Ta=/^[^<]*(<[\w\W]+>)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]* [...]
+Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if(( [...]
+(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagNam [...]
+a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.c [...]
+"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:fu [...]
+function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b<d;b++)if((e=arguments[b])!=null)for(j in e){i=a[j];o=e[j];if(a!==o)if(f&&o&&(c.isPlainObject(o)||c.isArray(o))){i=i&&(c.i [...]
+c.isArray(i))?i:c.isArray(o)?[]:{};a[j]=c.extend(f,i,o)}else if(o!==w)a[j]=o}return a};c.extend({noConflict:function(a){A.$=Sa;if(a)A.jQuery=Ra;return c},isReady:false,ready:function(){if(!c.isReady){if(!s.body)return setTimeout(c.ready,13);c.isReady=true;if(Q){for(var a,b=0;a=Q[b++];)a.call(s,c);Q=null}c.fn.triggerHandler&&c(s).triggerHandler("ready")}},bindReady:function(){if(!xa){xa=true;if(s.readyState==="complete")return c.ready();if(s.addEventListener){s.addEventListener("DOMConten [...]
+L,false);A.addEventListener("load",c.ready,false)}else if(s.attachEvent){s.attachEvent("onreadystatechange",L);A.attachEvent("onload",c.ready);var a=false;try{a=A.frameElement==null}catch(b){}s.documentElement.doScroll&&a&&ma()}}},isFunction:function(a){return $.call(a)==="[object Function]"},isArray:function(a){return $.call(a)==="[object Array]"},isPlainObject:function(a){if(!a||$.call(a)!=="[object Object]"||a.nodeType||a.setInterval)return false;if(a.constructor&&!aa.call(a,"construc [...]
+"isPrototypeOf"))return false;var b;for(b in a);return b===w||aa.call(a,b)},isEmptyObject:function(a){for(var b in a)return false;return true},error:function(a){throw a;},parseJSON:function(a){if(typeof a!=="string"||!a)return null;a=c.trim(a);if(/^[\],:{}\s]*$/.test(a.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,"@").replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,"]").replace(/(?:^|:|,)(?:\s*\[)+/g,"")))return A.JSON&&A.JSON.parse?A.JSON.parse(a):(new Functi [...]
+a))();else c.error("Invalid JSON: "+a)},noop:function(){},globalEval:function(a){if(a&&Va.test(a)){var b=s.getElementsByTagName("head")[0]||s.documentElement,d=s.createElement("script");d.type="text/javascript";if(c.support.scriptEval)d.appendChild(s.createTextNode(a));else d.text=a;b.insertBefore(d,b.firstChild);b.removeChild(d)}},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,b,d){var f,e=0,j=a.length,i=j===w||c.isFunction(a);if(d) [...]
+d)===false)break}else for(;e<j;){if(b.apply(a[e++],d)===false)break}else if(i)for(f in a){if(b.call(a[f],f,a[f])===false)break}else for(d=a[0];e<j&&b.call(d,e,d)!==false;d=a[++e]);return a},trim:function(a){return(a||"").replace(Wa,"")},makeArray:function(a,b){b=b||[];if(a!=null)a.length==null||typeof a==="string"||c.isFunction(a)||typeof a!=="function"&&a.setInterval?ba.call(b,a):c.merge(b,a);return b},inArray:function(a,b){if(b.indexOf)return b.indexOf(a);for(var d=0,f=b.length;d<f;d++ [...]
+a)return d;return-1},merge:function(a,b){var d=a.length,f=0;if(typeof b.length==="number")for(var e=b.length;f<e;f++)a[d++]=b[f];else for(;b[f]!==w;)a[d++]=b[f++];a.length=d;return a},grep:function(a,b,d){for(var f=[],e=0,j=a.length;e<j;e++)!d!==!b(a[e],e)&&f.push(a[e]);return f},map:function(a,b,d){for(var f=[],e,j=0,i=a.length;j<i;j++){e=b(a[j],j,d);if(e!=null)f[f.length]=e}return f.concat.apply([],f)},guid:1,proxy:function(a,b,d){if(arguments.length===2)if(typeof b==="string"){d=a;a=d [...]
+!c.isFunction(b)){d=b;b=w}if(!b&&a)b=function(){return a.apply(d||this,arguments)};if(a)b.guid=a.guid=a.guid||b.guid||c.guid++;return b},uaMatch:function(a){a=a.toLowerCase();a=/(webkit)[ \/]([\w.]+)/.exec(a)||/(opera)(?:.*version)?[ \/]([\w.]+)/.exec(a)||/(msie) ([\w.]+)/.exec(a)||!/compatible/.test(a)&&/(mozilla)(?:.*? rv:([\w.]+))?/.exec(a)||[];return{browser:a[1]||"",version:a[2]||"0"}},browser:{}});P=c.uaMatch(P);if(P.browser){c.browser[P.browser]=true;c.browser.version=P.version}if [...]
+true;if(ya)c.inArray=function(a,b){return ya.call(b,a)};T=c(s);if(s.addEventListener)L=function(){s.removeEventListener("DOMContentLoaded",L,false);c.ready()};else if(s.attachEvent)L=function(){if(s.readyState==="complete"){s.detachEvent("onreadystatechange",L);c.ready()}};(function(){c.support={};var a=s.documentElement,b=s.createElement("script"),d=s.createElement("div"),f="script"+J();d.style.display="none";d.innerHTML="   <link/><table></table><a href='/a' style='color:red;float:left [...]
+var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select [...]
+parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCl [...]
+false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="<input type='radio' name='radiotest' checked='checked'/>";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="non [...]
+s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embe [...]
+applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.rem [...]
+else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c. [...]
+a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a; [...]
+w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|inpu [...]
+cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1)if(e.className){for(var j=" "+e.cla [...]
+i=e.className,o=0,k=b.length;o<k;o++)if(j.indexOf(" "+b[o]+" ")<0)i+=" "+b[o];e.className=c.trim(i)}else e.className=a}return this},removeClass:function(a){if(c.isFunction(a))return this.each(function(k){var n=c(this);n.removeClass(a.call(this,k,n.attr("class")))});if(a&&typeof a==="string"||a===w)for(var b=(a||"").split(ca),d=0,f=this.length;d<f;d++){var e=this[d];if(e.nodeType===1&&e.className)if(a){for(var j=(" "+e.className+" ").replace(Aa," "),i=0,o=b.length;i<o;i++)j=j.replace(" "+ [...]
+" ");e.className=c.trim(j)}else e.className=""}return this},toggleClass:function(a,b){var d=typeof a,f=typeof b==="boolean";if(c.isFunction(a))return this.each(function(e){var j=c(this);j.toggleClass(a.call(this,e,j.attr("class"),b),b)});return this.each(function(){if(d==="string")for(var e,j=0,i=c(this),o=b,k=a.split(ca);e=k[j++];){o=f?o:!i.hasClass(e);i[o?"addClass":"removeClass"](e)}else if(d==="undefined"||d==="boolean"){this.className&&c.data(this,"__className__",this.className);thi [...]
+this.className||a===false?"":c.data(this,"__className__")||""}})},hasClass:function(a){a=" "+a+" ";for(var b=0,d=this.length;b<d;b++)if((" "+this[b].className+" ").replace(Aa," ").indexOf(a)>-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j< [...]
+e[j];if(i.selected){a=c(i).val();if(b)return a;f.push(a)}}return f}if(Ba.test(b.type)&&!c.support.checkOn)return b.getAttribute("value")===null?"on":b.value;return(b.value||"").replace(Za,"")}return w}var o=c.isFunction(a);return this.each(function(k){var n=c(this),r=a;if(this.nodeType===1){if(o)r=a.call(this,k,n.val());if(typeof r==="number")r+="";if(c.isArray(r)&&Ba.test(this.type))this.checked=c.inArray(n.val(),r)>=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",t [...]
+c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type proper [...]
+a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=functi [...]
+function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1) [...]
+k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n [...]
+C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B<r.length;B++){u=r[B];if(d.guid===u.guid){if(i||k.test(u.namespace)){f==null&&r.splice(B--,1);n.remove&&n.remove [...]
+null)break}}if(r.length===0||f!=null&&r.length===1){if(!n.teardown||n.teardown.call(a,o)===false)Ca(a,e,z.handle);delete C[e]}}else for(var B=0;B<r.length;B++){u=r[B];if(i||k.test(u.namespace)){c.event.remove(a,n,u.handler,B);r.splice(B--,1)}}}if(c.isEmptyObject(C)){if(b=z.handle)b.elem=null;delete z.events;delete z.handle;c.isEmptyObject(z)&&c.removeData(a)}}}}},trigger:function(a,b,d,f){var e=a.type||a;if(!f){a=typeof a==="object"?a[G]?a:c.extend(c.Event(e),a):c.Event(e);if(e.indexOf(" [...]
+e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(! [...]
+f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.e [...]
+if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e<j;e++){var i=d[e];if(b||f.test(i.namespace)){a.handler=i.handler;a.data=i.data;a.handleObj=i;i=i.handler.apply(this,arguments);if(i!==w){a.result=i;if(i===false){a.preventDefault();a.stopPropagation()}}if(a.isImmediatePropagationStopped())break}}}return a.result},props:"altKey attrChange at [...]
+fix:function(a){if(a[G])return a;var b=a;a=c.Event(b);for(var d=this.props.length,f;d;){f=this.props[--d];a[f]=b[f]}if(!a.target)a.target=a.srcElement||s;if(a.target.nodeType===3)a.target=a.target.parentNode;if(!a.relatedTarget&&a.fromElement)a.relatedTarget=a.fromElement===a.target?a.toElement:a.fromElement;if(a.pageX==null&&a.clientX!=null){b=s.documentElement;d=s.body;a.pageX=a.clientX+(b&&b.scrollLeft||d&&d.scrollLeft||0)-(b&&b.clientLeft||d&&d.clientLeft||0);a.pageY=a.clientY+(b&&b. [...]
+d&&d.scrollTop||0)-(b&&b.clientTop||d&&d.clientTop||0)}if(!a.which&&(a.charCode||a.charCode===0?a.charCode:a.keyCode))a.which=a.charCode||a.keyCode;if(!a.metaKey&&a.ctrlKey)a.metaKey=a.ctrlKey;if(!a.which&&a.button!==w)a.which=a.button&1?1:a.button&2?3:a.button&4?2:0;return a},guid:1E8,proxy:c.proxy,special:{ready:{setup:c.bindReady,teardown:c.noop},live:{add:function(a){c.event.add(this,a.origType,c.extend({},a,{handler:oa}))},remove:function(a){var b=true,d=a.origType.replace(O,"");c.e [...]
+"events").live||[],function(){if(d===this.origType.replace(O,""))return b=false});b&&c.event.remove(this,a.origType,oa)}},beforeunload:{setup:function(a,b,d){if(this.setInterval)this.onbeforeunload=d;return false},teardown:function(a,b){if(this.onbeforeunload===b)this.onbeforeunload=null}}}};var Ca=s.removeEventListener?function(a,b,d){a.removeEventListener(b,d,false)}:function(a,b,d){a.detachEvent("on"+b,d)};c.Event=function(a){if(!this.preventDefault)return new c.Event(a);if(a&&a.type) [...]
+a;this.type=a.type}else this.type=a;this.timeStamp=J();this[G]=true};c.Event.prototype={preventDefault:function(){this.isDefaultPrevented=Z;var a=this.originalEvent;if(a){a.preventDefault&&a.preventDefault();a.returnValue=false}},stopPropagation:function(){this.isPropagationStopped=Z;var a=this.originalEvent;if(a){a.stopPropagation&&a.stopPropagation();a.cancelBubble=true}},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=Z;this.stopPropagation()},isDefaultPrevented [...]
+isImmediatePropagationStopped:Y};var Da=function(a){var b=a.relatedTarget;try{for(;b&&b!==this;)b=b.parentNode;if(b!==this){a.type=a.data;c.event.handle.apply(this,arguments)}}catch(d){}},Ea=function(a){a.type=a.data;c.event.handle.apply(this,arguments)};c.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){c.event.special[a]={setup:function(d){c.event.add(this,b,d&&d.selector?Ea:Da,a)},teardown:function(d){c.event.remove(this,b,d&&d.selector?Ea:Da)}}});if(!c.support.submit [...]
+{setup:function(){if(this.nodeName.toLowerCase()!=="form"){c.event.add(this,"click.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="submit"||d==="image")&&c(b).closest("form").length)return na("submit",this,arguments)});c.event.add(this,"keypress.specialSubmit",function(a){var b=a.target,d=b.type;if((d==="text"||d==="password")&&c(b).closest("form").length&&a.keyCode===13)return na("submit",this,arguments)})}else return false},teardown:function(){c.event.remove(this,".specialS [...]
+if(!c.support.changeBubbles){var da=/textarea|input|select/i,ea,Fa=function(a){var b=a.type,d=a.value;if(b==="radio"||b==="checkbox")d=a.checked;else if(b==="select-multiple")d=a.selectedIndex>-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_ [...]
+e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.t [...]
+"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventLi [...]
+d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j<o;j++)c.event.add(this[j],d,i,f)}return this}});c.fn.extend({unbind:function(a,b){if(typeof a=== [...]
+!a.preventDefault)for(var d in a)this.unbind(d,a[d]);else{d=0;for(var f=this.length;d<f;d++)c.event.remove(this[d],a,b)}return this},delegate:function(a,b,d,f){return this.live(b,d,f,a)},undelegate:function(a,b,d){return arguments.length===0?this.unbind("live"):this.die(b,null,d,a)},trigger:function(a,b){return this.each(function(){c.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0]){a=c.Event(a);a.preventDefault();a.stopPropagation();c.event.trigger(a,b,this[0]);return  [...]
+toggle:function(a){for(var b=arguments,d=1;d<b.length;)c.proxy(a,b[d++]);return this.click(c.proxy(a,function(f){var e=(c.data(this,"lastToggle"+a.guid)||0)%d;c.data(this,"lastToggle"+a.guid,e+1);f.preventDefault();return b[e].apply(this,arguments)||false}))},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}});var Ga={focus:"focusin",blur:"focusout",mouseenter:"mouseover",mouseleave:"mouseout"};c.each(["live","die"],function(a,b){c.fn[b]=function(d,f,e,j){var i,o=0,k,n,r=j| [...]
+u=j?this:c(this.context);if(c.isFunction(f)){e=f;f=w}for(d=(d||"").split(" ");(i=d[o++])!=null;){j=O.exec(i);k="";if(j){k=j[0];i=i.replace(O,"")}if(i==="hover")d.push("mouseenter"+k,"mouseleave"+k);else{n=i;if(i==="focus"||i==="blur"){d.push(Ga[i]+k);i+=k}else i=(Ga[i]||i)+k;b==="live"?u.each(function(){c.event.add(this,pa(i,r),{data:f,selector:r,handler:e,origType:i,origHandler:e,preType:n})}):u.unbind(pa(i,r),e)}}return this}});c.each("blur focus focusin focusout load resize scroll unl [...]
+function(a,b){c.fn[b]=function(d){return d?this.bind(b,d):this.trigger(b)};if(c.attrFn)c.attrFn[b]=true});A.attachEvent&&!A.addEventListener&&A.attachEvent("onunload",function(){for(var a in c.cache)if(c.cache[a].handle)try{c.event.remove(c.cache[a].handle.elem)}catch(b){}});(function(){function a(g){for(var h="",l,m=0;g[m];m++){l=g[m];if(l.nodeType===3||l.nodeType===4)h+=l.nodeValue;else if(l.nodeType!==8)h+=a(l.childNodes)}return h}function b(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++ [...]
+if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1&&!p){t.sizcache=l;t.sizset=q}if(t.nodeName.toLowerCase()===h){y=t;break}t=t[g]}m[q]=y}}}function d(g,h,l,m,q,p){q=0;for(var v=m.length;q<v;q++){var t=m[q];if(t){t=t[g];for(var y=false;t;){if(t.sizcache===l){y=m[t.sizset];break}if(t.nodeType===1){if(!p){t.sizcache=l;t.sizset=q}if(typeof h!=="string"){if(t===h){y=true;break}}else if(k.filter(h,[t]).length>0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?: [...]
+e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g]) [...]
+t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||( [...]
+g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h<g.length;h++)g[h]===g[h-1]&&g.splice(h--,1)}return g};k.matches=function(g,h){return k(g,null,null,h)};k.find=function(g,h,l){var m,q [...]
+for(var p=0,v=n.order.length;p<v;p++){var t=n.order[p];if(q=n.leftMatch[t].exec(g)){var y=q[1];q.splice(1,1);if(y.substr(y.length-1)!=="\\"){q[1]=(q[1]||"").replace(/\\/g,"");m=n.find[t](q,h,l);if(m!=null){g=g.replace(n.match[t],"");break}}}}m||(m=h.getElementsByTagName("*"));return{set:m,expr:g}};k.filter=function(g,h,l,m){for(var q=g,p=[],v=h,t,y,S=h&&h[0]&&x(h[0]);g&&h.length;){for(var H in n.filter)if((t=n.leftMatch[H].exec(g))!=null&&t[2]){var M=n.filter[H],I,D;D=t[1];y=false;t.spli [...]
+1)!=="\\"){if(v===p)p=[];if(n.preFilter[H])if(t=n.preFilter[H](t,v,l,p,m,S)){if(t===true)continue}else y=I=true;if(t)for(var U=0;(D=v[U])!=null;U++)if(D){I=M(D,t,U,v);var Ha=m^!!I;if(l&&I!=null)if(Ha)y=true;else v[U]=false;else if(Ha){p.push(D);y=true}}if(I!==w){l||(v=p);g=g.replace(n.match[H],"");if(!y)return[];break}}}if(g===q)if(y==null)k.error(g);else break;q=g}return v};k.error=function(g){throw"Syntax error, unrecognized expression: "+g;};var n=k.selectors={order:["ID","NAME","TAG" [...]
+CLASS:/\.((?:[\w\u00c0-\uFFFF-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF-]|\\.)+)\s*(?:(\S?=)\s*(['"]*)(.*?)\3|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\((even|odd|[\dn+-]*)\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attr [...]
+relative:{"+":function(g,h){var l=typeof h==="string",m=l&&!/\W/.test(h);l=l&&!m;if(m)h=h.toLowerCase();m=0;for(var q=g.length,p;m<q;m++)if(p=g[m]){for(;(p=p.previousSibling)&&p.nodeType!==1;);g[m]=l||p&&p.nodeName.toLowerCase()===h?p||false:p===h}l&&k.filter(h,g,true)},">":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m<q;m++){var p=g[m];if(p){l=p.parentNode;g[m]=l.nodeName.toLowerCase()===h?l:false}}}else{m=0;for(q=g.length;m<q;m+ [...]
+l?p.parentNode:p.parentNode===h;l&&k.filter(h,g,true)}},"":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("parentNode",h,m,g,p,l)},"~":function(g,h,l){var m=e++,q=d;if(typeof h==="string"&&!/\W/.test(h)){var p=h=h.toLowerCase();q=b}q("previousSibling",h,m,g,p,l)}},find:{ID:function(g,h,l){if(typeof h.getElementById!=="undefined"&&!l)return(g=h.getElementById(g[1]))?[g]:[]},NAME:function(g,h){if(typeof h.getElementsByName!=="undefined"){ [...]
+h=h.getElementsByName(g[1]);for(var m=0,q=h.length;m<q;m++)h[m].getAttribute("name")===g[1]&&l.push(h[m]);return l.length===0?null:l}},TAG:function(g,h){return h.getElementsByTagName(g[1])}},preFilter:{CLASS:function(g,h,l,m,q,p){g=" "+g[1].replace(/\\/g,"")+" ";if(p)return g;p=0;for(var v;(v=h[p])!=null;p++)if(v)if(q^(v.className&&(" "+v.className+" ").replace(/[\t\n]/g," ").indexOf(g)>=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG: [...]
+CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m [...]
+g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:functio [...]
+text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.no [...]
+setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return h<l[3]-0},gt:function(g,h,l){return h>l[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q=== [...]
+h[3];l=0;for(m=h.length;l<m;l++)if(h[l]===g)return false;return true}else k.error("Syntax error, unrecognized expression: "+q)},CHILD:function(g,h){var l=h[1],m=g;switch(l){case "only":case "first":for(;m=m.previousSibling;)if(m.nodeType===1)return false;if(l==="first")return true;m=g;case "last":for(;m=m.nextSibling;)if(m.nodeType===1)return false;return true;case "nth":l=h[2];var q=h[3];if(l===1&&q===0)return true;h=h[0];var p=g.parentNode;if(p&&(p.sizcache!==h||!g.nodeIndex)){var v=0; [...]
+m.nextSibling)if(m.nodeType===1)m.nodeIndex=++v;p.sizcache=h}g=g.nodeIndex-q;return l===0?g===0:g%l===0&&g/l>=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m [...]
+"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+) [...]
+h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l<m;l++)h.push(g[l]);else for(l=0;g[l];l++)h.push(g[l]);return h}}var B;if(s.documentElement.compareDocumentPosition)B=function(g,h){if(!g.compareDocume [...]
+!h.compareDocumentPosition){if(g==h)i=true;return g.compareDocumentPosition?-1:1}g=g.compareDocumentPosition(h)&4?-1:g===h?0:1;if(g===0)i=true;return g};else if("sourceIndex"in s.documentElement)B=function(g,h){if(!g.sourceIndex||!h.sourceIndex){if(g==h)i=true;return g.sourceIndex?-1:1}g=g.sourceIndex-h.sourceIndex;if(g===0)i=true;return g};else if(s.createRange)B=function(g,h){if(!g.ownerDocument||!h.ownerDocument){if(g==h)i=true;return g.ownerDocument?-1:1}var l=g.ownerDocument.createR [...]
+h.ownerDocument.createRange();l.setStart(g,0);l.setEnd(g,0);m.setStart(h,0);m.setEnd(h,0);g=l.compareBoundaryPoints(Range.START_TO_END,m);if(g===0)i=true;return g};(function(){var g=s.createElement("div"),h="script"+(new Date).getTime();g.innerHTML="<a name='"+h+"'/>";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!= [...]
+q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTM [...]
+if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="<p class='TEST'></p>";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l]; [...]
+(function(){var g=s.createElement("div");g.innerHTML="<div class='test e'></div><div class='test'></div>";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPositi [...]
+function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q<p;q++)k(g,h[q],l);return k.filter(m,l)};c.find=k;c.expr=k.selectors;c.expr[":"]=c.expr.filters;c.unique=k.uniqueSort;c.text=a;c.isXMLDoc=x;c.contains=E})();var eb=/Unti [...]
+gb=/,/;R=Array.prototype.slice;var Ia=function(a,b,d){if(c.isFunction(b))return c.grep(a,function(e,j){return!!b.call(e,j,e)===d});else if(b.nodeType)return c.grep(a,function(e){return e===b===d});else if(typeof b==="string"){var f=c.grep(a,function(e){return e.nodeType===1});if(Ua.test(b))return c.filter(b,f,!d);else b=c.filter(b,f)}return c.grep(a,function(e){return c.inArray(e,b)>=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f<e;f+ [...]
+c.find(a,this[f],b);if(f>0)for(var j=d;j<b.length;j++)for(var i=0;i<d;i++)if(b[i]===b[j]){b.splice(j--,1);break}}return b},has:function(a){var b=c(a);return this.filter(function(){for(var d=0,f=b.length;d<f;d++)if(c.contains(this,b[d]))return true})},not:function(a){return this.pushStack(Ia(this,a,false),"not",a)},filter:function(a){return this.pushStack(Ia(this,a,true),"filter",a)},is:function(a){return!!a&&c.filter(a,this).length>0},closest:function(a,b){if(c.isArray(a)){var d=[],f=thi [...]
+{},i;if(f&&a.length){e=0;for(var o=a.length;e<o;e++){i=a[e];j[i]||(j[i]=c.expr.match.POS.test(i)?c(i,b||this.context):i)}for(;f&&f.ownerDocument&&f!==b;){for(i in j){e=j[i];if(e.jquery?e.index(f)>-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||t [...]
+"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"par [...]
+d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeNam [...]
+a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeTy [...]
+1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/<tbody/i,jb=/<|&#?\w+;/,ta=/<script|<object|<embed|<option|<style/i,ua=/checked\s*(?:[^=]|=\s*.checked.)/i,Ma=function(a,b, [...]
+a:b+"></"+d+">"},F={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div<div>","</div>"];c.fn. [...]
+c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(th [...]
+wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChi [...]
+prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.pa [...]
+this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChi [...]
+return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].i [...]
+""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b<d;b++)if(this[b].nodeType===1){c.cleanData(this[b].getElementsByTagName("*"));this[b].innerHTML=a}}catch(f){this.empty().append(a)}}else c.isFunction(a)?this.each(function(e){var j=c(this),i=j.html();j.empty().append(function(){return a.call(this,e,i)})}):this.empty().append(a);return this},replaceWith [...]
+this[0].parentNode){if(c.isFunction(a))return this.each(function(b){var d=c(this),f=d.html();d.replaceWith(a.call(this,b,f))});if(typeof a!=="string")a=c(a).detach();return this.each(function(){var b=this.nextSibling,d=this.parentNode;c(this).remove();b?c(b).before(a):c(d).append(a)})}else return this.pushStack(c(c.isFunction(a)?a():a),"replaceWith",a)},detach:function(a){return this.remove(a,true)},domManip:function(a,b,d){function f(u){return c.nodeName(u,"table")?u.getElementsByTagNam [...]
+u.appendChild(u.ownerDocument.createElement("tbody")):u}var e,j,i=a[0],o=[],k;if(!c.support.checkClone&&arguments.length===3&&typeof i==="string"&&ua.test(i))return this.each(function(){c(this).domManip(a,b,d,true)});if(c.isFunction(i))return this.each(function(u){var z=c(this);a[0]=i.call(this,u,b?z.html():w);z.domManip(a,b,d)});if(this[0]){e=i&&i.parentNode;e=c.support.parentNode&&e&&e.nodeType===11&&e.childNodes.length===this.length?{fragment:e}:sa(a,this,o);k=e.fragment;if(j=k.childN [...]
+1?(k=k.firstChild):k.firstChild){b=b&&c.nodeName(j,"tr");for(var n=0,r=this.length;n<r;n++)d.call(b?f(this[n],j):this[n],n>0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length=== [...]
+return this}else{e=0;for(var j=d.length;e<j;e++){var i=(e>0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec [...]
+""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]==="<table>"&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.pu [...]
+c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b [...]
+c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styl [...]
+function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")| [...]
+Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c [...]
+"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedSty [...]
+a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters. [...]
+a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=/<script(.|\s)*?\/script>/gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:funct [...]
+"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("<div />").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}}) [...]
+serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess [...]
+function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url: [...]
+global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{} [...]
+e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka [...]
+"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;i [...]
+false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if [...]
+false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.se [...]
+c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q== [...]
+d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h [...]
+g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status== [...]
+1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="st [...]
+"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w) [...]
+if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a], [...]
+this[a].style.display=d||"";if(c.css(this[a],"display")==="none"){d=this[a].nodeName;var f;if(la[d])f=la[d];else{var e=c("<"+d+" />").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a<b;a++)this[a].style.display=c.data(this[a],"olddisplay")||"";return this}},hide:function(a,b){if(a||a===0)return this.animate(K("hide",3),a,b);else{a=0;for(b=this.length;a<b;a++){var d=c.data(this[a],"olddisplay");!d&&d!==" [...]
+"olddisplay",c.css(this[a],"display"))}a=0;for(b=this.length;a<b;a++)this[a].style.display="none";return this}},_toggle:c.fn.toggle,toggle:function(a,b){var d=typeof a==="boolean";if(c.isFunction(a)&&c.isFunction(b))this._toggle.apply(this,arguments);else a==null||d?this.each(function(){var f=d?a:c(this).is(":hidden");c(this)[f?"show":"hide"]()}):this.animate(K("toggle",3),a,b);return this},fadeTo:function(a,b,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacit [...]
+animate:function(a,b,d,f){var e=c.speed(b,d,f);if(c.isEmptyObject(a))return this.each(e.complete);return this[e.queue===false?"each":"queue"](function(){var j=c.extend({},e),i,o=this.nodeType===1&&c(this).is(":hidden"),k=this;for(i in a){var n=i.replace(ia,ja);if(i!==n){a[n]=a[i];delete a[i];i=n}if(a[i]==="hide"&&o||a[i]==="show"&&!o)return j.complete.call(this);if((i==="height"||i==="width")&&this.style){j.display=c.css(this,"display");j.overflow=this.style.overflow}if(c.isArray(a[i])){ [...]
+j.specialEasing||{})[i]=a[i][1];a[i]=a[i][0]}}if(j.overflow!=null)this.style.overflow="hidden";j.curAnim=c.extend({},a);c.each(a,function(r,u){var z=new c.fx(k,j,r);if(Ab.test(u))z[u==="toggle"?o?"show":"hide":u](a);else{var C=Bb.exec(u),B=z.cur(true)||0;if(C){u=parseFloat(C[2]);var E=C[3]||"px";if(E!=="px"){k.style[r]=(u||1)+E;B=(u||1)/z.cur(true)*B;k.style[r]=B+E}if(C[1])u=(C[1]==="-="?-1:1)*u+B;z.custom(B,u,E)}else z.custom(B,u,"")}});return true})},stop:function(a,b){var d=c.timers;a [...]
+this.each(function(){for(var f=d.length-1;f>=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.f [...]
+"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this [...]
+c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.n [...]
+this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.st [...]
+this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this. [...]
+e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b<a.length;b++)a[b]()||a.splice(b--,1);a [...]
+c.fx.stop()},stop:function(){clearInterval(W);W=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){c.style(a.elem,"opacity",a.now)},_default:function(a){if(a.elem.style&&a.elem.style[a.prop]!=null)a.elem.style[a.prop]=(a.prop==="width"||a.prop==="height"?Math.max(0,a.now):a.now)+a.unit;else a.elem[a.prop]=a.now}}});if(c.expr&&c.expr.filters)c.expr.filters.animated=function(a){return c.grep(c.timers,function(b){return a===b.elem}).length};c.fn.offset="getBoundingClien [...]
+function(a){var b=this[0];if(a)return this.each(function(e){c.offset.setOffset(this,a,e)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);var d=b.getBoundingClientRect(),f=b.ownerDocument;b=f.body;f=f.documentElement;return{top:d.top+(self.pageYOffset||c.support.boxModel&&f.scrollTop||b.scrollTop)-(f.clientTop||b.clientTop||0),left:d.left+(self.pageXOffset||c.support.boxModel&&f.scrollLeft||b.scrollLeft)-(f.clientLeft||b.clientLeft||0)}}:fun [...]
+this[0];if(a)return this.each(function(r){c.offset.setOffset(this,a,r)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return c.offset.bodyOffset(b);c.offset.initialize();var d=b.offsetParent,f=b,e=b.ownerDocument,j,i=e.documentElement,o=e.body;f=(e=e.defaultView)?e.getComputedStyle(b,null):b.currentStyle;for(var k=b.offsetTop,n=b.offsetLeft;(b=b.parentNode)&&b!==o&&b!==i;){if(c.offset.supportsFixedPosition&&f.position==="fixed")break;j=e?e.getComputedStyle(b,null):b.cu [...]
+k-=b.scrollTop;n-=b.scrollLeft;if(b===d){k+=b.offsetTop;n+=b.offsetLeft;if(c.offset.doesNotAddBorder&&!(c.offset.doesAddBorderForTableAndCells&&/^t(able|d|h)$/i.test(b.nodeName))){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=d;d=b.offsetParent}if(c.offset.subtractsBorderForOverflowNotVisible&&j.overflow!=="visible"){k+=parseFloat(j.borderTopWidth)||0;n+=parseFloat(j.borderLeftWidth)||0}f=j}if(f.position==="relative"||f.position==="static"){k+=o.offsetTop;n+=o. [...]
+f.position==="fixed"){k+=Math.max(i.scrollTop,o.scrollTop);n+=Math.max(i.scrollLeft,o.scrollLeft)}return{top:k,left:n}};c.offset={initialize:function(){var a=s.body,b=s.createElement("div"),d,f,e,j=parseFloat(c.curCSS(a,"marginTop",true))||0;c.extend(b.style,{position:"absolute",top:0,left:0,margin:0,border:0,width:"1px",height:"1px",visibility:"hidden"});b.innerHTML="<div style='position:absolute;top:0;left:0;margin:0;border:5px solid #000;padding:0;width:1px;height:1px;'><div></div></d [...]
+a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j [...]
+c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b= [...]
+d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{ [...]
+f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pag [...]
+"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return" [...]
+e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window);
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery.tipsy.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery.tipsy.js
new file mode 100644
index 0000000..f8c0863
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/jquery.tipsy.js
@@ -0,0 +1,125 @@
+/*!
+ * The MIT License
+ * 
+ * Copyright (c) 2008 Jason Frame (jason at onehackoranother.com)
+ * 
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ * 
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ * 
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+(function($) {
+    $.fn.tipsy = function(options) {
+        options = $.extend({}, $.fn.tipsy.defaults, options);
+        
+        return this.each(function() {
+            
+            var opts = $.fn.tipsy.elementOptions(this, options);
+            
+            $(this).hover(function() {
+
+                $.data(this, 'cancel.tipsy', true);
+
+                var tip = $.data(this, 'active.tipsy');
+                if (!tip) {
+                    tip = $('<div class="tipsy"><div class="tipsy-inner"/></div>');
+                    tip.css({position: 'absolute', zIndex: 100000});
+                    $.data(this, 'active.tipsy', tip);
+                }
+
+                if ($(this).attr('title') || typeof($(this).attr('original-title')) != 'string') {
+                    $(this).attr('original-title', $(this).attr('title') || '').removeAttr('title');
+                }
+                var title;
+                if (typeof opts.title == 'string') {
+                    title = $(this).attr(opts.title == 'title' ? 'original-title' : opts.title);
+                } else if (typeof opts.title == 'function') {
+                    title = opts.title.call(this);
+                }
+
+                tip.find('.tipsy-inner')[opts.html ? 'html' : 'text'](title || opts.fallback);
+
+                var pos = $.extend({}, $(this).offset(), {width: this.offsetWidth, height: this.offsetHeight});
+                tip.get(0).className = 'tipsy'; // reset classname in case of dynamic gravity
+                tip.remove().css({top: 0, left: 0, visibility: 'hidden', display: 'block'}).appendTo(document.body);
+                var actualWidth = tip[0].offsetWidth, actualHeight = tip[0].offsetHeight;
+                var gravity = (typeof opts.gravity == 'function') ? opts.gravity.call(this) : opts.gravity;
+
+                switch (gravity.charAt(0)) {
+                    case 'n':
+                        tip.css({top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2}).addClass('tipsy-north');
+                        break;
+                    case 's':
+                        tip.css({top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2}).addClass('tipsy-south');
+                        break;
+                    case 'e':
+                        tip.css({top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth}).addClass('tipsy-east');
+                        break;
+                    case 'w':
+                        tip.css({top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width}).addClass('tipsy-west');
+                        break;
+                }
+
+                if (opts.fade) {
+                    tip.css({opacity: 0, display: 'block', visibility: 'visible'}).animate({opacity: 0.8});
+                } else {
+                    tip.css({visibility: 'visible'});
+                }
+
+            }, function() {
+                $.data(this, 'cancel.tipsy', false);
+                var self = this;
+                setTimeout(function() {
+                    if ($.data(this, 'cancel.tipsy')) return;
+                    var tip = $.data(self, 'active.tipsy');
+                    if (opts.fade) {
+                        tip.stop().fadeOut(function() { $(this).remove(); });
+                    } else {
+                        tip.remove();
+                    }
+                }, 100);
+
+            });
+            
+        });
+        
+    };
+    
+    // Overwrite this method to provide options on a per-element basis.
+    // For example, you could store the gravity in a 'tipsy-gravity' attribute:
+    // return $.extend({}, options, {gravity: $(ele).attr('tipsy-gravity') || 'n' });
+    // (remember - do not modify 'options' in place!)
+    $.fn.tipsy.elementOptions = function(ele, options) {
+        return $.metadata ? $.extend({}, options, $(ele).metadata()) : options;
+    };
+    
+    $.fn.tipsy.defaults = {
+        fade: false,
+        fallback: '',
+        gravity: 'n',
+        html: false,
+        title: 'title'
+    };
+    
+    $.fn.tipsy.autoNS = function() {
+        return $(this).offset().top > ($(document).scrollTop() + $(window).height() / 2) ? 's' : 'n';
+    };
+    
+    $.fn.tipsy.autoWE = function() {
+        return $(this).offset().left > ($(document).scrollLeft() + $(window).width() / 2) ? 'e' : 'w';
+    };
+    
+})(jQuery);
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/protovis-r3.2.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/protovis-r3.2.js
new file mode 100644
index 0000000..bb282f0
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/protovis-r3.2.js
@@ -0,0 +1,304 @@
+/* Copyright (c) 2010, Stanford Visualization Group
+ * All rights reserved.
+ * 
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ * 
+ *   * Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ * 
+ *   * Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ * 
+ *   * Neither the name of Stanford University nor the names of its contributors
+ *     may be used to endorse or promote products derived from this software
+ *     without specific prior written permission.
+ * 
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+var a;if(!Array.prototype.map)Array.prototype.map=function(b,c){for(var d=this.length,f=new Array(d),g=0;g<d;g++)if(g in this)f[g]=b.call(c,this[g],g,this);return f};if(!Array.prototype.filter)Array.prototype.filter=function(b,c){for(var d=this.length,f=[],g=0;g<d;g++)if(g in this){var h=this[g];b.call(c,h,g,this)&&f.push(h)}return f};if(!Array.prototype.forEach)Array.prototype.forEach=function(b,c){for(var d=this.length>>>0,f=0;f<d;f++)f in this&&b.call(c,this[f],f,this)};
+if(!Array.prototype.reduce)Array.prototype.reduce=function(b,c){var d=this.length;if(!d&&arguments.length==1)throw new Error("reduce: empty array, no initial value");var f=0;if(arguments.length<2)for(;;){if(f in this){c=this[f++];break}if(++f>=d)throw new Error("reduce: no values, no initial value");}for(;f<d;f++)if(f in this)c=b(c,this[f],f,this);return c};var pv={};pv.version={major:3,minor:2};pv.identity=function(b){return b};pv.index=function(){return this.index};pv.child=function(){ [...]
+pv.parent=function(){return this.parent.index};pv.extend=function(b){function c(){}c.prototype=b.prototype||b;return new c};
+try{eval("pv.parse = function(x) x;")}catch(e){pv.parse=function(b){for(var c=new RegExp("function\\s*(\\b\\w+)?\\s*\\([^)]*\\)\\s*","mg"),d,f,g=0,h="";d=c.exec(b);){d=d.index+d[0].length;if(b.charAt(d)!="{"){h+=b.substring(g,d)+"{return ";g=d;for(var i=0;i>=0&&d<b.length;d++){var j=b.charAt(d);switch(j){case '"':case "'":for(;++d<b.length&&(f=b.charAt(d))!=j;)f=="\\"&&d++;break;case "[":case "(":i++;break;case "]":case ")":i--;break;case ";":case ",":i==0&&i--;break}}h+=pv.parse(b.subst [...]
+";}";g=d}c.lastIndex=d}h+=b.substring(g);return h}}pv.css=function(b,c){return window.getComputedStyle?window.getComputedStyle(b,null).getPropertyValue(c):b.currentStyle[c]};pv.error=function(b){typeof console=="undefined"?alert(b):console.error(b)};pv.listen=function(b,c,d){d=pv.listener(d);return b.addEventListener?b.addEventListener(c,d,false):b.attachEvent("on"+c,d)};pv.listener=function(b){return b.$listener||(b.$listener=function(c){try{pv.event=c;return b.call(this,c)}finally{dele [...]
+pv.ancestor=function(b,c){for(;c;){if(c==b)return true;c=c.parentNode}return false};pv.id=function(){var b=1;return function(){return b++}}();pv.functor=function(b){return typeof b=="function"?b:function(){return b}};pv.listen(window,"load",function(){for(pv.$={i:0,x:document.getElementsByTagName("script")};pv.$.i<pv.$.x.length;pv.$.i++){pv.$.s=pv.$.x[pv.$.i];if(pv.$.s.type=="text/javascript+protovis")try{window.eval(pv.parse(pv.$.s.text))}catch(b){pv.error(b)}}delete pv.$});pv.Format={};
+pv.Format.re=function(b){return b.replace(/[\\\^\$\*\+\?\[\]\(\)\.\{\}]/g,"\\$&")};pv.Format.pad=function(b,c,d){c=c-String(d).length;return c<1?d:(new Array(c+1)).join(b)+d};
+pv.Format.date=function(b){function c(f){return b.replace(/%[a-zA-Z0-9]/g,function(g){switch(g){case "%a":return["Sun","Mon","Tue","Wed","Thu","Fri","Sat"][f.getDay()];case "%A":return["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"][f.getDay()];case "%h":case "%b":return["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"][f.getMonth()];case "%B":return["January","February","March","April","May","June","July","August","September","October","Nov [...]
+case "%c":return f.toLocaleString();case "%C":return d("0",2,Math.floor(f.getFullYear()/100)%100);case "%d":return d("0",2,f.getDate());case "%x":case "%D":return d("0",2,f.getMonth()+1)+"/"+d("0",2,f.getDate())+"/"+d("0",2,f.getFullYear()%100);case "%e":return d(" ",2,f.getDate());case "%H":return d("0",2,f.getHours());case "%I":return(g=f.getHours()%12)?d("0",2,g):12;case "%m":return d("0",2,f.getMonth()+1);case "%M":return d("0",2,f.getMinutes());case "%n":return"\n";case "%p":return  [...]
+12?"AM":"PM";case "%T":case "%X":case "%r":g=f.getHours()%12;return(g?d("0",2,g):12)+":"+d("0",2,f.getMinutes())+":"+d("0",2,f.getSeconds())+" "+(f.getHours()<12?"AM":"PM");case "%R":return d("0",2,f.getHours())+":"+d("0",2,f.getMinutes());case "%S":return d("0",2,f.getSeconds());case "%Q":return d("0",3,f.getMilliseconds());case "%t":return"\t";case "%u":return(g=f.getDay())?g:1;case "%w":return f.getDay();case "%y":return d("0",2,f.getFullYear()%100);case "%Y":return f.getFullYear();ca [...]
+var d=pv.Format.pad;c.format=c;c.parse=function(f){var g=1970,h=0,i=1,j=0,l=0,k=0,q=[function(){}],o=pv.Format.re(b).replace(/%[a-zA-Z0-9]/g,function(n){switch(n){case "%b":q.push(function(m){h={Jan:0,Feb:1,Mar:2,Apr:3,May:4,Jun:5,Jul:6,Aug:7,Sep:8,Oct:9,Nov:10,Dec:11}[m]});return"([A-Za-z]+)";case "%h":case "%B":q.push(function(m){h={January:0,February:1,March:2,April:3,May:4,June:5,July:6,August:7,September:8,October:9,November:10,December:11}[m]});return"([A-Za-z]+)";case "%e":case "% [...]
+m});return"([0-9]+)";case "%I":case "%H":q.push(function(m){j=m});return"([0-9]+)";case "%m":q.push(function(m){h=m-1});return"([0-9]+)";case "%M":q.push(function(m){l=m});return"([0-9]+)";case "%p":q.push(function(m){if(j==12){if(m=="am")j=0}else if(m=="pm")j=Number(j)+12});return"(am|pm)";case "%S":q.push(function(m){k=m});return"([0-9]+)";case "%y":q.push(function(m){m=Number(m);g=m+(0<=m&&m<69?2E3:m>=69&&m<100?1900:0)});return"([0-9]+)";case "%Y":q.push(function(m){g=m});return"([0-9 [...]
+return"%"}return n});(f=f.match(o))&&f.forEach(function(n,m){q[m](n)});return new Date(g,h,i,j,l,k)};return c};
+pv.Format.time=function(b){function c(f){f=Number(f);switch(b){case "short":if(f>=31536E6)return(f/31536E6).toFixed(1)+" years";else if(f>=6048E5)return(f/6048E5).toFixed(1)+" weeks";else if(f>=864E5)return(f/864E5).toFixed(1)+" days";else if(f>=36E5)return(f/36E5).toFixed(1)+" hours";else if(f>=6E4)return(f/6E4).toFixed(1)+" minutes";return(f/1E3).toFixed(1)+" seconds";case "long":var g=[],h=f%36E5/6E4>>0;g.push(d("0",2,f%6E4/1E3>>0));if(f>=36E5){var i=f%864E5/36E5>>0;g.push(d("0",2,h)) [...]
+2,i));g.push(Math.floor(f/864E5).toFixed())}else g.push(i.toFixed())}else g.push(h.toFixed());return g.reverse().join(":")}}var d=pv.Format.pad;c.format=c;c.parse=function(f){switch(b){case "short":for(var g=/([0-9,.]+)\s*([a-z]+)/g,h,i=0;h=g.exec(f);){var j=parseFloat(h[0].replace(",","")),l=0;switch(h[2].toLowerCase()){case "year":case "years":l=31536E6;break;case "week":case "weeks":l=6048E5;break;case "day":case "days":l=864E5;break;case "hour":case "hours":l=36E5;break;case "minute" [...]
+6E4;break;case "second":case "seconds":l=1E3;break}i+=j*l}return i;case "long":h=f.replace(",","").split(":").reverse();i=0;if(h.length)i+=parseFloat(h[0])*1E3;if(h.length>1)i+=parseFloat(h[1])*6E4;if(h.length>2)i+=parseFloat(h[2])*36E5;if(h.length>3)i+=parseFloat(h[3])*864E5;return i}};return c};
+pv.Format.number=function(){function b(n){if(Infinity>h)n=Math.round(n*i)/i;var m=String(Math.abs(n)).split("."),r=m[0];n=n<0?"-":"";if(r.length>d)r=r.substring(r.length-d);if(k&&r.length<c)r=n+(new Array(c-r.length+1)).join(j)+r;if(r.length>3)r=r.replace(/\B(?=(?:\d{3})+(?!\d))/g,o);if(!k&&r.length<f)r=(new Array(f-r.length+1)).join(j)+n+r;m[0]=r;r=m[1]||"";if(r.length<g)m[1]=r+(new Array(g-r.length+1)).join(l);return m.join(q)}var c=0,d=Infinity,f=0,g=0,h=0,i=1,j="0",l="0",k=true,q="." [...]
+b;b.parse=function(n){var m=pv.Format.re;n=String(n).replace(new RegExp("^("+m(j)+")*"),"").replace(new RegExp("("+m(l)+")*$"),"").split(q);m=n[0].replace(new RegExp(m(o),"g"),"");if(m.length>d)m=m.substring(m.length-d);n=n[1]?Number("0."+n[1]):0;if(Infinity>h)n=Math.round(n*i)/i;return Math.round(m)+n};b.integerDigits=function(n,m){if(arguments.length){c=Number(n);d=arguments.length>1?Number(m):c;f=c+Math.floor(c/3)*o.length;return this}return[c,d]};b.fractionDigits=function(n,m){if(arg [...]
+Number(n);h=arguments.length>1?Number(m):g;i=Math.pow(10,h);return this}return[g,h]};b.integerPad=function(n){if(arguments.length){j=String(n);k=/\d/.test(j);return this}return j};b.fractionPad=function(n){if(arguments.length){l=String(n);return this}return l};b.decimal=function(n){if(arguments.length){q=String(n);return this}return q};b.group=function(n){if(arguments.length){o=n?String(n):"";f=c+Math.floor(c/3)*o.length;return this}return o};return b};
+pv.map=function(b,c){var d={};return c?b.map(function(f,g){d.index=g;return c.call(d,f)}):b.slice()};pv.repeat=function(b,c){if(arguments.length==1)c=2;return pv.blend(pv.range(c).map(function(){return b}))};pv.cross=function(b,c){for(var d=[],f=0,g=b.length,h=c.length;f<g;f++)for(var i=0,j=b[f];i<h;i++)d.push([j,c[i]]);return d};pv.blend=function(b){return Array.prototype.concat.apply([],b)};
+pv.transpose=function(b){var c=b.length,d=pv.max(b,function(i){return i.length});if(d>c){b.length=d;for(var f=c;f<d;f++)b[f]=new Array(c);for(f=0;f<c;f++)for(var g=f+1;g<d;g++){var h=b[f][g];b[f][g]=b[g][f];b[g][f]=h}}else{for(f=0;f<d;f++)b[f].length=c;for(f=0;f<c;f++)for(g=0;g<f;g++){h=b[f][g];b[f][g]=b[g][f];b[g][f]=h}}b.length=d;for(f=0;f<d;f++)b[f].length=c;return b};pv.normalize=function(b,c){b=pv.map(b,c);c=pv.sum(b);for(var d=0;d<b.length;d++)b[d]/=c;return b};
+pv.permute=function(b,c,d){if(!d)d=pv.identity;var f=new Array(c.length),g={};c.forEach(function(h,i){g.index=h;f[i]=d.call(g,b[h])});return f};pv.numerate=function(b,c){if(!c)c=pv.identity;var d={},f={};b.forEach(function(g,h){f.index=h;d[c.call(f,g)]=h});return d};pv.uniq=function(b,c){if(!c)c=pv.identity;var d={},f=[],g={},h;b.forEach(function(i,j){g.index=j;h=c.call(g,i);h in d||(d[h]=f.push(h))});return f};pv.naturalOrder=function(b,c){return b<c?-1:b>c?1:0};
+pv.reverseOrder=function(b,c){return c<b?-1:c>b?1:0};pv.search=function(b,c,d){if(!d)d=pv.identity;for(var f=0,g=b.length-1;f<=g;){var h=f+g>>1,i=d(b[h]);if(i<c)f=h+1;else if(i>c)g=h-1;else return h}return-f-1};pv.search.index=function(b,c,d){b=pv.search(b,c,d);return b<0?-b-1:b};
+pv.range=function(b,c,d){if(arguments.length==1){c=b;b=0}if(d==undefined)d=1;if((c-b)/d==Infinity)throw new Error("range must be finite");var f=[],g=0,h;if(d<0)for(;(h=b+d*g++)>c;)f.push(h);else for(;(h=b+d*g++)<c;)f.push(h);return f};pv.random=function(b,c,d){if(arguments.length==1){c=b;b=0}if(d==undefined)d=1;return d?Math.floor(Math.random()*(c-b)/d)*d+b:Math.random()*(c-b)+b};
+pv.sum=function(b,c){var d={};return b.reduce(c?function(f,g,h){d.index=h;return f+c.call(d,g)}:function(f,g){return f+g},0)};pv.max=function(b,c){if(c==pv.index)return b.length-1;return Math.max.apply(null,c?pv.map(b,c):b)};pv.max.index=function(b,c){if(!b.length)return-1;if(c==pv.index)return b.length-1;if(!c)c=pv.identity;for(var d=0,f=-Infinity,g={},h=0;h<b.length;h++){g.index=h;var i=c.call(g,b[h]);if(i>f){f=i;d=h}}return d};
+pv.min=function(b,c){if(c==pv.index)return 0;return Math.min.apply(null,c?pv.map(b,c):b)};pv.min.index=function(b,c){if(!b.length)return-1;if(c==pv.index)return 0;if(!c)c=pv.identity;for(var d=0,f=Infinity,g={},h=0;h<b.length;h++){g.index=h;var i=c.call(g,b[h]);if(i<f){f=i;d=h}}return d};pv.mean=function(b,c){return pv.sum(b,c)/b.length};
+pv.median=function(b,c){if(c==pv.index)return(b.length-1)/2;b=pv.map(b,c).sort(pv.naturalOrder);if(b.length%2)return b[Math.floor(b.length/2)];c=b.length/2;return(b[c-1]+b[c])/2};pv.variance=function(b,c){if(b.length<1)return NaN;if(b.length==1)return 0;var d=pv.mean(b,c),f=0,g={};if(!c)c=pv.identity;for(var h=0;h<b.length;h++){g.index=h;var i=c.call(g,b[h])-d;f+=i*i}return f};pv.deviation=function(b,c){return Math.sqrt(pv.variance(b,c)/(b.length-1))};pv.log=function(b,c){return Math.log [...]
+pv.logSymmetric=function(b,c){return b==0?0:b<0?-pv.log(-b,c):pv.log(b,c)};pv.logAdjusted=function(b,c){if(!isFinite(b))return b;var d=b<0;if(b<c)b+=(c-b)/c;return d?-pv.log(b,c):pv.log(b,c)};pv.logFloor=function(b,c){return b>0?Math.pow(c,Math.floor(pv.log(b,c))):-Math.pow(c,-Math.floor(-pv.log(-b,c)))};pv.logCeil=function(b,c){return b>0?Math.pow(c,Math.ceil(pv.log(b,c))):-Math.pow(c,-Math.ceil(-pv.log(-b,c)))};
+(function(){var b=Math.PI/180,c=180/Math.PI;pv.radians=function(d){return b*d};pv.degrees=function(d){return c*d}})();pv.keys=function(b){var c=[];for(var d in b)c.push(d);return c};pv.entries=function(b){var c=[];for(var d in b)c.push({key:d,value:b[d]});return c};pv.values=function(b){var c=[];for(var d in b)c.push(b[d]);return c};pv.dict=function(b,c){for(var d={},f={},g=0;g<b.length;g++)if(g in b){var h=b[g];f.index=g;d[h]=c.call(f,h)}return d};pv.dom=function(b){return new pv.Dom(b)};
+pv.Dom=function(b){this.$map=b};pv.Dom.prototype.$leaf=function(b){return typeof b!="object"};pv.Dom.prototype.leaf=function(b){if(arguments.length){this.$leaf=b;return this}return this.$leaf};pv.Dom.prototype.root=function(b){function c(g){var h=new pv.Dom.Node;for(var i in g){var j=g[i];h.appendChild(d(j)?new pv.Dom.Node(j):c(j)).nodeName=i}return h}var d=this.$leaf,f=c(this.$map);f.nodeName=b;return f};pv.Dom.prototype.nodes=function(){return this.root().nodes()};
+pv.Dom.Node=function(b){this.nodeValue=b;this.childNodes=[]};a=pv.Dom.Node.prototype;a.parentNode=null;a.firstChild=null;a.lastChild=null;a.previousSibling=null;a.nextSibling=null;
+a.removeChild=function(b){var c=this.childNodes.indexOf(b);if(c==-1)throw new Error("child not found");this.childNodes.splice(c,1);if(b.previousSibling)b.previousSibling.nextSibling=b.nextSibling;else this.firstChild=b.nextSibling;if(b.nextSibling)b.nextSibling.previousSibling=b.previousSibling;else this.lastChild=b.previousSibling;delete b.nextSibling;delete b.previousSibling;delete b.parentNode;return b};
+a.appendChild=function(b){b.parentNode&&b.parentNode.removeChild(b);b.parentNode=this;if(b.previousSibling=this.lastChild)this.lastChild.nextSibling=b;else this.firstChild=b;this.lastChild=b;this.childNodes.push(b);return b};
+a.insertBefore=function(b,c){if(!c)return this.appendChild(b);var d=this.childNodes.indexOf(c);if(d==-1)throw new Error("child not found");b.parentNode&&b.parentNode.removeChild(b);b.parentNode=this;b.nextSibling=c;if(b.previousSibling=c.previousSibling)c.previousSibling.nextSibling=b;else{if(c==this.lastChild)this.lastChild=b;this.firstChild=b}this.childNodes.splice(d,0,b);return b};
+a.replaceChild=function(b,c){var d=this.childNodes.indexOf(c);if(d==-1)throw new Error("child not found");b.parentNode&&b.parentNode.removeChild(b);b.parentNode=this;b.nextSibling=c.nextSibling;if(b.previousSibling=c.previousSibling)c.previousSibling.nextSibling=b;else this.firstChild=b;if(c.nextSibling)c.nextSibling.previousSibling=b;else this.lastChild=b;this.childNodes[d]=b;return c};a.visitBefore=function(b){function c(d,f){b(d,f);for(d=d.firstChild;d;d=d.nextSibling)c(d,f+1)}c(this,0)};
+a.visitAfter=function(b){function c(d,f){for(var g=d.firstChild;g;g=g.nextSibling)c(g,f+1);b(d,f)}c(this,0)};a.sort=function(b){if(this.firstChild){this.childNodes.sort(b);var c=this.firstChild=this.childNodes[0],d;delete c.previousSibling;for(var f=1;f<this.childNodes.length;f++){c.sort(b);d=this.childNodes[f];d.previousSibling=c;c=c.nextSibling=d}this.lastChild=c;delete c.nextSibling;c.sort(b)}return this};
+a.reverse=function(){var b=[];this.visitAfter(function(c){for(;c.lastChild;)b.push(c.removeChild(c.lastChild));for(var d;d=b.pop();)c.insertBefore(d,c.firstChild)});return this};a.nodes=function(){function b(d){c.push(d);d.childNodes.forEach(b)}var c=[];b(this,c);return c};
+a.toggle=function(b){if(b)return this.toggled?this.visitBefore(function(d){d.toggled&&d.toggle()}):this.visitAfter(function(d){d.toggled||d.toggle()});b=this;if(b.toggled){for(var c;c=b.toggled.pop();)b.appendChild(c);delete b.toggled}else if(b.lastChild)for(b.toggled=[];b.lastChild;)b.toggled.push(b.removeChild(b.lastChild))};pv.nodes=function(b){for(var c=new pv.Dom.Node,d=0;d<b.length;d++)c.appendChild(new pv.Dom.Node(b[d]));return c.nodes()};pv.tree=function(b){return new pv.Tree(b)};
+pv.Tree=function(b){this.array=b};pv.Tree.prototype.keys=function(b){this.k=b;return this};pv.Tree.prototype.value=function(b){this.v=b;return this};pv.Tree.prototype.map=function(){for(var b={},c={},d=0;d<this.array.length;d++){c.index=d;for(var f=this.array[d],g=this.k.call(c,f),h=b,i=0;i<g.length-1;i++)h=h[g[i]]||(h[g[i]]={});h[g[i]]=this.v?this.v.call(c,f):f}return b};pv.nest=function(b){return new pv.Nest(b)};pv.Nest=function(b){this.array=b;this.keys=[]};a=pv.Nest.prototype;
+a.key=function(b){this.keys.push(b);return this};a.sortKeys=function(b){this.keys[this.keys.length-1].order=b||pv.naturalOrder;return this};a.sortValues=function(b){this.order=b||pv.naturalOrder;return this};a.map=function(){for(var b={},c=[],d,f=0;f<this.array.length;f++){var g=this.array[f],h=b;for(d=0;d<this.keys.length-1;d++){var i=this.keys[d](g);h[i]||(h[i]={});h=h[i]}i=this.keys[d](g);if(!h[i]){d=[];c.push(d);h[i]=d}h[i].push(g)}if(this.order)for(d=0;d<c.length;d++)c[d].sort(this. [...]
+a.entries=function(){function b(d){var f=[];for(var g in d){var h=d[g];f.push({key:g,values:h instanceof Array?h:b(h)})}return f}function c(d,f){var g=this.keys[f].order;g&&d.sort(function(i,j){return g(i.key,j.key)});if(++f<this.keys.length)for(var h=0;h<d.length;h++)c.call(this,d[h].values,f);return d}return c.call(this,b(this.map()),0)};a.rollup=function(b){function c(d){for(var f in d){var g=d[f];if(g instanceof Array)d[f]=b(g);else c(g)}return d}return c(this.map())};pv.flatten=func [...]
+pv.Flatten=function(b){this.map=b;this.keys=[]};pv.Flatten.prototype.key=function(b,c){this.keys.push({name:b,value:c});delete this.$leaf;return this};pv.Flatten.prototype.leaf=function(b){this.keys.length=0;this.$leaf=b;return this};
+pv.Flatten.prototype.array=function(){function b(i,j){if(j<f.length-1)for(var l in i){d.push(l);b(i[l],j+1);d.pop()}else c.push(d.concat(i))}var c=[],d=[],f=this.keys,g=this.$leaf;if(g){function h(i,j){if(g(i))c.push({keys:d.slice(),value:i});else for(var l in i){d.push(l);h(i[l],j+1);d.pop()}}h(this.map,0);return c}b(this.map,0);return c.map(function(i){for(var j={},l=0;l<f.length;l++){var k=f[l],q=i[l];j[k.name]=k.value?k.value.call(null,q):q}return j})};
+pv.vector=function(b,c){return new pv.Vector(b,c)};pv.Vector=function(b,c){this.x=b;this.y=c};a=pv.Vector.prototype;a.perp=function(){return new pv.Vector(-this.y,this.x)};a.norm=function(){var b=this.length();return this.times(b?1/b:1)};a.length=function(){return Math.sqrt(this.x*this.x+this.y*this.y)};a.times=function(b){return new pv.Vector(this.x*b,this.y*b)};a.plus=function(b,c){return arguments.length==1?new pv.Vector(this.x+b.x,this.y+b.y):new pv.Vector(this.x+b,this.y+c)};
+a.minus=function(b,c){return arguments.length==1?new pv.Vector(this.x-b.x,this.y-b.y):new pv.Vector(this.x-b,this.y-c)};a.dot=function(b,c){return arguments.length==1?this.x*b.x+this.y*b.y:this.x*b+this.y*c};pv.Transform=function(){};pv.Transform.prototype={k:1,x:0,y:0};pv.Transform.identity=new pv.Transform;pv.Transform.prototype.translate=function(b,c){var d=new pv.Transform;d.k=this.k;d.x=this.k*b+this.x;d.y=this.k*c+this.y;return d};
+pv.Transform.prototype.scale=function(b){var c=new pv.Transform;c.k=this.k*b;c.x=this.x;c.y=this.y;return c};pv.Transform.prototype.invert=function(){var b=new pv.Transform,c=1/this.k;b.k=c;b.x=-this.x*c;b.y=-this.y*c;return b};pv.Transform.prototype.times=function(b){var c=new pv.Transform;c.k=this.k*b.k;c.x=this.k*b.x+this.x;c.y=this.k*b.y+this.y;return c};pv.Scale=function(){};
+pv.Scale.interpolator=function(b,c){if(typeof b=="number")return function(d){return d*(c-b)+b};b=pv.color(b).rgb();c=pv.color(c).rgb();return function(d){var f=b.a*(1-d)+c.a*d;if(f<1.0E-5)f=0;return b.a==0?pv.rgb(c.r,c.g,c.b,f):c.a==0?pv.rgb(b.r,b.g,b.b,f):pv.rgb(Math.round(b.r*(1-d)+c.r*d),Math.round(b.g*(1-d)+c.g*d),Math.round(b.b*(1-d)+c.b*d),f)}};
+pv.Scale.quantitative=function(){function b(o){return new Date(o)}function c(o){var n=pv.search(d,o);if(n<0)n=-n-2;n=Math.max(0,Math.min(h.length-1,n));return h[n]((l(o)-f[n])/(f[n+1]-f[n]))}var d=[0,1],f=[0,1],g=[0,1],h=[pv.identity],i=Number,j=false,l=pv.identity,k=pv.identity,q=String;c.transform=function(o,n){l=function(m){return j?-o(-m):o(m)};k=function(m){return j?-n(-m):n(m)};f=d.map(l);return this};c.domain=function(o,n,m){if(arguments.length){var r;if(o instanceof Array){if(arg [...]
+2)n=pv.identity;if(arguments.length<3)m=n;r=o.length&&n(o[0]);d=o.length?[pv.min(o,n),pv.max(o,m)]:[]}else{r=o;d=Array.prototype.slice.call(arguments).map(Number)}if(d.length){if(d.length==1)d=[d[0],d[0]]}else d=[-Infinity,Infinity];j=(d[0]||d[d.length-1])<0;f=d.map(l);i=r instanceof Date?b:Number;return this}return d.map(i)};c.range=function(){if(arguments.length){g=Array.prototype.slice.call(arguments);if(g.length){if(g.length==1)g=[g[0],g[0]]}else g=[-Infinity,Infinity];h=[];for(var o [...]
+1;o++)h.push(pv.Scale.interpolator(g[o],g[o+1]));return this}return g};c.invert=function(o){var n=pv.search(g,o);if(n<0)n=-n-2;n=Math.max(0,Math.min(h.length-1,n));return i(k(f[n]+(o-g[n])/(g[n+1]-g[n])*(f[n+1]-f[n])))};c.ticks=function(o){var n=d[0],m=d[d.length-1],r=m<n,s=r?m:n;m=r?n:m;var u=m-s;if(!u||!isFinite(u)){if(i==b)q=pv.Format.date("%x");return[i(s)]}if(i==b){function x(w,y){switch(y){case 31536E6:w.setMonth(0);case 2592E6:w.setDate(1);case 6048E5:y==6048E5&&w.setDate(w.getDat [...]
+case 864E5:w.setHours(0);case 36E5:w.setMinutes(0);case 6E4:w.setSeconds(0);case 1E3:w.setMilliseconds(0)}}var t,p,v=1;if(u>=94608E6){n=31536E6;t="%Y";p=function(w){w.setFullYear(w.getFullYear()+v)}}else if(u>=7776E6){n=2592E6;t="%m/%Y";p=function(w){w.setMonth(w.getMonth()+v)}}else if(u>=18144E5){n=6048E5;t="%m/%d";p=function(w){w.setDate(w.getDate()+7*v)}}else if(u>=2592E5){n=864E5;t="%m/%d";p=function(w){w.setDate(w.getDate()+v)}}else if(u>=108E5){n=36E5;t="%I:%M %p";p=function(w){w.s [...]
+v)}}else if(u>=18E4){n=6E4;t="%I:%M %p";p=function(w){w.setMinutes(w.getMinutes()+v)}}else if(u>=3E3){n=1E3;t="%I:%M:%S";p=function(w){w.setSeconds(w.getSeconds()+v)}}else{n=1;t="%S.%Qs";p=function(w){w.setTime(w.getTime()+v)}}q=pv.Format.date(t);s=new Date(s);t=[];x(s,n);u=u/n;if(u>10)switch(n){case 36E5:v=u>20?6:3;s.setHours(Math.floor(s.getHours()/v)*v);break;case 2592E6:v=3;s.setMonth(Math.floor(s.getMonth()/v)*v);break;case 6E4:v=u>30?15:u>15?10:5;s.setMinutes(Math.floor(s.getMinute [...]
+break;case 1E3:v=u>90?15:u>60?10:5;s.setSeconds(Math.floor(s.getSeconds()/v)*v);break;case 1:v=u>1E3?250:u>200?100:u>100?50:u>50?25:5;s.setMilliseconds(Math.floor(s.getMilliseconds()/v)*v);break;default:v=pv.logCeil(u/15,10);if(u/v<2)v/=5;else if(u/v<5)v/=2;s.setFullYear(Math.floor(s.getFullYear()/v)*v);break}for(;;){p(s);if(s>m)break;t.push(new Date(s))}return r?t.reverse():t}arguments.length||(o=10);v=pv.logFloor(u/o,10);n=o/(u/v);if(n<=0.15)v*=10;else if(n<=0.35)v*=5;else if(n<=0.75)v [...]
+v)*v;m=Math.floor(m/v)*v;q=pv.Format.number().fractionDigits(Math.max(0,-Math.floor(pv.log(v,10)+0.01)));m=pv.range(n,m+v,v);return r?m.reverse():m};c.tickFormat=function(o){return q(o)};c.nice=function(){if(d.length!=2)return this;var o=d[0],n=d[d.length-1],m=n<o,r=m?n:o;o=m?o:n;n=o-r;if(!n||!isFinite(n))return this;n=Math.pow(10,Math.round(Math.log(n)/Math.log(10))-1);d=[Math.floor(r/n)*n,Math.ceil(o/n)*n];m&&d.reverse();f=d.map(l);return this};c.by=function(o){function n(){return c(o. [...]
+arguments))}for(var m in c)n[m]=c[m];return n};c.domain.apply(c,arguments);return c};pv.Scale.linear=function(){var b=pv.Scale.quantitative();b.domain.apply(b,arguments);return b};
+pv.Scale.log=function(){var b=pv.Scale.quantitative(1,10),c,d,f=function(h){return Math.log(h)/d},g=function(h){return Math.pow(c,h)};b.ticks=function(){var h=b.domain(),i=h[0]<0,j=Math.floor(i?-f(-h[0]):f(h[0])),l=Math.ceil(i?-f(-h[1]):f(h[1])),k=[];if(i)for(k.push(-g(-j));j++<l;)for(i=c-1;i>0;i--)k.push(-g(-j)*i);else{for(;j<l;j++)for(i=1;i<c;i++)k.push(g(j)*i);k.push(g(j))}for(j=0;k[j]<h[0];j++);for(l=k.length;k[l-1]>h[1];l--);return k.slice(j,l)};b.tickFormat=function(h){return h.toP [...]
+b.nice=function(){var h=b.domain();return b.domain(pv.logFloor(h[0],c),pv.logCeil(h[1],c))};b.base=function(h){if(arguments.length){c=Number(h);d=Math.log(c);b.transform(f,g);return this}return c};b.domain.apply(b,arguments);return b.base(10)};pv.Scale.root=function(){var b=pv.Scale.quantitative();b.power=function(c){if(arguments.length){var d=Number(c),f=1/d;b.transform(function(g){return Math.pow(g,f)},function(g){return Math.pow(g,d)});return this}return d};b.domain.apply(b,arguments) [...]
+pv.Scale.ordinal=function(){function b(g){g in d||(d[g]=c.push(g)-1);return f[d[g]%f.length]}var c=[],d={},f=[];b.domain=function(g,h){if(arguments.length){g=g instanceof Array?arguments.length>1?pv.map(g,h):g:Array.prototype.slice.call(arguments);c=[];for(var i={},j=0;j<g.length;j++){var l=g[j];if(!(l in i)){i[l]=true;c.push(l)}}d=pv.numerate(c);return this}return c};b.range=function(g,h){if(arguments.length){f=g instanceof Array?arguments.length>1?pv.map(g,h):g:Array.prototype.slice.ca [...]
+if(typeof f[0]=="string")f=f.map(pv.color);return this}return f};b.split=function(g,h){var i=(h-g)/this.domain().length;f=pv.range(g+i/2,h,i);return this};b.splitFlush=function(g,h){var i=this.domain().length,j=(h-g)/(i-1);f=i==1?[(g+h)/2]:pv.range(g,h+j/2,j);return this};b.splitBanded=function(g,h,i){if(arguments.length<3)i=1;if(i<0){var j=this.domain().length;j=(h-g- -i*j)/(j+1);f=pv.range(g+j,h,j-i);f.band=-i}else{j=(h-g)/(this.domain().length+(1-i));f=pv.range(g+j*(1-i),h,j);f.band=j [...]
+b.by=function(g){function h(){return b(g.apply(this,arguments))}for(var i in b)h[i]=b[i];return h};b.domain.apply(b,arguments);return b};
+pv.Scale.quantile=function(){function b(i){return h(Math.max(0,Math.min(d,pv.search.index(f,i)-1))/d)}var c=-1,d=-1,f=[],g=[],h=pv.Scale.linear();b.quantiles=function(i){if(arguments.length){c=Number(i);if(c<0){f=[g[0]].concat(g);d=g.length-1}else{f=[];f[0]=g[0];for(var j=1;j<=c;j++)f[j]=g[~~(j*(g.length-1)/c)];d=c-1}return this}return f};b.domain=function(i,j){if(arguments.length){g=i instanceof Array?pv.map(i,j):Array.prototype.slice.call(arguments);g.sort(pv.naturalOrder);b.quantiles( [...]
+b.range=function(){if(arguments.length){h.range.apply(h,arguments);return this}return h.range()};b.by=function(i){function j(){return b(i.apply(this,arguments))}for(var l in b)j[l]=b[l];return j};b.domain.apply(b,arguments);return b};
+pv.histogram=function(b,c){var d=true;return{bins:function(f){var g=pv.map(b,c),h=[];arguments.length||(f=pv.Scale.linear(g).ticks());for(var i=0;i<f.length-1;i++){var j=h[i]=[];j.x=f[i];j.dx=f[i+1]-f[i];j.y=0}for(i=0;i<g.length;i++){j=pv.search.index(f,g[i])-1;j=h[Math.max(0,Math.min(h.length-1,j))];j.y++;j.push(b[i])}if(!d)for(i=0;i<h.length;i++)h[i].y/=g.length;return h},frequency:function(f){if(arguments.length){d=Boolean(f);return this}return d}}};
+pv.color=function(b){if(b.rgb)return b.rgb();var c=/([a-z]+)\((.*)\)/i.exec(b);if(c){var d=c[2].split(","),f=1;switch(c[1]){case "hsla":case "rgba":f=parseFloat(d[3]);if(!f)return pv.Color.transparent;break}switch(c[1]){case "hsla":case "hsl":b=parseFloat(d[0]);var g=parseFloat(d[1])/100;d=parseFloat(d[2])/100;return(new pv.Color.Hsl(b,g,d,f)).rgb();case "rgba":case "rgb":function h(l){var k=parseFloat(l);return l[l.length-1]=="%"?Math.round(k*2.55):k}g=h(d[0]);var i=h(d[1]),j=h(d[2]);re [...]
+i,j,f)}}if(f=pv.Color.names[b])return f;if(b.charAt(0)=="#"){if(b.length==4){g=b.charAt(1);g+=g;i=b.charAt(2);i+=i;j=b.charAt(3);j+=j}else if(b.length==7){g=b.substring(1,3);i=b.substring(3,5);j=b.substring(5,7)}return pv.rgb(parseInt(g,16),parseInt(i,16),parseInt(j,16),1)}return new pv.Color(b,1)};pv.Color=function(b,c){this.color=b;this.opacity=c};pv.Color.prototype.brighter=function(b){return this.rgb().brighter(b)};pv.Color.prototype.darker=function(b){return this.rgb().darker(b)};
+pv.rgb=function(b,c,d,f){return new pv.Color.Rgb(b,c,d,arguments.length==4?f:1)};pv.Color.Rgb=function(b,c,d,f){pv.Color.call(this,f?"rgb("+b+","+c+","+d+")":"none",f);this.r=b;this.g=c;this.b=d;this.a=f};pv.Color.Rgb.prototype=pv.extend(pv.Color);a=pv.Color.Rgb.prototype;a.red=function(b){return pv.rgb(b,this.g,this.b,this.a)};a.green=function(b){return pv.rgb(this.r,b,this.b,this.a)};a.blue=function(b){return pv.rgb(this.r,this.g,b,this.a)};
+a.alpha=function(b){return pv.rgb(this.r,this.g,this.b,b)};a.rgb=function(){return this};a.brighter=function(b){b=Math.pow(0.7,arguments.length?b:1);var c=this.r,d=this.g,f=this.b;if(!c&&!d&&!f)return pv.rgb(30,30,30,this.a);if(c&&c<30)c=30;if(d&&d<30)d=30;if(f&&f<30)f=30;return pv.rgb(Math.min(255,Math.floor(c/b)),Math.min(255,Math.floor(d/b)),Math.min(255,Math.floor(f/b)),this.a)};
+a.darker=function(b){b=Math.pow(0.7,arguments.length?b:1);return pv.rgb(Math.max(0,Math.floor(b*this.r)),Math.max(0,Math.floor(b*this.g)),Math.max(0,Math.floor(b*this.b)),this.a)};pv.hsl=function(b,c,d,f){return new pv.Color.Hsl(b,c,d,arguments.length==4?f:1)};pv.Color.Hsl=function(b,c,d,f){pv.Color.call(this,"hsl("+b+","+c*100+"%,"+d*100+"%)",f);this.h=b;this.s=c;this.l=d;this.a=f};pv.Color.Hsl.prototype=pv.extend(pv.Color);a=pv.Color.Hsl.prototype;
+a.hue=function(b){return pv.hsl(b,this.s,this.l,this.a)};a.saturation=function(b){return pv.hsl(this.h,b,this.l,this.a)};a.lightness=function(b){return pv.hsl(this.h,this.s,b,this.a)};a.alpha=function(b){return pv.hsl(this.h,this.s,this.l,b)};
+a.rgb=function(){function b(j){if(j>360)j-=360;else if(j<0)j+=360;if(j<60)return i+(h-i)*j/60;if(j<180)return h;if(j<240)return i+(h-i)*(240-j)/60;return i}function c(j){return Math.round(b(j)*255)}var d=this.h,f=this.s,g=this.l;d%=360;if(d<0)d+=360;f=Math.max(0,Math.min(f,1));g=Math.max(0,Math.min(g,1));var h=g<=0.5?g*(1+f):g+f-g*f,i=2*g-h;return pv.rgb(c(d+120),c(d),c(d-120),this.a)};
+pv.Color.names={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9", [...]
+darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gains [...]
+ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lig [...]
+lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5 [...]
+moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460 [...]
+seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32",transparent:pv.Color.transparent=pv.rgb(0,0,0,0)};(function(){var b=pv.Color.names;for(var c in b)b[c]=pv.color [...]
+pv.colors=function(){var b=pv.Scale.ordinal();b.range.apply(b,arguments);return b};pv.Colors={};pv.Colors.category10=function(){var b=pv.colors("#1f77b4","#ff7f0e","#2ca02c","#d62728","#9467bd","#8c564b","#e377c2","#7f7f7f","#bcbd22","#17becf");b.domain.apply(b,arguments);return b};
+pv.Colors.category20=function(){var b=pv.colors("#1f77b4","#aec7e8","#ff7f0e","#ffbb78","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5","#8c564b","#c49c94","#e377c2","#f7b6d2","#7f7f7f","#c7c7c7","#bcbd22","#dbdb8d","#17becf","#9edae5");b.domain.apply(b,arguments);return b};
+pv.Colors.category19=function(){var b=pv.colors("#9c9ede","#7375b5","#4a5584","#cedb9c","#b5cf6b","#8ca252","#637939","#e7cb94","#e7ba52","#bd9e39","#8c6d31","#e7969c","#d6616b","#ad494a","#843c39","#de9ed6","#ce6dbd","#a55194","#7b4173");b.domain.apply(b,arguments);return b};pv.ramp=function(){var b=pv.Scale.linear();b.range.apply(b,arguments);return b};
+pv.Scene=pv.SvgScene={svg:"http://www.w3.org/2000/svg",xmlns:"http://www.w3.org/2000/xmlns",xlink:"http://www.w3.org/1999/xlink",xhtml:"http://www.w3.org/1999/xhtml",scale:1,events:["DOMMouseScroll","mousewheel","mousedown","mouseup","mouseover","mouseout","mousemove","click","dblclick"],implicit:{svg:{"shape-rendering":"auto","pointer-events":"painted",x:0,y:0,dy:0,"text-anchor":"start",transform:"translate(0,0)",fill:"none","fill-opacity":1,stroke:"none","stroke-opacity":1,"stroke-widt [...]
+css:{font:"10px sans-serif"}}};pv.SvgScene.updateAll=function(b){if(b.length&&b[0].reverse&&b.type!="line"&&b.type!="area"){for(var c=pv.extend(b),d=0,f=b.length-1;f>=0;d++,f--)c[d]=b[f];b=c}this.removeSiblings(this[b.type](b))};pv.SvgScene.create=function(b){return document.createElementNS(this.svg,b)};
+pv.SvgScene.expect=function(b,c,d,f){if(b){if(b.tagName=="a")b=b.firstChild;if(b.tagName!=c){c=this.create(c);b.parentNode.replaceChild(c,b);b=c}}else b=this.create(c);for(var g in d){c=d[g];if(c==this.implicit.svg[g])c=null;c==null?b.removeAttribute(g):b.setAttribute(g,c)}for(g in f){c=f[g];if(c==this.implicit.css[g])c=null;if(c==null)b.style.removeProperty(g);else b.style[g]=c}return b};
+pv.SvgScene.append=function(b,c,d){b.$scene={scenes:c,index:d};b=this.title(b,c[d]);b.parentNode||c.$g.appendChild(b);return b.nextSibling};pv.SvgScene.title=function(b,c){var d=b.parentNode;if(d&&d.tagName!="a")d=null;if(c.title){if(!d){d=this.create("a");b.parentNode&&b.parentNode.replaceChild(d,b);d.appendChild(b)}d.setAttributeNS(this.xlink,"title",c.title);return d}d&&d.parentNode.replaceChild(b,d);return b};
+pv.SvgScene.dispatch=pv.listener(function(b){var c=b.target.$scene;if(c){var d=b.type;switch(d){case "DOMMouseScroll":d="mousewheel";b.wheel=-480*b.detail;break;case "mousewheel":b.wheel=(window.opera?12:1)*b.wheelDelta;break}pv.Mark.dispatch(d,c.scenes,c.index)&&b.preventDefault()}});pv.SvgScene.removeSiblings=function(b){for(;b;){var c=b.nextSibling;b.parentNode.removeChild(b);b=c}};pv.SvgScene.undefined=function(){};
+pv.SvgScene.pathBasis=function(){function b(f,g,h,i,j){return{x:f[0]*g.left+f[1]*h.left+f[2]*i.left+f[3]*j.left,y:f[0]*g.top+f[1]*h.top+f[2]*i.top+f[3]*j.top}}var c=[[1/6,2/3,1/6,0],[0,2/3,1/3,0],[0,1/3,2/3,0],[0,1/6,2/3,1/6]],d=function(f,g,h,i){var j=b(c[1],f,g,h,i),l=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"C"+j.x+","+j.y+","+l.x+","+l.y+","+f.x+","+f.y};d.segment=function(f,g,h,i){var j=b(c[0],f,g,h,i),l=b(c[1],f,g,h,i),k=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"M"+j.x+","+j.y+"C"+l. [...]
+","+k.x+","+k.y+","+f.x+","+f.y};return d}();pv.SvgScene.curveBasis=function(b){if(b.length<=2)return"";var c="",d=b[0],f=d,g=d,h=b[1];c+=this.pathBasis(d,f,g,h);for(var i=2;i<b.length;i++){d=f;f=g;g=h;h=b[i];c+=this.pathBasis(d,f,g,h)}c+=this.pathBasis(f,g,h,h);c+=this.pathBasis(g,h,h,h);return c};
+pv.SvgScene.curveBasisSegments=function(b){if(b.length<=2)return"";var c=[],d=b[0],f=d,g=d,h=b[1],i=this.pathBasis.segment(d,f,g,h);d=f;f=g;g=h;h=b[2];c.push(i+this.pathBasis(d,f,g,h));for(i=3;i<b.length;i++){d=f;f=g;g=h;h=b[i];c.push(this.pathBasis.segment(d,f,g,h))}c.push(this.pathBasis.segment(f,g,h,h)+this.pathBasis(g,h,h,h));return c};
+pv.SvgScene.curveHermite=function(b,c){if(c.length<1||b.length!=c.length&&b.length!=c.length+2)return"";var d=b.length!=c.length,f="",g=b[0],h=b[1],i=c[0],j=i,l=1;if(d){f+="Q"+(h.left-i.x*2/3)+","+(h.top-i.y*2/3)+","+h.left+","+h.top;g=b[1];l=2}if(c.length>1){j=c[1];h=b[l];l++;f+="C"+(g.left+i.x)+","+(g.top+i.y)+","+(h.left-j.x)+","+(h.top-j.y)+","+h.left+","+h.top;for(g=2;g<c.length;g++,l++){h=b[l];j=c[g];f+="S"+(h.left-j.x)+","+(h.top-j.y)+","+h.left+","+h.top}}if(d){b=b[l];f+="Q"+(h.l [...]
+3)+","+(h.top+j.y*2/3)+","+b.left+","+b.top}return f};
+pv.SvgScene.curveHermiteSegments=function(b,c){if(c.length<1||b.length!=c.length&&b.length!=c.length+2)return[];var d=b.length!=c.length,f=[],g=b[0],h=g,i=c[0],j=i,l=1;if(d){h=b[1];f.push("M"+g.left+","+g.top+"Q"+(h.left-j.x*2/3)+","+(h.top-j.y*2/3)+","+h.left+","+h.top);l=2}for(var k=1;k<c.length;k++,l++){g=h;i=j;h=b[l];j=c[k];f.push("M"+g.left+","+g.top+"C"+(g.left+i.x)+","+(g.top+i.y)+","+(h.left-j.x)+","+(h.top-j.y)+","+h.left+","+h.top)}if(d){b=b[l];f.push("M"+h.left+","+h.top+"Q"+( [...]
+2/3)+","+(h.top+j.y*2/3)+","+b.left+","+b.top)}return f};pv.SvgScene.cardinalTangents=function(b,c){var d=[];c=(1-c)/2;for(var f=b[0],g=b[1],h=b[2],i=3;i<b.length;i++){d.push({x:c*(h.left-f.left),y:c*(h.top-f.top)});f=g;g=h;h=b[i]}d.push({x:c*(h.left-f.left),y:c*(h.top-f.top)});return d};pv.SvgScene.curveCardinal=function(b,c){if(b.length<=2)return"";return this.curveHermite(b,this.cardinalTangents(b,c))};
+pv.SvgScene.curveCardinalSegments=function(b,c){if(b.length<=2)return"";return this.curveHermiteSegments(b,this.cardinalTangents(b,c))};
+pv.SvgScene.monotoneTangents=function(b){var c=[],d=[],f=[],g=[],h=0;for(h=0;h<b.length-1;h++)d[h]=(b[h+1].top-b[h].top)/(b[h+1].left-b[h].left);f[0]=d[0];g[0]=b[1].left-b[0].left;for(h=1;h<b.length-1;h++){f[h]=(d[h-1]+d[h])/2;g[h]=(b[h+1].left-b[h-1].left)/2}f[h]=d[h-1];g[h]=b[h].left-b[h-1].left;for(h=0;h<b.length-1;h++)if(d[h]==0){f[h]=0;f[h+1]=0}for(h=0;h<b.length-1;h++)if(!(Math.abs(f[h])<1.0E-5||Math.abs(f[h+1])<1.0E-5)){var i=f[h]/d[h],j=f[h+1]/d[h],l=i*i+j*j;if(l>9){l=3/Math.sqrt [...]
+l*i*d[h];f[h+1]=l*j*d[h]}}for(h=0;h<b.length;h++){d=1+f[h]*f[h];c.push({x:g[h]/3/d,y:f[h]*g[h]/3/d})}return c};pv.SvgScene.curveMonotone=function(b){if(b.length<=2)return"";return this.curveHermite(b,this.monotoneTangents(b))};pv.SvgScene.curveMonotoneSegments=function(b){if(b.length<=2)return"";return this.curveHermiteSegments(b,this.monotoneTangents(b))};
+pv.SvgScene.area=function(b){function c(o,n){for(var m=[],r=[],s=n;o<=s;o++,n--){var u=b[o],x=b[n];u=u.left+","+u.top;x=x.left+x.width+","+(x.top+x.height);if(o<s){var t=b[o+1],p=b[n-1];switch(g.interpolate){case "step-before":u+="V"+t.top;x+="H"+(p.left+p.width);break;case "step-after":u+="H"+t.left;x+="V"+(p.top+p.height);break}}m.push(u);r.push(x)}return m.concat(r).join("L")}function d(o,n){for(var m=[],r=[],s=n;o<=s;o++,n--){var u=b[n];m.push(b[o]);r.push({left:u.left+u.width,top:u. [...]
+"basis"){o=pv.SvgScene.curveBasis(m);n=pv.SvgScene.curveBasis(r)}else if(g.interpolate=="cardinal"){o=pv.SvgScene.curveCardinal(m,g.tension);n=pv.SvgScene.curveCardinal(r,g.tension)}else{o=pv.SvgScene.curveMonotone(m);n=pv.SvgScene.curveMonotone(r)}return m[0].left+","+m[0].top+o+"L"+r[0].left+","+r[0].top+n}var f=b.$g.firstChild;if(!b.length)return f;var g=b[0];if(g.segmented)return this.areaSegment(b);if(!g.visible)return f;var h=g.fillStyle,i=g.strokeStyle;if(!h.opacity&&!i.opacity)re [...]
+[],l,k=0;k<b.length;k++){l=b[k];if(l.width||l.height){for(var q=k+1;q<b.length;q++){l=b[q];if(!l.width&&!l.height)break}k&&g.interpolate!="step-after"&&k--;q<b.length&&g.interpolate!="step-before"&&q++;j.push((q-k>2&&(g.interpolate=="basis"||g.interpolate=="cardinal"||g.interpolate=="monotone")?d:c)(k,q-1));k=q-1}}if(!j.length)return f;f=this.expect(f,"path",{"shape-rendering":g.antialias?null:"crispEdges","pointer-events":g.events,cursor:g.cursor,d:"M"+j.join("ZM")+"Z",fill:h.color,"fil [...]
+null,stroke:i.color,"stroke-opacity":i.opacity||null,"stroke-width":i.opacity?g.lineWidth/this.scale:null});return this.append(f,b,0)};
+pv.SvgScene.areaSegment=function(b){var c=b.$g.firstChild,d=b[0],f,g;if(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"){f=[];g=[];for(var h=0,i=b.length;h<i;h++){var j=b[i-h-1];f.push(b[h]);g.push({left:j.left+j.width,top:j.top+j.height})}if(d.interpolate=="basis"){f=this.curveBasisSegments(f);g=this.curveBasisSegments(g)}else if(d.interpolate=="cardinal"){f=this.curveCardinalSegments(f,d.tension);g=this.curveCardinalSegments(g,d.tension)}else{f=this.curveMo [...]
+g=this.curveMonotoneSegments(g)}}h=0;for(i=b.length-1;h<i;h++){d=b[h];var l=b[h+1];if(d.visible&&l.visible){var k=d.fillStyle,q=d.strokeStyle;if(k.opacity||q.opacity){if(f){j=f[h];l="L"+g[i-h-1].substr(1);j=j+l+"Z"}else{var o=d;j=l;switch(d.interpolate){case "step-before":o=l;break;case "step-after":j=d;break}j="M"+d.left+","+o.top+"L"+l.left+","+j.top+"L"+(l.left+l.width)+","+(j.top+j.height)+"L"+(d.left+d.width)+","+(o.top+o.height)+"Z"}c=this.expect(c,"path",{"shape-rendering":d.antia [...]
+"crispEdges","pointer-events":d.events,cursor:d.cursor,d:j,fill:k.color,"fill-opacity":k.opacity||null,stroke:q.color,"stroke-opacity":q.opacity||null,"stroke-width":q.opacity?d.lineWidth/this.scale:null});c=this.append(c,b,h)}}}return c};
+pv.SvgScene.bar=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){var g=f.fillStyle,h=f.strokeStyle;if(g.opacity||h.opacity){c=this.expect(c,"rect",{"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events,cursor:f.cursor,x:f.left,y:f.top,width:Math.max(1.0E-10,f.width),height:Math.max(1.0E-10,f.height),fill:g.color,"fill-opacity":g.opacity||null,stroke:h.color,"stroke-opacity":h.opacity||null,"stroke-width":h.opacity?f.lineWidth/this.sca [...]
+c=this.append(c,b,d)}}}return c};
+pv.SvgScene.dot=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){var g=f.fillStyle,h=f.strokeStyle;if(g.opacity||h.opacity){var i=f.radius,j=null;switch(f.shape){case "cross":j="M"+-i+","+-i+"L"+i+","+i+"M"+i+","+-i+"L"+-i+","+i;break;case "triangle":j=i;var l=i*1.1547;j="M0,"+j+"L"+l+","+-j+" "+-l+","+-j+"Z";break;case "diamond":i*=Math.SQRT2;j="M0,"+-i+"L"+i+",0 0,"+i+" "+-i+",0Z";break;case "square":j="M"+-i+","+-i+"L"+i+","+-i+" "+i+","+i+" "+-i+","+ [...]
+case "tick":j="M0,0L0,"+-f.size;break;case "bar":j="M0,"+f.size/2+"L0,"+-(f.size/2);break}g={"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events,cursor:f.cursor,fill:g.color,"fill-opacity":g.opacity||null,stroke:h.color,"stroke-opacity":h.opacity||null,"stroke-width":h.opacity?f.lineWidth/this.scale:null};if(j){g.transform="translate("+f.left+","+f.top+")";if(f.angle)g.transform+=" rotate("+180*f.angle/Math.PI+")";g.d=j;c=this.expect(c,"path",g)}else{g.cx=f.left;g.c [...]
+i;c=this.expect(c,"circle",g)}c=this.append(c,b,d)}}}return c};
+pv.SvgScene.image=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){c=this.fill(c,b,d);if(f.image){c=this.expect(c,"foreignObject",{cursor:f.cursor,x:f.left,y:f.top,width:f.width,height:f.height});var g=c.firstChild||c.appendChild(document.createElementNS(this.xhtml,"canvas"));g.$scene={scenes:b,index:d};g.style.width=f.width;g.style.height=f.height;g.width=f.imageWidth;g.height=f.imageHeight;g.getContext("2d").putImageData(f.image,0,0)}else{c=this.expect [...]
+{preserveAspectRatio:"none",cursor:f.cursor,x:f.left,y:f.top,width:f.width,height:f.height});c.setAttributeNS(this.xlink,"href",f.url)}c=this.append(c,b,d);c=this.stroke(c,b,d)}}return c};
+pv.SvgScene.label=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){var g=f.textStyle;if(g.opacity&&f.text){var h=0,i=0,j=0,l="start";switch(f.textBaseline){case "middle":j=".35em";break;case "top":j=".71em";i=f.textMargin;break;case "bottom":i="-"+f.textMargin;break}switch(f.textAlign){case "right":l="end";h="-"+f.textMargin;break;case "center":l="middle";break;case "left":h=f.textMargin;break}c=this.expect(c,"text",{"pointer-events":f.events,cursor:f.cu [...]
+y:i,dy:j,transform:"translate("+f.left+","+f.top+")"+(f.textAngle?" rotate("+180*f.textAngle/Math.PI+")":"")+(this.scale!=1?" scale("+1/this.scale+")":""),fill:g.color,"fill-opacity":g.opacity||null,"text-anchor":l},{font:f.font,"text-shadow":f.textShadow,"text-decoration":f.textDecoration});if(c.firstChild)c.firstChild.nodeValue=f.text;else c.appendChild(document.createTextNode(f.text));c=this.append(c,b,d)}}}return c};
+pv.SvgScene.line=function(b){var c=b.$g.firstChild;if(b.length<2)return c;var d=b[0];if(d.segmented)return this.lineSegment(b);if(!d.visible)return c;var f=d.fillStyle,g=d.strokeStyle;if(!f.opacity&&!g.opacity)return c;var h="M"+d.left+","+d.top;if(b.length>2&&(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"))switch(d.interpolate){case "basis":h+=this.curveBasis(b);break;case "cardinal":h+=this.curveCardinal(b,d.tension);break;case "monotone":h+=this.curveMon [...]
+break}else for(var i=1;i<b.length;i++)h+=this.pathSegment(b[i-1],b[i]);c=this.expect(c,"path",{"shape-rendering":d.antialias?null:"crispEdges","pointer-events":d.events,cursor:d.cursor,d:h,fill:f.color,"fill-opacity":f.opacity||null,stroke:g.color,"stroke-opacity":g.opacity||null,"stroke-width":g.opacity?d.lineWidth/this.scale:null,"stroke-linejoin":d.lineJoin});return this.append(c,b,0)};
+pv.SvgScene.lineSegment=function(b){var c=b.$g.firstChild,d=b[0],f;switch(d.interpolate){case "basis":f=this.curveBasisSegments(b);break;case "cardinal":f=this.curveCardinalSegments(b,d.tension);break;case "monotone":f=this.curveMonotoneSegments(b);break}d=0;for(var g=b.length-1;d<g;d++){var h=b[d],i=b[d+1];if(h.visible&&i.visible){var j=h.strokeStyle,l=pv.Color.transparent;if(j.opacity){if(h.interpolate=="linear"&&h.lineJoin=="miter"){l=j;j=pv.Color.transparent;i=this.pathJoin(b[d-1],h, [...]
+f?f[d]:"M"+h.left+","+h.top+this.pathSegment(h,i);c=this.expect(c,"path",{"shape-rendering":h.antialias?null:"crispEdges","pointer-events":h.events,cursor:h.cursor,d:i,fill:l.color,"fill-opacity":l.opacity||null,stroke:j.color,"stroke-opacity":j.opacity||null,"stroke-width":j.opacity?h.lineWidth/this.scale:null,"stroke-linejoin":h.lineJoin});c=this.append(c,b,d)}}}return c};
+pv.SvgScene.pathSegment=function(b,c){var d=1;switch(b.interpolate){case "polar-reverse":d=0;case "polar":var f=c.left-b.left,g=c.top-b.top;b=1-b.eccentricity;f=Math.sqrt(f*f+g*g)/(2*b);if(b<=0||b>1)break;return"A"+f+","+f+" 0 0,"+d+" "+c.left+","+c.top;case "step-before":return"V"+c.top+"H"+c.left;case "step-after":return"H"+c.left+"V"+c.top}return"L"+c.left+","+c.top};pv.SvgScene.lineIntersect=function(b,c,d,f){return b.plus(c.times(d.minus(b).dot(f.perp())/c.dot(f.perp())))};
+pv.SvgScene.pathJoin=function(b,c,d,f){var g=pv.vector(c.left,c.top);d=pv.vector(d.left,d.top);var h=d.minus(g),i=h.perp().norm(),j=i.times(c.lineWidth/(2*this.scale));c=g.plus(j);var l=d.plus(j),k=d.minus(j);j=g.minus(j);if(b&&b.visible){b=g.minus(b.left,b.top).perp().norm().plus(i);j=this.lineIntersect(g,b,j,h);c=this.lineIntersect(g,b,c,h)}if(f&&f.visible){f=pv.vector(f.left,f.top).minus(d).perp().norm().plus(i);k=this.lineIntersect(d,f,k,h);l=this.lineIntersect(d,f,l,h)}return"M"+c.x [...]
+"L"+l.x+","+l.y+" "+k.x+","+k.y+" "+j.x+","+j.y};
+pv.SvgScene.panel=function(b){for(var c=b.$g,d=c&&c.firstChild,f=0;f<b.length;f++){var g=b[f];if(g.visible){if(!b.parent){g.canvas.style.display="inline-block";if(c&&c.parentNode!=g.canvas)d=(c=g.canvas.firstChild)&&c.firstChild;if(!c){c=g.canvas.appendChild(this.create("svg"));c.setAttribute("font-size","10px");c.setAttribute("font-family","sans-serif");c.setAttribute("fill","none");c.setAttribute("stroke","none");c.setAttribute("stroke-width",1.5);for(var h=0;h<this.events.length;h++)c [...]
+this.dispatch,false);d=c.firstChild}b.$g=c;c.setAttribute("width",g.width+g.left+g.right);c.setAttribute("height",g.height+g.top+g.bottom)}if(g.overflow=="hidden"){h=pv.id().toString(36);var i=this.expect(d,"g",{"clip-path":"url(#"+h+")"});i.parentNode||c.appendChild(i);b.$g=c=i;d=i.firstChild;d=this.expect(d,"clipPath",{id:h});h=d.firstChild||d.appendChild(this.create("rect"));h.setAttribute("x",g.left);h.setAttribute("y",g.top);h.setAttribute("width",g.width);h.setAttribute("height",g. [...]
+c.appendChild(d);d=d.nextSibling}d=this.fill(d,b,f);var j=this.scale,l=g.transform,k=g.left+l.x,q=g.top+l.y;this.scale*=l.k;for(h=0;h<g.children.length;h++){g.children[h].$g=d=this.expect(d,"g",{transform:"translate("+k+","+q+")"+(l.k!=1?" scale("+l.k+")":"")});this.updateAll(g.children[h]);d.parentNode||c.appendChild(d);d=d.nextSibling}this.scale=j;d=this.stroke(d,b,f);if(g.overflow=="hidden"){b.$g=c=i.parentNode;d=i.nextSibling}}}return d};
+pv.SvgScene.fill=function(b,c,d){var f=c[d],g=f.fillStyle;if(g.opacity||f.events=="all"){b=this.expect(b,"rect",{"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events,cursor:f.cursor,x:f.left,y:f.top,width:f.width,height:f.height,fill:g.color,"fill-opacity":g.opacity,stroke:null});b=this.append(b,c,d)}return b};
+pv.SvgScene.stroke=function(b,c,d){var f=c[d],g=f.strokeStyle;if(g.opacity||f.events=="all"){b=this.expect(b,"rect",{"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events=="all"?"stroke":f.events,cursor:f.cursor,x:f.left,y:f.top,width:Math.max(1.0E-10,f.width),height:Math.max(1.0E-10,f.height),fill:null,stroke:g.color,"stroke-opacity":g.opacity,"stroke-width":f.lineWidth/this.scale});b=this.append(b,c,d)}return b};
+pv.SvgScene.rule=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){var g=f.strokeStyle;if(g.opacity){c=this.expect(c,"line",{"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events,cursor:f.cursor,x1:f.left,y1:f.top,x2:f.left+f.width,y2:f.top+f.height,stroke:g.color,"stroke-opacity":g.opacity,"stroke-width":f.lineWidth/this.scale});c=this.append(c,b,d)}}}return c};
+pv.SvgScene.wedge=function(b){for(var c=b.$g.firstChild,d=0;d<b.length;d++){var f=b[d];if(f.visible){var g=f.fillStyle,h=f.strokeStyle;if(g.opacity||h.opacity){var i=f.innerRadius,j=f.outerRadius,l=Math.abs(f.angle);if(l>=2*Math.PI)i=i?"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"Z":"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"Z";else{var k=Math.min(f.startAngle,f.endAngle),q=Math.max(f.startAn [...]
+o=Math.cos(k),n=Math.cos(q);k=Math.sin(k);q=Math.sin(q);i=i?"M"+j*o+","+j*k+"A"+j+","+j+" 0 "+(l<Math.PI?"0":"1")+",1 "+j*n+","+j*q+"L"+i*n+","+i*q+"A"+i+","+i+" 0 "+(l<Math.PI?"0":"1")+",0 "+i*o+","+i*k+"Z":"M"+j*o+","+j*k+"A"+j+","+j+" 0 "+(l<Math.PI?"0":"1")+",1 "+j*n+","+j*q+"L0,0Z"}c=this.expect(c,"path",{"shape-rendering":f.antialias?null:"crispEdges","pointer-events":f.events,cursor:f.cursor,transform:"translate("+f.left+","+f.top+")",d:i,fill:g.color,"fill-rule":"evenodd","fill-o [...]
+null,stroke:h.color,"stroke-opacity":h.opacity||null,"stroke-width":h.opacity?f.lineWidth/this.scale:null});c=this.append(c,b,d)}}}return c};pv.Mark=function(){this.$properties=[];this.$handlers={}};pv.Mark.prototype.properties={};pv.Mark.cast={};pv.Mark.prototype.property=function(b,c){if(!this.hasOwnProperty("properties"))this.properties=pv.extend(this.properties);this.properties[b]=true;pv.Mark.prototype.propertyMethod(b,false,pv.Mark.cast[b]=c);return this};
+pv.Mark.prototype.propertyMethod=function(b,c,d){d||(d=pv.Mark.cast[b]);this[b]=function(f){if(c&&this.scene){var g=this.scene.defs;if(arguments.length){g[b]={id:f==null?0:pv.id(),value:f!=null&&d?d(f):f};return this}return g[b]?g[b].value:null}if(arguments.length){g=!c<<1|typeof f=="function";this.propertyValue(b,g&1&&d?function(){var h=f.apply(this,arguments);return h!=null?d(h):null}:f!=null&&d?d(f):f).type=g;return this}return this.instance()[b]}};
+pv.Mark.prototype.propertyValue=function(b,c){var d=this.$properties;c={name:b,id:pv.id(),value:c};for(var f=0;f<d.length;f++)if(d[f].name==b){d.splice(f,1);break}d.push(c);return c};pv.Mark.prototype.property("data").property("visible",Boolean).property("left",Number).property("right",Number).property("top",Number).property("bottom",Number).property("cursor",String).property("title",String).property("reverse",Boolean).property("antialias",Boolean).property("events",String);a=pv.Mark.prototype;
+a.childIndex=-1;a.index=-1;a.scale=1;a.defaults=(new pv.Mark).data(function(b){return[b]}).visible(true).antialias(true).events("painted");a.extend=function(b){this.proto=b;return this};a.add=function(b){return this.parent.add(b).extend(this)};a.def=function(b,c){this.propertyMethod(b,true);return this[b](arguments.length>1?c:null)};
+a.anchor=function(b){function c(g){for(var h=d,i=[];!(f=h.scene);){g=g.parent;i.push({index:g.index,childIndex:h.childIndex});h=h.parent}for(;i.length;){g=i.pop();f=f[g.index].children[g.childIndex]}if(d.hasOwnProperty("index")){i=pv.extend(f[d.index]);i.right=i.top=i.left=i.bottom=0;return[i]}return f}var d=this,f;b||(b="center");return(new pv.Anchor(this)).name(b).def("$mark.anchor",function(){f=this.scene.target=c(this)}).data(function(){return f.map(function(g){return g.data})}).visi [...]
+f[this.index],h=g.width||0;switch(this.name()){case "bottom":case "top":case "center":return g.left+h/2;case "left":return null}return g.left+h}).top(function(){var g=f[this.index],h=g.height||0;switch(this.name()){case "left":case "right":case "center":return g.top+h/2;case "top":return null}return g.top+h}).right(function(){var g=f[this.index];return this.name()=="left"?g.right+(g.width||0):null}).bottom(function(){var g=f[this.index];return this.name()=="top"?g.bottom+(g.height||0):nu [...]
+case "right":return"right"}return"left"}).textBaseline(function(){switch(this.name()){case "right":case "left":case "center":return"middle";case "top":return"top"}return"bottom"})};a.anchorTarget=function(){return this.proto.anchorTarget()};a.margin=function(b){return this.left(b).right(b).top(b).bottom(b)};a.instance=function(b){var c=this.scene||this.parent.instance(-1).children[this.childIndex],d=!arguments.length||this.hasOwnProperty("index")?this.index:b;return c[d<0?c.length-1:d]}; [...]
+a.last=function(){return this.scene[this.scene.length-1]};a.sibling=function(){return this.index==0?null:this.scene[this.index-1]};a.cousin=function(){var b=this.parent;return(b=b&&b.sibling())&&b.children?b.children[this.childIndex][this.index]:null};
+a.render=function(){function b(i,j,l){i.scale=l;if(j<g.length){f.unshift(null);if(i.hasOwnProperty("index"))c(i,j,l);else{for(var k=0,q=i.scene.length;k<q;k++){i.index=k;c(i,j,l)}delete i.index}f.shift()}else{i.build();pv.Scene.scale=l;pv.Scene.updateAll(i.scene)}delete i.scale}function c(i,j,l){var k=i.scene[i.index],q;if(k.visible){var o=g[j],n=i.children[o];for(q=0;q<o;q++)i.children[q].scene=k.children[q];f[0]=k.data;if(n.scene)b(n,j+1,l*k.transform.k);else{n.scene=k.children[o];b(n, [...]
+delete n.scene}for(q=0;q<o;q++)delete i.children[q].scene}}var d=this.parent,f=pv.Mark.stack;if(d&&!this.root.scene)this.root.render();else{for(var g=[],h=this;h.parent;h=h.parent)g.unshift(h.childIndex);for(this.bind();d&&!d.hasOwnProperty("index");)d=d.parent;this.context(d?d.scene:undefined,d?d.index:-1,function(){b(this.root,0,1)})}};pv.Mark.stack=[];a=pv.Mark.prototype;
+a.bind=function(){function b(j){do for(var l=j.$properties,k=l.length-1;k>=0;k--){var q=l[k];if(!(q.name in c)){c[q.name]=q;switch(q.name){case "data":f=q;break;case "visible":g=q;break;default:d[q.type].push(q);break}}}while(j=j.proto)}var c={},d=[[],[],[],[]],f,g;b(this);b(this.defaults);d[1].reverse();d[3].reverse();var h=this;do for(var i in h.properties)i in c||d[2].push(c[i]={name:i,type:2,value:null});while(h=h.proto);h=d[0].concat(d[1]);for(i=0;i<h.length;i++)this.propertyMethod( [...]
+true);this.binds={properties:c,data:f,defs:h,required:[g],optional:pv.blend(d)}};
+a.build=function(){var b=this.scene,c=pv.Mark.stack;if(!b){b=this.scene=[];b.mark=this;b.type=this.type;b.childIndex=this.childIndex;if(this.parent){b.parent=this.parent.scene;b.parentIndex=this.parent.index}}if(this.binds.defs.length){var d=b.defs;if(!d)b.defs=d={};for(var f=0;f<this.binds.defs.length;f++){var g=this.binds.defs[f],h=d[g.name];if(!h||g.id>h.id)d[g.name]={id:0,value:g.type&1?g.value.apply(this,c):g.value}}}d=this.binds.data;d=d.type&1?d.value.apply(this,c):d.value;c.unshi [...]
+b.length=d.length;for(f=0;f<d.length;f++){pv.Mark.prototype.index=this.index=f;(g=b[f])||(b[f]=g={});g.data=c[0]=d[f];this.buildInstance(g)}pv.Mark.prototype.index=-1;delete this.index;c.shift();return this};a.buildProperties=function(b,c){for(var d=0,f=c.length;d<f;d++){var g=c[d],h=g.value;switch(g.type){case 0:case 1:h=this.scene.defs[g.name].value;break;case 3:h=h.apply(this,pv.Mark.stack);break}b[g.name]=h}};
+a.buildInstance=function(b){this.buildProperties(b,this.binds.required);if(b.visible){this.buildProperties(b,this.binds.optional);this.buildImplied(b)}};
+a.buildImplied=function(b){var c=b.left,d=b.right,f=b.top,g=b.bottom,h=this.properties,i=h.width?b.width:0,j=h.height?b.height:0,l=this.parent?this.parent.width():i+c+d;if(i==null)i=l-(d=d||0)-(c=c||0);else if(d==null)d=l-i-(c=c||0);else if(c==null)c=l-i-(d=d||0);l=this.parent?this.parent.height():j+f+g;if(j==null)j=l-(f=f||0)-(g=g||0);else if(g==null)g=l-j-(f=f||0);else if(f==null)f=l-j-(g=g||0);b.left=c;b.right=d;b.top=f;b.bottom=g;if(h.width)b.width=i;if(h.height)b.height=j;if(h.textS [...]
+pv.Color.transparent;if(h.fillStyle&&!b.fillStyle)b.fillStyle=pv.Color.transparent;if(h.strokeStyle&&!b.strokeStyle)b.strokeStyle=pv.Color.transparent};
+a.mouse=function(){var b=pv.event.pageX||0,c=pv.event.pageY||0,d=this.root.canvas();do{b-=d.offsetLeft;c-=d.offsetTop}while(d=d.offsetParent);d=pv.Transform.identity;var f=this.properties.transform?this:this.parent,g=[];do g.push(f);while(f=f.parent);for(;f=g.pop();)d=d.translate(f.left(),f.top()).times(f.transform());d=d.invert();return pv.vector(b*d.k+d.x,c*d.k+d.y)};a.event=function(b,c){this.$handlers[b]=pv.functor(c);return this};
+a.context=function(b,c,d){function f(k,q){pv.Mark.scene=k;h.index=q;if(k){var o=k.mark,n=o,m=[];do{m.push(n);i.push(k[q].data);n.index=q;n.scene=k;q=k.parentIndex;k=k.parent}while(n=n.parent);k=m.length-1;for(q=1;k>0;k--){n=m[k];n.scale=q;q*=n.scene[n.index].transform.k}if(o.children){k=0;for(m=o.children.length;k<m;k++){n=o.children[k];n.scene=o.scene[o.index].children[k];n.scale=q}}}}function g(k){if(k){k=k.mark;var q;if(k.children)for(var o=0,n=k.children.length;o<n;o++){q=k.children[ [...]
+delete q.scale}q=k;do{i.pop();if(q.parent){delete q.scene;delete q.scale}delete q.index}while(q=q.parent)}}var h=pv.Mark.prototype,i=pv.Mark.stack,j=pv.Mark.scene,l=h.index;g(j,l);f(b,c);try{d.apply(this,i)}finally{g(b,c);f(j,l)}};pv.Mark.dispatch=function(b,c,d){var f=c.mark,g=c.parent,h=f.$handlers[b];if(!h)return g&&pv.Mark.dispatch(b,g,c.parentIndex);f.context(c,d,function(){(f=h.apply(f,pv.Mark.stack))&&f.render&&f.render()});return true};
+pv.Anchor=function(b){pv.Mark.call(this);this.target=b;this.parent=b.parent};pv.Anchor.prototype=pv.extend(pv.Mark).property("name",String);pv.Anchor.prototype.anchorTarget=function(){return this.target};pv.Area=function(){pv.Mark.call(this)};
+pv.Area.prototype=pv.extend(pv.Mark).property("width",Number).property("height",Number).property("lineWidth",Number).property("strokeStyle",pv.color).property("fillStyle",pv.color).property("segmented",Boolean).property("interpolate",String).property("tension",Number);pv.Area.prototype.type="area";pv.Area.prototype.defaults=(new pv.Area).extend(pv.Mark.prototype.defaults).lineWidth(1.5).fillStyle(pv.Colors.category20().by(pv.parent)).interpolate("linear").tension(0.7);
+pv.Area.prototype.buildImplied=function(b){if(b.height==null)b.height=0;if(b.width==null)b.width=0;pv.Mark.prototype.buildImplied.call(this,b)};pv.Area.fixed={lineWidth:1,lineJoin:1,strokeStyle:1,fillStyle:1,segmented:1,interpolate:1,tension:1};
+pv.Area.prototype.bind=function(){pv.Mark.prototype.bind.call(this);var b=this.binds,c=b.required;b=b.optional;for(var d=0,f=b.length;d<f;d++){var g=b[d];g.fixed=g.name in pv.Area.fixed;if(g.name=="segmented"){c.push(g);b.splice(d,1);d--;f--}}this.binds.$required=c;this.binds.$optional=b};
+pv.Area.prototype.buildInstance=function(b){var c=this.binds;if(this.index){var d=c.fixed;if(!d){d=c.fixed=[];function f(i){return!i.fixed||(d.push(i),false)}c.required=c.required.filter(f);if(!this.scene[0].segmented)c.optional=c.optional.filter(f)}c=0;for(var g=d.length;c<g;c++){var h=d[c].name;b[h]=this.scene[0][h]}}else{c.required=c.$required;c.optional=c.$optional;c.fixed=null}pv.Mark.prototype.buildInstance.call(this,b)};
+pv.Area.prototype.anchor=function(b){var c;return pv.Mark.prototype.anchor.call(this,b).def("$area.anchor",function(){c=this.scene.target}).interpolate(function(){return c[this.index].interpolate}).eccentricity(function(){return c[this.index].eccentricity}).tension(function(){return c[this.index].tension})};pv.Bar=function(){pv.Mark.call(this)};
+pv.Bar.prototype=pv.extend(pv.Mark).property("width",Number).property("height",Number).property("lineWidth",Number).property("strokeStyle",pv.color).property("fillStyle",pv.color);pv.Bar.prototype.type="bar";pv.Bar.prototype.defaults=(new pv.Bar).extend(pv.Mark.prototype.defaults).lineWidth(1.5).fillStyle(pv.Colors.category20().by(pv.parent));pv.Dot=function(){pv.Mark.call(this)};
+pv.Dot.prototype=pv.extend(pv.Mark).property("size",Number).property("radius",Number).property("shape",String).property("angle",Number).property("lineWidth",Number).property("strokeStyle",pv.color).property("fillStyle",pv.color);pv.Dot.prototype.type="dot";pv.Dot.prototype.defaults=(new pv.Dot).extend(pv.Mark.prototype.defaults).size(20).shape("circle").lineWidth(1.5).strokeStyle(pv.Colors.category10().by(pv.parent));
+pv.Dot.prototype.anchor=function(b){var c;return pv.Mark.prototype.anchor.call(this,b).def("$wedge.anchor",function(){c=this.scene.target}).left(function(){var d=c[this.index];switch(this.name()){case "bottom":case "top":case "center":return d.left;case "left":return null}return d.left+d.radius}).right(function(){var d=c[this.index];return this.name()=="left"?d.right+d.radius:null}).top(function(){var d=c[this.index];switch(this.name()){case "left":case "right":case "center":return d.top [...]
+d.radius}).bottom(function(){var d=c[this.index];return this.name()=="top"?d.bottom+d.radius:null}).textAlign(function(){switch(this.name()){case "left":return"right";case "bottom":case "top":case "center":return"center"}return"left"}).textBaseline(function(){switch(this.name()){case "right":case "left":case "center":return"middle";case "bottom":return"top"}return"bottom"})};
+pv.Dot.prototype.buildImplied=function(b){if(b.radius==null)b.radius=Math.sqrt(b.size);else if(b.size==null)b.size=b.radius*b.radius;pv.Mark.prototype.buildImplied.call(this,b)};pv.Label=function(){pv.Mark.call(this)};
+pv.Label.prototype=pv.extend(pv.Mark).property("text",String).property("font",String).property("textAngle",Number).property("textStyle",pv.color).property("textAlign",String).property("textBaseline",String).property("textMargin",Number).property("textDecoration",String).property("textShadow",String);pv.Label.prototype.type="label";pv.Label.prototype.defaults=(new pv.Label).extend(pv.Mark.prototype.defaults).events("none").text(pv.identity).font("10px sans-serif").textAngle(0).textStyle(" [...]
+pv.Line=function(){pv.Mark.call(this)};pv.Line.prototype=pv.extend(pv.Mark).property("lineWidth",Number).property("lineJoin",String).property("strokeStyle",pv.color).property("fillStyle",pv.color).property("segmented",Boolean).property("interpolate",String).property("eccentricity",Number).property("tension",Number);a=pv.Line.prototype;a.type="line";a.defaults=(new pv.Line).extend(pv.Mark.prototype.defaults).lineJoin("miter").lineWidth(1.5).strokeStyle(pv.Colors.category10().by(pv.parent) [...]
+a.bind=pv.Area.prototype.bind;a.buildInstance=pv.Area.prototype.buildInstance;a.anchor=function(b){return pv.Area.prototype.anchor.call(this,b).textAlign(function(){switch(this.name()){case "left":return"right";case "bottom":case "top":case "center":return"center";case "right":return"left"}}).textBaseline(function(){switch(this.name()){case "right":case "left":case "center":return"middle";case "top":return"bottom";case "bottom":return"top"}})};pv.Rule=function(){pv.Mark.call(this)};
+pv.Rule.prototype=pv.extend(pv.Mark).property("width",Number).property("height",Number).property("lineWidth",Number).property("strokeStyle",pv.color);pv.Rule.prototype.type="rule";pv.Rule.prototype.defaults=(new pv.Rule).extend(pv.Mark.prototype.defaults).lineWidth(1).strokeStyle("black").antialias(false);pv.Rule.prototype.anchor=pv.Line.prototype.anchor;
+pv.Rule.prototype.buildImplied=function(b){var c=b.left,d=b.right;if(b.width!=null||c==null&&d==null||d!=null&&c!=null)b.height=0;else b.width=0;pv.Mark.prototype.buildImplied.call(this,b)};pv.Panel=function(){pv.Bar.call(this);this.children=[];this.root=this;this.$dom=pv.$&&pv.$.s};pv.Panel.prototype=pv.extend(pv.Bar).property("transform").property("overflow",String).property("canvas",function(b){return typeof b=="string"?document.getElementById(b):b});a=pv.Panel.prototype;a.type="panel";
+a.defaults=(new pv.Panel).extend(pv.Bar.prototype.defaults).fillStyle(null).overflow("visible");a.anchor=function(b){b=pv.Bar.prototype.anchor.call(this,b);b.parent=this;return b};a.add=function(b){b=new b;b.parent=this;b.root=this.root;b.childIndex=this.children.length;this.children.push(b);return b};a.bind=function(){pv.Mark.prototype.bind.call(this);for(var b=0;b<this.children.length;b++)this.children[b].bind()};
+a.buildInstance=function(b){pv.Bar.prototype.buildInstance.call(this,b);if(b.visible){if(!b.children)b.children=[];var c=this.scale*b.transform.k,d,f=this.children.length;pv.Mark.prototype.index=-1;for(var g=0;g<f;g++){d=this.children[g];d.scene=b.children[g];d.scale=c;d.build()}for(g=0;g<f;g++){d=this.children[g];b.children[g]=d.scene;delete d.scene;delete d.scale}b.children.length=f}};
+a.buildImplied=function(b){if(!this.parent){var c=b.canvas;if(c){if(c.$panel!=this)for(c.$panel=this;c.lastChild;)c.removeChild(c.lastChild);var d;if(b.width==null){d=parseFloat(pv.css(c,"width"));b.width=d-b.left-b.right}if(b.height==null){d=parseFloat(pv.css(c,"height"));b.height=d-b.top-b.bottom}}else{d=this.$canvas||(this.$canvas=[]);if(!(c=d[this.index])){c=d[this.index]=document.createElement("span");if(this.$dom)this.$dom.parentNode.insertBefore(c,this.$dom);else{for(d=document.bo [...]
+d.lastChild.tagName;)d=d.lastChild;if(d!=document.body)d=d.parentNode;d.appendChild(c)}}}b.canvas=c}if(!b.transform)b.transform=pv.Transform.identity;pv.Mark.prototype.buildImplied.call(this,b)};pv.Image=function(){pv.Bar.call(this)};pv.Image.prototype=pv.extend(pv.Bar).property("url",String).property("imageWidth",Number).property("imageHeight",Number);a=pv.Image.prototype;a.type="image";a.defaults=(new pv.Image).extend(pv.Bar.prototype.defaults).fillStyle(null);
+a.image=function(b){this.$image=function(){var c=b.apply(this,arguments);return c==null?pv.Color.transparent:typeof c=="string"?pv.color(c):c};return this};a.bind=function(){pv.Bar.prototype.bind.call(this);var b=this.binds,c=this;do b.image=c.$image;while(!b.image&&(c=c.proto))};
+a.buildImplied=function(b){pv.Bar.prototype.buildImplied.call(this,b);if(b.visible){if(b.imageWidth==null)b.imageWidth=b.width;if(b.imageHeight==null)b.imageHeight=b.height;if(b.url==null&&this.binds.image){var c=this.$canvas||(this.$canvas=document.createElement("canvas")),d=c.getContext("2d"),f=b.imageWidth,g=b.imageHeight,h=pv.Mark.stack;c.width=f;c.height=g;b=(b.image=d.createImageData(f,g)).data;h.unshift(null,null);for(d=c=0;c<g;c++){h[1]=c;for(var i=0;i<f;i++){h[0]=i;var j=this.bi [...]
+h);b[d++]=j.r;b[d++]=j.g;b[d++]=j.b;b[d++]=255*j.a}}h.splice(0,2)}}};pv.Wedge=function(){pv.Mark.call(this)};pv.Wedge.prototype=pv.extend(pv.Mark).property("startAngle",Number).property("endAngle",Number).property("angle",Number).property("innerRadius",Number).property("outerRadius",Number).property("lineWidth",Number).property("strokeStyle",pv.color).property("fillStyle",pv.color);a=pv.Wedge.prototype;a.type="wedge";
+a.defaults=(new pv.Wedge).extend(pv.Mark.prototype.defaults).startAngle(function(){var b=this.sibling();return b?b.endAngle:-Math.PI/2}).innerRadius(0).lineWidth(1.5).strokeStyle(null).fillStyle(pv.Colors.category20().by(pv.index));a.midRadius=function(){return(this.innerRadius()+this.outerRadius())/2};a.midAngle=function(){return(this.startAngle()+this.endAngle())/2};
+a.anchor=function(b){function c(h){return h.innerRadius||h.angle<2*Math.PI}function d(h){return(h.innerRadius+h.outerRadius)/2}function f(h){return(h.startAngle+h.endAngle)/2}var g;return pv.Mark.prototype.anchor.call(this,b).def("$wedge.anchor",function(){g=this.scene.target}).left(function(){var h=g[this.index];if(c(h))switch(this.name()){case "outer":return h.left+h.outerRadius*Math.cos(f(h));case "inner":return h.left+h.innerRadius*Math.cos(f(h));case "start":return h.left+d(h)*Math. [...]
+case "center":return h.left+d(h)*Math.cos(f(h));case "end":return h.left+d(h)*Math.cos(h.endAngle)}return h.left}).top(function(){var h=g[this.index];if(c(h))switch(this.name()){case "outer":return h.top+h.outerRadius*Math.sin(f(h));case "inner":return h.top+h.innerRadius*Math.sin(f(h));case "start":return h.top+d(h)*Math.sin(h.startAngle);case "center":return h.top+d(h)*Math.sin(f(h));case "end":return h.top+d(h)*Math.sin(h.endAngle)}return h.top}).textAlign(function(){var h=g[this.inde [...]
+"right":"left";case "inner":return pv.Wedge.upright(f(h))?"left":"right"}return"center"}).textBaseline(function(){var h=g[this.index];if(c(h))switch(this.name()){case "start":return pv.Wedge.upright(h.startAngle)?"top":"bottom";case "end":return pv.Wedge.upright(h.endAngle)?"bottom":"top"}return"middle"}).textAngle(function(){var h=g[this.index],i=0;if(c(h))switch(this.name()){case "center":case "inner":case "outer":i=f(h);break;case "start":i=h.startAngle;break;case "end":i=h.endAngle;b [...]
+i:i+Math.PI})};pv.Wedge.upright=function(b){b%=2*Math.PI;b=b<0?2*Math.PI+b:b;return b<Math.PI/2||b>=3*Math.PI/2};pv.Wedge.prototype.buildImplied=function(b){if(b.angle==null)b.angle=b.endAngle-b.startAngle;else if(b.endAngle==null)b.endAngle=b.startAngle+b.angle;pv.Mark.prototype.buildImplied.call(this,b)};pv.simulation=function(b){return new pv.Simulation(b)};pv.Simulation=function(b){for(var c=0;c<b.length;c++)this.particle(b[c])};a=pv.Simulation.prototype;
+a.particle=function(b){b.next=this.particles;if(isNaN(b.px))b.px=b.x;if(isNaN(b.py))b.py=b.y;if(isNaN(b.fx))b.fx=0;if(isNaN(b.fy))b.fy=0;this.particles=b;return this};a.force=function(b){b.next=this.forces;this.forces=b;return this};a.constraint=function(b){b.next=this.constraints;this.constraints=b;return this};
+a.stabilize=function(b){var c;arguments.length||(b=3);for(var d=0;d<b;d++){var f=new pv.Quadtree(this.particles);for(c=this.constraints;c;c=c.next)c.apply(this.particles,f)}for(c=this.particles;c;c=c.next){c.px=c.x;c.py=c.y}return this};
+a.step=function(){var b;for(b=this.particles;b;b=b.next){var c=b.px,d=b.py;b.px=b.x;b.py=b.y;b.x+=b.vx=b.x-c+b.fx;b.y+=b.vy=b.y-d+b.fy}c=new pv.Quadtree(this.particles);for(b=this.constraints;b;b=b.next)b.apply(this.particles,c);for(b=this.particles;b;b=b.next)b.fx=b.fy=0;for(b=this.forces;b;b=b.next)b.apply(this.particles,c)};
+pv.Quadtree=function(b){function c(k,q,o,n,m,r){if(!(isNaN(q.x)||isNaN(q.y)))if(k.leaf)if(k.p){if(!(Math.abs(k.p.x-q.x)+Math.abs(k.p.y-q.y)<0.01)){var s=k.p;k.p=null;d(k,s,o,n,m,r)}d(k,q,o,n,m,r)}else k.p=q;else d(k,q,o,n,m,r)}function d(k,q,o,n,m,r){var s=(o+m)*0.5,u=(n+r)*0.5,x=q.x>=s,t=q.y>=u;k.leaf=false;switch((t<<1)+x){case 0:k=k.c1||(k.c1=new pv.Quadtree.Node);break;case 1:k=k.c2||(k.c2=new pv.Quadtree.Node);break;case 2:k=k.c3||(k.c3=new pv.Quadtree.Node);break;case 3:k=k.c4||(k. [...]
+break}if(x)o=s;else m=s;if(t)n=u;else r=u;c(k,q,o,n,m,r)}var f,g=Number.POSITIVE_INFINITY,h=g,i=Number.NEGATIVE_INFINITY,j=i;for(f=b;f;f=f.next){if(f.x<g)g=f.x;if(f.y<h)h=f.y;if(f.x>i)i=f.x;if(f.y>j)j=f.y}f=i-g;var l=j-h;if(f>l)j=h+f;else i=g+l;this.xMin=g;this.yMin=h;this.xMax=i;this.yMax=j;this.root=new pv.Quadtree.Node;for(f=b;f;f=f.next)c(this.root,f,g,h,i,j)};pv.Quadtree.Node=function(){this.leaf=true;this.p=this.c4=this.c3=this.c2=this.c1=null};pv.Force={};
+pv.Force.charge=function(b){function c(k){function q(m){c(m);k.cn+=m.cn;o+=m.cn*m.cx;n+=m.cn*m.cy}var o=0,n=0;k.cn=0;if(!k.leaf){k.c1&&q(k.c1);k.c2&&q(k.c2);k.c3&&q(k.c3);k.c4&&q(k.c4)}if(k.p){k.cn+=b;o+=b*k.p.x;n+=b*k.p.y}k.cx=o/k.cn;k.cy=n/k.cn}function d(k,q,o,n,m,r){var s=k.cx-q.x,u=k.cy-q.y,x=1/Math.sqrt(s*s+u*u);if(k.leaf&&k.p!=q||(m-o)*x<j){if(!(x<i)){if(x>g)x=g;k=k.cn*x*x*x;s=s*k;u=u*k;q.fx+=s;q.fy+=u}}else if(!k.leaf){var t=(o+m)*0.5,p=(n+r)*0.5;k.c1&&d(k.c1,q,o,n,t,p);k.c2&&d(k [...]
+m,p);k.c3&&d(k.c3,q,o,p,t,r);k.c4&&d(k.c4,q,t,p,m,r);if(!(x<i)){if(x>g)x=g;if(k.p&&k.p!=q){k=b*x*x*x;s=s*k;u=u*k;q.fx+=s;q.fy+=u}}}}var f=2,g=1/f,h=500,i=1/h,j=0.9,l={};arguments.length||(b=-40);l.constant=function(k){if(arguments.length){b=Number(k);return l}return b};l.domain=function(k,q){if(arguments.length){f=Number(k);g=1/f;h=Number(q);i=1/h;return l}return[f,h]};l.theta=function(k){if(arguments.length){j=Number(k);return l}return j};l.apply=function(k,q){c(q.root);for(k=k;k;k=k.ne [...]
+k,q.xMin,q.yMin,q.xMax,q.yMax)};return l};pv.Force.drag=function(b){var c={};arguments.length||(b=0.1);c.constant=function(d){if(arguments.length){b=d;return c}return b};c.apply=function(d){if(b)for(d=d;d;d=d.next){d.fx-=b*d.vx;d.fy-=b*d.vy}};return c};
+pv.Force.spring=function(b){var c=0.1,d=20,f,g,h={};arguments.length||(b=0.1);h.links=function(i){if(arguments.length){f=i;g=i.map(function(j){return 1/Math.sqrt(Math.max(j.sourceNode.linkDegree,j.targetNode.linkDegree))});return h}return f};h.constant=function(i){if(arguments.length){b=Number(i);return h}return b};h.damping=function(i){if(arguments.length){c=Number(i);return h}return c};h.length=function(i){if(arguments.length){d=Number(i);return h}return d};h.apply=function(){for(var i [...]
+f[i].sourceNode,l=f[i].targetNode,k=j.x-l.x,q=j.y-l.y,o=Math.sqrt(k*k+q*q),n=o?1/o:1;n=(b*g[i]*(o-d)+c*g[i]*(k*(j.vx-l.vx)+q*(j.vy-l.vy))*n)*n;k=-n*(o?k:0.01*(0.5-Math.random()));q=-n*(o?q:0.01*(0.5-Math.random()));j.fx+=k;j.fy+=q;l.fx-=k;l.fy-=q}};return h};pv.Constraint={};
+pv.Constraint.collision=function(b){function c(k,q,o,n,m,r){if(!k.leaf){var s=(o+m)*0.5,u=(n+r)*0.5,x=u<j,t=s>g,p=s<i;if(u>h){k.c1&&t&&c(k.c1,q,o,n,s,u);k.c2&&p&&c(k.c2,q,s,n,m,u)}if(x){k.c3&&t&&c(k.c3,q,o,u,s,r);k.c4&&p&&c(k.c4,q,s,u,m,r)}}if(k.p&&k.p!=q){o=q.x-k.p.x;n=q.y-k.p.y;m=Math.sqrt(o*o+n*n);r=f+b(k.p);if(m<r){m=(m-r)/m*0.5;o*=m;n*=m;q.x-=o;q.y-=n;k.p.x+=o;k.p.y+=n}}}var d=1,f,g,h,i,j,l={};arguments.length||(f=10);l.repeat=function(k){if(arguments.length){d=Number(k);return l}re [...]
+function(k,q){var o,n,m=-Infinity;for(o=k;o;o=o.next){n=b(o);if(n>m)m=n}for(var r=0;r<d;r++)for(o=k;o;o=o.next){n=(f=b(o))+m;g=o.x-n;i=o.x+n;h=o.y-n;j=o.y+n;c(q.root,o,q.xMin,q.yMin,q.xMax,q.yMax)}};return l};pv.Constraint.position=function(b){var c=1,d={};arguments.length||(b=function(f){return f.fix});d.alpha=function(f){if(arguments.length){c=Number(f);return d}return c};d.apply=function(f){for(f=f;f;f=f.next){var g=b(f);if(g){f.x+=(g.x-f.x)*c;f.y+=(g.y-f.y)*c;f.fx=f.fy=f.vx=f.vy=0}}} [...]
+pv.Constraint.bound=function(){var b={},c,d;b.x=function(f,g){if(arguments.length){c={min:Math.min(f,g),max:Math.max(f,g)};return this}return c};b.y=function(f,g){if(arguments.length){d={min:Math.min(f,g),max:Math.max(f,g)};return this}return d};b.apply=function(f){if(c)for(var g=f;g;g=g.next)g.x=g.x<c.min?c.min:g.x>c.max?c.max:g.x;if(d)for(g=f;g;g=g.next)g.y=g.y<d.min?d.min:g.y>d.max?d.max:g.y};return b};pv.Layout=function(){pv.Panel.call(this)};pv.Layout.prototype=pv.extend(pv.Panel);
+pv.Layout.prototype.property=function(b,c){if(!this.hasOwnProperty("properties"))this.properties=pv.extend(this.properties);this.properties[b]=true;this.propertyMethod(b,false,pv.Mark.cast[b]=c);return this};
+pv.Layout.Network=function(){pv.Layout.call(this);var b=this;this.$id=pv.id();(this.node=(new pv.Mark).data(function(){return b.nodes()}).strokeStyle("#1f77b4").fillStyle("#fff").left(function(c){return c.x}).top(function(c){return c.y})).parent=this;this.link=(new pv.Mark).extend(this.node).data(function(c){return[c.sourceNode,c.targetNode]}).fillStyle(null).lineWidth(function(c,d){return d.linkValue*1.5}).strokeStyle("rgba(0,0,0,.2)");this.link.add=function(c){return b.add(pv.Panel).da [...]
+(this.label=(new pv.Mark).extend(this.node).textMargin(7).textBaseline("middle").text(function(c){return c.nodeName||c.nodeValue}).textAngle(function(c){c=c.midAngle;return pv.Wedge.upright(c)?c:c+Math.PI}).textAlign(function(c){return pv.Wedge.upright(c.midAngle)?"left":"right"})).parent=this};
+pv.Layout.Network.prototype=pv.extend(pv.Layout).property("nodes",function(b){return b.map(function(c,d){if(typeof c!="object")c={nodeValue:c};c.index=d;c.linkDegree=0;return c})}).property("links",function(b){return b.map(function(c){if(isNaN(c.linkValue))c.linkValue=isNaN(c.value)?1:c.value;return c})});pv.Layout.Network.prototype.reset=function(){this.$id=pv.id();return this};
+pv.Layout.Network.prototype.buildProperties=function(b,c){if((b.$id||0)<this.$id)pv.Layout.prototype.buildProperties.call(this,b,c)};pv.Layout.Network.prototype.buildImplied=function(b){pv.Layout.prototype.buildImplied.call(this,b);if(b.$id>=this.$id)return true;b.$id=this.$id;b.links.forEach(function(c){var d=c.linkValue;(c.sourceNode||(c.sourceNode=b.nodes[c.source])).linkDegree+=d;(c.targetNode||(c.targetNode=b.nodes[c.target])).linkDegree+=d})};
+pv.Layout.Hierarchy=function(){pv.Layout.Network.call(this);this.link.strokeStyle("#ccc")};pv.Layout.Hierarchy.prototype=pv.extend(pv.Layout.Network);pv.Layout.Hierarchy.prototype.buildImplied=function(b){if(!b.links)b.links=pv.Layout.Hierarchy.links.call(this);pv.Layout.Network.prototype.buildImplied.call(this,b)};pv.Layout.Hierarchy.links=function(){return this.nodes().filter(function(b){return b.parentNode}).map(function(b){return{sourceNode:b,targetNode:b.parentNode,linkValue:1}})};
+pv.Layout.Hierarchy.NodeLink={buildImplied:function(b){function c(m){return m.parentNode?m.depth*(o-q)+q:0}function d(m){return m.parentNode?(m.breadth-0.25)*2*Math.PI:0}function f(m){switch(i){case "left":return m.depth*l;case "right":return l-m.depth*l;case "top":return m.breadth*l;case "bottom":return l-m.breadth*l;case "radial":return l/2+c(m)*Math.cos(m.midAngle)}}function g(m){switch(i){case "left":return m.breadth*k;case "right":return k-m.breadth*k;case "top":return m.depth*k;cas [...]
+m.depth*k;case "radial":return k/2+c(m)*Math.sin(m.midAngle)}}var h=b.nodes,i=b.orient,j=/^(top|bottom)$/.test(i),l=b.width,k=b.height;if(i=="radial"){var q=b.innerRadius,o=b.outerRadius;if(q==null)q=0;if(o==null)o=Math.min(l,k)/2}for(b=0;b<h.length;b++){var n=h[b];n.midAngle=i=="radial"?d(n):j?Math.PI/2:0;n.x=f(n);n.y=g(n);if(n.firstChild)n.midAngle+=Math.PI}}};
+pv.Layout.Hierarchy.Fill={constructor:function(){this.node.strokeStyle("#fff").fillStyle("#ccc").width(function(b){return b.dx}).height(function(b){return b.dy}).innerRadius(function(b){return b.innerRadius}).outerRadius(function(b){return b.outerRadius}).startAngle(function(b){return b.startAngle}).angle(function(b){return b.angle});this.label.textAlign("center").left(function(b){return b.x+b.dx/2}).top(function(b){return b.y+b.dy/2});delete this.link},buildImplied:function(b){function  [...]
+v)/(1+v)}function d(p){switch(o){case "left":return c(p.minDepth,s)*m;case "right":return(1-c(p.maxDepth,s))*m;case "top":return p.minBreadth*m;case "bottom":return(1-p.maxBreadth)*m;case "radial":return m/2}}function f(p){switch(o){case "left":return p.minBreadth*r;case "right":return(1-p.maxBreadth)*r;case "top":return c(p.minDepth,s)*r;case "bottom":return(1-c(p.maxDepth,s))*r;case "radial":return r/2}}function g(p){switch(o){case "left":case "right":return(p.maxDepth-p.minDepth)/(1+s [...]
+p.minBreadth)*m;case "radial":return p.parentNode?(p.innerRadius+p.outerRadius)*Math.cos(p.midAngle):0}}function h(p){switch(o){case "left":case "right":return(p.maxBreadth-p.minBreadth)*r;case "top":case "bottom":return(p.maxDepth-p.minDepth)/(1+s)*r;case "radial":return p.parentNode?(p.innerRadius+p.outerRadius)*Math.sin(p.midAngle):0}}function i(p){return Math.max(0,c(p.minDepth,s/2))*(x-u)+u}function j(p){return c(p.maxDepth,s/2)*(x-u)+u}function l(p){return(p.parentNode?p.minBreadth [...]
+2*Math.PI}function k(p){return(p.parentNode?p.maxBreadth-p.minBreadth:1)*2*Math.PI}var q=b.nodes,o=b.orient,n=/^(top|bottom)$/.test(o),m=b.width,r=b.height,s=-q[0].minDepth;if(o=="radial"){var u=b.innerRadius,x=b.outerRadius;if(u==null)u=0;if(u)s*=2;if(x==null)x=Math.min(m,r)/2}for(b=0;b<q.length;b++){var t=q[b];t.x=d(t);t.y=f(t);if(o=="radial"){t.innerRadius=i(t);t.outerRadius=j(t);t.startAngle=l(t);t.angle=k(t);t.midAngle=t.startAngle+t.angle/2}else t.midAngle=n?-Math.PI/2:0;t.dx=g(t); [...]
+pv.Layout.Grid=function(){pv.Layout.call(this);var b=this;(this.cell=(new pv.Mark).data(function(){return b.scene[b.index].$grid}).width(function(){return b.width()/b.cols()}).height(function(){return b.height()/b.rows()}).left(function(){return this.width()*(this.index%b.cols())}).top(function(){return this.height()*Math.floor(this.index/b.cols())})).parent=this};pv.Layout.Grid.prototype=pv.extend(pv.Layout).property("rows").property("cols");pv.Layout.Grid.prototype.defaults=(new pv.Lay [...]
+pv.Layout.Grid.prototype.buildImplied=function(b){pv.Layout.prototype.buildImplied.call(this,b);var c=b.rows,d=b.cols;if(typeof d=="object")c=pv.transpose(d);if(typeof c=="object"){b.$grid=pv.blend(c);b.rows=c.length;b.cols=c[0]?c[0].length:0}else b.$grid=pv.repeat([b.data],c*d)};
+pv.Layout.Stack=function(){function b(i){return function(){return f[i](this.parent.index,this.index)}}pv.Layout.call(this);var c=this,d=function(){return null},f={t:d,l:d,r:d,b:d,w:d,h:d},g,h=c.buildImplied;this.buildImplied=function(i){h.call(this,i);var j=i.layers,l=j.length,k,q=i.orient,o=/^(top|bottom)\b/.test(q),n=this.parent[o?"height":"width"](),m=[],r=[],s=[],u=pv.Mark.stack,x={parent:{parent:this}};u.unshift(null);g=[];for(var t=0;t<l;t++){s[t]=[];r[t]=[];x.parent.index=t;u[0]=j [...]
+this.$values.apply(x.parent,u);if(!t)k=g[t].length;u.unshift(null);for(var p=0;p<k;p++){u[0]=g[t][p];x.index=p;t||(m[p]=this.$x.apply(x,u));s[t][p]=this.$y.apply(x,u)}u.shift()}u.shift();switch(i.order){case "inside-out":var v=s.map(function(A){return pv.max.index(A)});x=pv.range(l).sort(function(A,D){return v[A]-v[D]});j=s.map(function(A){return pv.sum(A)});var w=u=0,y=[],z=[];for(t=0;t<l;t++){p=x[t];if(u<w){u+=j[p];y.push(p)}else{w+=j[p];z.push(p)}}j=z.reverse().concat(y);break;case "r [...]
+pv.range(l-1,-1,-1);break;default:j=pv.range(l);break}switch(i.offset){case "silohouette":for(p=0;p<k;p++){for(t=x=0;t<l;t++)x+=s[t][p];r[j[0]][p]=(n-x)/2}break;case "wiggle":for(t=x=0;t<l;t++)x+=s[t][0];r[j[0]][0]=x=(n-x)/2;for(p=1;p<k;p++){u=n=0;w=m[p]-m[p-1];for(t=0;t<l;t++)n+=s[t][p];for(t=0;t<l;t++){y=(s[j[t]][p]-s[j[t]][p-1])/(2*w);for(i=0;i<t;i++)y+=(s[j[i]][p]-s[j[i]][p-1])/w;u+=y*s[j[t]][p]}r[j[0]][p]=x-=n?u/n*w:0}break;case "expand":for(p=0;p<k;p++){for(t=i=r[j[0]][p]=0;t<l;t++ [...]
+if(i){i=n/i;for(t=0;t<l;t++)s[t][p]*=i}else{i=n/l;for(t=0;t<l;t++)s[t][p]=i}}break;default:for(p=0;p<k;p++)r[j[0]][p]=0;break}for(p=0;p<k;p++){x=r[j[0]][p];for(t=1;t<l;t++){x+=s[j[t-1]][p];r[j[t]][p]=x}}t=q.indexOf("-");l=o?"h":"w";o=t<0?o?"l":"b":q.charAt(t+1);q=q.charAt(0);for(var C in f)f[C]=d;f[o]=function(A,D){return m[D]};f[q]=function(A,D){return r[A][D]};f[l]=function(A,D){return s[A][D]}};this.layer=(new pv.Mark).data(function(){return g[this.parent.index]}).top(b("t")).left(b(" [...]
+this.layer.add=function(i){return c.add(pv.Panel).data(function(){return c.layers()}).add(i).extend(this)}};pv.Layout.Stack.prototype=pv.extend(pv.Layout).property("orient",String).property("offset",String).property("order",String).property("layers");a=pv.Layout.Stack.prototype;a.defaults=(new pv.Layout.Stack).extend(pv.Layout.prototype.defaults).orient("bottom-left").offset("zero").layers([[]]);a.$x=pv.Layout.Stack.prototype.$y=function(){return 0};a.x=function(b){this.$x=pv.functor(b); [...]
+a.y=function(b){this.$y=pv.functor(b);return this};a.$values=pv.identity;a.values=function(b){this.$values=pv.functor(b);return this};
+pv.Layout.Treemap=function(){pv.Layout.Hierarchy.call(this);this.node.strokeStyle("#fff").fillStyle("rgba(31, 119, 180, .25)").width(function(b){return b.dx}).height(function(b){return b.dy});this.label.visible(function(b){return!b.firstChild}).left(function(b){return b.x+b.dx/2}).top(function(b){return b.y+b.dy/2}).textAlign("center").textAngle(function(b){return b.dx>b.dy?0:-Math.PI/2});(this.leaf=(new pv.Mark).extend(this.node).fillStyle(null).strokeStyle(null).visible(function(b){ret [...]
+this;delete this.link};pv.Layout.Treemap.prototype=pv.extend(pv.Layout.Hierarchy).property("round",Boolean).property("paddingLeft",Number).property("paddingRight",Number).property("paddingTop",Number).property("paddingBottom",Number).property("mode",String).property("order",String);a=pv.Layout.Treemap.prototype;a.defaults=(new pv.Layout.Treemap).extend(pv.Layout.Hierarchy.prototype.defaults).mode("squarify").order("ascending");a.padding=function(b){return this.paddingLeft(b).paddingRight [...]
+a.$size=function(b){return Number(b.nodeValue)};a.size=function(b){this.$size=pv.functor(b);return this};
+a.buildImplied=function(b){function c(r,s,u,x,t,p,v){for(var w=0,y=0;w<r.length;w++){var z=r[w];if(u){z.x=x+y;z.y=t;y+=z.dx=n(p*z.size/s);z.dy=v}else{z.x=x;z.y=t+y;z.dx=p;y+=z.dy=n(v*z.size/s)}}if(z)if(u)z.dx+=p-y;else z.dy+=v-y}function d(r,s){for(var u=-Infinity,x=Infinity,t=0,p=0;p<r.length;p++){var v=r[p].size;if(v<x)x=v;if(v>u)u=v;t+=v}t*=t;s*=s;return Math.max(s*u/t,t/(s*x))}function f(r,s){function u(A){var D=p==y,G=pv.sum(A,o),E=y?n(G/y):0;c(A,G,D,x,t,D?p:E,D?E:v);if(D){t+=E;v-=E [...]
+E;p-=E}y=Math.min(p,v);return D}var x=r.x+j,t=r.y+k,p=r.dx-j-l,v=r.dy-k-q;if(m!="squarify")c(r.childNodes,r.size,m=="slice"?true:m=="dice"?false:s&1,x,t,p,v);else{var w=[];s=Infinity;var y=Math.min(p,v),z=p*v/r.size;if(!(r.size<=0)){r.visitBefore(function(A){A.size*=z});for(r=r.childNodes.slice();r.length;){var C=r[r.length-1];if(C.size){w.push(C);z=d(w,y);if(z<=s){r.pop();s=z}else{w.pop();u(w);w.length=0;s=Infinity}}else r.pop()}if(u(w))for(s=0;s<w.length;s++)w[s].dy+=v;else for(s=0;s<w [...]
+p}}}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var g=this,h=b.nodes[0],i=pv.Mark.stack,j=b.paddingLeft,l=b.paddingRight,k=b.paddingTop,q=b.paddingBottom,o=function(r){return r.size},n=b.round?Math.round:Number,m=b.mode;i.unshift(null);h.visitAfter(function(r,s){r.depth=s;r.x=r.y=r.dx=r.dy=0;r.size=r.firstChild?pv.sum(r.childNodes,function(u){return u.size}):g.$size.apply(g,(i[0]=r,i))});i.shift();switch(b.order){case "ascending":h.sort(function(r,s){return r.size-s.size [...]
+case "descending":h.sort(function(r,s){return s.size-r.size});break;case "reverse":h.reverse();break}h.x=0;h.y=0;h.dx=b.width;h.dy=b.height;h.visitBefore(f)}};pv.Layout.Tree=function(){pv.Layout.Hierarchy.call(this)};pv.Layout.Tree.prototype=pv.extend(pv.Layout.Hierarchy).property("group",Number).property("breadth",Number).property("depth",Number).property("orient",String);pv.Layout.Tree.prototype.defaults=(new pv.Layout.Tree).extend(pv.Layout.Hierarchy.prototype.defaults).group(1).bread [...]
+pv.Layout.Tree.prototype.buildImplied=function(b){function c(p){var v,w,y;if(p.firstChild){v=p.firstChild;w=p.lastChild;for(var z=y=v;z;z=z.nextSibling){c(z);y=f(z,y)}j(p);w=0.5*(v.prelim+w.prelim);if(v=p.previousSibling){p.prelim=v.prelim+k(p.depth,true);p.mod=p.prelim-w}else p.prelim=w}else if(v=p.previousSibling)p.prelim=v.prelim+k(p.depth,true)}function d(p,v,w){p.breadth=p.prelim+v;v+=p.mod;for(p=p.firstChild;p;p=p.nextSibling)d(p,v,w)}function f(p,v){var w=p.previousSibling;if(w){v [...]
+p,C=w;w=p.parentNode.firstChild;var A=y.mod,D=z.mod,G=C.mod,E=w.mod;C=h(C);for(y=g(y);C&&y;){C=C;y=y;w=g(w);z=h(z);z.ancestor=p;var B=C.prelim+G-(y.prelim+A)+k(C.depth,false);if(B>0){i(l(C,p,v),p,B);A+=B;D+=B}G+=C.mod;A+=y.mod;E+=w.mod;D+=z.mod;C=h(C);y=g(y)}if(C&&!h(z)){z.thread=C;z.mod+=G-D}if(y&&!g(w)){w.thread=y;w.mod+=A-E;v=p}}return v}function g(p){return p.firstChild||p.thread}function h(p){return p.lastChild||p.thread}function i(p,v,w){var y=v.number-p.number;v.change-=w/y;v.shif [...]
+w/y;v.prelim+=w;v.mod+=w}function j(p){var v=0,w=0;for(p=p.lastChild;p;p=p.previousSibling){p.prelim+=v;p.mod+=v;w+=p.change;v+=p.shift+w}}function l(p,v,w){return p.ancestor.parentNode==v.parentNode?p.ancestor:w}function k(p,v){return(v?1:u+1)/(m=="radial"?p:1)}function q(p){return m=="radial"?p.breadth/r:0}function o(p){switch(m){case "left":return p.depth;case "right":return x-p.depth;case "top":case "bottom":return p.breadth+x/2;case "radial":return x/2+p.depth*Math.cos(q(p))}}functi [...]
+t/2;case "top":return p.depth;case "bottom":return t-p.depth;case "radial":return t/2+p.depth*Math.sin(q(p))}}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var m=b.orient,r=b.depth,s=b.breadth,u=b.group,x=b.width,t=b.height;b=b.nodes[0];b.visitAfter(function(p,v){p.ancestor=p;p.prelim=0;p.mod=0;p.change=0;p.shift=0;p.number=p.previousSibling?p.previousSibling.number+1:0;p.depth=v});c(b);d(b,-b.prelim,0);b.visitAfter(function(p){p.breadth*=s;p.depth*=r;p.midAngle=q(p);p.x=o [...]
+if(p.firstChild)p.midAngle+=Math.PI;delete p.breadth;delete p.depth;delete p.ancestor;delete p.prelim;delete p.mod;delete p.change;delete p.shift;delete p.number;delete p.thread})}};pv.Layout.Indent=function(){pv.Layout.Hierarchy.call(this);this.link.interpolate("step-after")};pv.Layout.Indent.prototype=pv.extend(pv.Layout.Hierarchy).property("depth",Number).property("breadth",Number);pv.Layout.Indent.prototype.defaults=(new pv.Layout.Indent).extend(pv.Layout.Hierarchy.prototype.defaults [...]
+pv.Layout.Indent.prototype.buildImplied=function(b){function c(i,j,l){i.x=g+l++*f;i.y=h+j++*d;i.midAngle=0;for(i=i.firstChild;i;i=i.nextSibling)j=c(i,j,l);return j}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var d=b.breadth,f=b.depth,g=0,h=0;c(b.nodes[0],1,1)}};pv.Layout.Pack=function(){pv.Layout.Hierarchy.call(this);this.node.radius(function(b){return b.radius}).strokeStyle("rgb(31, 119, 180)").fillStyle("rgba(31, 119, 180, .25)");this.label.textAlign("center");delete t [...]
+pv.Layout.Pack.prototype=pv.extend(pv.Layout.Hierarchy).property("spacing",Number).property("order",String);pv.Layout.Pack.prototype.defaults=(new pv.Layout.Pack).extend(pv.Layout.Hierarchy.prototype.defaults).spacing(1).order("ascending");pv.Layout.Pack.prototype.$radius=function(){return 1};pv.Layout.Pack.prototype.size=function(b){this.$radius=typeof b=="function"?function(){return Math.sqrt(b.apply(this,arguments))}:(b=Math.sqrt(b),function(){return b});return this};
+pv.Layout.Pack.prototype.buildImplied=function(b){function c(o){var n=pv.Mark.stack;n.unshift(null);for(var m=0,r=o.length;m<r;m++){var s=o[m];if(!s.firstChild)s.radius=i.$radius.apply(i,(n[0]=s,n))}n.shift()}function d(o){var n=[];for(o=o.firstChild;o;o=o.nextSibling){if(o.firstChild)o.radius=d(o);o.n=o.p=o;n.push(o)}switch(b.order){case "ascending":n.sort(function(m,r){return m.radius-r.radius});break;case "descending":n.sort(function(m,r){return r.radius-m.radius});break;case "reverse [...]
+break}return f(n)}function f(o){function n(B){u=Math.min(B.x-B.radius,u);x=Math.max(B.x+B.radius,x);t=Math.min(B.y-B.radius,t);p=Math.max(B.y+B.radius,p)}function m(B,F){var H=B.n;B.n=F;F.p=B;F.n=H;H.p=F}function r(B,F){B.n=F;F.p=B}function s(B,F){var H=F.x-B.x,I=F.y-B.y;B=B.radius+F.radius;return B*B-H*H-I*I>0.0010}var u=Infinity,x=-Infinity,t=Infinity,p=-Infinity,v,w,y,z,C;v=o[0];v.x=-v.radius;v.y=0;n(v);if(o.length>1){w=o[1];w.x=w.radius;w.y=0;n(w);if(o.length>2){y=o[2];g(v,w,y);n(y); [...]
+y;m(y,w);w=v.n;for(var A=3;A<o.length;A++){g(v,w,y=o[A]);var D=0,G=1,E=1;for(z=w.n;z!=w;z=z.n,G++)if(s(z,y)){D=1;break}if(D==1)for(C=v.p;C!=z.p;C=C.p,E++)if(s(C,y)){if(E<G){D=-1;z=C}break}if(D==0){m(v,y);w=y;n(y)}else if(D>0){r(v,z);w=z;A--}else if(D<0){r(z,w);v=z;A--}}}}v=(u+x)/2;w=(t+p)/2;for(A=y=0;A<o.length;A++){z=o[A];z.x-=v;z.y-=w;y=Math.max(y,z.radius+Math.sqrt(z.x*z.x+z.y*z.y))}return y+b.spacing}function g(o,n,m){var r=n.radius+m.radius,s=o.radius+m.radius,u=n.x-o.x;n=n.y-o.y;va [...]
+u+n*n),t=(s*s+x*x-r*r)/(2*s*x);r=Math.acos(t);t=t*s;s=Math.sin(r)*s;u/=x;n/=x;m.x=o.x+t*u+s*n;m.y=o.y+t*n-s*u}function h(o,n,m,r){for(var s=o.firstChild;s;s=s.nextSibling){s.x+=o.x;s.y+=o.y;h(s,n,m,r)}o.x=n+r*o.x;o.y=m+r*o.y;o.radius*=r}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var i=this,j=b.nodes,l=j[0];c(j);l.x=0;l.y=0;l.radius=d(l);j=this.width();var k=this.height(),q=1/Math.max(2*l.radius/j,2*l.radius/k);h(l,j/2,k/2,q)}};
+pv.Layout.Force=function(){pv.Layout.Network.call(this);this.link.lineWidth(function(b,c){return Math.sqrt(c.linkValue)*1.5});this.label.textAlign("center")};
+pv.Layout.Force.prototype=pv.extend(pv.Layout.Network).property("bound",Boolean).property("iterations",Number).property("dragConstant",Number).property("chargeConstant",Number).property("chargeMinDistance",Number).property("chargeMaxDistance",Number).property("chargeTheta",Number).property("springConstant",Number).property("springDamping",Number).property("springLength",Number);pv.Layout.Force.prototype.defaults=(new pv.Layout.Force).extend(pv.Layout.Network.prototype.defaults).dragConst [...]
+pv.Layout.Force.prototype.buildImplied=function(b){function c(q){return q.fix?1:q.vx*q.vx+q.vy*q.vy}if(pv.Layout.Network.prototype.buildImplied.call(this,b)){if(b=b.$force){b.next=this.binds.$force;this.binds.$force=b}}else{for(var d=this,f=b.nodes,g=b.links,h=b.iterations,i=b.width,j=b.height,l=0,k;l<f.length;l++){k=f[l];if(isNaN(k.x))k.x=i/2+40*Math.random()-20;if(isNaN(k.y))k.y=j/2+40*Math.random()-20}k=pv.simulation(f);k.force(pv.Force.drag(b.dragConstant));k.force(pv.Force.charge(b. [...]
+b.chargeMaxDistance).theta(b.chargeTheta));k.force(pv.Force.spring(b.springConstant).damping(b.springDamping).length(b.springLength).links(g));k.constraint(pv.Constraint.position());b.bound&&k.constraint(pv.Constraint.bound().x(6,i-6).y(6,j-6));if(h==null){k.step();k.step();b.$force=this.binds.$force={next:this.binds.$force,nodes:f,min:1.0E-4*(g.length+1),sim:k};if(!this.$timer)this.$timer=setInterval(function(){for(var q=false,o=d.binds.$force;o;o=o.next)if(pv.max(o.nodes,c)>o.min){o.si [...]
+q=true}q&&d.render()},42)}else for(l=0;l<h;l++)k.step()}};pv.Layout.Cluster=function(){pv.Layout.Hierarchy.call(this);var b,c=this.buildImplied;this.buildImplied=function(d){c.call(this,d);b=/^(top|bottom)$/.test(d.orient)?"step-before":/^(left|right)$/.test(d.orient)?"step-after":"linear"};this.link.interpolate(function(){return b})};
+pv.Layout.Cluster.prototype=pv.extend(pv.Layout.Hierarchy).property("group",Number).property("orient",String).property("innerRadius",Number).property("outerRadius",Number);pv.Layout.Cluster.prototype.defaults=(new pv.Layout.Cluster).extend(pv.Layout.Hierarchy.prototype.defaults).group(0).orient("top");
+pv.Layout.Cluster.prototype.buildImplied=function(b){if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var c=b.nodes[0],d=b.group,f,g,h=0,i=0.5-d/2,j=undefined;c.visitAfter(function(l){if(l.firstChild)l.depth=1+pv.max(l.childNodes,function(k){return k.depth});else{if(d&&j!=l.parentNode){j=l.parentNode;h+=d}h++;l.depth=0}});f=1/h;g=1/c.depth;j=undefined;c.visitAfter(function(l){if(l.firstChild)l.breadth=pv.mean(l.childNodes,function(k){return k.breadth});else{if(d&&j!=l.parentN [...]
+i+=d}l.breadth=f*i++}l.depth=1-l.depth*g});c.visitAfter(function(l){l.minBreadth=l.firstChild?l.firstChild.minBreadth:l.breadth-f/2;l.maxBreadth=l.firstChild?l.lastChild.maxBreadth:l.breadth+f/2});c.visitBefore(function(l){l.minDepth=l.parentNode?l.parentNode.maxDepth:0;l.maxDepth=l.parentNode?l.depth+c.depth:l.minDepth+2*c.depth});c.minDepth=-g;pv.Layout.Hierarchy.NodeLink.buildImplied.call(this,b)}};pv.Layout.Cluster.Fill=function(){pv.Layout.Cluster.call(this);pv.Layout.Hierarchy.Fill [...]
+pv.Layout.Cluster.Fill.prototype=pv.extend(pv.Layout.Cluster);pv.Layout.Cluster.Fill.prototype.buildImplied=function(b){pv.Layout.Cluster.prototype.buildImplied.call(this,b)||pv.Layout.Hierarchy.Fill.buildImplied.call(this,b)};pv.Layout.Partition=function(){pv.Layout.Hierarchy.call(this)};pv.Layout.Partition.prototype=pv.extend(pv.Layout.Hierarchy).property("order",String).property("orient",String).property("innerRadius",Number).property("outerRadius",Number);
+pv.Layout.Partition.prototype.defaults=(new pv.Layout.Partition).extend(pv.Layout.Hierarchy.prototype.defaults).orient("top");pv.Layout.Partition.prototype.$size=function(){return 1};pv.Layout.Partition.prototype.size=function(b){this.$size=b;return this};
+pv.Layout.Partition.prototype.buildImplied=function(b){if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var c=this,d=b.nodes[0],f=pv.Mark.stack,g=0;f.unshift(null);d.visitAfter(function(i,j){if(j>g)g=j;i.size=i.firstChild?pv.sum(i.childNodes,function(l){return l.size}):c.$size.apply(c,(f[0]=i,f))});f.shift();switch(b.order){case "ascending":d.sort(function(i,j){return i.size-j.size});break;case "descending":d.sort(function(i,j){return j.size-i.size});break}var h=1/g;d.minBrea [...]
+0.5;d.maxBreadth=1;d.visitBefore(function(i){for(var j=i.minBreadth,l=i.maxBreadth-j,k=i.firstChild;k;k=k.nextSibling){k.minBreadth=j;k.maxBreadth=j+=k.size/i.size*l;k.breadth=(j+k.minBreadth)/2}});d.visitAfter(function(i,j){i.minDepth=(j-1)*h;i.maxDepth=i.depth=j*h});pv.Layout.Hierarchy.NodeLink.buildImplied.call(this,b)}};pv.Layout.Partition.Fill=function(){pv.Layout.Partition.call(this);pv.Layout.Hierarchy.Fill.constructor.call(this)};pv.Layout.Partition.Fill.prototype=pv.extend(pv.La [...]
+pv.Layout.Partition.Fill.prototype.buildImplied=function(b){pv.Layout.Partition.prototype.buildImplied.call(this,b)||pv.Layout.Hierarchy.Fill.buildImplied.call(this,b)};pv.Layout.Arc=function(){pv.Layout.Network.call(this);var b,c,d,f=this.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.directed;b=g.orient=="radial"?"linear":"polar";d=g.orient=="right"||g.orient=="top"};this.link.data(function(g){var h=g.sourceNode;g=g.targetNode;return d!=(c||h.breadth<g.breadth)?[h,g]:[g, [...]
+pv.Layout.Arc.prototype=pv.extend(pv.Layout.Network).property("orient",String).property("directed",Boolean);pv.Layout.Arc.prototype.defaults=(new pv.Layout.Arc).extend(pv.Layout.Network.prototype.defaults).orient("bottom");pv.Layout.Arc.prototype.sort=function(b){this.$sort=b;return this};
+pv.Layout.Arc.prototype.buildImplied=function(b){function c(m){switch(h){case "top":return-Math.PI/2;case "bottom":return Math.PI/2;case "left":return Math.PI;case "right":return 0;case "radial":return(m-0.25)*2*Math.PI}}function d(m){switch(h){case "top":case "bottom":return m*l;case "left":return 0;case "right":return l;case "radial":return l/2+q*Math.cos(c(m))}}function f(m){switch(h){case "top":return 0;case "bottom":return k;case "left":case "right":return m*k;case "radial":return k [...]
+if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var g=b.nodes,h=b.orient,i=this.$sort,j=pv.range(g.length),l=b.width,k=b.height,q=Math.min(l,k)/2;i&&j.sort(function(m,r){return i(g[m],g[r])});for(b=0;b<g.length;b++){var o=g[j[b]],n=o.breadth=(b+0.5)/g.length;o.x=d(n);o.y=f(n);o.midAngle=c(n)}}};
+pv.Layout.Horizon=function(){pv.Layout.call(this);var b=this,c,d,f,g,h,i,j=this.buildImplied;this.buildImplied=function(l){j.call(this,l);c=l.bands;d=l.mode;f=Math.round((d=="color"?0.5:1)*l.height);g=l.backgroundStyle;h=pv.ramp(g,l.negativeStyle).domain(0,c);i=pv.ramp(g,l.positiveStyle).domain(0,c)};c=(new pv.Panel).data(function(){return pv.range(c*2)}).overflow("hidden").height(function(){return f}).top(function(l){return d=="color"?(l&1)*f:0}).fillStyle(function(l){return l?null:g}); [...]
+(new pv.Mark).top(function(l,k){return d=="mirror"&&k&1?(k+1>>1)*f:null}).bottom(function(l,k){return d=="mirror"?k&1?null:(k+1>>1)*-f:(k&1||-1)*(k+1>>1)*f}).fillStyle(function(l,k){return(k&1?h:i)((k>>1)+1)});this.band.add=function(l){return b.add(pv.Panel).extend(c).add(l).extend(this)}};pv.Layout.Horizon.prototype=pv.extend(pv.Layout).property("bands",Number).property("mode",String).property("backgroundStyle",pv.color).property("positiveStyle",pv.color).property("negativeStyle",pv.color);
+pv.Layout.Horizon.prototype.defaults=(new pv.Layout.Horizon).extend(pv.Layout.prototype.defaults).bands(2).mode("offset").backgroundStyle("white").positiveStyle("#1f77b4").negativeStyle("#d62728");
+pv.Layout.Rollup=function(){pv.Layout.Network.call(this);var b=this,c,d,f=b.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.$rollup.nodes;d=g.$rollup.links};this.node.data(function(){return c}).size(function(g){return g.nodes.length*20});this.link.interpolate("polar").eccentricity(0.8);this.link.add=function(g){return b.add(pv.Panel).data(function(){return d}).add(g).extend(this)}};pv.Layout.Rollup.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean);
+pv.Layout.Rollup.prototype.x=function(b){this.$x=pv.functor(b);return this};pv.Layout.Rollup.prototype.y=function(b){this.$y=pv.functor(b);return this};
+pv.Layout.Rollup.prototype.buildImplied=function(b){function c(r){return i[r]+","+j[r]}if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var d=b.nodes,f=b.links,g=b.directed,h=d.length,i=[],j=[],l=0,k={},q={},o=pv.Mark.stack,n={parent:this};o.unshift(null);for(var m=0;m<h;m++){n.index=m;o[0]=d[m];i[m]=this.$x.apply(n,o);j[m]=this.$y.apply(n,o)}o.shift();for(m=0;m<d.length;m++){h=c(m);o=k[h];if(!o){o=k[h]=pv.extend(d[m]);o.index=l++;o.x=i[m];o.y=j[m];o.nodes=[]}o.nodes.push(d[m]) [...]
+f.length;m++){l=f[m].targetNode;d=k[c(f[m].sourceNode.index)];l=k[c(l.index)];h=!g&&d.index>l.index?l.index+","+d.index:d.index+","+l.index;(o=q[h])||(o=q[h]={sourceNode:d,targetNode:l,linkValue:0,links:[]});o.links.push(f[m]);o.linkValue+=f[m].linkValue}b.$rollup={nodes:pv.values(k),links:pv.values(q)}}};
+pv.Layout.Matrix=function(){pv.Layout.Network.call(this);var b,c,d,f,g,h=this.buildImplied;this.buildImplied=function(i){h.call(this,i);b=i.nodes.length;c=i.width/b;d=i.height/b;f=i.$matrix.labels;g=i.$matrix.pairs};this.link.data(function(){return g}).left(function(){return c*(this.index%b)}).top(function(){return d*Math.floor(this.index/b)}).width(function(){return c}).height(function(){return d}).lineWidth(1.5).strokeStyle("#fff").fillStyle(function(i){return i.linkValue?"#555":"#eee" [...]
+this;delete this.link.add;this.label.data(function(){return f}).left(function(){return this.index&1?c*((this.index>>1)+0.5):null}).top(function(){return this.index&1?null:d*((this.index>>1)+0.5)}).textMargin(4).textAlign(function(){return this.index&1?"left":"right"}).textAngle(function(){return this.index&1?-Math.PI/2:0});delete this.node};pv.Layout.Matrix.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean);pv.Layout.Matrix.prototype.sort=function(b){this.$sort=b;return this};
+pv.Layout.Matrix.prototype.buildImplied=function(b){if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var c=b.nodes,d=b.links,f=this.$sort,g=c.length,h=pv.range(g),i=[],j=[],l={};b.$matrix={labels:i,pairs:j};f&&h.sort(function(m,r){return f(c[m],c[r])});for(var k=0;k<g;k++)for(var q=0;q<g;q++){var o=h[k],n=h[q];j.push(l[o+"."+n]={row:k,col:q,sourceNode:c[o],targetNode:c[n],linkValue:0})}for(k=0;k<g;k++){o=h[k];i.push(c[o],c[o])}for(k=0;k<d.length;k++){i=d[k];g=i.sourceNode.index [...]
+i=i.linkValue;l[g+"."+h].linkValue+=i;b.directed||(l[h+"."+g].linkValue+=i)}}};
+pv.Layout.Bullet=function(){pv.Layout.call(this);var b=this,c=b.buildImplied,d=b.x=pv.Scale.linear(),f,g,h,i,j;this.buildImplied=function(l){c.call(this,j=l);f=l.orient;g=/^left|right$/.test(f);h=pv.ramp("#bbb","#eee").domain(0,Math.max(1,j.ranges.length-1));i=pv.ramp("steelblue","lightsteelblue").domain(0,Math.max(1,j.measures.length-1))};(this.range=new pv.Mark).data(function(){return j.ranges}).reverse(true).left(function(){return f=="left"?0:null}).top(function(){return f=="top"?0:nu [...]
+"right"?0:null}).bottom(function(){return f=="bottom"?0:null}).width(function(l){return g?d(l):null}).height(function(l){return g?null:d(l)}).fillStyle(function(){return h(this.index)}).antialias(false).parent=b;(this.measure=new pv.Mark).extend(this.range).data(function(){return j.measures}).left(function(){return f=="left"?0:g?null:this.parent.width()/3.25}).top(function(){return f=="top"?0:g?this.parent.height()/3.25:null}).right(function(){return f=="right"?0:g?null:this.parent.width [...]
+"bottom"?0:g?this.parent.height()/3.25:null}).fillStyle(function(){return i(this.index)}).parent=b;(this.marker=new pv.Mark).data(function(){return j.markers}).left(function(l){return f=="left"?d(l):g?null:this.parent.width()/2}).top(function(l){return f=="top"?d(l):g?this.parent.height()/2:null}).right(function(l){return f=="right"?d(l):null}).bottom(function(l){return f=="bottom"?d(l):null}).strokeStyle("black").shape("bar").angle(function(){return g?0:Math.PI/2}).parent=b;(this.tick=n [...]
+"left"?d(l):null}).top(function(l){return f=="top"?d(l):null}).right(function(l){return f=="right"?d(l):g?null:-6}).bottom(function(l){return f=="bottom"?d(l):g?-8:null}).height(function(){return g?6:null}).width(function(){return g?null:6}).parent=b};pv.Layout.Bullet.prototype=pv.extend(pv.Layout).property("orient",String).property("ranges").property("markers").property("measures").property("maximum",Number);pv.Layout.Bullet.prototype.defaults=(new pv.Layout.Bullet).extend(pv.Layout.pro [...]
+pv.Layout.Bullet.prototype.buildImplied=function(b){pv.Layout.prototype.buildImplied.call(this,b);var c=this.parent[/^left|right$/.test(b.orient)?"width":"height"]();b.maximum=b.maximum||pv.max([].concat(b.ranges,b.markers,b.measures));this.x.domain(0,b.maximum).range(0,c)};pv.Behavior={};
+pv.Behavior.drag=function(){function b(l){g=this.index;f=this.scene;var k=this.mouse();i=((h=l).fix=pv.vector(l.x,l.y)).minus(k);j={x:this.parent.width()-(l.dx||0),y:this.parent.height()-(l.dy||0)};f.mark.context(f,g,function(){this.render()});pv.Mark.dispatch("dragstart",f,g)}function c(){if(f){f.mark.context(f,g,function(){var l=this.mouse();h.x=h.fix.x=Math.max(0,Math.min(i.x+l.x,j.x));h.y=h.fix.y=Math.max(0,Math.min(i.y+l.y,j.y));this.render()});pv.Mark.dispatch("drag",f,g)}}function [...]
+null;f.mark.context(f,g,function(){this.render()});pv.Mark.dispatch("dragend",f,g);f=null}}var f,g,h,i,j;pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b};
+pv.Behavior.point=function(b){function c(k,q){k=k[q];q={cost:Infinity};for(var o=0,n=k.visible&&k.children.length;o<n;o++){var m=k.children[o],r=m.mark,s;if(r.type=="panel"){r.scene=m;for(var u=0,x=m.length;u<x;u++){r.index=u;s=c(m,u);if(s.cost<q.cost)q=s}delete r.scene;delete r.index}else if(r.$handlers.point){r=r.mouse();u=0;for(x=m.length;u<x;u++){var t=m[u];s=r.x-t.left-(t.width||0)/2;t=r.y-t.top-(t.height||0)/2;var p=i*s*s+j*t*t;if(p<q.cost){q.distance=s*s+t*t;q.cost=p;q.scene=m;q.i [...]
+function d(){var k=c(this.scene,this.index);if(k.cost==Infinity||k.distance>l)k=null;if(g){if(k&&g.scene==k.scene&&g.index==k.index)return;pv.Mark.dispatch("unpoint",g.scene,g.index)}if(g=k){pv.Mark.dispatch("point",k.scene,k.index);pv.listen(this.root.canvas(),"mouseout",f)}}function f(k){if(g&&!pv.ancestor(this,k.relatedTarget)){pv.Mark.dispatch("unpoint",g.scene,g.index);g=null}}var g,h=null,i=1,j=1,l=arguments.length?b*b:900;d.collapse=function(k){if(arguments.length){h=String(k);swi [...]
+1;j=0;break;case "x":i=0;j=1;break;default:j=i=1;break}return d}return h};return d};
+pv.Behavior.select=function(){function b(j){g=this.index;f=this.scene;i=this.mouse();h=j;h.x=i.x;h.y=i.y;h.dx=h.dy=0;pv.Mark.dispatch("selectstart",f,g)}function c(){if(f){f.mark.context(f,g,function(){var j=this.mouse();h.x=Math.max(0,Math.min(i.x,j.x));h.y=Math.max(0,Math.min(i.y,j.y));h.dx=Math.min(this.width(),Math.max(j.x,i.x))-h.x;h.dy=Math.min(this.height(),Math.max(j.y,i.y))-h.y;this.render()});pv.Mark.dispatch("select",f,g)}}function d(){if(f){pv.Mark.dispatch("selectend",f,g);f [...]
+g,h,i;pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b};
+pv.Behavior.resize=function(b){function c(l){h=this.index;g=this.scene;j=this.mouse();i=l;switch(b){case "left":j.x=i.x+i.dx;break;case "right":j.x=i.x;break;case "top":j.y=i.y+i.dy;break;case "bottom":j.y=i.y;break}pv.Mark.dispatch("resizestart",g,h)}function d(){if(g){g.mark.context(g,h,function(){var l=this.mouse();i.x=Math.max(0,Math.min(j.x,l.x));i.y=Math.max(0,Math.min(j.y,l.y));i.dx=Math.min(this.parent.width(),Math.max(l.x,j.x))-i.x;i.dy=Math.min(this.parent.height(),Math.max(l.y [...]
+this.render()});pv.Mark.dispatch("resize",g,h)}}function f(){if(g){pv.Mark.dispatch("resizeend",g,h);g=null}}var g,h,i,j;pv.listen(window,"mousemove",d);pv.listen(window,"mouseup",f);return c};
+pv.Behavior.pan=function(){function b(){g=this.index;f=this.scene;i=pv.vector(pv.event.pageX,pv.event.pageY);h=this.transform();j=1/(h.k*this.scale);if(l)l={x:(1-h.k)*this.width(),y:(1-h.k)*this.height()}}function c(){if(f){f.mark.context(f,g,function(){var k=h.translate((pv.event.pageX-i.x)*j,(pv.event.pageY-i.y)*j);if(l){k.x=Math.max(l.x,Math.min(0,k.x));k.y=Math.max(l.y,Math.min(0,k.y))}this.transform(k).render()});pv.Mark.dispatch("pan",f,g)}}function d(){f=null}var f,g,h,i,j,l;b.bou [...]
+Boolean(k);return this}return Boolean(l)};pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b};
+pv.Behavior.zoom=function(b){function c(){var f=this.mouse(),g=pv.event.wheel*b;f=this.transform().translate(f.x,f.y).scale(g<0?1E3/(1E3-g):(1E3+g)/1E3).translate(-f.x,-f.y);if(d){f.k=Math.max(1,f.k);f.x=Math.max((1-f.k)*this.width(),Math.min(0,f.x));f.y=Math.max((1-f.k)*this.height(),Math.min(0,f.y))}this.transform(f).render();pv.Mark.dispatch("zoom",this.scene,this.index)}var d;arguments.length||(b=1/48);c.bound=function(f){if(arguments.length){d=Boolean(f);return this}return Boolean(d [...]
+pv.Geo=function(){};
+pv.Geo.projections={mercator:{project:function(b){return{x:b.lng/180,y:b.lat>85?1:b.lat<-85?-1:Math.log(Math.tan(Math.PI/4+pv.radians(b.lat)/2))/Math.PI}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(2*Math.atan(Math.exp(b.y*Math.PI))-Math.PI/2)}}},"gall-peters":{project:function(b){return{x:b.lng/180,y:Math.sin(pv.radians(b.lat))}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(Math.asin(b.y))}}},sinusoidal:{project:function(b){return{x:pv.radians(b.lng)*Math.cos(pv.radian [...]
+y:b.lat/90}},invert:function(b){return{lng:pv.degrees(b.x*Math.PI/Math.cos(b.y*Math.PI/2)),lat:b.y*90}}},aitoff:{project:function(b){var c=pv.radians(b.lng);b=pv.radians(b.lat);var d=Math.acos(Math.cos(b)*Math.cos(c/2));return{x:2*(d?Math.cos(b)*Math.sin(c/2)*d/Math.sin(d):0)/Math.PI,y:2*(d?Math.sin(b)*d/Math.sin(d):0)/Math.PI}},invert:function(b){var c=b.y*Math.PI/2;return{lng:pv.degrees(b.x*Math.PI/2/Math.cos(c)),lat:pv.degrees(c)}}},hammer:{project:function(b){var c=pv.radians(b.lng); [...]
+var d=Math.sqrt(1+Math.cos(b)*Math.cos(c/2));return{x:2*Math.SQRT2*Math.cos(b)*Math.sin(c/2)/d/3,y:Math.SQRT2*Math.sin(b)/d/1.5}},invert:function(b){var c=b.x*3;b=b.y*1.5;var d=Math.sqrt(1-c*c/16-b*b/4);return{lng:pv.degrees(2*Math.atan2(d*c,2*(2*d*d-1))),lat:pv.degrees(Math.asin(d*b))}}},identity:{project:function(b){return{x:b.lng/180,y:b.lat/90}},invert:function(b){return{lng:b.x*180,lat:b.y*90}}}};
+pv.Geo.scale=function(b){function c(m){if(!o||m.lng!=o.lng||m.lat!=o.lat){o=m;m=d(m);n={x:l(m.x),y:k(m.y)}}return n}function d(m){return j.project({lng:m.lng-q.lng,lat:m.lat})}function f(m){m=j.invert(m);m.lng+=q.lng;return m}var g={x:0,y:0},h={x:1,y:1},i=[],j=pv.Geo.projections.identity,l=pv.Scale.linear(-1,1).range(0,1),k=pv.Scale.linear(-1,1).range(1,0),q={lng:0,lat:0},o,n;c.x=function(m){return c(m).x};c.y=function(m){return c(m).y};c.ticks={lng:function(m){var r;if(i.length>1){var s [...]
+if(m==undefined)m=10;r=s.domain(i,function(u){return u.lat}).ticks(m);m=s.domain(i,function(u){return u.lng}).ticks(m)}else{r=pv.range(-80,81,10);m=pv.range(-180,181,10)}return m.map(function(u){return r.map(function(x){return{lat:x,lng:u}})})},lat:function(m){return pv.transpose(c.ticks.lng(m))}};c.invert=function(m){return f({x:l.invert(m.x),y:k.invert(m.y)})};c.domain=function(m,r){if(arguments.length){i=m instanceof Array?arguments.length>1?pv.map(m,r):m:Array.prototype.slice.call(ar [...]
+if(i.length>1){var s=i.map(function(x){return x.lng}),u=i.map(function(x){return x.lat});q={lng:(pv.max(s)+pv.min(s))/2,lat:(pv.max(u)+pv.min(u))/2};s=i.map(d);l.domain(s,function(x){return x.x});k.domain(s,function(x){return x.y})}else{q={lng:0,lat:0};l.domain(-1,1);k.domain(-1,1)}o=null;return this}return i};c.range=function(m,r){if(arguments.length){if(typeof m=="object"){g={x:Number(m.x),y:Number(m.y)};h={x:Number(r.x),y:Number(r.y)}}else{g={x:0,y:0};h={x:Number(m),y:Number(r)}}l.ran [...]
+k.range(h.y,g.y);o=null;return this}return[g,h]};c.projection=function(m){if(arguments.length){j=typeof m=="string"?pv.Geo.projections[m]||pv.Geo.projections.identity:m;return this.domain(i)}return m};c.by=function(m){function r(){return c(m.apply(this,arguments))}for(var s in c)r[s]=c[s];return r};arguments.length&&c.projection(b);return c};
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.css b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.css
new file mode 100644
index 0000000..ebdaa0b
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.css
@@ -0,0 +1,30 @@
+/*!
+ * The MIT License
+ * 
+ * Copyright (c) 2008 Jason Frame (jason at onehackoranother.com)
+ * 
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ * 
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ * 
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+.tipsy { padding: 5px; font-size: 10px; opacity: 0.8; filter: alpha(opacity=80); background-repeat: no-repeat;  background-image: url(../images/tipsy.gif); }
+  .tipsy-inner { padding: 5px 8px 4px 8px; background-color: black; color: white; max-width: 200px; text-align: center; }
+  .tipsy-inner { -moz-border-radius:3px; -webkit-border-radius:3px; }
+  .tipsy-north { background-position: top center; }
+  .tipsy-south { background-position: bottom center; }
+  .tipsy-east { background-position: right center; }
+  .tipsy-west { background-position: left center; }
diff --git a/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.js b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.js
new file mode 100644
index 0000000..2f8b85a
--- /dev/null
+++ b/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/static/tipsy.js
@@ -0,0 +1,92 @@
+/*!
+ * The MIT License
+ * 
+ * Copyright (c) 2008 Jason Frame (jason at onehackoranother.com)
+ * 
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ * 
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ * 
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+pv.Behavior.tipsy = function(opts) {
+  var tip;
+
+  /**
+   * @private When the mouse leaves the root panel, trigger a mouseleave event
+   * on the tooltip span. This is necessary for dimensionless marks (e.g.,
+   * lines) when the mouse isn't actually over the span.
+   */
+  function trigger() {
+    $(tip).tipsy("hide");
+  }
+
+  /**
+   * @private When the mouse leaves the tooltip, remove the tooltip span. This
+   * event handler is declared outside of the event handler such that duplicate
+   * registrations are ignored.
+   */
+  function cleanup() {
+    if (tip) {
+      tip.parentNode.removeChild(tip);
+      tip = null;
+    }
+  }
+
+  return function(d) {
+      /* Compute the transform to offset the tooltip position. */
+      var t = pv.Transform.identity, p = this.parent;
+      do {
+        t = t.translate(p.left(), p.top()).times(p.transform());
+      } while (p = p.parent);
+
+      /* Create and cache the tooltip span to be used by tipsy. */
+      if (!tip) {
+        var c = this.root.canvas();
+        c.style.position = "relative";
+        $(c).mouseleave(trigger);
+
+        tip = c.appendChild(document.createElement("div"));
+        tip.style.position = "absolute";
+        $(tip).tipsy(opts);
+      }
+
+      /* Propagate the tooltip text. */
+      tip.title = this.title() || this.text();
+
+      /*
+       * Compute bounding box. TODO support area, lines, wedges, stroke. Also
+       * note that CSS positioning does not support subpixels, and the current
+       * rounding implementation can be off by one pixel.
+       */
+      if (this.properties.width) {
+        tip.style.width = Math.ceil(this.width() * t.k) + 1 + "px";
+        tip.style.height = Math.ceil(this.height() * t.k) + 1 + "px";
+      } else if (this.properties.radius) {
+        var r = this.radius();
+        t.x -= r;
+        t.y -= r;
+        tip.style.height = tip.style.width = Math.ceil(2 * r * t.k) + "px";
+      }
+      tip.style.left = Math.floor(this.left() * t.k + t.x) + "px";
+      tip.style.top = Math.floor(this.top() * t.k + t.y) + "px";
+
+      /*
+       * Cleanup the tooltip span on mouseout. Immediately trigger the tooltip;
+       * this is necessary for dimensionless marks.
+       */
+      $(tip).mouseleave(cleanup).tipsy("show");
+    };
+};
diff --git a/lang/java/ipc/src/main/velocity/org/apache/avro/ipc/stats/templates/statsview.vm b/lang/java/ipc/src/main/velocity/org/apache/avro/ipc/stats/templates/statsview.vm
new file mode 100644
index 0000000..045b421
--- /dev/null
+++ b/lang/java/ipc/src/main/velocity/org/apache/avro/ipc/stats/templates/statsview.vm
@@ -0,0 +1,98 @@
+##
+## Licensed to the Apache Software Foundation (ASF) under one
+## or more contributor license agreements.  See the NOTICE file
+## distributed with this work for additional information
+## regarding copyright ownership.  The ASF licenses this file
+## to you under the Apache License, Version 2.0 (the
+## "License"); you may not use this file except in compliance
+## with the License.  You may obtain a copy of the License at
+##
+##     http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+
+#macro( makeChart $attr_map )
+#if ($attr_map.type == "bar") 
+  <script>
+    makeBarChart($attr_map.labelStr, $attr_map.boundaryStr, $attr_map.dataStr);
+  </script>
+#end
+
+#if ($attr_map.type == "dot") 
+  <script>
+    makeDotChart($attr_map.dataStr);
+  </script>
+#end
+#end
+
+#macro( makeChartHeader $attr_map )
+#if ($attr_map.type == "bar") 
+  <p>
+    $attr_map.title <br>
+    Average: $attr_map.avg$attr_map.units<br>
+    Stdev: $attr_map.stdDev$attr_map.units
+  </p>
+#end
+
+#if ($attr_map.type == "dot") 
+  <p>
+    $attr_map.title <br>
+    Most recent calls
+  </p>
+#end
+#end
+
+<html>
+<head>
+<title>$title</title>
+<script type="text/javascript" src="static/protovis-r3.2.js"></script>
+<script type="text/javascript" src="static/tipsy.js"></script>
+<script src="static/jquery-1.4.2.min.js" type="text/javascript"></script>
+<script src="static/jquery.tipsy.js" type="text/javascript"></script>
+<script src="static/tipsy.js" type="text/javascript"></script>
+<link href="static/tipsy.css" type="text/css" rel="stylesheet"/>
+<script src="static/avro.js" type="text/javascript"></script>
+<link href="static/avro.css" type="text/css" rel="stylesheet"/>
+
+<script>
+
+</script>
+</head>
+<body>
+  <h1>$title</h1>
+  <p>
+    Running since: $startupTime<br>
+    Currently: $currTime
+  </p>
+  
+  #if ($inFlightRpcs.size() != 0) 
+  <h4>Active RPC's</h4>
+    <ol>
+    #foreach( $rpc in $inFlightRpcs ) 
+      <li>$rpc</li>
+    #end
+    </ol>
+  #end
+    #foreach ($message in $messages)
+    #set( $width = $message.charts.size() * 300 ) 
+    <table id="charts_table" width=$width>
+      <h3>$message.name ($message.numCalls calls)</h3>
+      <tr valign="top">
+      #foreach ($chart in $message.charts)
+        <td>#makeChartHeader($chart) </td>
+      #end
+      </tr>
+      <tr>
+      #foreach ($chart in $message.charts)
+        <td>#makeChart($chart) </td>
+      #end
+      </tr>
+    </table>
+    #end
+
+</body>
+</html>
\ No newline at end of file
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java b/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java
new file mode 100644
index 0000000..dd64bf5
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class DataFileInteropTest {
+
+  private static final File DATAFILE_DIR = 
+    new File(System.getProperty("test.dir", "/tmp"));
+
+  @BeforeClass
+  public static void printDir() {
+    System.out.println("Reading data files from directory: "
+        + DATAFILE_DIR.getAbsolutePath());
+  }
+
+  @Test
+  public void testGeneratedGeneric() throws IOException {
+    System.out.println("Reading with generic:");
+    DatumReaderProvider<Object> provider = new DatumReaderProvider<Object>() {
+      @Override public DatumReader<Object> get() { 
+        return new GenericDatumReader<Object>(); 
+        }
+      };
+    readFiles(provider);
+  }
+
+  @Test
+  public void testGeneratedSpecific() throws IOException {
+    System.out.println("Reading with specific:");
+    DatumReaderProvider<Interop> provider = new DatumReaderProvider<Interop>() {
+      @Override public DatumReader<Interop> get() { 
+        return new SpecificDatumReader<Interop>(); 
+        }
+      };
+    readFiles(provider);
+  }
+
+  // Can't use same Interop.java as specific for reflect.
+  // This used to be the case because one used Utf8 and the other Sring, but 
+  // we use CharSequence now.
+  // The current incompatibility is now that one uses byte[] and the other ByteBuffer
+
+  // We could
+  // fix this by defining a reflect-specific version of Interop.java, but we'd
+  // need to put it on a different classpath than the specific one.
+  // I think changing Specific to generate more flexible code would help too --
+  // it could convert ByteBuffer to byte[] or vice/versa.
+  // Additionally, some complication arises because of IndexedRecord's simplicity
+
+//   @Test
+//   public void testGeneratedReflect() throws IOException {
+//     DatumReaderProvider<Interop> provider = new DatumReaderProvider<Interop>() {
+//       @Override public DatumReader<Interop> get() { 
+//         return new ReflectDatumReader<Interop>(Interop.class); 
+//         }
+//       };
+//     readFiles(provider);
+//   }
+
+  private  <T extends Object> void readFiles(DatumReaderProvider<T> provider) throws IOException {
+    for (File f : DATAFILE_DIR.listFiles()) {
+      System.out.println("Reading: " + f.getName());
+      FileReader<? extends Object> reader = DataFileReader.openReader(f,
+          provider.get());
+      for (Object datum : reader) {
+        Assert.assertNotNull(datum);
+      }
+    }
+  }
+  
+  interface DatumReaderProvider<T extends Object> {
+    public DatumReader<T> get();
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
new file mode 100644
index 0000000..96e2e62
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+import junit.framework.Assert;
+
+import org.apache.avro.ipc.RPCContext;
+import org.apache.avro.ipc.RPCPlugin;
+
+/**
+ * An implementation of an RPC metadata plugin API designed for unit testing.
+ * This plugin tests handshake and call state by passing a string as metadata,
+ * slowly building it up at each instrumentation point, testing it as it goes.
+ * Finally, after the call or handshake is complete, the constructed string is
+ * tested. It also tests that RPC context data is appropriately filled in 
+ * along the way by Requestor and Responder classes.
+ */
+public final class RPCMetaTestPlugin extends RPCPlugin {
+  
+  protected final String key;
+  
+  public RPCMetaTestPlugin(String keyname) {
+    key = keyname;
+  }
+  
+  @Override
+  public void clientStartConnect(RPCContext context) {
+    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
+    context.requestHandshakeMeta().put(key, buf);
+  }
+  
+  @Override
+  public void serverConnecting(RPCContext context) {
+    
+    Assert.assertNotNull(context.requestHandshakeMeta());
+    Assert.assertNotNull(context.responseHandshakeMeta());
+    Assert.assertNull(context.getRequestPayload());
+    Assert.assertNull(context.getResponsePayload());
+    
+    if (!context.requestHandshakeMeta().containsKey(key)) return;
+    
+    ByteBuffer buf = context.requestHandshakeMeta().get(key);
+    Assert.assertNotNull(buf);
+    Assert.assertNotNull(buf.array());
+    
+    String partialstr = new String(buf.array());
+    Assert.assertNotNull(partialstr);
+    Assert.assertEquals("partial string mismatch", "ap", partialstr);
+    
+    buf = ByteBuffer.wrap((partialstr + "ac").getBytes());
+    Assert.assertTrue(buf.remaining() > 0);
+    context.responseHandshakeMeta().put(key, buf);
+  }
+  
+  @Override
+  public void clientFinishConnect(RPCContext context) {
+    Map<String,ByteBuffer> handshakeMeta = context.responseHandshakeMeta();
+    
+    Assert.assertNull(context.getRequestPayload());
+    Assert.assertNull(context.getResponsePayload());
+    Assert.assertNotNull(handshakeMeta);
+    
+    if (!handshakeMeta.containsKey(key)) return;
+    
+    ByteBuffer buf = handshakeMeta.get(key);
+    Assert.assertNotNull(buf);
+    Assert.assertNotNull(buf.array());
+    
+    String partialstr = new String(buf.array());
+    Assert.assertNotNull(partialstr);
+    Assert.assertEquals("partial string mismatch", "apac", partialstr);
+    
+    buf = ByteBuffer.wrap((partialstr + "he").getBytes());
+    Assert.assertTrue(buf.remaining() > 0);
+    handshakeMeta.put(key, buf);
+    
+    checkRPCMetaMap(handshakeMeta);
+  }
+  
+  @Override
+  public void clientSendRequest(RPCContext context) { 
+    ByteBuffer buf = ByteBuffer.wrap("ap".getBytes());
+    context.requestCallMeta().put(key, buf);
+    Assert.assertNotNull(context.getMessage());
+    Assert.assertNotNull(context.getRequestPayload());
+    Assert.assertNull(context.getResponsePayload());
+  }
+  
+  @Override
+  public void serverReceiveRequest(RPCContext context) {
+    Map<String,ByteBuffer> meta = context.requestCallMeta();
+    
+    Assert.assertNotNull(meta);    
+    Assert.assertNotNull(context.getMessage());
+    Assert.assertNull(context.getResponsePayload());
+    
+    if (!meta.containsKey(key)) return;
+    
+    ByteBuffer buf = meta.get(key);
+    Assert.assertNotNull(buf);
+    Assert.assertNotNull(buf.array());
+    
+    String partialstr = new String(buf.array());
+    Assert.assertNotNull(partialstr);
+    Assert.assertEquals("partial string mismatch", "ap", partialstr);
+    
+    buf = ByteBuffer.wrap((partialstr + "a").getBytes());
+    Assert.assertTrue(buf.remaining() > 0);
+    meta.put(key, buf);
+  }
+  
+  @Override
+  public void serverSendResponse(RPCContext context) {
+    Assert.assertNotNull(context.requestCallMeta());
+    Assert.assertNotNull(context.responseCallMeta());
+
+    Assert.assertNotNull(context.getResponsePayload());
+    
+    if (!context.requestCallMeta().containsKey(key)) return;
+    
+    ByteBuffer buf = context.requestCallMeta().get(key);
+    Assert.assertNotNull(buf);
+    Assert.assertNotNull(buf.array());
+    
+    String partialstr = new String(buf.array());
+    Assert.assertNotNull(partialstr);
+    Assert.assertEquals("partial string mismatch", "apa", partialstr);
+    
+    buf = ByteBuffer.wrap((partialstr + "c").getBytes());
+    Assert.assertTrue(buf.remaining() > 0);
+    context.responseCallMeta().put(key, buf);
+  }
+  
+  @Override
+  public void clientReceiveResponse(RPCContext context) {
+    Assert.assertNotNull(context.responseCallMeta());
+    Assert.assertNotNull(context.getRequestPayload());
+    
+    if (!context.responseCallMeta().containsKey(key)) return;
+    
+    ByteBuffer buf = context.responseCallMeta().get(key);
+    Assert.assertNotNull(buf);
+    Assert.assertNotNull(buf.array());
+    
+    String partialstr = new String(buf.array());
+    Assert.assertNotNull(partialstr);
+    Assert.assertEquals("partial string mismatch", "apac", partialstr);
+    
+    buf = ByteBuffer.wrap((partialstr + "he").getBytes());
+    Assert.assertTrue(buf.remaining() > 0);
+    context.responseCallMeta().put(key, buf);
+    
+    checkRPCMetaMap(context.responseCallMeta());
+  }
+  
+  protected void checkRPCMetaMap(Map<String,ByteBuffer> rpcMeta) {
+    Assert.assertNotNull(rpcMeta);
+    Assert.assertTrue("key not present in map", rpcMeta.containsKey(key));
+    
+    ByteBuffer keybuf = rpcMeta.get(key);
+    Assert.assertNotNull(keybuf);
+    Assert.assertTrue("key BB had nothing remaining", keybuf.remaining() > 0);
+    
+    String str = new String(keybuf.array());
+    Assert.assertEquals("apache", str);
+  }
+  
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java b/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java
new file mode 100644
index 0000000..49f8857
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/RandomData.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.util.Utf8;
+
+/** Generates schema data as Java objects with random values. */
+public class RandomData implements Iterable<Object> {
+  private final Schema root;
+  private final long seed;
+  private final int count;
+
+  public RandomData(Schema schema, int count) {
+    this(schema, count, System.currentTimeMillis());
+  }
+
+  public RandomData(Schema schema, int count, long seed) {
+    this.root = schema;
+    this.seed = seed;
+    this.count = count;
+  }
+  
+  public Iterator<Object> iterator() {
+    return new Iterator<Object>() {
+      private int n;
+      private Random random = new Random(seed);
+      public boolean hasNext() { return n < count; }
+      public Object next() {
+        n++;
+        return generate(root, random, 0);
+      }
+      public void remove() { throw new UnsupportedOperationException(); }
+    };
+  }
+  
+  @SuppressWarnings(value="unchecked")
+  private static Object generate(Schema schema, Random random, int d) {
+    switch (schema.getType()) {
+    case RECORD:
+      GenericRecord record = new GenericData.Record(schema);
+      for (Schema.Field field : schema.getFields())
+        record.put(field.name(), generate(field.schema(), random, d+1));
+      return record;
+    case ENUM:
+      List<String> symbols = schema.getEnumSymbols();
+      return new GenericData.EnumSymbol
+        (schema, symbols.get(random.nextInt(symbols.size())));
+    case ARRAY:
+      int length = (random.nextInt(5)+2)-d;
+      GenericArray<Object> array =
+        new GenericData.Array(length<=0?0:length, schema);
+      for (int i = 0; i < length; i++)
+        array.add(generate(schema.getElementType(), random, d+1));
+      return array;
+    case MAP:
+      length = (random.nextInt(5)+2)-d;
+      Map<Object,Object> map = new HashMap<Object,Object>(length<=0?0:length);
+      for (int i = 0; i < length; i++) {
+        map.put(randomUtf8(random, 40),
+                generate(schema.getValueType(), random, d+1));
+      }
+      return map;
+    case UNION:
+      List<Schema> types = schema.getTypes();
+      return generate(types.get(random.nextInt(types.size())), random, d);
+    case FIXED:
+      byte[] bytes = new byte[schema.getFixedSize()];
+      random.nextBytes(bytes);
+      return new GenericData.Fixed(schema, bytes);
+    case STRING:  return randomUtf8(random, 40);
+    case BYTES:   return randomBytes(random, 40);
+    case INT:     return random.nextInt();
+    case LONG:    return random.nextLong();
+    case FLOAT:   return random.nextFloat();
+    case DOUBLE:  return random.nextDouble();
+    case BOOLEAN: return random.nextBoolean();
+    case NULL:    return null;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  private static Utf8 randomUtf8(Random rand, int maxLength) {
+    Utf8 utf8 = new Utf8().setLength(rand.nextInt(maxLength));
+    for (int i = 0; i < utf8.getLength(); i++) {
+      utf8.getBytes()[i] = (byte)('a'+rand.nextInt('z'-'a'));
+    }
+    return utf8;
+  }
+
+  private static ByteBuffer randomBytes(Random rand, int maxLength) {
+    ByteBuffer bytes = ByteBuffer.allocate(rand.nextInt(maxLength));
+    bytes.limit(bytes.capacity());
+    rand.nextBytes(bytes.array());
+    return bytes;
+  }
+
+  public static void main(String[] args) throws Exception {
+    if(args.length != 3) {
+      System.out.println("Usage: RandomData <schemafile> <outputfile> <count>");
+      System.exit(-1);
+    }
+    Schema sch = Schema.parse(new File(args[0]));
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+      .create(sch, new File(args[1]));
+    try {
+      for (Object datum : new RandomData(sch, Integer.parseInt(args[2]))) {
+        writer.append(datum);
+      }
+    } finally {
+      writer.close();
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/SimpleException.java b/lang/java/ipc/src/test/java/org/apache/avro/SimpleException.java
new file mode 100644
index 0000000..fd1b8c3
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/SimpleException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+/** This should be a static nested class in TestProtocolReflect, but that
+ * breaks CheckStyle (http://jira.codehaus.org/browse/MPCHECKSTYLE-20). */
+public class SimpleException extends Exception {
+  SimpleException() {}
+  SimpleException(String message) { super(message) ; }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestAnnotation.java b/lang/java/ipc/src/test/java/org/apache/avro/TestAnnotation.java
new file mode 100644
index 0000000..fdfaa78
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestAnnotation.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Target;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+ at Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD})
+ at Retention(RetentionPolicy.RUNTIME)
+public @interface TestAnnotation {
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestBulkData.java b/lang/java/ipc/src/test/java/org/apache/avro/TestBulkData.java
new file mode 100644
index 0000000..0eb1a5b
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestBulkData.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.apache.avro.test.BulkData;
+
+public class TestBulkData {
+  private static final long COUNT =
+    Integer.parseInt(System.getProperty("test.count", "10"));
+  private static final int SIZE =
+    Integer.parseInt(System.getProperty("test.size", "65536"));
+
+  private static final ByteBuffer DATA = ByteBuffer.allocate(SIZE);
+  {
+    Random rand = new Random();
+    DATA.limit(DATA.capacity());
+    DATA.position(0);
+    rand.nextBytes(DATA.array());
+  }
+
+  public static class BulkDataImpl implements BulkData {
+    public ByteBuffer read() { return DATA.duplicate(); }
+    public Void write(ByteBuffer data) {
+      Assert.assertEquals(SIZE, data.remaining());
+      return null;
+    }
+  }
+
+  private static Server server;
+  private static Transceiver client;
+  private static BulkData proxy;
+
+  @Before
+  public void startServer() throws Exception {
+    if (server != null) return;
+    server =
+      new HttpServer(new SpecificResponder(BulkData.class, new BulkDataImpl()),
+                     0);
+    server.start();
+    client =
+      new HttpTransceiver(new URL("http://127.0.0.1:"+server.getPort()+"/"));
+    proxy = SpecificRequestor.getClient(BulkData.class, client);
+  }
+
+  @Test
+  public void testRead() throws IOException {
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(SIZE, proxy.read().remaining());
+  }
+
+  @Test
+  public void testWrite() throws IOException {
+    for (int i = 0; i < COUNT; i++)
+      proxy.write(DATA.duplicate());
+  }
+
+  @AfterClass
+  public static void stopServer() throws Exception {
+    server.close();
+  }
+
+  public static void main(String[] args) throws Exception {
+    TestBulkData test = new TestBulkData();
+    test.startServer();
+    System.out.println("READ");
+    long start = System.currentTimeMillis();
+    test.testRead();
+    printStats(start);
+    System.out.println("WRITE");
+    start = System.currentTimeMillis();
+    test.testWrite();
+    printStats(start);
+    test.stopServer();
+  }
+
+  private static void printStats(long start) {
+    double seconds = (System.currentTimeMillis()-start)/1000.0;
+    System.out.println("seconds = "+(int)seconds);
+    System.out.println("requests/second = "+(int)(COUNT/seconds));
+    double megabytes = (COUNT*SIZE)/(1024*1024.0);
+    System.out.println("MB = "+(int)megabytes);
+    System.out.println("MB/second = "+ (int)(megabytes/seconds));
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
new file mode 100644
index 0000000..8f02022
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.util.Utf8;
+
+import org.apache.avro.test.TestRecord;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.MD5;
+
+public class TestCompare {
+
+  @Test
+  public void testNull() throws Exception {
+    Schema schema = Schema.parse("\"null\"");
+    byte[] b = render(null, schema, new GenericDatumWriter<Object>());
+    assertEquals(0, BinaryData.compare(b, 0, b, 0, schema));
+  }
+
+  @Test
+  public void testBoolean() throws Exception {
+    check("\"boolean\"", Boolean.FALSE, Boolean.TRUE);
+  }
+
+  @Test
+  public void testString() throws Exception {
+    check("\"string\"", new Utf8(""), new Utf8("a"));
+    check("\"string\"", new Utf8("a"), new Utf8("b"));
+    check("\"string\"", new Utf8("a"), new Utf8("ab"));
+    check("\"string\"", new Utf8("ab"), new Utf8("b"));
+  }
+
+  @Test
+  public void testBytes() throws Exception {
+    check("\"bytes\"",
+          ByteBuffer.wrap(new byte[]{}),
+          ByteBuffer.wrap(new byte[]{1}));
+    check("\"bytes\"",
+          ByteBuffer.wrap(new byte[]{1}),
+          ByteBuffer.wrap(new byte[]{2}));
+    check("\"bytes\"",
+          ByteBuffer.wrap(new byte[]{1,2}),
+          ByteBuffer.wrap(new byte[]{2}));
+  }
+
+  @Test
+  public void testInt() throws Exception {
+    check("\"int\"", new Integer(-1), new Integer(0));
+    check("\"int\"", new Integer(0), new Integer(1));
+  }
+
+  @Test
+  public void testLong() throws Exception {
+    check("\"long\"", new Long(11), new Long(12));
+    check("\"long\"", new Long(-1), new Long(1));
+  }
+
+  @Test
+  public void testFloat() throws Exception {
+    check("\"float\"", new Float(1.1), new Float(1.2));
+    check("\"float\"", new Float(-1.1), new Float(1.0));
+  }
+
+  @Test
+  public void testDouble() throws Exception {
+    check("\"double\"", new Double(1.2), new Double(1.3));
+    check("\"double\"", new Double(-1.2), new Double(1.3));
+  }
+
+  @Test
+  public void testArray() throws Exception {
+    String json = "{\"type\":\"array\", \"items\": \"long\"}";
+    Schema schema = Schema.parse(json);
+    GenericArray<Long> a1 = new GenericData.Array<Long>(1, schema);
+    a1.add(1L);
+    GenericArray<Long> a2 = new GenericData.Array<Long>(1, schema);
+    a2.add(1L);
+    a2.add(0L);
+    check(json, a1, a2);
+  }
+
+  @Test
+  public void testRecord() throws Exception {
+    String fields = " \"fields\":["
+      +"{\"name\":\"f\",\"type\":\"int\",\"order\":\"ignore\"},"
+      +"{\"name\":\"g\",\"type\":\"int\",\"order\":\"descending\"},"
+      +"{\"name\":\"h\",\"type\":\"int\"}]}";
+    String recordJson = "{\"type\":\"record\", \"name\":\"Test\","+fields;
+    Schema schema = Schema.parse(recordJson);
+    GenericData.Record r1 = new GenericData.Record(schema);
+    r1.put("f", 1);
+    r1.put("g", 13);
+    r1.put("h", 41);
+    GenericData.Record r2 = new GenericData.Record(schema);
+    r2.put("f", 0);
+    r2.put("g", 12);
+    r2.put("h", 41);
+    check(recordJson, r1, r2);
+    r2.put("f", 0);
+    r2.put("g", 13);
+    r2.put("h", 42);
+    check(recordJson, r1, r2);
+
+    String record2Json = "{\"type\":\"record\", \"name\":\"Test2\","+fields;
+    Schema schema2 = Schema.parse(record2Json);
+    GenericData.Record r3= new GenericData.Record(schema2);
+    r3.put("f", 1);
+    r3.put("g", 13);
+    r3.put("h", 41);
+    assert(!r1.equals(r3));                       // same fields, diff name
+  }
+
+  @Test
+  public void testEnum() throws Exception {
+    String json =
+      "{\"type\":\"enum\", \"name\":\"Test\",\"symbols\": [\"A\", \"B\"]}";
+    Schema schema = Schema.parse(json);
+    check(json,
+          new GenericData.EnumSymbol(schema, "A"),
+          new GenericData.EnumSymbol(schema, "B"));
+  }
+
+  @Test
+  public void testFixed() throws Exception {
+    String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
+    Schema schema = Schema.parse(json);
+    check(json,
+          new GenericData.Fixed(schema, new byte[]{(byte)'a'}),
+          new GenericData.Fixed(schema, new byte[]{(byte)'b'}));
+  }
+
+  @Test
+  public void testUnion() throws Exception {
+    check("[\"string\", \"long\"]", new Utf8("a"), new Utf8("b"), false);
+    check("[\"string\", \"long\"]", new Long(1), new Long(2), false);
+    check("[\"string\", \"long\"]", new Utf8("a"), new Long(1), false);
+  }
+
+  @Test
+  public void testSpecificRecord() throws Exception {
+    TestRecord s1 = new TestRecord();
+    TestRecord s2 = new TestRecord();
+    s1.setName("foo");
+    s1.setKind(Kind.BAZ);
+    s1.setHash(new MD5(new byte[] {0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    s2.setName("bar");
+    s2.setKind(Kind.BAR);
+    s2.setHash(new MD5(new byte[] {0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,6}));
+    Schema schema = SpecificData.get().getSchema(TestRecord.class);
+
+    check(schema, s1, s2, true, new SpecificDatumWriter<TestRecord>(schema),
+          SpecificData.get());
+    s2.setKind(Kind.BAZ);
+    check(schema, s1, s2, true, new SpecificDatumWriter<TestRecord>(schema),
+          SpecificData.get());
+  }  
+
+  private static <T> void check(String schemaJson, T o1, T o2)
+    throws Exception {
+    check(schemaJson, o1, o2, true);
+  }
+
+  private static <T> void check(String schemaJson, T o1, T o2,
+                            boolean comparable)
+    throws Exception {
+    check(Schema.parse(schemaJson), o1, o2, comparable,
+          new GenericDatumWriter<T>(), GenericData.get());
+  }
+
+  private static <T> void check(Schema schema, T o1, T o2,
+                            boolean comparable,
+                            DatumWriter<T> writer,
+                            GenericData comparator)
+    throws Exception {
+
+    byte[] b1 = render(o1, schema, writer);
+    byte[] b2 = render(o2, schema, writer);
+    assertEquals(-1, BinaryData.compare(b1, 0, b2, 0, schema));
+    assertEquals(1, BinaryData.compare(b2, 0, b1, 0, schema));
+    assertEquals(0, BinaryData.compare(b1, 0, b1, 0, schema));
+    assertEquals(0, BinaryData.compare(b2, 0, b2, 0, schema));
+
+    assertEquals(-1, compare(o1, o2, schema, comparable, comparator));
+    assertEquals(1, compare(o2, o1, schema, comparable, comparator));
+    assertEquals(0, compare(o1, o1, schema, comparable, comparator));
+    assertEquals(0, compare(o2, o2, schema, comparable, comparator));
+
+    assert(o1.equals(o1));
+    assert(o2.equals(o2));
+    assert(!o1.equals(o2));
+    assert(!o2.equals(o1));
+    assert(!o1.equals(new Object()));
+    assert(!o2.equals(new Object()));
+    assert(!o1.equals(null));
+    assert(!o2.equals(null));
+
+    assert(o1.hashCode() != o2.hashCode());
+
+    // check BinaryData.hashCode against Object.hashCode
+    if (schema.getType() != Schema.Type.ENUM) {
+      assertEquals(o1.hashCode(),
+                   BinaryData.hashCode(b1, 0, b1.length, schema));
+      assertEquals(o2.hashCode(),
+                   BinaryData.hashCode(b2, 0, b2.length, schema));
+    }
+
+    // check BinaryData.hashCode against GenericData.hashCode
+    assertEquals(comparator.hashCode(o1, schema),
+                 BinaryData.hashCode(b1, 0, b1.length, schema));
+    assertEquals(comparator.hashCode(o2, schema),
+                 BinaryData.hashCode(b2, 0, b2.length, schema));
+
+  }
+
+  @SuppressWarnings(value="unchecked")
+  private static int compare(Object o1, Object o2, Schema schema,
+                             boolean comparable, GenericData comparator) {
+    return comparable
+      ? ((Comparable<Object>)o1).compareTo(o2)
+      : comparator.compare(o1, o2, schema);
+  }
+
+  private static <T> byte[] render(T datum, Schema schema,
+                               DatumWriter<T> writer)
+    throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.setSchema(schema);
+    Encoder enc = new EncoderFactory().directBinaryEncoder(out, null);
+    writer.write(datum, enc);
+    enc.flush();
+    return out.toByteArray();
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
new file mode 100644
index 0000000..9d85a20
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData.Record;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.specific.SpecificDatumReader;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.avro.Foo;
+
+public class TestDataFileSpecific {
+
+  private static final File DIR =
+    new File(System.getProperty("test.dir","/tmp"));
+  private static final File FILE = new File(DIR, "specific.avro");
+
+  /* Test when using SpecificDatumReader<T>() constructor to read from a file
+   * with a different schema that both reader & writer schemas are found.*/
+  @Test
+  public void testSpecificDatumReaderDefaultCtor() throws IOException {
+    // like the specific Foo, but with another field
+    Schema s1 = Schema.parse("{\"type\":\"record\",\"name\":\"Foo\","
+                             +"\"namespace\":\"org.apache.avro\",\"fields\":["
+                             +"{\"name\":\"label\",\"type\":\"string\"},"
+                             +"{\"name\":\"id\",\"type\":\"int\"}]}");
+
+    // write a file using generic objects
+    DataFileWriter<Record> writer
+      = new DataFileWriter<Record>(new GenericDatumWriter<Record>(s1))
+      .create(s1, FILE);
+    for (int i = 0; i < 10; i++) {
+      Record r = new Record(s1);
+      r.put("label", ""+i);
+      r.put("id", i);
+      writer.append(r);
+    }
+    writer.close();
+
+    // read using a 'new SpecificDatumReader<T>()' to force inference of
+    // reader's schema from runtime
+    DataFileReader<Foo> reader =
+      new DataFileReader<Foo>(FILE, new SpecificDatumReader<Foo>());
+    int i = 0;
+    for (Foo f : reader) 
+      Assert.assertEquals(""+(i++), f.getLabel().toString());
+    Assert.assertEquals(10, i);
+    reader.close();
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceReflect.java b/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceReflect.java
new file mode 100644
index 0000000..a2ad03a
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceReflect.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+import org.apache.avro.test.namespace.TestNamespace;
+import org.junit.Before;
+
+import java.net.InetSocketAddress;
+
+public class TestNamespaceReflect extends TestNamespaceSpecific {
+
+  @Before @Override
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SocketServer(new ReflectResponder(TestNamespace.class, new TestImpl()),
+                              new InetSocketAddress(0));
+    server.start();
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    proxy = ReflectRequestor.getClient(TestNamespace.class, client);
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceSpecific.java
new file mode 100644
index 0000000..d4c9bf1
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceSpecific.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.namespace.TestNamespace;
+import org.apache.avro.test.util.MD5;
+import org.apache.avro.test.errors.TestError;
+import org.apache.avro.test.namespace.TestRecord;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+public class TestNamespaceSpecific {
+
+  public static class TestImpl implements TestNamespace {
+    public TestRecord echo(TestRecord record) { return record; }
+    public Void error() throws AvroRemoteException {
+      throw TestError.newBuilder().setMessage$("an error").build();
+    }
+  }
+
+  protected static SocketServer server;
+  protected static Transceiver client;
+  protected static TestNamespace proxy;
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SocketServer(new SpecificResponder(TestNamespace.class, new TestImpl()),
+                              new InetSocketAddress(0));
+    server.start();
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    proxy = SpecificRequestor.getClient(TestNamespace.class, client);
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    TestRecord record = new TestRecord();
+    record.setHash(new MD5(new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    TestRecord echoed = proxy.echo(record);
+    assertEquals(record, echoed);
+    assertEquals(record.hashCode(), echoed.hashCode());
+  }
+
+  @Test
+  public void testError() throws IOException {
+    TestError error = null;
+    try {
+      proxy.error();
+    } catch (TestError e) {
+      error = e;
+    }
+    assertNotNull(error);
+    assertEquals("an error", error.getMessage$().toString());
+  }
+
+  @AfterClass
+  public static void testStopServer() throws IOException {
+    client.close();
+    server.close();
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java
new file mode 100644
index 0000000..1f16acc
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.net.InetSocketAddress;
+import java.util.Random;
+
+import org.apache.avro.ipc.DatagramServer;
+import org.apache.avro.ipc.DatagramTransceiver;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Simple;
+
+public class TestProtocolDatagram extends TestProtocolSpecific {
+  @Override
+  public Server createServer(Responder testResponder) throws Exception {
+    return  new DatagramServer(new SpecificResponder(Simple.class, new TestImpl()),
+        new InetSocketAddress("localhost",
+            new Random().nextInt(10000)+10000));
+  }
+  
+  @Override
+  public Transceiver createTransceiver() throws Exception{
+    return new DatagramTransceiver(new InetSocketAddress("localhost", server.getPort()));
+  }
+
+  @Override
+  protected int getExpectedHandshakeCount() {
+    return 0;
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
new file mode 100644
index 0000000..1309ea1
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.generic.GenericResponder;
+import org.apache.avro.util.Utf8;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+public class TestProtocolGeneric {
+  private static final Logger LOG
+    = LoggerFactory.getLogger(TestProtocolGeneric.class);
+
+  protected static final File FILE = new File("../../../share/test/schemas/simple.avpr");
+  protected static final Protocol PROTOCOL;
+  static {
+    try {
+      PROTOCOL = Protocol.parse(FILE);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private static boolean throwUndeclaredError;
+
+  protected static class TestResponder extends GenericResponder {
+    public TestResponder() { super(PROTOCOL); }
+    public Object respond(Message message, Object request)
+      throws AvroRemoteException {
+      GenericRecord params = (GenericRecord)request;
+
+      if ("hello".equals(message.getName())) {
+        LOG.info("hello: "+params.get("greeting"));
+        return new Utf8("goodbye");
+      }
+
+      if ("echo".equals(message.getName())) {
+        Object record = params.get("record");
+        LOG.info("echo: "+record);
+        return record;
+      }
+
+      if ("echoBytes".equals(message.getName())) {
+        Object data = params.get("data");
+        LOG.info("echoBytes: "+data);
+        return data;
+      }
+
+      if ("error".equals(message.getName())) {
+        if (throwUndeclaredError) throw new RuntimeException("foo");
+        GenericRecord error =
+          new GenericData.Record(PROTOCOL.getType("TestError"));
+        error.put("message", new Utf8("an error"));
+        throw new AvroRemoteException(error);
+      }
+      
+      throw new AvroRuntimeException("unexpected message: "+message.getName());
+    }
+
+  }
+
+  protected static SocketServer server;
+  protected static Transceiver client;
+  protected static GenericRequestor requestor;
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SocketServer(new TestResponder(), new InetSocketAddress(0));
+    server.start();
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    requestor = new GenericRequestor(PROTOCOL, client);
+  }
+
+  @Test
+  public void testHello() throws IOException {
+    GenericRecord params = 
+      new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
+    params.put("greeting", new Utf8("bob"));
+    Utf8 response = (Utf8)requestor.request("hello", params);
+    assertEquals(new Utf8("goodbye"), response);
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    GenericRecord record =
+      new GenericData.Record(PROTOCOL.getType("TestRecord"));
+    record.put("name", new Utf8("foo"));
+    record.put("kind", new GenericData.EnumSymbol
+               (PROTOCOL.getType("Kind"), "BAR"));
+    record.put("hash", new GenericData.Fixed
+               (PROTOCOL.getType("MD5"),
+                new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    GenericRecord params =
+      new GenericData.Record(PROTOCOL.getMessages().get("echo").getRequest());
+    params.put("record", record);
+    Object echoed = requestor.request("echo", params);
+    assertEquals(record, echoed);
+  }
+
+  @Test
+  public void testEchoBytes() throws IOException {
+    Random random = new Random();
+    int length = random.nextInt(1024*16);
+    GenericRecord params =
+      new GenericData.Record(PROTOCOL.getMessages().get("echoBytes").getRequest());
+    ByteBuffer data = ByteBuffer.allocate(length);
+    random.nextBytes(data.array());
+    data.flip();
+    params.put("data", data);
+    Object echoed = requestor.request("echoBytes", params);
+    assertEquals(data, echoed);
+  }
+
+  @Test
+  public void testError() throws IOException {
+    GenericRecord params =
+      new GenericData.Record(PROTOCOL.getMessages().get("error").getRequest());
+    AvroRemoteException error = null;
+    try {
+      requestor.request("error", params);
+    } catch (AvroRemoteException e) {
+      error = e;
+    }
+    assertNotNull(error);
+    assertEquals("an error", ((GenericRecord)error.getValue()).get("message").toString());
+  }
+
+  @Test
+  public void testUndeclaredError() throws IOException {
+    this.throwUndeclaredError = true;
+    RuntimeException error = null;
+    GenericRecord params =
+      new GenericData.Record(PROTOCOL.getMessages().get("error").getRequest());
+    try {
+      requestor.request("error", params);
+    } catch (RuntimeException e) {
+      error = e;
+    } finally {
+      this.throwUndeclaredError = false;
+    }
+    assertNotNull(error);
+    assertTrue(error.toString().contains("foo"));
+  }
+
+  @Test
+  /** Construct and use a different protocol whose "hello" method has an extra
+      argument to check that schema is sent to parse request. */
+  public void testHandshake() throws IOException {
+    Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(new Schema.Field("extra", Schema.create(Schema.Type.BOOLEAN),
+                   null, null));
+    fields.add(new Schema.Field("greeting", Schema.create(Schema.Type.STRING),
+                   null, null));
+    Protocol.Message message =
+      protocol.createMessage("hello",
+                             null /* doc */,
+                             Schema.createRecord(fields),
+                             Schema.create(Schema.Type.STRING),
+                             Schema.createUnion(new ArrayList<Schema>()));
+    protocol.getMessages().put("hello", message);
+    Transceiver t
+      = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    try {
+      GenericRequestor r = new GenericRequestor(protocol, t);
+      GenericRecord params = new GenericData.Record(message.getRequest());
+      params.put("extra", Boolean.TRUE);
+      params.put("greeting", new Utf8("bob"));
+      Utf8 response = (Utf8)r.request("hello", params);
+      assertEquals(new Utf8("goodbye"), response);
+    } finally {
+      t.close();
+    }
+  }
+
+  @Test
+  /** Construct and use a different protocol whose "echo" response has an extra
+      field to check that correct schema is used to parse response. */
+  public void testResponseChange() throws IOException {
+
+    List<Field> fields = new ArrayList<Field>();
+    for (Field f : PROTOCOL.getType("TestRecord").getFields())
+      fields.add(new Field(f.name(), f.schema(), null, null));
+    fields.add(new Field("extra", Schema.create(Schema.Type.BOOLEAN),
+                         null, true));
+    Schema record =
+      Schema.createRecord("TestRecord", null, "org.apache.avro.test", false);
+    record.setFields(fields);
+
+    Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
+    List<Field> params = new ArrayList<Field>();
+    params.add(new Field("record", record, null, null));
+
+    Protocol.Message message =
+      protocol.createMessage("echo", null, Schema.createRecord(params),
+                             record,
+                             Schema.createUnion(new ArrayList<Schema>()));
+    protocol.getMessages().put("echo", message);
+    Transceiver t
+      = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    try {
+      GenericRequestor r = new GenericRequestor(protocol, t);
+      GenericRecord args = new GenericData.Record(message.getRequest());
+      GenericRecord rec = new GenericData.Record(record);
+      rec.put("name", new Utf8("foo"));
+      rec.put("kind", new GenericData.EnumSymbol
+              (PROTOCOL.getType("Kind"), "BAR"));
+      rec.put("hash", new GenericData.Fixed
+              (PROTOCOL.getType("MD5"),
+               new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+      rec.put("extra", Boolean.TRUE);
+      args.put("record", rec);
+      GenericRecord response = (GenericRecord)r.request("echo", args);
+      assertEquals(rec, response);
+    } finally {
+      t.close();
+    }
+  }
+
+  @AfterClass
+  public static void testStopServer() throws IOException {
+    client.close();
+    server.close();
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java
new file mode 100644
index 0000000..5bc46f6
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.net.InetSocketAddress;
+
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.junit.Before;
+
+public class TestProtocolGenericMeta extends TestProtocolGeneric {
+  
+  @Before @Override
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    Responder responder = new TestResponder();
+    responder.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+    server = new SocketServer(responder, new InetSocketAddress(0));
+    server.start();
+    
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    requestor = new GenericRequestor(PROTOCOL, client);
+    requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    requestor.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java
new file mode 100644
index 0000000..dc460c5
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttp.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.test.Simple;
+
+import org.junit.Test;
+
+import java.net.URL;
+import java.net.ServerSocket;
+import java.net.SocketTimeoutException;
+import java.util.ArrayList;
+
+public class TestProtocolHttp extends TestProtocolSpecific {
+
+  @Override
+  public Server createServer(Responder testResponder) throws Exception {
+    return new HttpServer(testResponder, 0);
+  }
+  
+  @Override
+  public Transceiver createTransceiver() throws Exception{
+    return new HttpTransceiver(new URL("http://127.0.0.1:"+server.getPort()+"/"));
+  }
+ 
+  protected int getExpectedHandshakeCount() {
+    return REPEATING;
+  }
+
+  @Test(expected=SocketTimeoutException.class)
+    public void testTimeout() throws Throwable {
+    ServerSocket s = new ServerSocket(0);
+    HttpTransceiver client =
+      new HttpTransceiver(new URL("http://127.0.0.1:"+s.getLocalPort()+"/"));
+    client.setTimeout(100);
+    Simple proxy = SpecificRequestor.getClient(Simple.class, client);
+    try {
+      proxy.hello("foo");
+    } catch (AvroRemoteException e) {
+      throw e.getCause();
+    } finally {
+      s.close();
+    }
+  }
+
+  /** Test that Responder ignores one-way with stateless transport. */
+  @Test public void testStatelessOneway() throws Exception {
+    // a version of the Simple protocol that doesn't declare "ack" one-way
+    Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
+    Protocol.Message message =
+      protocol.createMessage("ack", null,
+                             Schema.createRecord(new ArrayList<Field>()),
+                             Schema.create(Schema.Type.NULL),
+                             Schema.createUnion(new ArrayList<Schema>()));
+    protocol.getMessages().put("ack", message);
+
+    // call a server over a stateless protocol that has a one-way "ack"
+    GenericRequestor requestor =
+      new GenericRequestor(protocol, createTransceiver());
+    requestor.request("ack", new GenericData.Record(message.getRequest()));
+
+    // make the request again, to better test handshakes w/ differing protocols
+    requestor.request("ack", new GenericData.Record(message.getRequest()));
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java
new file mode 100644
index 0000000..1c87955
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolHttps.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.test.Simple;
+
+import org.junit.Test;
+
+import org.mortbay.jetty.security.SslSocketConnector;
+
+import java.net.URL;
+import java.net.ServerSocket;
+import java.net.SocketTimeoutException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.util.ArrayList;
+
+public class TestProtocolHttps extends TestProtocolSpecific {
+
+  @Override
+  public Server createServer(Responder testResponder) throws Exception {
+    System.setProperty("javax.net.ssl.keyStore", "src/test/keystore");
+    System.setProperty("javax.net.ssl.keyStorePassword", "avrotest");
+    System.setProperty("javax.net.ssl.password", "avrotest");
+    System.setProperty("javax.net.ssl.trustStore", "src/test/truststore");
+    System.setProperty("javax.net.ssl.trustStorePassword", "avrotest");
+    SslSocketConnector connector = new SslSocketConnector();
+    connector.setPort(18443);
+    connector.setKeystore(System.getProperty("javax.net.ssl.keyStore"));
+    connector.setPassword(System.getProperty("javax.net.ssl.password"));
+    connector.setKeyPassword(System.getProperty("javax.net.ssl.keyStorePassword"));
+    connector.setHost("localhost");
+    connector.setNeedClientAuth(false);
+    return new HttpServer(testResponder, connector);
+  }
+  
+  @Override
+  public Transceiver createTransceiver() throws Exception{
+    return new HttpTransceiver(new URL("https://localhost:"+server.getPort()+"/"));
+  }
+ 
+  protected int getExpectedHandshakeCount() {
+    return REPEATING;
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java
new file mode 100644
index 0000000..ed73e78
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolNetty.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro;
+
+import java.net.InetSocketAddress;
+
+import org.apache.avro.ipc.NettyServer;
+import org.apache.avro.ipc.NettyTransceiver;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+
+/**
+ * Protocol test with Netty server and transceiver
+ */
+public class TestProtocolNetty extends TestProtocolSpecific {
+  @Override
+  public Server createServer(Responder testResponder) throws Exception {
+    return new NettyServer(responder, new InetSocketAddress(0));
+  }
+  
+  @Override
+  public Transceiver createTransceiver() throws Exception{
+    return new NettyTransceiver(new InetSocketAddress(server.getPort()), 2000L);
+  }
+  
+  @Override
+  protected int getExpectedHandshakeCount() {
+    return REPEATING;
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolParsing.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolParsing.java
new file mode 100644
index 0000000..ba07225
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolParsing.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.junit.Test;
+
+import org.apache.avro.Protocol.Message;
+
+public class TestProtocolParsing {
+  public static Protocol getSimpleProtocol() throws IOException {
+    File file = new File("../../../share/test/schemas/simple.avpr");
+    Protocol protocol = Protocol.parse(file);
+    return protocol;
+  }
+  
+  @Test
+  public void testParsing() throws IOException {
+    Protocol protocol = getSimpleProtocol();
+    
+    assertEquals(protocol.getDoc(), "Protocol used for testing.");
+    assertEquals(6, protocol.getMessages().size());
+    assertEquals("Pretend you're in a cave!", protocol.getMessages().get("echo").getDoc());    
+  }
+  
+  private static Message parseMessage(String message) throws Exception {
+    return Protocol.parse("{\"protocol\": \"org.foo.Bar\","
+                          +"\"types\": [],"
+                          +"\"messages\": {"
+                          + message
+                          + "}}").getMessages().values().iterator().next();
+  }
+
+  @Test public void oneWay() throws Exception {
+    Message m;
+    // permit one-way messages w/ null resposne
+    m = parseMessage("\"ack\": {"
+                     +"\"request\": [],"
+                     +"\"response\": \"null\","
+                     +"\"one-way\": true}");
+    assertTrue(m.isOneWay());
+    // permit one-way messages w/o response
+    m = parseMessage("\"ack\": {"
+                     +"\"request\": [],"
+                     +"\"one-way\": true}");
+    assertTrue(m.isOneWay());
+  }
+
+  @Test(expected=SchemaParseException.class)
+  public void oneWayResponse() throws Exception {
+    // prohibit one-way messages with a non-null response type
+    parseMessage("\"ack\": {"
+                 +"\"request\": [\"string\"],"
+                 +"\"response\": \"string\","
+                 +"\"one-way\": true}");
+  }
+
+  @Test(expected=SchemaParseException.class)
+  public void oneWayError() throws Exception {
+    // prohibit one-way messages with errors
+    parseMessage("\"ack\": {"
+                 +"\"request\": [\"string\"],"
+                 +"\"errors\": [],"
+                 +"\"one-way\": true}");
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java
new file mode 100644
index 0000000..7567c6e
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.net.InetSocketAddress;
+import java.util.Random;
+import java.io.IOException;
+
+public class TestProtocolReflect {
+
+  public static class TestRecord {
+    private String name;
+    public int hashCode() { return this.name.hashCode(); }
+    public boolean equals(Object that) {
+      return this.name.equals(((TestRecord)that).name);
+    }
+  }
+
+  public interface Simple {
+    String hello(String greeting);
+    TestRecord echo(TestRecord record);
+    int add(int arg1, int arg2);
+    byte[] echoBytes(byte[] data);
+    void error() throws SimpleException;
+  }
+  
+  private static boolean throwUndeclaredError;
+
+  public static class TestImpl implements Simple {
+    public String hello(String greeting) { return "goodbye"; }
+    public int add(int arg1, int arg2) { return arg1 + arg2; }
+    public TestRecord echo(TestRecord record) { return record; }
+    public byte[] echoBytes(byte[] data) { return data; }
+    public void error() throws SimpleException {
+      if (throwUndeclaredError) throw new RuntimeException("foo");
+      throw new SimpleException("foo");
+    }
+  }
+
+  protected static Server server;
+  protected static Transceiver client;
+  protected static Simple proxy;
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SocketServer(new ReflectResponder(Simple.class, new TestImpl()),
+                              new InetSocketAddress(0));
+    server.start();
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    proxy = ReflectRequestor.getClient(Simple.class, client);
+  }
+
+  @Test public void testClassLoader() throws Exception {
+    ClassLoader loader = new ClassLoader() {};
+
+    ReflectResponder responder
+      = new ReflectResponder(Simple.class, new TestImpl(),
+                             new ReflectData(loader));
+    assertEquals(responder.getReflectData().getClassLoader(), loader);
+
+    ReflectRequestor requestor
+      = new ReflectRequestor(Simple.class, client, new ReflectData(loader));
+    assertEquals(requestor.getReflectData().getClassLoader(), loader);
+  }
+
+  @Test
+  public void testHello() throws IOException {
+    String response = proxy.hello("bob");
+    assertEquals("goodbye", response);
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    TestRecord record = new TestRecord();
+    record.name = "foo";
+    TestRecord echoed = proxy.echo(record);
+    assertEquals(record, echoed);
+  }
+
+  @Test
+  public void testAdd() throws IOException {
+    int result = proxy.add(1, 2);
+    assertEquals(3, result);
+  }
+
+  @Test
+  public void testEchoBytes() throws IOException {
+    Random random = new Random();
+    int length = random.nextInt(1024*16);
+    byte[] data = new byte[length];
+    random.nextBytes(data);
+    byte[] echoed = proxy.echoBytes(data);
+    assertArrayEquals(data, echoed);
+  }
+
+  @Test
+  public void testError() throws IOException {
+    SimpleException error = null;
+    try {
+      proxy.error();
+    } catch (SimpleException e) {
+      error = e;
+    }
+    assertNotNull(error);
+    assertEquals("foo", error.getMessage());
+  }
+
+  @Test
+  public void testUndeclaredError() throws Exception {
+    this.throwUndeclaredError = true;
+    RuntimeException error = null;
+    try {
+      proxy.error();
+    } catch (RuntimeException e) {
+      error = e;
+    } finally {
+      this.throwUndeclaredError = false;
+    }
+    assertNotNull(error);
+    assertTrue(error.toString().contains("foo"));
+  }
+
+  @AfterClass
+  public static void testStopServer() throws IOException {
+    client.close();
+    server.close();
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java
new file mode 100644
index 0000000..c6394cd
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+import org.apache.avro.test.Simple;
+import org.junit.Before;
+
+import java.net.InetSocketAddress;
+
+public class TestProtocolReflectMeta extends TestProtocolReflect {
+
+  @Before @Override
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    ReflectResponder rresp = new ReflectResponder(Simple.class, new TestImpl());
+    rresp.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    rresp.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+    server = new SocketServer(rresp, new InetSocketAddress(0));
+    server.start();
+    
+    client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
+    ReflectRequestor requestor = new ReflectRequestor(Simple.class, client);
+    requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    requestor.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+    proxy = ReflectRequestor.getClient(Simple.class, (ReflectRequestor)requestor);
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
new file mode 100644
index 0000000..f660f28
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java
@@ -0,0 +1,351 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.RPCContext;
+import org.apache.avro.ipc.RPCPlugin;
+import org.apache.avro.ipc.Requestor;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.test.Simple;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.MD5;
+import org.apache.avro.test.TestError;
+import org.apache.avro.test.TestRecord;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.Before;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertTrue;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.LineNumberReader;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.util.Random;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.HashSet;
+
+
+public class TestProtocolSpecific {
+
+  protected static final int REPEATING = -1;
+  protected static final File SERVER_PORTS_DIR
+  = new File(System.getProperty("test.dir", "/tmp")+"/server-ports/");
+
+  public static int ackCount;
+
+  private static boolean throwUndeclaredError;
+
+  public static class TestImpl implements Simple {
+    public String hello(String greeting) { return "goodbye"; }
+    public int add(int arg1, int arg2) { return arg1 + arg2; }
+    public TestRecord echo(TestRecord record) { return record; }
+    public ByteBuffer echoBytes(ByteBuffer data) { return data; }
+    public Void error() throws AvroRemoteException {
+      if (throwUndeclaredError) throw new RuntimeException("foo");
+      throw TestError.newBuilder().setMessage$("an error").build();
+    }
+    public void ack() { ackCount++; }
+  }
+
+  protected static Server server;
+  protected static Transceiver client;
+  protected static Simple proxy;
+
+  protected static SpecificResponder responder;
+
+  protected static HandshakeMonitor monitor;
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    responder = new SpecificResponder(Simple.class, new TestImpl());
+    server = createServer(responder);
+    server.start();
+    
+    client = createTransceiver();
+    SpecificRequestor req = new SpecificRequestor(Simple.class, client);
+    addRpcPlugins(req);
+    proxy = SpecificRequestor.getClient(Simple.class, (SpecificRequestor)req);
+    
+    monitor = new HandshakeMonitor();
+    responder.addRPCPlugin(monitor);
+  }
+  
+  public void addRpcPlugins(Requestor requestor){}
+  
+  public Server createServer(Responder testResponder) throws Exception{
+    return server = new SocketServer(testResponder,
+                              new InetSocketAddress(0));   
+  }
+  
+  public Transceiver createTransceiver() throws Exception{
+    return new SocketTransceiver(new InetSocketAddress(server.getPort()));
+  }
+
+  @Test public void testClassLoader() throws Exception {
+    ClassLoader loader = new ClassLoader() {};
+
+    SpecificResponder responder
+      = new SpecificResponder(Simple.class, new TestImpl(),
+                              new SpecificData(loader));
+    assertEquals(responder.getSpecificData().getClassLoader(), loader);
+
+    SpecificRequestor requestor
+      = new SpecificRequestor(Simple.class, client, new SpecificData(loader));
+    assertEquals(requestor.getSpecificData().getClassLoader(), loader);
+  }
+
+  @Test public void testGetRemote() throws IOException {
+    assertEquals(Simple.PROTOCOL, SpecificRequestor.getRemote(proxy));
+  }
+
+  @Test
+  public void testHello() throws IOException {
+    String response = proxy.hello("bob");
+    assertEquals("goodbye", response);
+  }
+
+  @Test
+  public void testHashCode() throws IOException {
+    TestError error = new TestError();
+    error.hashCode();
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    TestRecord record = new TestRecord();
+    record.setName("foo");
+    record.setKind(Kind.BAR);
+    record.setHash(new MD5(new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    TestRecord echoed = proxy.echo(record);
+    assertEquals(record, echoed);
+    assertEquals(record.hashCode(), echoed.hashCode());
+  }
+
+  @Test
+  public void testAdd() throws IOException {
+    int result = proxy.add(1, 2);
+    assertEquals(3, result);
+  }
+
+  @Test
+  public void testEchoBytes() throws IOException {
+    Random random = new Random();
+    int length = random.nextInt(1024*16);
+    ByteBuffer data = ByteBuffer.allocate(length);
+    random.nextBytes(data.array());
+    data.flip();
+    ByteBuffer echoed = proxy.echoBytes(data);
+    assertEquals(data, echoed);
+  }
+
+  @Test
+  public void testEmptyEchoBytes() throws IOException {
+    ByteBuffer data = ByteBuffer.allocate(0);
+    ByteBuffer echoed = proxy.echoBytes(data);
+    data.flip();
+    assertEquals(data, echoed);
+  }
+
+  @Test
+  public void testError() throws IOException {
+    TestError error = null;
+    try {
+      proxy.error();
+    } catch (TestError e) {
+      error = e;
+    }
+    assertNotNull(error);
+    assertEquals("an error", error.getMessage$().toString());
+  }
+
+  @Test
+  public void testUndeclaredError() throws Exception {
+    this.throwUndeclaredError = true;
+    RuntimeException error = null;
+    try {
+      proxy.error();
+    } catch (RuntimeException e) {
+      error = e;
+    } finally {
+      this.throwUndeclaredError = false;
+    }
+    assertNotNull(error);
+    assertTrue(error.toString().contains("foo"));
+  }
+
+
+  @Test
+  public void testOneWay() throws IOException {
+    ackCount = 0;
+    proxy.ack();
+    proxy.hello("foo");                           // intermix normal req
+    proxy.ack();
+    try { Thread.sleep(100); } catch (InterruptedException e) {}
+    assertEquals(2, ackCount);
+  }
+  
+  @Test
+  public void testRepeatedAccess() throws Exception {
+    for (int x = 0; x < 1000; x++) {
+      proxy.hello("hi!");
+    }
+  }
+  
+  @Test(expected = Exception.class)
+  public void testConnectionRefusedOneWay() throws IOException {
+    Transceiver client = new HttpTransceiver(new URL("http://localhost:4444"));
+    SpecificRequestor req = new SpecificRequestor(Simple.class, client);
+    addRpcPlugins(req);
+    Simple proxy = SpecificRequestor.getClient(Simple.class, (SpecificRequestor)req);
+    proxy.ack();
+  }
+
+  @Test
+  /** Construct and use a protocol whose "hello" method has an extra
+      argument to check that schema is sent to parse request. */
+  public void testParamVariation() throws Exception {
+    Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
+    List<Schema.Field> fields = new ArrayList<Schema.Field>();
+    fields.add(new Schema.Field("extra", Schema.create(Schema.Type.BOOLEAN),
+                   null, null));
+    fields.add(new Schema.Field("greeting", Schema.create(Schema.Type.STRING),
+                   null, null));
+    Protocol.Message message =
+      protocol.createMessage("hello",
+                             null /* doc */,
+                             Schema.createRecord(fields),
+                             Schema.create(Schema.Type.STRING),
+                             Schema.createUnion(new ArrayList<Schema>()));
+    protocol.getMessages().put("hello", message);
+    Transceiver t = createTransceiver();
+    try {
+      GenericRequestor r = new GenericRequestor(protocol, t);
+      addRpcPlugins(r);
+      GenericRecord params = new GenericData.Record(message.getRequest());
+      params.put("extra", Boolean.TRUE);
+      params.put("greeting", "bob");
+      String response = r.request("hello", params).toString();
+      assertEquals("goodbye", response);
+    } finally {
+      t.close();
+    }
+  }
+
+  @AfterClass
+  public static void testHandshakeCount() throws IOException {
+    monitor.assertHandshake();
+  }
+
+  @AfterClass
+  public static void testStopServer() throws IOException {
+    client.close();
+    server.close();
+    server = null;
+  }
+  
+  public class HandshakeMonitor extends RPCPlugin{
+    
+    private int handshakes;
+    private HashSet<String> seenProtocols = new HashSet<String>();
+    
+    @Override
+    public void serverConnecting(RPCContext context) {
+      handshakes++;
+      int expected = getExpectedHandshakeCount();
+      if(expected > 0  && handshakes > expected){
+        throw new IllegalStateException("Expected number of Protocol negotiation handshakes exceeded expected "+expected+" was "+handshakes);
+      }
+
+      // check that a given client protocol is only sent once
+      String clientProtocol =
+        context.getHandshakeRequest().clientProtocol;
+      if (clientProtocol != null) {
+        assertFalse(seenProtocols.contains(clientProtocol));
+        seenProtocols.add(clientProtocol);
+      }
+    }
+    
+    public void assertHandshake(){
+      int expected = getExpectedHandshakeCount();
+      if(expected != REPEATING){
+        assertEquals("Expected number of handshakes did not take place.", expected, handshakes);
+      }
+    }
+  }
+  
+  protected int getExpectedHandshakeCount() {
+   return 3;
+  }
+
+  public static class InteropTest {
+
+  @Test
+    public void testClient() throws Exception {
+      for (File f : SERVER_PORTS_DIR.listFiles()) {
+        LineNumberReader reader = new LineNumberReader(new FileReader(f));
+        int port = Integer.parseInt(reader.readLine());
+        System.out.println("Validating java client to "+
+            f.getName()+" - " + port);
+        Transceiver client = new SocketTransceiver(
+            new InetSocketAddress("localhost", port));
+        proxy = (Simple)SpecificRequestor.getClient(Simple.class, client);
+        TestProtocolSpecific proto = new TestProtocolSpecific();
+        proto.testHello();
+        proto.testEcho();
+        proto.testEchoBytes();
+        proto.testError();
+        System.out.println("Done! Validation java client to "+
+            f.getName()+" - " + port);
+      }
+    }
+
+    /**
+     * Starts the RPC server.
+     */
+    public static void main(String[] args) throws Exception {
+      SocketServer server = new SocketServer(
+          new SpecificResponder(Simple.class, new TestImpl()),
+          new InetSocketAddress(0));
+      server.start();
+      File portFile = new File(SERVER_PORTS_DIR, "java-port");
+      FileWriter w = new FileWriter(portFile);
+      w.write(Integer.toString(server.getPort()));
+      w.close();
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java
new file mode 100644
index 0000000..a2dd575
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import java.net.InetSocketAddress;
+
+import org.apache.avro.ipc.Requestor;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.SocketServer;
+import org.apache.avro.ipc.SocketTransceiver;
+import org.apache.avro.ipc.Transceiver;
+
+
+public class TestProtocolSpecificMeta extends TestProtocolSpecific {
+  
+  @Override
+  public Server createServer(Responder testResponder) throws Exception {
+    responder.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+    return new SocketServer(responder, new InetSocketAddress(0));
+  }
+  
+  @Override
+  public Transceiver createTransceiver() throws Exception {
+    return new SocketTransceiver(new InetSocketAddress(server.getPort()));
+  }
+  
+  public void addRpcPlugins(Requestor req){
+    req.addRPCPlugin(new RPCMetaTestPlugin("key1"));
+    req.addRPCPlugin(new RPCMetaTestPlugin("key2"));
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
new file mode 100644
index 0000000..b19b138
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java
@@ -0,0 +1,1024 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.compiler.specific.TestSpecificCompiler;
+import org.apache.avro.data.Json;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.util.Utf8;
+import org.codehaus.jackson.JsonNode;
+import org.junit.Test;
+
+public class TestSchema {
+
+  public static final String LISP_SCHEMA = "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+            +"{\"name\":\"value\", \"type\":[\"null\", \"string\","
+            +"{\"type\": \"record\", \"name\": \"Cons\", \"fields\": ["
+            +"{\"name\":\"car\", \"type\":\"Lisp\"},"
+            +"{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}";
+
+  public static final String BASIC_ENUM_SCHEMA = "{\"type\":\"enum\", \"name\":\"Test\","
+            +"\"symbols\": [\"A\", \"B\"]}";
+
+  public static final String SCHEMA_WITH_DOC_TAGS = "{\n"
+      + "  \"type\": \"record\",\n"
+      + "  \"name\": \"outer_record\",\n"
+      + "  \"doc\": \"This is not a world record.\",\n"
+      + "  \"fields\": [\n"
+      + "    { \"type\": { \"type\": \"fixed\", \"doc\": \"Very Inner Fixed\", "
+      + "                  \"name\": \"very_inner_fixed\", \"size\": 1 },\n"
+      + "      \"doc\": \"Inner Fixed\", \"name\": \"inner_fixed\" },\n"
+      + "    { \"type\": \"string\",\n"
+      + "      \"name\": \"inner_string\",\n"
+      + "      \"doc\": \"Inner String\" },\n"
+      + "    { \"type\": { \"type\": \"enum\", \"doc\": \"Very Inner Enum\", \n"
+      + "                  \"name\": \"very_inner_enum\", \n"
+      + "                  \"symbols\": [ \"A\", \"B\", \"C\" ] },\n"
+      + "      \"doc\": \"Inner Enum\", \"name\": \"inner_enum\" },\n"
+      + "    { \"type\": [\"string\", \"int\"], \"doc\": \"Inner Union\", \n"
+      + "      \"name\": \"inner_union\" }\n" + "  ]\n" + "}\n";
+
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "30"));
+
+  @Test
+  public void testNull() throws Exception {
+    assertEquals(Schema.create(Type.NULL), Schema.parse("\"null\""));
+    assertEquals(Schema.create(Type.NULL), Schema.parse("{\"type\":\"null\"}"));
+    check("\"null\"", "null", null);
+  }
+
+  @Test
+  public void testBoolean() throws Exception {
+    assertEquals(Schema.create(Type.BOOLEAN), Schema.parse("\"boolean\""));
+    assertEquals(Schema.create(Type.BOOLEAN),
+                 Schema.parse("{\"type\":\"boolean\"}"));
+    check("\"boolean\"", "true", Boolean.TRUE);
+  }
+
+  @Test
+  public void testString() throws Exception {
+    assertEquals(Schema.create(Type.STRING), Schema.parse("\"string\""));
+    assertEquals(Schema.create(Type.STRING),
+                 Schema.parse("{\"type\":\"string\"}"));
+    check("\"string\"", "\"foo\"", new Utf8("foo"));
+  }
+
+  @Test
+  public void testBytes() throws Exception {
+    assertEquals(Schema.create(Type.BYTES), Schema.parse("\"bytes\""));
+    assertEquals(Schema.create(Type.BYTES),
+                 Schema.parse("{\"type\":\"bytes\"}"));
+    check("\"bytes\"", "\"\\u0000ABC\\u00FF\"",
+          ByteBuffer.wrap(new byte[]{0,65,66,67,-1}));
+  }
+
+  @Test
+  public void testInt() throws Exception {
+    assertEquals(Schema.create(Type.INT), Schema.parse("\"int\""));
+    assertEquals(Schema.create(Type.INT), Schema.parse("{\"type\":\"int\"}"));
+    check("\"int\"", "9", new Integer(9));
+  }
+
+  @Test
+  public void testLong() throws Exception {
+    assertEquals(Schema.create(Type.LONG), Schema.parse("\"long\""));
+    assertEquals(Schema.create(Type.LONG), Schema.parse("{\"type\":\"long\"}"));
+    check("\"long\"", "11", new Long(11));
+  }
+
+  @Test
+  public void testFloat() throws Exception {
+    assertEquals(Schema.create(Type.FLOAT), Schema.parse("\"float\""));
+    assertEquals(Schema.create(Type.FLOAT),
+                 Schema.parse("{\"type\":\"float\"}"));
+    check("\"float\"", "1.1", new Float(1.1));
+    checkDefault("\"float\"", "\"NaN\"", Float.NaN);
+    checkDefault("\"float\"", "\"Infinity\"", Float.POSITIVE_INFINITY);
+    checkDefault("\"float\"", "\"-Infinity\"", Float.NEGATIVE_INFINITY);
+  }
+
+  @Test
+  public void testDouble() throws Exception {
+    assertEquals(Schema.create(Type.DOUBLE), Schema.parse("\"double\""));
+    assertEquals(Schema.create(Type.DOUBLE),
+                 Schema.parse("{\"type\":\"double\"}"));
+    check("\"double\"", "1.2", new Double(1.2));
+    checkDefault("\"double\"", "\"NaN\"", Double.NaN);
+    checkDefault("\"double\"", "\"Infinity\"", Double.POSITIVE_INFINITY);
+    checkDefault("\"double\"", "\"-Infinity\"", Double.NEGATIVE_INFINITY);
+  }
+
+  @Test
+  public void testArray() throws Exception {
+    String json = "{\"type\":\"array\", \"items\": \"long\"}";
+    Schema schema = Schema.parse(json);
+    Collection<Long> array = new GenericData.Array<Long>(1, schema);
+    array.add(1L);
+    check(json, "[1]", array);
+    array = new ArrayList<Long>(1);
+    array.add(1L);
+    check(json, "[1]", array);
+    checkParseError("{\"type\":\"array\"}");      // items required
+  }
+
+  @Test
+  public void testMap() throws Exception {
+    HashMap<Utf8,Long> map = new HashMap<Utf8,Long>();
+    map.put(new Utf8("a"), 1L);
+    check("{\"type\":\"map\", \"values\":\"long\"}", "{\"a\":1}", map);
+    checkParseError("{\"type\":\"map\"}");        // values required
+  }
+  
+  @Test
+  public void testUnionMap() throws Exception {
+    String unionMapSchema = "{\"name\":\"foo\", \"type\":\"record\"," +
+    		" \"fields\":[ {\"name\":\"mymap\", \"type\":" +
+    		"   [{\"type\":\"map\", \"values\":" +
+    		"      [\"int\",\"long\",\"float\",\"string\"]}," +
+    		"    \"null\"]" +
+    		"   }]" +
+    		" }";
+    check(unionMapSchema, true);
+  }
+
+  @Test
+  public void testRecord() throws Exception {
+    String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+      +"[{\"name\":\"f\", \"type\":\"long\", \"foo\":\"bar\"}]}";
+    Schema schema = Schema.parse(recordJson);
+
+    GenericData.Record record = new GenericData.Record(schema);
+    record.put("f", 11L);
+    check(recordJson, "{\"f\":11}", record, false);
+
+    // test field props
+    assertEquals("bar", schema.getField("f").getProp("foo"));
+    assertEquals("bar", Schema.parse(schema.toString())
+                 .getField("f").getProp("foo"));
+    schema.getField("f").addProp("baz", "boo");
+    assertEquals("boo", schema.getField("f").getProp("baz"));
+
+    checkParseError("{\"type\":\"record\"}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X\"}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":\"Y\"}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+                    +"[{\"name\":\"f\"}]}");       // no type
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":"
+                    +"[{\"type\":\"long\"}]}");    // no name
+    // check invalid record names
+    checkParseError("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}");
+    // check invalid field names
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":["
+                    +"{\"name\":\"1f\",\"type\":\"int\"}]}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":["
+                    +"{\"name\":\"f$\",\"type\":\"int\"}]}");
+    checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":["
+                    +"{\"name\":\"f.g\",\"type\":\"int\"}]}");
+  }
+
+  @Test public void testInvalidNameTolerance() {
+    Schema.parse("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}", false);
+    Schema.parse("{\"type\":\"record\",\"name\":\"X-\",\"fields\":[]}", false);
+    Schema.parse("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}", false);
+  }
+
+  @Test
+  public void testMapInRecord() throws Exception {
+    String json = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+      +"[{\"name\":\"f\", \"type\": {\"type\":\"map\", \"values\":\"long\"}}]}";
+    Schema schema = Schema.parse(json);
+
+    HashMap<Utf8,Long> map = new HashMap<Utf8,Long>();
+    map.put(new Utf8("a"), 1L);
+    GenericData.Record record = new GenericData.Record(schema);
+    record.put("f", map);
+    check(json, "{\"f\":{\"a\":1}}", record, false);
+  }
+
+
+  @Test
+  public void testEnum() throws Exception {
+    check(BASIC_ENUM_SCHEMA, "\"B\"",
+          new GenericData.EnumSymbol(Schema.parse(BASIC_ENUM_SCHEMA), "B"),
+          false);
+    checkParseError("{\"type\":\"enum\"}");        // symbols required
+    checkParseError("{\"type\":\"enum\",\"symbols\": [\"X\"]}"); // name reqd
+    // check no duplicate symbols
+    checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X\",\"X\"]}");
+    // check no invalid symbols
+    checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"1X\"]}");
+    checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X$\"]}");
+    checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X.Y\"]}");
+  }
+
+  @Test
+  public void testFixed() throws Exception {
+    String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
+    Schema schema = Schema.parse(json);
+    check(json, "\"a\"",
+          new GenericData.Fixed(schema, new byte[]{(byte)'a'}), false);
+    checkParseError("{\"type\":\"fixed\"}");        // size required
+  }
+
+  @Test
+  public void testRecursive() throws Exception {
+    check("{\"type\": \"record\", \"name\": \"Node\", \"fields\": ["
+          +"{\"name\":\"label\", \"type\":\"string\"},"
+          +"{\"name\":\"children\", \"type\":"
+          +"{\"type\": \"array\", \"items\": \"Node\" }}]}",
+          false);
+  }
+
+  @Test
+  public void testRecursiveEquals() throws Exception {
+    String jsonSchema = "{\"type\":\"record\", \"name\":\"List\", \"fields\": ["
+      +"{\"name\":\"next\", \"type\":\"List\"}]}";
+    Schema s1 = Schema.parse(jsonSchema);
+    Schema s2 = Schema.parse(jsonSchema);
+    assertEquals(s1, s2);
+    s1.hashCode();                                // test no stackoverflow
+  }
+
+  @Test
+  /** Test that equals() and hashCode() don't require exponential time on
+   *  certain pathological schemas. */
+  public void testSchemaExplosion() throws Exception {
+    for (int i = 1; i < 15; i++) {                // 15 is big enough to trigger
+      // create a list of records, each with a single field whose type is a
+      // union of all of the records.
+      List<Schema> recs = new ArrayList<Schema>();
+      for (int j = 0; j < i; j++)
+        recs.add(Schema.createRecord(""+(char)('A'+j), null, null, false));
+      for (Schema s : recs) {
+        Schema union = Schema.createUnion(recs);
+        Field f = new Field("x", union, null, null);
+        List<Field> fields = new ArrayList<Field>();
+        fields.add(f);
+        s.setFields(fields);
+      }
+      // check that equals and hashcode are correct and complete in a
+      // reasonable amount of time
+      for (Schema s1 : recs) {
+        Schema s2 = Schema.parse(s1.toString());
+        assertEquals(s1.hashCode(), s2.hashCode()); 
+        assertEquals(s1, s2);
+      }
+    }                 
+  }
+
+  @Test
+  public void testLisp() throws Exception {
+    check(LISP_SCHEMA, false);
+  }
+
+  @Test
+  public void testUnion() throws Exception {
+    check("[\"string\", \"long\"]", false);
+    checkDefault("[\"double\", \"long\"]", "1.1", new Double(1.1));
+
+    // test that erroneous default values cause errors
+    for (String type : new String[]
+          {"int", "long", "float", "double", "string", "bytes", "boolean"}) {
+      checkValidateDefaults("[\""+type+"\", \"null\"]", "null"); // schema parse time
+      boolean error = false;
+      try {
+        checkDefault("[\""+type+"\", \"null\"]", "null", 0); // read time
+      } catch (AvroTypeException e) {
+        error = true;
+      }
+      assertTrue(error);
+      checkValidateDefaults("[\"null\", \""+type+"\"]", "0");  // schema parse time
+      error = false;
+      try {
+        checkDefault("[\"null\", \""+type+"\"]", "0", null); // read time
+      } catch (AvroTypeException e) {
+        error = true;
+      }
+      assertTrue(error);
+    }
+
+    // check union json
+    String record = "{\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}";
+    String fixed = "{\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}";
+    String enu = "{\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
+    Schema union = Schema.parse("[\"null\",\"string\","
+                                +record+","+ enu+","+fixed+"]");
+    checkJson(union, null, "null");
+    checkJson(union, new Utf8("foo"), "{\"string\":\"foo\"}");
+    checkJson(union,
+              new GenericData.Record(Schema.parse(record)),
+              "{\"Foo\":{}}");
+    checkJson(union,
+              new GenericData.Fixed(Schema.parse(fixed), new byte[]{(byte)'a'}),
+              "{\"Bar\":\"a\"}");
+    checkJson(union,
+              new GenericData.EnumSymbol(Schema.parse(enu), "X"),
+              "{\"Baz\":\"X\"}");
+  }
+
+  @Test
+  public void testComplexUnions() throws Exception {
+    // one of each unnamed type and two of named types
+    String partial = "[\"int\", \"long\", \"float\", \"double\", \"boolean\", \"bytes\"," +
+    " \"string\", {\"type\":\"array\", \"items\": \"long\"}," +
+    " {\"type\":\"map\", \"values\":\"long\"}";
+    String namedTypes = ", {\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}," +
+    " {\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}," +
+    " {\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
+    
+    String namedTypes2 = ", {\"type\":\"record\",\"name\":\"Foo2\",\"fields\":[]}," +
+    " {\"type\":\"fixed\",\"name\":\"Bar2\",\"size\": 1}," +
+    " {\"type\":\"enum\",\"name\":\"Baz2\",\"symbols\": [\"X\"]}";
+    
+    check(partial + namedTypes + "]", false);
+    check(partial + namedTypes + namedTypes2 + "]", false); 
+    checkParseError(partial + namedTypes + namedTypes + "]");
+    
+    // fail with two branches of the same unnamed type
+    checkUnionError(new Schema[] {Schema.create(Type.INT), Schema.create(Type.INT)});
+    checkUnionError(new Schema[] {Schema.create(Type.LONG), Schema.create(Type.LONG)});
+    checkUnionError(new Schema[] {Schema.create(Type.FLOAT), Schema.create(Type.FLOAT)});
+    checkUnionError(new Schema[] {Schema.create(Type.DOUBLE), Schema.create(Type.DOUBLE)});
+    checkUnionError(new Schema[] {Schema.create(Type.BOOLEAN), Schema.create(Type.BOOLEAN)});
+    checkUnionError(new Schema[] {Schema.create(Type.BYTES), Schema.create(Type.BYTES)});
+    checkUnionError(new Schema[] {Schema.create(Type.STRING), Schema.create(Type.STRING)});
+    checkUnionError(new Schema[] {Schema.createArray(Schema.create(Type.INT)), 
+        Schema.createArray(Schema.create(Type.INT))});
+    checkUnionError(new Schema[] {Schema.createMap(Schema.create(Type.INT)), 
+        Schema.createMap(Schema.create(Type.INT))});
+    
+    List<String> symbols = new ArrayList<String>();
+    symbols.add("NOTHING");
+    
+    // succeed with two branches of the same named type, if different names
+    Schema u;
+    u = buildUnion(new Schema[] {
+        Schema.parse("{\"type\":\"record\",\"name\":\"x.A\",\"fields\":[]}"),
+        Schema.parse("{\"type\":\"record\",\"name\":\"y.A\",\"fields\":[]}")});
+    check(u.toString(), false);
+
+    u = buildUnion(new Schema[] {
+        Schema.parse
+        ("{\"type\":\"enum\",\"name\":\"x.A\",\"symbols\":[\"X\"]}"),
+        Schema.parse
+        ("{\"type\":\"enum\",\"name\":\"y.A\",\"symbols\":[\"Y\"]}")});
+    check(u.toString(), false);
+    
+    u = buildUnion(new Schema[] {
+        Schema.parse("{\"type\":\"fixed\",\"name\":\"x.A\",\"size\":4}"),
+        Schema.parse("{\"type\":\"fixed\",\"name\":\"y.A\",\"size\":8}")});
+    check(u.toString(), false);
+    
+    // fail with two branches of the same named type, but same names
+    checkUnionError(new Schema[] {Schema.createRecord("Foo", null, "org.test", false),
+        Schema.createRecord("Foo", null, "org.test", false)});
+    checkUnionError(new Schema[] {Schema.createEnum("Bar", null, "org.test", symbols),
+        Schema.createEnum("Bar", null, "org.test", symbols)});
+    checkUnionError(new Schema[] {Schema.createFixed("Baz", null, "org.test", 2),
+        Schema.createFixed("Baz", null, "org.test", 1)});
+    
+    Schema union = buildUnion(new Schema[] {Schema.create(Type.INT)});
+    // fail if creating a union of a union
+    checkUnionError(new Schema[] {union});
+  }
+  
+  @Test
+  public void testComplexProp() throws Exception {
+    String json = "{\"type\":\"null\", \"foo\": [0]}";
+    Schema s = Schema.parse(json);
+    assertEquals(null, s.getProp("foo"));
+  }
+  
+  @Test public void testPropOrdering() throws Exception {
+    String json = "{\"type\":\"int\",\"z\":\"c\",\"yy\":\"b\",\"x\":\"a\"}";
+    Schema s = Schema.parse(json);
+    assertEquals(json, s.toString());
+  }
+
+  @Test
+  public void testParseInputStream() throws IOException {
+    Schema s = Schema.parse(
+        new ByteArrayInputStream("\"boolean\"".getBytes("UTF-8")));
+    assertEquals(Schema.parse("\"boolean\""), s);
+  }
+
+  @Test
+  public void testNamespaceScope() throws Exception {
+    String z = "{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}";
+    String y = "{\"type\":\"record\",\"name\":\"q.Y\",\"fields\":["
+      +"{\"name\":\"f\",\"type\":"+z+"}]}";
+    String x = "{\"type\":\"record\",\"name\":\"p.X\",\"fields\":["
+      +"{\"name\":\"f\",\"type\":"+y+"},"
+      +"{\"name\":\"g\",\"type\":"+z+"}"
+      +"]}";
+    Schema xs = Schema.parse(x);
+    Schema ys = xs.getField("f").schema();
+    assertEquals("p.Z", xs.getField("g").schema().getFullName());
+    assertEquals("q.Z", ys.getField("f").schema().getFullName());
+  }
+
+  @Test
+  public void testNamespaceNesting() throws Exception {
+    String y = "{\"type\":\"record\",\"name\":\"y.Y\",\"fields\":["
+      +"{\"name\":\"f\",\"type\":\"x.X\"}]}";
+    String x = "{\"type\":\"record\",\"name\":\"x.X\",\"fields\":["
+      +"{\"name\":\"f\",\"type\":"+y+"}"
+      +"]}";
+    Schema xs = Schema.parse(x);
+    assertEquals(xs, Schema.parse(xs.toString()));
+  }
+
+  @Test
+  public void testNestedNullNamespace() throws Exception {
+    Schema inner =
+      Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+    Schema outer = Schema.createRecord("Outer", null, "space", false);
+    outer.setFields(Arrays.asList(new Field("f", inner, null, null)));
+    assertEquals(outer, Schema.parse(outer.toString()));
+  }
+
+  @Test
+  public void testNestedNullNamespaceReferencing() {
+    Schema inner =
+        Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+    Schema outer = Schema.createRecord("Outer", null, "space", false);
+    outer.setFields(Arrays.asList(new Field("f1", inner, null, null),
+                                  new Field("f2", inner, null, null)));
+    assertEquals(outer, Schema.parse(outer.toString()));
+  }
+
+  @Test
+  public void testNestedNullNamespaceReferencingWithUnion() {
+    Schema inner =
+        Schema.parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
+    Schema innerUnion = Schema.createUnion(Arrays.asList(inner, Schema.create(Type.NULL)));
+    Schema outer = Schema.createRecord("Outer", null, "space", false);
+    outer.setFields(Arrays.asList(new Field("f1", innerUnion, null, null),
+                                  new Field("f2", innerUnion, null, null)));
+    assertEquals(outer, Schema.parse(outer.toString()));
+  }
+
+  @Test
+  public void testNestedNonNullNamespace1() throws Exception {
+    Schema inner1 = Schema.createEnum("InnerEnum", null, "space", Arrays.asList("x"));
+    Schema inner2 = Schema.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
+      +"\"InnerRecord\",\"fields\":[]}");
+    Schema nullOuter = Schema.createRecord("Outer", null, null, false);
+    nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null),
+                                      new Field("f2", inner2, null, null)));
+    assertEquals(nullOuter, Schema.parse(nullOuter.toString()));
+  }
+
+  @Test
+  public void testNestedNonNullNamespace2() throws Exception {
+    Schema inner1 = Schema.createFixed("InnerFixed", null, "space", 1);
+    Schema inner2 = Schema.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":"
+      +"\"InnerRecord\",\"fields\":[]}");
+    Schema nullOuter = Schema.createRecord("Outer", null, null, false);
+    nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null),
+                                      new Field("f2", inner2, null, null)));
+    assertEquals(nullOuter, Schema.parse(nullOuter.toString()));
+  }
+
+  @Test
+  public void testNullNamespaceAlias() throws Exception {
+    Schema s =
+      Schema.parse("{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}");
+    Schema t =
+      Schema.parse("{\"type\":\"record\",\"name\":\"x.Y\",\"aliases\":[\".Z\"],"
+                   +"\"fields\":[]}");
+    Schema u = Schema.applyAliases(s, t);
+    assertEquals("x.Y", u.getFullName());
+  }
+
+  @Test
+  public void testNullPointer() throws Exception {
+    String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+      +"[{\"name\":\"x\", \"type\":\"string\"}]}";
+    Schema schema = Schema.parse(recordJson);
+    GenericData.Record record = new GenericData.Record(schema);
+    try {
+      checkBinary(schema, record,
+                  new GenericDatumWriter<Object>(),
+                  new GenericDatumReader<Object>());
+    } catch (NullPointerException e) {
+      assertEquals("null of string in field x of Test", e.getMessage());
+    }
+  }
+
+  private static void checkParseError(String json) {
+    try {
+      Schema.parse(json);
+    } catch (SchemaParseException e) {
+      return;
+    }
+    fail("Should not have parsed: "+json);
+  }
+
+  private static void checkUnionError(Schema[] branches) {
+    List<Schema> branchList = Arrays.asList(branches);
+    try {
+      Schema.createUnion(branchList);
+      fail("Union should not have constructed from: " + branchList);
+    } catch (AvroRuntimeException are) {
+      return;
+    }
+  }
+
+  private static Schema buildUnion(Schema[] branches) {
+    List<Schema> branchList = Arrays.asList(branches);
+    return Schema.createUnion(branchList);
+  }
+
+  /**
+   * Makes sure that "doc" tags are transcribed in the schemas.
+   * Note that there are docs both for fields and for the records
+   * themselves.
+   */
+  @Test
+  public void testDocs() {
+    Schema schema = Schema.parse(SCHEMA_WITH_DOC_TAGS);
+    assertEquals("This is not a world record.", schema.getDoc());
+    assertEquals("Inner Fixed", schema.getField("inner_fixed").doc());
+    assertEquals("Very Inner Fixed", schema.getField("inner_fixed").schema().getDoc());
+    assertEquals("Inner String", schema.getField("inner_string").doc());
+    assertEquals("Inner Enum", schema.getField("inner_enum").doc());
+    assertEquals("Very Inner Enum", schema.getField("inner_enum").schema().getDoc());
+    assertEquals("Inner Union", schema.getField("inner_union").doc());
+  }
+
+  @Test
+  public void testFieldDocs() {
+    String schemaStr = "{\"name\": \"Rec\",\"type\": \"record\",\"fields\" : ["+
+      "{\"name\": \"f\", \"type\": \"int\", \"doc\": \"test\"}]}";
+
+    // check field doc is parsed correctly
+    Schema schema = Schema.parse(schemaStr);
+    assertEquals("test", schema.getField("f").doc());
+    
+    // check print/read cycle preserves field doc
+    schema = Schema.parse(schema.toString());
+    assertEquals("test", schema.getField("f").doc());
+  }
+
+  @Test
+  public void testAliases() throws Exception {
+    String t1 = "{\"type\":\"record\",\"name\":\"a.b\",\"fields\":["
+      +"{\"name\":\"f\",\"type\":\"long\"},"
+      +"{\"name\":\"h\",\"type\":\"int\"}]}";
+    String t2 = "{\"type\":\"record\",\"name\":\"x.y\",\"aliases\":[\"a.b\"],"
+      +"\"fields\":[{\"name\":\"g\",\"type\":\"long\",\"aliases\":[\"f\"]},"
+      +"{\"name\":\"h\",\"type\":\"int\"}]}";
+    Schema s1 = Schema.parse(t1);
+    Schema s2 = Schema.parse(t2);
+
+    assertEquals(s1.getAliases(), Collections.emptySet());
+    assertEquals(s1.getField("f").aliases(), Collections.emptySet());
+    assertEquals(s2.getAliases(), Collections.singleton("a.b"));
+    assertEquals(s2.getField("g").aliases(), Collections.singleton("f"));
+
+    Schema s3 = Schema.applyAliases(s1,s2);
+    assertFalse(s2 == s3);
+    assertEquals(s2, s3);
+
+    t1 = "{\"type\":\"enum\",\"name\":\"a.b\","
+      +"\"symbols\":[\"x\"]}";
+    t2 = "{\"type\":\"enum\",\"name\":\"a.c\",\"aliases\":[\"b\"],"
+      +"\"symbols\":[\"x\"]}";
+    s1 = Schema.parse(t1);
+    s2 = Schema.parse(t2);
+    s3 = Schema.applyAliases(s1,s2);
+    assertFalse(s2 == s3);
+    assertEquals(s2, s3);
+
+    t1 = "{\"type\":\"fixed\",\"name\":\"a\","
+      +"\"size\": 5}";
+    t2 = "{\"type\":\"fixed\",\"name\":\"b\",\"aliases\":[\"a\"],"
+      +"\"size\": 5}";
+    s1 = Schema.parse(t1);
+    s2 = Schema.parse(t2);
+    s3 = Schema.applyAliases(s1,s2);
+    assertFalse(s2 == s3);
+    assertEquals(s2, s3);
+  }
+
+  private static void check(String schemaJson, String defaultJson,
+                            Object defaultValue) throws Exception {
+    check(schemaJson, defaultJson, defaultValue, true);
+  }
+  private static void check(String schemaJson, String defaultJson,
+                            Object defaultValue, boolean induce)
+    throws Exception {
+    check(schemaJson, induce);
+    checkDefault(schemaJson, defaultJson, defaultValue);
+  }
+
+  private static void check(String jsonSchema, boolean induce)
+    throws Exception {
+    Schema schema = Schema.parse(jsonSchema);
+    checkProp(schema);
+    Object reuse = null;
+    for (Object datum : new RandomData(schema, COUNT)) {
+
+      if (induce) {
+        Schema induced = GenericData.get().induce(datum);
+        assertEquals("Induced schema does not match.", schema, induced);
+      }
+        
+      assertTrue("Datum does not validate against schema "+datum,
+                 GenericData.get().validate(schema, datum));
+
+      checkBinary(schema, datum,
+                  new GenericDatumWriter<Object>(),
+                  new GenericDatumReader<Object>(), null);
+      reuse = checkBinary(schema, datum,
+          new GenericDatumWriter<Object>(),
+          new GenericDatumReader<Object>(), reuse);
+      checkDirectBinary(schema, datum,
+                  new GenericDatumWriter<Object>(),
+                  new GenericDatumReader<Object>());
+      checkBlockingBinary(schema, datum,
+                  new GenericDatumWriter<Object>(),
+                  new GenericDatumReader<Object>());
+      checkJson(schema, datum,
+                  new GenericDatumWriter<Object>(),
+                  new GenericDatumReader<Object>());
+
+      // Check that we can generate the code for every schema we see.
+      TestSpecificCompiler.assertCompiles(schema, false);
+
+      // Check that we can read/write the json of every schema we see.
+      checkBinaryJson(jsonSchema);
+    }
+  }
+
+  private static void checkProp(Schema s0) throws Exception {
+    if(s0.getType().equals(Schema.Type.UNION)) return; // unions have no props
+    assertEquals(null, s0.getProp("foo"));
+    Schema s1 = Schema.parse(s0.toString());
+    s1.addProp("foo", "bar");
+    assertEquals("bar", s1.getProp("foo"));
+    assertFalse(s0.equals(s1));
+    Schema s2 = Schema.parse(s1.toString());
+    assertEquals("bar", s2.getProp("foo"));
+    assertEquals(s1, s2);
+    assertFalse(s0.equals(s2));
+  }
+  
+  public static void checkBinary(Schema schema, Object datum,
+                                 DatumWriter<Object> writer,
+                                 DatumReader<Object> reader)
+    throws IOException {
+    checkBinary(schema, datum, writer, reader, null);
+  }
+  
+  public static Object checkBinary(Schema schema, Object datum,
+                                 DatumWriter<Object> writer,
+                                 DatumReader<Object> reader,
+                                 Object reuse)
+    throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.setSchema(schema);
+    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
+    writer.write(datum, encoder);
+    encoder.flush();
+    byte[] data = out.toByteArray();
+
+    reader.setSchema(schema);
+        
+    Object decoded =
+      reader.read(reuse, DecoderFactory.get().binaryDecoder(
+          data, null));
+      
+    assertEquals("Decoded data does not match.", datum, decoded);
+    return decoded;
+  }
+
+  public static void checkDirectBinary(Schema schema, Object datum,
+      DatumWriter<Object> writer, DatumReader<Object> reader)
+      throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.setSchema(schema);
+    Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
+    writer.write(datum, encoder);
+    // no flush for direct
+    byte[] data = out.toByteArray();
+
+    reader.setSchema(schema);
+
+    Object decoded = reader.read(null, DecoderFactory.get()
+        .binaryDecoder(data, null));
+
+    assertEquals("Decoded data does not match.", datum, decoded);
+  }
+
+  public static void checkBlockingBinary(Schema schema, Object datum,
+      DatumWriter<Object> writer, DatumReader<Object> reader)
+      throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.setSchema(schema);
+    Encoder encoder = EncoderFactory.get().blockingBinaryEncoder(out, null);
+    writer.write(datum, encoder);
+    encoder.flush();
+    byte[] data = out.toByteArray();
+
+    reader.setSchema(schema);
+
+    Object decoded = reader.read(null, DecoderFactory.get()
+        .binaryDecoder(data, null));
+
+    assertEquals("Decoded data does not match.", datum, decoded);
+  }
+
+  private static void checkJson(Schema schema, Object datum,
+                                DatumWriter<Object> writer,
+                                DatumReader<Object> reader)
+    throws IOException {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
+    writer.setSchema(schema);
+    writer.write(datum, encoder);
+    writer.write(datum, encoder);
+    encoder.flush();
+    byte[] data = out.toByteArray();
+
+    reader.setSchema(schema);
+    Decoder decoder = DecoderFactory.get().jsonDecoder(schema,
+        new ByteArrayInputStream(data));
+    Object decoded = reader.read(null, decoder);
+    assertEquals("Decoded data does not match.", datum, decoded);
+
+    decoded = reader.read(decoded, decoder);
+    assertEquals("Decoded data does not match.", datum, decoded);
+  }
+
+  private static void checkJson(Schema schema, Object datum,
+                                String json) throws Exception {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
+    DatumWriter<Object> writer = new GenericDatumWriter<Object>();
+    writer.setSchema(schema);
+    writer.write(datum, encoder);
+    encoder.flush();
+    byte[] data = out.toByteArray();
+
+    String encoded = new String(data, "UTF-8");
+    assertEquals("Encoded data does not match.", json, encoded);
+
+    DatumReader<Object> reader = new GenericDatumReader<Object>();
+    reader.setSchema(schema);
+    Object decoded = reader.read(null, DecoderFactory.get()
+        .jsonDecoder(schema, new ByteArrayInputStream(data)));
+      
+    assertEquals("Decoded data does not match.", datum, decoded);
+  }
+
+  public static void checkBinaryJson(String json) throws Exception {
+    Object node = Json.parseJson(json);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    DatumWriter<Object> writer = new Json.ObjectWriter();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
+    encoder = EncoderFactory.get().validatingEncoder(Json.SCHEMA, encoder);
+    writer.write(node, encoder);
+    encoder.flush();
+    byte[] bytes = out.toByteArray();
+
+    DatumReader<Object> reader = new Json.ObjectReader();
+    Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, null);
+    decoder = DecoderFactory.get().validatingDecoder(Json.SCHEMA, decoder);
+    Object decoded = reader.read(null, decoder);
+
+    assertEquals("Decoded json does not match.", Json.toString(node), Json.toString(decoded));
+  }
+
+  private static final Schema ACTUAL =            // an empty record schema
+    Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
+
+  private static void checkDefault(String schemaJson, String defaultJson,
+                                   Object defaultValue) throws Exception {
+    String recordJson =
+      "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", "
+    +"\"type\":"+schemaJson+", "
+    +"\"default\":"+defaultJson+"}]}";
+    Schema expected = Schema.parse(recordJson);
+    DatumReader<Object> in = new GenericDatumReader<Object>(ACTUAL, expected);
+    GenericData.Record record = (GenericData.Record)
+      in.read(null, DecoderFactory.get().binaryDecoder(
+          new byte[0], null));
+    assertEquals("Wrong default.", defaultValue, record.get("f"));
+    assertEquals("Wrong toString", expected, Schema.parse(expected.toString()));
+  }
+
+  private static void checkValidateDefaults(String schemaJson, String defaultJson) {
+    try {
+      Schema.Parser parser = new Schema.Parser();
+      parser.setValidateDefaults(true);
+      String recordJson =
+          "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", "
+              +"\"type\":"+schemaJson+", "
+              +"\"default\":"+defaultJson+"}]}";
+      parser.parse(recordJson);
+      fail("Schema of type " + schemaJson + " should not have default " + defaultJson);
+    } catch (AvroTypeException e) {
+      return;
+    }
+  }
+
+  @Test(expected=AvroTypeException.class)
+  public void testNoDefaultField() throws Exception {
+    Schema expected =
+      Schema.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":"+
+                   "[{\"name\":\"f\", \"type\": \"string\"}]}");
+    DatumReader<Object> in = new GenericDatumReader<Object>(ACTUAL, expected);
+    in.read(null, DecoderFactory.get().binaryDecoder(
+        new ByteArrayInputStream(new byte[0]), null));
+  }
+
+  @Test
+  public void testEnumMismatch() throws Exception {
+    Schema actual = Schema.parse
+      ("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}");
+    Schema expected = Schema.parse
+      ("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}");
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    DatumWriter<Object> writer = new GenericDatumWriter<Object>(actual);
+    Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
+    writer.write(new GenericData.EnumSymbol(actual, "Y"), encoder);
+    writer.write(new GenericData.EnumSymbol(actual, "X"), encoder);
+    encoder.flush();
+    byte[] data = out.toByteArray();
+    Decoder decoder = DecoderFactory.get().binaryDecoder(
+        data, null);
+    DatumReader<String> in = new GenericDatumReader<String>(actual, expected);
+    assertEquals("Wrong value", new GenericData.EnumSymbol(expected, "Y"),
+                 in.read(null, decoder));
+    try {
+      in.read(null, decoder);
+      fail("Should have thrown exception.");
+    } catch (AvroTypeException e) {
+      // expected
+    }
+  }
+
+  @Test(expected=AvroTypeException.class)
+  public void testRecordWithPrimitiveName() {
+    Schema.parse("{\"type\":\"record\", \"name\":\"string\", \"fields\": []}");
+  }
+  
+  @Test(expected=AvroTypeException.class)
+  public void testEnumWithPrimitiveName() {
+    Schema.parse("{\"type\":\"enum\", \"name\":\"null\", \"symbols\": [\"A\"]}");
+  }
+  
+  private static Schema enumSchema() {
+    return Schema.parse("{ \"type\": \"enum\", \"name\": \"e\", "
+        + "\"symbols\": [\"a\", \"b\"]}");
+  }
+
+  @Test(expected=AvroRuntimeException.class)
+  public void testImmutability1() {
+    Schema s = enumSchema();
+    s.addProp("p1", "1");
+    s.addProp("p1", "2");
+  }
+  
+  @Test(expected=AvroRuntimeException.class)
+  public void testImmutability2() {
+    Schema s = enumSchema();
+    s.addProp("p1", (String)null);
+  }
+
+  private static List<String> lockedArrayList() {
+    return new Schema.LockableArrayList<String>(Arrays.asList(new String[] {
+        "a", "b", "c" })).lock();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList1() {
+    lockedArrayList().add("p");
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList2() {
+    lockedArrayList().remove("a");
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList3() {
+    lockedArrayList().addAll(Arrays.asList(new String[] { "p" }));
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList4() {
+    lockedArrayList().addAll(0,
+        Arrays.asList(new String[] { "p" }));
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList5() {
+    lockedArrayList().
+      removeAll(Arrays.asList(new String[] { "a" }));
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList6() {
+    lockedArrayList().
+      retainAll(Arrays.asList(new String[] { "a" }));
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList7() {
+    lockedArrayList().clear();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList8() {
+    lockedArrayList().iterator().remove();
+  }
+
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList9() {
+    Iterator<String> it = lockedArrayList().iterator();
+    it.next();
+    it.remove();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testLockedArrayList10() {
+    lockedArrayList().remove(1);
+  }
+
+  @Test
+  public void testNames_GetWithInheritedNamespace() {
+    Schema schema = Schema.create(Type.STRING);
+    Schema.Names names = new Schema.Names("space");
+    names.put(new Schema.Name("Name", "space"), schema);
+
+    assertEquals(schema, names.get(new Schema.Name("Name", "space")));
+    assertEquals(schema, names.get("Name"));
+  }
+
+  @Test
+  public void testNames_GetWithNullNamespace() {
+    Schema schema = Schema.create(Type.STRING);
+    Schema.Names names = new Schema.Names("space");
+    names.put(new Schema.Name("Name", ""), schema);
+
+    assertEquals(schema, names.get(new Schema.Name("Name", "")));
+    assertEquals(schema, names.get("Name"));
+  }
+
+  @Test
+  public void testNames_GetNotFound() {
+    Schema.Names names = new Schema.Names("space");
+    names.put(new Schema.Name("Name", "otherspace"), Schema.create(Type.STRING));
+
+    assertNull(names.get("Name"));
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
new file mode 100644
index 0000000..34a3601
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java
@@ -0,0 +1,742 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.compiler.specific;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+import javax.tools.JavaCompiler;
+import javax.tools.StandardJavaFileManager;
+import javax.tools.ToolProvider;
+import javax.tools.JavaCompiler.CompilationTask;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.TestProtocolParsing;
+import org.apache.avro.TestSchema;
+import org.apache.avro.TestAnnotation;
+import org.apache.avro.generic.GenericData.StringType;
+
+import org.apache.avro.test.Simple;
+import org.apache.avro.test.TestRecord;
+import org.apache.avro.test.MD5;
+import org.apache.avro.test.Kind;
+
+import org.apache.avro.compiler.specific.SpecificCompiler.OutputFile;
+import org.junit.Test;
+
+public class TestSpecificCompiler {
+  static final String PROTOCOL = "" +
+        "{ \"protocol\": \"default\",\n" +
+        "  \"types\":\n" +
+        "    [\n" +
+        "      {\n" +
+        "       \"name\": \"finally\",\n" +
+        "       \"type\": \"error\",\n" +
+        "       \"fields\": [{\"name\": \"catch\", \"type\": \"boolean\"}]\n" +
+        "      }\n" +
+        "    ],\n" +
+        "  \"messages\": { \"goto\":\n" +
+        "    { \"request\": [{\"name\": \"break\", \"type\": \"string\"}],\n" +
+        "      \"response\": \"string\",\n" +
+        "      \"errors\": [\"finally\"]\n" +
+        "    }" +
+        "   }\n" +
+        "}\n";
+
+  @Test
+  public void testEsc() {
+    assertEquals("\\\"", SpecificCompiler.javaEscape("\""));
+  }
+
+  @Test
+  public void testMakePath() {
+    assertEquals("foo/bar/Baz.java".replace("/", File.separator), SpecificCompiler.makePath("Baz", "foo.bar"));
+    assertEquals("baz.java", SpecificCompiler.makePath("baz", ""));
+  }
+
+  @Test
+  public void testPrimitiveSchemaGeneratesNothing() {
+    assertEquals(0, new SpecificCompiler(Schema.parse("\"double\"")).compile().size());
+  }
+
+  @Test
+  public void testSimpleEnumSchema() throws IOException {
+    Collection<OutputFile> outputs = new SpecificCompiler(Schema.parse(TestSchema.BASIC_ENUM_SCHEMA)).compile();
+    assertEquals(1, outputs.size());
+    OutputFile o = outputs.iterator().next();
+    assertEquals(o.path, "Test.java");
+    assertTrue(o.contents.contains("public enum Test"));
+    assertCompilesWithJavaCompiler(outputs);
+  }
+
+  @Test
+  public void testMangleIfReserved() {
+    assertEquals("foo", SpecificCompiler.mangle("foo"));
+    assertEquals("goto$", SpecificCompiler.mangle("goto"));
+  }
+
+  @Test
+  public void testManglingForProtocols() throws IOException {
+    String protocolDef = PROTOCOL;
+    Collection<OutputFile> c =
+      new SpecificCompiler(Protocol.parse(protocolDef)).compile();
+    Iterator<OutputFile> i = c.iterator();
+    String errType = i.next().contents;
+    String protocol = i.next().contents;
+
+    assertTrue(errType.contains("public class finally$ extends org.apache.avro.specific.SpecificExceptionBase"));
+    assertTrue(errType.contains("public boolean catch$;"));
+
+    assertTrue(protocol.contains("java.lang.CharSequence goto$(java.lang.CharSequence break$)"));
+    assertTrue(protocol.contains("public interface default$"));
+    assertTrue(protocol.contains("throws org.apache.avro.AvroRemoteException, finally$"));
+    
+    assertCompilesWithJavaCompiler(c);
+
+  }
+
+  private static String SCHEMA =
+      "{ \"name\": \"volatile\", \"type\": \"record\", " +
+      "  \"fields\": [ {\"name\": \"package\", \"type\": \"string\" }," +
+      "                {\"name\": \"data\", \"type\": \"int\" }," +
+      "                {\"name\": \"value\", \"type\": \"int\" }," +
+      "                {\"name\": \"defaultValue\", \"type\": \"int\" }," +
+      "                {\"name\": \"other\", \"type\": \"int\" }," +
+      "                {\"name\": \"short\", \"type\": \"volatile\" } ] }";
+
+
+  @Test
+  public void testManglingForRecords() throws IOException {
+    Collection<OutputFile> c =
+      new SpecificCompiler(Schema.parse(SCHEMA)).compile();
+    assertEquals(1, c.size());
+    String contents = c.iterator().next().contents;
+
+    assertTrue(contents.contains("public java.lang.CharSequence package$;"));
+    assertTrue(contents.contains("class volatile$ extends"));
+    assertTrue(contents.contains("volatile$ short$;"));
+    
+    assertCompilesWithJavaCompiler(c);
+  }
+
+  @Test
+  public void testManglingForEnums() throws IOException {
+    String enumSchema = "" +
+      "{ \"name\": \"instanceof\", \"type\": \"enum\"," +
+      "  \"symbols\": [\"new\", \"super\", \"switch\"] }";
+    Collection<OutputFile> c =
+      new SpecificCompiler(Schema.parse(enumSchema)).compile();
+    assertEquals(1, c.size());
+    String contents = c.iterator().next().contents;
+
+    assertTrue(contents.contains("new$"));
+    
+    assertCompilesWithJavaCompiler(c);
+  }
+
+  @Test
+  public void testSchemaSplit() throws IOException {
+    SpecificCompiler compiler = new SpecificCompiler(Schema.parse(SCHEMA));
+    compiler.maxStringChars = 10;
+    Collection<OutputFile> files = compiler.compile();
+    assertCompilesWithJavaCompiler(files);
+  }
+
+  @Test
+  public void testProtocolSplit() throws IOException {
+    SpecificCompiler compiler = new SpecificCompiler(Protocol.parse(PROTOCOL));
+    compiler.maxStringChars = 10;
+    Collection<OutputFile> files = compiler.compile();
+    assertCompilesWithJavaCompiler(files);
+  }
+
+  @Test
+  public void testSchemaWithDocs() {
+    Collection<OutputFile> outputs = new SpecificCompiler(
+        Schema.parse(TestSchema.SCHEMA_WITH_DOC_TAGS)).compile();
+    assertEquals(3, outputs.size());
+    int count = 0;
+    for (OutputFile o : outputs) {
+      if (o.path.endsWith("outer_record.java")) {
+        count++;
+        assertTrue(o.contents.contains("/** This is not a world record. */"));
+        assertTrue(o.contents.contains("/** Inner Fixed */"));
+        assertTrue(o.contents.contains("/** Inner Enum */"));
+        assertTrue(o.contents.contains("/** Inner String */"));
+      }
+      if (o.path.endsWith("very_inner_fixed.java")) {
+        count++;
+        assertTrue(o.contents.contains("/** Very Inner Fixed */"));
+        assertTrue(o.contents.contains("@org.apache.avro.specific.FixedSize(1)"));
+      }
+      if (o.path.endsWith("very_inner_enum.java")) {
+        count++;
+        assertTrue(o.contents.contains("/** Very Inner Enum */"));
+      }
+    }
+ 
+    assertEquals(3, count);
+  }
+  
+  @Test
+  public void testProtocolWithDocs() throws IOException {
+    Protocol protocol = TestProtocolParsing.getSimpleProtocol();
+    Collection<OutputFile> out = new SpecificCompiler(protocol).compile();
+    assertEquals(6, out.size());
+    int count = 0;
+    for (OutputFile o : out) {
+      if (o.path.endsWith("Simple.java")) {
+        count++;
+        assertTrue(o.contents.contains("/** Protocol used for testing. */"));
+        assertTrue(o.contents.contains("* Send a greeting"));
+      }
+    }
+    assertEquals("Missed generated protocol!", 1, count);
+  }
+  
+  @Test
+  public void testNeedCompile() throws IOException, InterruptedException {
+    String schema = "" +
+      "{ \"name\": \"Foo\", \"type\": \"record\", " +
+      "  \"fields\": [ {\"name\": \"package\", \"type\": \"string\" }," +
+      "                {\"name\": \"short\", \"type\": \"Foo\" } ] }";
+    File inputFile = AvroTestUtil.tempFile(getClass(), "input.avsc");
+    FileWriter fw = new FileWriter(inputFile);
+    fw.write(schema);
+    fw.close();
+    
+    File outputDir = new File(System.getProperty("test.dir", "target/test") + 
+      System.getProperty("file.separator") + "test_need_compile");
+    File outputFile = new File(outputDir, "Foo.java");
+    outputFile.delete();
+    assertTrue(!outputFile.exists());
+    outputDir.delete();
+    assertTrue(!outputDir.exists());
+    SpecificCompiler.compileSchema(inputFile, outputDir);
+    assertTrue(outputDir.exists());
+    assertTrue(outputFile.exists());
+
+    long lastModified = outputFile.lastModified();
+    Thread.sleep(1000);  //granularity of JVM doesn't seem to go below 1 sec
+    SpecificCompiler.compileSchema(inputFile, outputDir);
+    assertEquals(lastModified, outputFile.lastModified());
+    
+    fw = new FileWriter(inputFile);
+    fw.write(schema);
+    fw.close();
+    SpecificCompiler.compileSchema(inputFile, outputDir);
+    assertTrue(lastModified != outputFile.lastModified());
+  }
+  
+  /**
+   * Creates a record with the given name, error status, and fields.
+   * @param name the name of the schema.
+   * @param isError true if the schema represents an error; false otherwise.
+   * @param fields the field(s) to add to the schema.
+   * @return the schema.
+   */
+  private Schema createRecord(String name, 
+      boolean isError, Field... fields) {
+    Schema record = Schema.createRecord(name, null, null, isError);
+    record.setFields(Arrays.asList(fields));
+    return record;
+  }
+  
+  @Test
+  public void generateGetMethod() {
+    Field height = new Field("height", Schema.create(Type.INT), null, null);
+    Field Height = new Field("Height", Schema.create(Type.INT), null, null);
+    Field height_and_width = 
+        new Field("height_and_width", Schema.create(Type.STRING), null, null);
+    Field message = 
+        new Field("message", Schema.create(Type.STRING), null, null);
+    Field Message = 
+        new Field("Message", Schema.create(Type.STRING), null, null);
+    Field cause = 
+        new Field("cause", Schema.create(Type.STRING), null, null);
+    Field clasz = 
+        new Field("class", Schema.create(Type.STRING), null, null);
+    Field schema = 
+        new Field("schema", Schema.create(Type.STRING), null, null);
+    Field Schema$ = 
+        new Field("Schema", Schema.create(Type.STRING), null, null);
+    
+    assertEquals("getHeight", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, height), height));
+    
+    assertEquals("getHeightAndWidth", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, height_and_width), height_and_width));
+  
+    assertEquals("getMessage", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, message), message));
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    assertEquals("getMessage$", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, message), message));
+ 
+    assertEquals("getCause", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, cause), cause));
+    cause = new Field("cause", Schema.create(Type.STRING), null, null);
+    assertEquals("getCause$", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, cause), cause));
+
+    
+    assertEquals("getClass$", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, clasz), clasz));
+    clasz = new Field("class", Schema.create(Type.STRING), null, null);
+    assertEquals("getClass$", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, clasz), clasz));
+
+    assertEquals("getSchema$", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, schema), schema));
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    assertEquals("getSchema$", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, schema), schema));
+
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("getHeight", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, Height), Height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("getHeight$0", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, height, Height), height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("getHeight$1", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, height, Height), Height));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("getMessage$", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, Message), Message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("getMessage$0", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, message, Message), message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("getMessage$1", SpecificCompiler.generateGetMethod(
+        createRecord("test", true, message, Message), Message));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("getSchema$", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, Schema$), Schema$));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("getSchema$0", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, schema, Schema$), schema));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("getSchema$1", SpecificCompiler.generateGetMethod(
+        createRecord("test", false, schema, Schema$), Schema$));
+  }
+
+  @Test
+  public void generateSetMethod() {
+    Field height = new Field("height", Schema.create(Type.INT), null, null);
+    Field Height = new Field("Height", Schema.create(Type.INT), null, null);
+    Field height_and_width = 
+        new Field("height_and_width", Schema.create(Type.STRING), null, null);
+    Field message = 
+        new Field("message", Schema.create(Type.STRING), null, null);
+    Field Message = 
+        new Field("Message", Schema.create(Type.STRING), null, null);
+    Field cause = 
+        new Field("cause", Schema.create(Type.STRING), null, null);
+    Field clasz = 
+        new Field("class", Schema.create(Type.STRING), null, null);
+    Field schema = 
+        new Field("schema", Schema.create(Type.STRING), null, null);
+    Field Schema$ = 
+        new Field("Schema", Schema.create(Type.STRING), null, null);
+    
+    assertEquals("setHeight", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, height), height));
+    
+    assertEquals("setHeightAndWidth", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, height_and_width), height_and_width));
+  
+    assertEquals("setMessage", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, message), message));
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    assertEquals("setMessage$", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, message), message));
+ 
+    assertEquals("setCause", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, cause), cause));
+    cause = new Field("cause", Schema.create(Type.STRING), null, null);
+    assertEquals("setCause$", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, cause), cause));
+
+    
+    assertEquals("setClass$", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, clasz), clasz));
+    clasz = new Field("class", Schema.create(Type.STRING), null, null);
+    assertEquals("setClass$", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, clasz), clasz));
+
+    assertEquals("setSchema$", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, schema), schema));
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    assertEquals("setSchema$", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, schema), schema));
+
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("setHeight", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, Height), Height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("setHeight$0", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, height, Height), height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("setHeight$1", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, height, Height), Height));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("setMessage$", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, Message), Message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("setMessage$0", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, message, Message), message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("setMessage$1", SpecificCompiler.generateSetMethod(
+        createRecord("test", true, message, Message), Message));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("setSchema$", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, Schema$), Schema$));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("setSchema$0", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, schema, Schema$), schema));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("setSchema$1", SpecificCompiler.generateSetMethod(
+        createRecord("test", false, schema, Schema$), Schema$));
+  }
+  
+  @Test
+  public void generateHasMethod() {
+    Field height = new Field("height", Schema.create(Type.INT), null, null);
+    Field Height = new Field("Height", Schema.create(Type.INT), null, null);
+    Field height_and_width = 
+        new Field("height_and_width", Schema.create(Type.STRING), null, null);
+    Field message = 
+        new Field("message", Schema.create(Type.STRING), null, null);
+    Field Message = 
+        new Field("Message", Schema.create(Type.STRING), null, null);
+    Field cause = 
+        new Field("cause", Schema.create(Type.STRING), null, null);
+    Field clasz = 
+        new Field("class", Schema.create(Type.STRING), null, null);
+    Field schema = 
+        new Field("schema", Schema.create(Type.STRING), null, null);
+    Field Schema$ = 
+        new Field("Schema", Schema.create(Type.STRING), null, null);
+    
+    assertEquals("hasHeight", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, height), height));
+    
+    assertEquals("hasHeightAndWidth", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, height_and_width), height_and_width));
+  
+    assertEquals("hasMessage", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, message), message));
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, message), message));
+ 
+    assertEquals("hasCause", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, cause), cause));
+    cause = new Field("cause", Schema.create(Type.STRING), null, null);
+    assertEquals("hasCause$", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, cause), cause));
+
+    
+    assertEquals("hasClass$", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, clasz), clasz));
+    clasz = new Field("class", Schema.create(Type.STRING), null, null);
+    assertEquals("hasClass$", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, clasz), clasz));
+
+    assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, schema), schema));
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, schema), schema));
+
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("hasHeight", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, Height), Height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("hasHeight$0", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, height, Height), height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("hasHeight$1", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, height, Height), Height));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, Message), Message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("hasMessage$0", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, message, Message), message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("hasMessage$1", SpecificCompiler.generateHasMethod(
+        createRecord("test", true, message, Message), Message));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, Schema$), Schema$));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("hasSchema$0", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, schema, Schema$), schema));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("hasSchema$1", SpecificCompiler.generateHasMethod(
+        createRecord("test", false, schema, Schema$), Schema$));
+  }
+  
+  @Test
+  public void generateClearMethod() {
+    Field height = new Field("height", Schema.create(Type.INT), null, null);
+    Field Height = new Field("Height", Schema.create(Type.INT), null, null);
+    Field height_and_width = 
+        new Field("height_and_width", Schema.create(Type.STRING), null, null);
+    Field message = 
+        new Field("message", Schema.create(Type.STRING), null, null);
+    Field Message = 
+        new Field("Message", Schema.create(Type.STRING), null, null);
+    Field cause = 
+        new Field("cause", Schema.create(Type.STRING), null, null);
+    Field clasz = 
+        new Field("class", Schema.create(Type.STRING), null, null);
+    Field schema = 
+        new Field("schema", Schema.create(Type.STRING), null, null);
+    Field Schema$ = 
+        new Field("Schema", Schema.create(Type.STRING), null, null);
+    
+    assertEquals("clearHeight", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, height), height));
+    
+    assertEquals("clearHeightAndWidth", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, height_and_width), height_and_width));
+  
+    assertEquals("clearMessage", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, message), message));
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, message), message));
+ 
+    assertEquals("clearCause", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, cause), cause));
+    cause = new Field("cause", Schema.create(Type.STRING), null, null);
+    assertEquals("clearCause$", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, cause), cause));
+
+    
+    assertEquals("clearClass$", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, clasz), clasz));
+    clasz = new Field("class", Schema.create(Type.STRING), null, null);
+    assertEquals("clearClass$", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, clasz), clasz));
+
+    assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, schema), schema));
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, schema), schema));
+
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("clearHeight", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, Height), Height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("clearHeight$0", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, height, Height), height));
+    
+    height = new Field("height", Schema.create(Type.INT), null, null);
+    Height = new Field("Height", Schema.create(Type.INT), null, null);
+    assertEquals("clearHeight$1", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, height, Height), Height));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, Message), Message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("clearMessage$0", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, message, Message), message));
+    
+    message = new Field("message", Schema.create(Type.STRING), null, null);
+    Message = new Field("Message", Schema.create(Type.STRING), null, null);
+    assertEquals("clearMessage$1", SpecificCompiler.generateClearMethod(
+        createRecord("test", true, message, Message), Message));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, Schema$), Schema$));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("clearSchema$0", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, schema, Schema$), schema));
+    
+    schema = new Field("schema", Schema.create(Type.STRING), null, null);
+    Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
+    assertEquals("clearSchema$1", SpecificCompiler.generateClearMethod(
+        createRecord("test", false, schema, Schema$), Schema$));
+  }
+
+  @Test public void testAnnotations() throws Exception {
+    // an interface generated for protocol
+    assertNotNull(Simple.class.getAnnotation(TestAnnotation.class));
+    // a class generated for a record
+    assertNotNull(TestRecord.class.getAnnotation(TestAnnotation.class));
+    // a class generated for a fixed
+    assertNotNull(MD5.class.getAnnotation(TestAnnotation.class));
+    // a class generated for an enum
+    assertNotNull(Kind.class.getAnnotation(TestAnnotation.class));
+
+    // a field
+    assertNotNull(TestRecord.class.getField("name")
+                  .getAnnotation(TestAnnotation.class));
+    // a method
+    assertNotNull(Simple.class.getMethod("ack")
+                  .getAnnotation(TestAnnotation.class));
+  }
+
+  @Test
+  public void testAliases() throws IOException {
+    Schema s = Schema.parse
+      ("{\"name\":\"X\",\"type\":\"record\",\"aliases\":[\"Y\"],\"fields\":["
+       +"{\"name\":\"f\",\"type\":\"int\",\"aliases\":[\"g\"]}]}");
+    SpecificCompiler compiler = new SpecificCompiler(s);
+    compiler.setStringType(StringType.valueOf("String"));
+    Collection<OutputFile> outputs = compiler.compile();
+    assertEquals(1, outputs.size());
+    OutputFile o = outputs.iterator().next();
+    assertEquals(o.path, "X.java");
+    assertTrue(o.contents.contains("[\\\"Y\\\"]"));
+    assertTrue(o.contents.contains("[\\\"g\\\"]"));
+  }
+
+  /**
+   * Checks that a schema passes through the SpecificCompiler, and,
+   * optionally, uses the system's Java compiler to check
+   * that the generated code is valid.
+   */
+  public static void
+      assertCompiles(Schema schema, boolean useJavaCompiler) 
+  throws IOException {
+    Collection<OutputFile> outputs = new SpecificCompiler(schema).compile();
+    assertTrue(null != outputs);
+    if (useJavaCompiler) {
+      assertCompilesWithJavaCompiler(outputs);
+    }
+  }
+  
+  /**
+   * Checks that a protocol passes through the SpecificCompiler,
+   * and, optionally, uses the system's Java compiler to check
+   * that the generated code is valid.
+   */
+  public static void assertCompiles(Protocol protocol, boolean useJavaCompiler)
+  throws IOException {
+    Collection<OutputFile> outputs = new SpecificCompiler(protocol).compile();
+    assertTrue(null != outputs);
+    if (useJavaCompiler) {
+      assertCompilesWithJavaCompiler(outputs);
+    }
+  }
+  
+  /** Uses the system's java compiler to actually compile the generated code. */
+  static void assertCompilesWithJavaCompiler(Collection<OutputFile> outputs) 
+  throws IOException {
+    if (outputs.isEmpty()) {
+      return;               // Nothing to compile!
+    }
+    File dstDir = AvroTestUtil.tempFile(TestSpecificCompiler.class, "realCompiler");
+    List<File> javaFiles = new ArrayList<File>();
+    for (OutputFile o : outputs) {
+      javaFiles.add(o.writeToDestination(null, dstDir));
+    }
+
+    JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
+    StandardJavaFileManager fileManager = 
+      compiler.getStandardFileManager(null, null, null);
+    
+    CompilationTask cTask = compiler.getTask(null, fileManager, null, null, 
+        null,
+        fileManager.getJavaFileObjects(
+            javaFiles.toArray(new File[javaFiles.size()])));
+    assertTrue(cTask.call());
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java b/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java
new file mode 100644
index 0000000..0cf36f5
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.generic;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Foo;
+import org.apache.avro.Interop;
+import org.apache.avro.Kind;
+import org.apache.avro.MD5;
+import org.apache.avro.Node;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.specific.SpecificData;
+import org.junit.Test;
+
+/** Unit test for performing a deep copy of an object with a schema */
+public class TestDeepCopy {
+  @Test
+  public void testDeepCopy() {
+    // Set all non-default fields in an Interop instance:
+    Interop.Builder interopBuilder = Interop.newBuilder();
+    interopBuilder.setArrayField(
+        Arrays.asList(new Double[] { 1.1, 1.2, 1.3, 1.4 }));
+    interopBuilder.setBoolField(true);
+    interopBuilder.setBytesField(ByteBuffer.wrap(new byte[] { 1, 2, 3, 4 }));
+    interopBuilder.setDoubleField(3.14d);
+    interopBuilder.setEnumField(Kind.B);
+    interopBuilder.setFixedField(new MD5(new byte[] { 
+        4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1 }));
+    interopBuilder.setFloatField(6.022f);
+    interopBuilder.setIntField(32);
+    interopBuilder.setLongField(64L);
+    
+    Map<java.lang.String,org.apache.avro.Foo> map = 
+      new HashMap<java.lang.String,org.apache.avro.Foo>(1);
+    map.put("foo", Foo.newBuilder().setLabel("bar").build());
+    interopBuilder.setMapField(map);
+    
+    interopBuilder.setNullField(null);
+    
+    Node.Builder rootBuilder = Node.newBuilder().setLabel("/");
+    Node.Builder homeBuilder = Node.newBuilder().setLabel("home");
+    homeBuilder.setChildren(new ArrayList<Node>(0));
+    rootBuilder.setChildren(Arrays.asList(new Node[] { homeBuilder.build() }));
+    interopBuilder.setRecordField(rootBuilder.build());
+    
+    interopBuilder.setStringField("Hello");
+    interopBuilder.setUnionField(Arrays.asList(new ByteBuffer[] {
+        ByteBuffer.wrap(new byte[] { 1, 2 }) }));
+    
+    Interop interop = interopBuilder.build();
+    
+    // Verify that deepCopy works for all fields:
+    for (Field field : Interop.SCHEMA$.getFields()) {
+      // Original field and deep copy should be equivalent:
+      if (interop.get(field.pos()) instanceof ByteBuffer) {
+        assertTrue(Arrays.equals(((ByteBuffer)interop.get(field.pos())).array(),
+            ((ByteBuffer)GenericData.get().deepCopy(field.schema(), 
+                interop.get(field.pos()))).array()));
+      }
+      else {
+        assertEquals(interop.get(field.pos()),
+            SpecificData.get().deepCopy(
+                field.schema(), interop.get(field.pos())));
+      }
+      
+      // Original field and deep copy should be different instances:
+      if ((field.schema().getType() != Type.ENUM)
+           && (field.schema().getType() != Type.NULL)
+           && (field.schema().getType() != Type.BOOLEAN)
+           && (field.schema().getType() != Type.INT)
+           && (field.schema().getType() != Type.LONG)
+           && (field.schema().getType() != Type.FLOAT)
+           && (field.schema().getType() != Type.DOUBLE)
+           && (field.schema().getType() != Type.STRING)) {
+        assertFalse("Field " + field.name() + " is same instance in deep copy",
+            interop.get(field.pos()) == 
+              GenericData.get().deepCopy(
+                  field.schema(), interop.get(field.pos())));
+      }
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java b/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java
new file mode 100644
index 0000000..762a215
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java
@@ -0,0 +1,1752 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.lang.reflect.Array;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.avro.FooBarSpecificRecord;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.TypeEnum;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.specific.SpecificRecordBase;
+import org.apache.avro.util.Utf8;
+
+
+/**
+ * Performance tests for various low level operations of
+ * Avro encoding and decoding.
+ */
+public class Perf {
+  private static final int COUNT = 250000; // needs to be a multiple of 4
+  private static final int CYCLES = 800;
+  
+  /**
+   * Use a fixed value seed for random number generation
+   * to allow for better cross-run comparisons.
+   */
+  private static final long SEED = 19781210;
+
+  protected static Random newRandom() {
+    return new Random(SEED);
+  }
+  
+  private static class TestDescriptor {
+    Class<? extends Test> test;
+    String param;
+    TestDescriptor(Class<? extends Test> test, String param) {
+      this.test = test;
+      this.param = param;
+    }
+    void add(List<TestDescriptor> typeList) {
+      ALL_TESTS.put(param, this);
+      typeList.add(this);
+    }
+  }
+  
+  private static final List<TestDescriptor> BASIC = new ArrayList<TestDescriptor>();
+  private static final List<TestDescriptor> RECORD = new ArrayList<TestDescriptor>();
+  private static final List<TestDescriptor> GENERIC = new ArrayList<TestDescriptor>();
+  private static final List<TestDescriptor> GENERIC_ONETIME = new ArrayList<TestDescriptor>();
+  private static final List<TestDescriptor> SPECIFIC = new ArrayList<TestDescriptor>();
+  private static final List<TestDescriptor> REFLECT = new ArrayList<TestDescriptor>();
+  private static final LinkedHashMap<String, TestDescriptor> ALL_TESTS;
+  private static final LinkedHashMap<String, List<TestDescriptor>> BATCHES;
+  static {
+    ALL_TESTS = new LinkedHashMap<String, TestDescriptor>();
+    BATCHES = new LinkedHashMap<String, List<TestDescriptor>>();
+    BATCHES.put("-basic", BASIC);
+    new TestDescriptor(IntTest.class, "-i").add(BASIC);
+    new TestDescriptor(SmallLongTest.class, "-ls").add(BASIC);
+    new TestDescriptor(LongTest.class, "-l").add(BASIC);
+    new TestDescriptor(FloatTest.class, "-f").add(BASIC);
+    new TestDescriptor(DoubleTest.class, "-d").add(BASIC);
+    new TestDescriptor(BoolTest.class, "-b").add(BASIC);
+    new TestDescriptor(BytesTest.class, "-by").add(BASIC);
+    new TestDescriptor(StringTest.class, "-s").add(BASIC);
+    new TestDescriptor(ArrayTest.class, "-a").add(BASIC);
+    new TestDescriptor(MapTest.class, "-m").add(BASIC);
+    BATCHES.put("-record", RECORD);
+    new TestDescriptor(RecordTest.class, "-R").add(RECORD);
+    new TestDescriptor(ValidatingRecord.class, "-Rv").add(RECORD);
+    new TestDescriptor(ResolvingRecord.class, "-Rr").add(RECORD);
+    new TestDescriptor(RecordWithDefault.class, "-Rd").add(RECORD);
+    new TestDescriptor(RecordWithOutOfOrder.class, "-Ro").add(RECORD);
+    new TestDescriptor(RecordWithPromotion.class, "-Rp").add(RECORD);
+    BATCHES.put("-generic", GENERIC);
+    new TestDescriptor(GenericTest.class, "-G").add(GENERIC);
+    new TestDescriptor(GenericStrings.class, "-Gs").add(GENERIC);
+    new TestDescriptor(GenericNested.class, "-Gn").add(GENERIC);
+    new TestDescriptor(GenericNestedFake.class, "-Gf").add(GENERIC);
+    new TestDescriptor(GenericWithDefault.class, "-Gd").add(GENERIC);
+    new TestDescriptor(GenericWithOutOfOrder.class, "-Go").add(GENERIC);
+    new TestDescriptor(GenericWithPromotion.class, "-Gp").add(GENERIC);
+    BATCHES.put("-generic-onetime", GENERIC_ONETIME);
+    new TestDescriptor(GenericOneTimeDecoderUse.class, "-Gotd").add(GENERIC_ONETIME);
+    new TestDescriptor(GenericOneTimeReaderUse.class, "-Gotr").add(GENERIC_ONETIME);
+    new TestDescriptor(GenericOneTimeUse.class, "-Got").add(GENERIC_ONETIME);
+    new TestDescriptor(FooBarSpecificRecordTest.class, "-Sf").add(SPECIFIC);
+    BATCHES.put("-reflect", REFLECT);
+    new TestDescriptor(ReflectRecordTest.class, "-REFr").add(REFLECT);
+    new TestDescriptor(ReflectBigRecordTest.class, "-REFbr").add(REFLECT);
+    new TestDescriptor(ReflectFloatTest.class, "-REFf").add(REFLECT);
+    new TestDescriptor(ReflectDoubleTest.class, "-REFd").add(REFLECT);
+    new TestDescriptor(ReflectIntArrayTest.class, "-REFia").add(REFLECT);
+    new TestDescriptor(ReflectLongArrayTest.class, "-REFla").add(REFLECT);
+    new TestDescriptor(ReflectDoubleArrayTest.class, "-REFda").add(REFLECT);
+    new TestDescriptor(ReflectFloatArrayTest.class, "-REFfa").add(REFLECT);
+    new TestDescriptor(ReflectNestedFloatArrayTest.class, "-REFnf").add(REFLECT);
+    new TestDescriptor(ReflectNestedObjectArrayTest.class, "-REFno").add(REFLECT);
+    new TestDescriptor(ReflectNestedLargeFloatArrayTest.class, "-REFnlf").add(REFLECT);
+    new TestDescriptor(ReflectNestedLargeFloatArrayBlockedTest.class, "-REFnlfb").add(REFLECT);
+  }
+  
+  private static void usage() {
+    StringBuilder usage = new StringBuilder("Usage: Perf { -nowrite | -noread | ");
+    StringBuilder details = new StringBuilder();
+    details.append(" -nowrite   (do not execute write tests)\n");
+    details.append(" -noread   (do not execute write tests)\n");
+    for (Map.Entry<String, List<TestDescriptor>> entry : BATCHES.entrySet()) {
+      List<TestDescriptor> lt = entry.getValue();
+      String param = entry.getKey();
+      String paramName = param.substring(1);
+      usage.append(param).append(" | ");
+      details.append(" ").append(param).append("   (executes all ").append(paramName).append(" tests):\n");
+      for (TestDescriptor t : lt) {
+        usage.append(t.param).append(" | ");
+        details.append("      ").append(t.param).append("  (").append(t.test.getSimpleName()).append(")\n");
+      }
+    }
+    usage.setLength(usage.length() - 2);
+    usage.append("}\n");
+    System.out.println(usage.toString());
+    System.out.print(details.toString());
+  }
+  
+  public static void main(String[] args) throws Exception {
+    List<Test> tests = new ArrayList<Test>();
+    boolean writeTests = true;
+    boolean readTests = true;
+    for (String a : args) {
+      TestDescriptor t = ALL_TESTS.get(a);
+      if (null != t) {
+        tests.add(t.test.newInstance());
+        continue;
+      }
+      List<TestDescriptor> lt = BATCHES.get(a);
+      if (null != lt) {
+        for (TestDescriptor td : lt) {
+          tests.add(td.test.newInstance());
+        }
+        continue;
+      }
+      if ("-nowrite".equals(a)) {
+        writeTests = false;
+        continue;
+      }
+      if ("-noread".equals(a)) {
+        readTests = false;
+        continue;
+      }
+      usage();
+      System.exit(1);
+    }
+    if (tests.isEmpty()) {
+      for (Map.Entry<String, TestDescriptor> entry : ALL_TESTS.entrySet()) {
+        TestDescriptor t = entry.getValue();
+        Test test = t.test.newInstance();
+        tests.add(test);
+      }
+    }
+    System.out.println("Executing tests: \n" + tests +  "\n readTests:" +
+        readTests + "\n writeTests:" + writeTests + "\n cycles=" + CYCLES);
+    
+    for (int k = 0; k < tests.size(); k++) {
+      Test t = tests.get(k);
+      try {
+        // get everything to compile once
+        t.init();
+        if (t.isReadTest()) {
+          t.readTest();
+        }
+        if (t.isWriteTest()) {
+          t.writeTest();
+        }
+        t.reset();
+      } catch (Exception e) {
+        System.out.println("Failed to execute test: " + t.getClass().getSimpleName());
+        throw e;
+      }
+    }
+    
+    printHeader();
+
+    for (int k = 0; k < tests.size(); k++) {
+      Test t = tests.get(k);
+      // warmup JVM
+      t.init();
+      if (t.isReadTest() && readTests) {
+        for (int i = 0; i < t.cycles/2; i++) {
+          t.readTest();
+        }
+      }
+      if (t.isWriteTest() && writeTests) {
+        for (int i = 0; i < t.cycles/2; i++) {
+          t.writeTest();
+        }
+      }
+      t.reset();
+      // test
+      long s = 0;
+      System.gc();
+      t.init();
+      if (t.isReadTest() && readTests) {
+        for (int i = 0; i < t.cycles; i++) {
+          s += t.readTest();
+        }
+        printResult(s, t, t.name + "Read");
+      }
+      s = 0;
+      if (t.isWriteTest() && writeTests) {
+        for (int i = 0; i < t.cycles; i++) {
+          s += t.writeTest();
+        }
+        printResult(s, t, t.name + "Write");
+      }
+      t.reset();
+    }
+  }
+  
+  private static final void printHeader() {
+    String header = String.format(
+        "%60s     time    M entries/sec   M bytes/sec  bytes/cycle",
+        "test name");
+    System.out.println(header.toString());
+  }
+  
+  private static final void printResult(long s, Test t, String name) {
+    s /= 1000;
+    double entries = (t.cycles * (double) t.count);
+    double bytes = t.cycles * (double) t.encodedSize;
+    StringBuilder result = new StringBuilder();
+    result.append(String.format("%42s: %6d ms  ", name, (s/1000)));
+    result.append(String.format("%10.3f   %11.3f   %11d", 
+        (entries / s), (bytes/ s),  t.encodedSize));
+    System.out.println(result.toString());
+  }
+  
+  private abstract static class Test {
+
+    /**
+     * Name of the test.
+     */
+    public final String name;
+    public final int count;
+    public final int cycles;
+    public long encodedSize = 0;
+    protected boolean isReadTest = true;
+    protected boolean isWriteTest = true;
+    static DecoderFactory decoder_factory = new DecoderFactory();
+    static EncoderFactory encoder_factory = new EncoderFactory();
+    
+    public Test(String name, int cycles, int count) {
+      this.name = name;
+      this.cycles = cycles;
+      this.count = count;
+    }
+
+    /**
+     * Reads data from a Decoder and returns the time taken in nanoseconds.
+     */
+    abstract long readTest() throws IOException;
+    
+    /**
+     * Writes data to an Encoder and returns the time taken in nanoseconds.
+     */
+    abstract long writeTest() throws IOException;
+    
+    final boolean isWriteTest() {
+      return isWriteTest;
+    }
+    
+    final boolean isReadTest() {
+      return isReadTest;
+    }
+ 
+    /** initializes data for read and write tests **/
+    abstract void init() throws IOException;
+
+    /** clears generated data arrays and other large objects created during initialization **/
+    abstract void reset();
+    
+    @Override
+    public String toString() {
+      return this.getClass().getSimpleName();
+    }
+       
+  }
+  
+  /** the basic test writes a simple schema directly to an encoder or
+   * reads from an array.  It does not use GenericDatumReader or any
+   * higher level constructs, just manual serialization.
+   */
+  private static abstract class BasicTest extends Test {
+    protected final Schema schema;
+    protected byte[] data;
+    BasicTest(String name, String json) throws IOException {
+      this(name, json, 1);
+    }
+    BasicTest(String name, String json, int factor) throws IOException {
+      super(name, CYCLES, COUNT/factor);
+      this.schema = new Schema.Parser().parse(json);
+    }
+
+    @Override
+    public final long readTest() throws IOException {
+      long t = System.nanoTime();
+      Decoder d = getDecoder();
+      readInternal(d);
+      return (System.nanoTime() - t);
+    }
+    
+    @Override
+    public final long writeTest() throws IOException {
+      long t = System.nanoTime();
+      Encoder e = getEncoder();
+      writeInternal(e);
+      e.flush();
+      return (System.nanoTime() - t);
+    }
+    
+    protected Decoder getDecoder() throws IOException {
+      return newDecoder();
+    }
+    
+    private Encoder getEncoder() throws IOException {
+      return newEncoder(getOutputStream());
+    }
+
+    protected Decoder newDecoder() {
+      return decoder_factory.binaryDecoder(data, null);
+    }
+    
+    protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
+      Encoder e = encoder_factory.binaryEncoder(out, null);
+//    Encoder e = encoder_factory.directBinaryEncoder(out, null);
+//    Encoder e = encoder_factory.blockingBinaryEncoder(out, null);
+//    Encoder e = new LegacyBinaryEncoder(out);
+      return e;
+    }
+
+    private ByteArrayOutputStream getOutputStream() {
+      return new ByteArrayOutputStream((int)(encodedSize > 0 ? encodedSize : count));
+    }
+    
+    @Override
+    void init() throws IOException {
+      genSourceData();
+      ByteArrayOutputStream baos = getOutputStream();
+      Encoder e = newEncoder(baos);
+      writeInternal(e);
+      e.flush();
+      data = baos.toByteArray();
+      encodedSize = data.length;
+      //System.out.println(this.getClass().getSimpleName() + " encodedSize=" + encodedSize);
+    }
+
+    abstract void genSourceData();
+    abstract void readInternal(Decoder d) throws IOException;
+    abstract void writeInternal(Encoder e) throws IOException;
+  }
+  
+  static class IntTest extends BasicTest {
+    protected int[] sourceData = null;
+    public IntTest() throws IOException {
+      this("Int", "{ \"type\": \"int\"} ");
+    }
+
+    private IntTest(String name, String schema) throws IOException {
+      super(name, schema);
+    }
+
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new int[count];
+      for (int i = 0; i < sourceData.length; i+=4) {
+        sourceData[i] = r.nextInt(50); // fits in 1 byte
+        sourceData[i+1] = r.nextInt(5000); // fits in 2 bytes
+        sourceData[i+2] = r.nextInt(500000); // fits in 3 bytes
+        sourceData[i+3] = r.nextInt(150000000); // most in 4, some in 5
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count/4; i++) {
+        d.readInt();
+        d.readInt();
+        d.readInt();
+        d.readInt();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i+=4) {
+        e.writeInt(sourceData[i]);
+        e.writeInt(sourceData[i+1]);
+        e.writeInt(sourceData[i+2]);
+        e.writeInt(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+
+  // This is the same data as ReadInt, but using readLong.
+  static class SmallLongTest extends IntTest {
+    public SmallLongTest() throws IOException {
+      super("SmallLong", "{ \"type\": \"long\"} ");
+    }
+
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count/4; i++) {
+        d.readLong();
+        d.readLong();
+        d.readLong();
+        d.readLong();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i+=4) {
+        e.writeLong(sourceData[i]);
+        e.writeLong(sourceData[i+1]);
+        e.writeLong(sourceData[i+2]);
+        e.writeLong(sourceData[i+3]);
+      }
+    }
+  }
+ 
+  // this tests reading Longs that are sometimes very large
+  static class LongTest extends BasicTest {
+    private long[] sourceData = null;
+    public LongTest() throws IOException {
+      super("Long", "{ \"type\": \"long\"} ");
+    }
+    
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new long[count];
+      for (int i = 0; i < sourceData.length; i+=4) {
+        sourceData[i] = r.nextLong() % 0x7FL; // half fit in 1, half in 2 
+        sourceData[i+1] = r.nextLong() % 0x1FFFFFL; // half fit in <=3, half in 4
+        sourceData[i+2] = r.nextLong() % 0x3FFFFFFFFL; // half in <=5, half in 6
+        sourceData[i+3] = r.nextLong() % 0x1FFFFFFFFFFFFL; // half in <=8, half in 9 
+      }
+      // last 16, make full size
+      for (int i = sourceData.length - 16; i < sourceData.length; i ++) {
+        sourceData[i] = r.nextLong();
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count/4; i++) {
+        d.readLong();
+        d.readLong();
+        d.readLong();
+        d.readLong();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeLong(sourceData[i]);
+        e.writeLong(sourceData[i+1]);
+        e.writeLong(sourceData[i+2]);
+        e.writeLong(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  static class FloatTest extends BasicTest {
+    float[] sourceData = null;
+    public FloatTest() throws IOException {
+      this("Float", "{ \"type\": \"float\"} ");
+    }
+    public FloatTest(String name, String schema) throws IOException {
+      super(name, schema);
+    }
+
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new float[count];
+      for (int i = 0; i < sourceData.length;) {
+        sourceData[i++] = r.nextFloat(); 
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i+=4) {
+        d.readFloat();
+        d.readFloat();
+        d.readFloat();
+        d.readFloat();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeFloat(sourceData[i]);
+        e.writeFloat(sourceData[i+1]);
+        e.writeFloat(sourceData[i+2]);
+        e.writeFloat(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+
+  static class DoubleTest extends BasicTest {
+    double[] sourceData = null;
+    public DoubleTest() throws IOException {
+      super("Double", "{ \"type\": \"double\"} ");
+    }
+    
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new double[count];
+      for (int i = 0; i < sourceData.length;) {
+        sourceData[i++] = r.nextDouble(); 
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i+=4) {
+        d.readDouble();
+        d.readDouble();
+        d.readDouble();
+        d.readDouble();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeDouble(sourceData[i]);
+        e.writeDouble(sourceData[i+1]);
+        e.writeDouble(sourceData[i+2]);
+        e.writeDouble(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  static class BoolTest extends BasicTest {
+    boolean[] sourceData = null;
+    public BoolTest() throws IOException {
+      super("Boolean", "{ \"type\": \"boolean\"} ");
+    }
+    
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new boolean[count];
+      for (int i = 0; i < sourceData.length;) {
+        sourceData[i++] = r.nextBoolean(); 
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count/4; i++) {
+        d.readBoolean();
+        d.readBoolean();
+        d.readBoolean();
+        d.readBoolean();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeBoolean(sourceData[i]);
+        e.writeBoolean(sourceData[i+1]);
+        e.writeBoolean(sourceData[i+2]);
+        e.writeBoolean(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  static class BytesTest extends BasicTest {
+    byte[][] sourceData = null;
+    public BytesTest() throws IOException {
+      super("Bytes", "{ \"type\": \"bytes\"} ", 5);
+    }
+    
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new byte[count][];
+      for (int i = 0; i < sourceData.length;) {
+        byte[] data = new byte[r.nextInt(70)];
+        r.nextBytes(data);
+        sourceData[i++] = data; 
+      }
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      ByteBuffer bb = ByteBuffer.allocate(70);
+      for (int i = 0; i < count/4; i++) {
+        d.readBytes(bb);
+        d.readBytes(bb);
+        d.readBytes(bb);
+        d.readBytes(bb);
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeBytes(sourceData[i]);
+        e.writeBytes(sourceData[i+1]);
+        e.writeBytes(sourceData[i+2]);
+        e.writeBytes(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  private static String randomString(Random r) {
+    char[] data = new char[r.nextInt(70)];
+    for (int j = 0; j < data.length; j++) {
+      data[j] = (char)('a' + r.nextInt('z'-'a'));
+    }
+    return new String(data);
+  }
+
+  static class StringTest extends BasicTest {
+    String[] sourceData = null;
+    public StringTest() throws IOException {
+      super("String", "{ \"type\": \"string\"} ", 5);
+    }
+    
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new String[count];
+      for (int i = 0; i < sourceData.length;)
+        sourceData[i++] = randomString(r);
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      Utf8 utf = new Utf8();
+      for (int i = 0; i < count/4; i++) {
+        d.readString(utf).toString();
+        d.readString(utf).toString();
+        d.readString(utf).toString();
+        d.readString(utf).toString();
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.writeString(sourceData[i]);
+        e.writeString(sourceData[i+1]);
+        e.writeString(sourceData[i+2]);
+        e.writeString(sourceData[i+3]);
+      }
+    }
+  
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  static class ArrayTest extends FloatTest {
+    public ArrayTest() throws IOException {
+      super("Array",
+          "{ \"type\": \"array\", \"items\": " +
+          " { \"type\": \"record\", \"name\":\"Foo\", \"fields\": " +
+          "  [{\"name\":\"bar\", \"type\":" +
+          "    {\"type\": \"array\", \"items\": " +
+          "     { \"type\": \"record\", \"name\":\"Vals\", \"fields\": [" +
+          "      {\"name\":\"f1\", \"type\":\"float\"}," +
+          "      {\"name\":\"f2\", \"type\":\"float\"}," +
+          "      {\"name\":\"f3\", \"type\":\"float\"}," +
+          "      {\"name\":\"f4\", \"type\":\"float\"}]" +
+          "     }" +
+          "    }" +
+          "   }]}}");
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      d.readArrayStart();
+      for (long i = d.readArrayStart(); i != 0; i = d.arrayNext()) {
+        for (long j = 0; j < i; j++) {
+          d.readFloat();
+          d.readFloat();
+          d.readFloat();
+          d.readFloat();
+        }
+      }
+      d.arrayNext();
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      int items = sourceData.length/4;
+      e.writeArrayStart();
+      e.setItemCount(1);
+      e.startItem();
+      e.writeArrayStart();
+      e.setItemCount(items);
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.startItem();
+        e.writeFloat(sourceData[i]);
+        e.writeFloat(sourceData[i+1]);
+        e.writeFloat(sourceData[i+2]);
+        e.writeFloat(sourceData[i+3]);
+      }
+      e.writeArrayEnd();
+      e.writeArrayEnd();
+    }
+  }
+  
+  static class MapTest extends FloatTest {
+    public MapTest() throws IOException {
+      super("Map", "{ \"type\": \"map\", \"values\": " +
+          "  { \"type\": \"record\", \"name\":\"Vals\", \"fields\": [" +
+          "   {\"name\":\"f1\", \"type\":\"float\"}," +
+          "   {\"name\":\"f2\", \"type\":\"float\"}," +
+          "   {\"name\":\"f3\", \"type\":\"float\"}," +
+          "   {\"name\":\"f4\", \"type\":\"float\"}]" +
+          "  }} ");
+    }
+   
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      Utf8 key = new Utf8();
+      for (long i = d.readMapStart(); i != 0; i = d.mapNext()) {
+        for (long j = 0; j < i; j++) {
+          key = d.readString(key);
+          d.readFloat();
+          d.readFloat();
+          d.readFloat();
+          d.readFloat();
+        }
+      }
+    }
+
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      int items = sourceData.length/4;
+      e.writeMapStart();
+      e.setItemCount(items);
+      Utf8 foo = new Utf8("foo");
+      for (int i = 0; i < sourceData.length;i+=4) {
+        e.startItem();
+        e.writeString(foo);
+        e.writeFloat(sourceData[i]);
+        e.writeFloat(sourceData[i+1]);
+        e.writeFloat(sourceData[i+2]);
+        e.writeFloat(sourceData[i+3]);
+      }
+      e.writeMapEnd();
+    }
+  }
+  
+  private static final String RECORD_SCHEMA = 
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f6\", \"type\": \"int\" }\n"
+    + "] }";
+  
+  private static final String NESTED_RECORD_SCHEMA =
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \n" +
+        "{ \"type\": \"record\", \"name\": \"D\", \"fields\": [\n" +
+          "{\"name\": \"dbl\", \"type\": \"double\" }]\n" +
+        "} },\n"
+    + "{ \"name\": \"f2\", \"type\": \"D\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"D\" },\n"
+    + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f6\", \"type\": \"int\" }\n"
+    + "] }";
+  
+  private static class Rec {
+    double f1;
+    double f2;
+    double f3;
+    int f4;
+    int f5;
+    int f6;
+    Rec() {
+      
+    }
+    Rec(Random r) {
+      f1 = r.nextDouble();
+      f2 = r.nextDouble();
+      f3 = r.nextDouble();
+      f4 = r.nextInt();
+      f5 = r.nextInt();
+      f6 = r.nextInt();
+    }
+  }
+
+  static class RecordTest extends BasicTest {
+    Rec[] sourceData = null;
+    public RecordTest() throws IOException {
+      this("Record");
+    }
+    public RecordTest(String name) throws IOException {
+      super(name, RECORD_SCHEMA, 6);
+    }
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new Rec[count];
+      for (int i = 0; i < sourceData.length; i++) {
+        sourceData[i] = new Rec(r); 
+      }
+    }
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i++) {
+        d.readDouble();
+        d.readDouble();
+        d.readDouble();
+        d.readInt();
+        d.readInt();
+        d.readInt();
+      }
+    }
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i++) {
+        Rec r = sourceData[i];
+        e.writeDouble(r.f1);
+        e.writeDouble(r.f2);
+        e.writeDouble(r.f3);
+        e.writeInt(r.f4);
+        e.writeInt(r.f5);
+        e.writeInt(r.f6);
+      }
+    }
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  static class ValidatingRecord extends RecordTest {
+    ValidatingRecord() throws IOException {
+      super("ValidatingRecord");
+    }
+    @Override
+    protected Decoder getDecoder() throws IOException {
+      return new ValidatingDecoder(schema, super.getDecoder());
+    }
+    @Override
+    protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
+      return encoder_factory.validatingEncoder(schema, super.newEncoder(out));  
+    }
+  }
+  
+  static class ResolvingRecord extends RecordTest {
+    public ResolvingRecord() throws IOException {
+      super("ResolvingRecord");
+      isWriteTest = false;
+    }
+    @Override
+    protected Decoder getDecoder() throws IOException {
+      return new ResolvingDecoder(schema, schema, super.getDecoder());
+    }
+  }
+
+  private static final String RECORD_SCHEMA_WITH_DEFAULT =
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f6\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f7\", \"type\": \"string\", "
+      + "\"default\": \"undefined\" },\n"
+    + "{ \"name\": \"f8\", \"type\": \"string\","
+      + "\"default\": \"undefined\" }\n"
+    + "] }";
+  
+  private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER =
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f5\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+    + "{ \"name\": \"f6\", \"type\": \"int\" }\n"
+    + "] }";
+
+  private static final String RECORD_SCHEMA_WITH_PROMOTION =
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+    + "{ \"name\": \"f4\", \"type\": \"long\" },\n"
+    + "{ \"name\": \"f5\", \"type\": \"long\" },\n"
+    + "{ \"name\": \"f6\", \"type\": \"long\" }\n"
+    + "] }";
+
+
+  /**
+   * Tests the performance of introducing default values.
+   */
+  static class RecordWithDefault extends RecordTest {
+    private final Schema readerSchema;
+    public RecordWithDefault() throws IOException {
+      super("RecordWithDefault");
+      readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
+      isWriteTest = false;
+    }
+    @Override
+    protected Decoder getDecoder() throws IOException {
+      return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
+    }
+    @Override
+    protected void readInternal(Decoder d) throws IOException {
+      ResolvingDecoder r = (ResolvingDecoder) d;
+      Field[] ff = r.readFieldOrder();
+      for (int i = 0; i < count; i++) {
+        for (int j = 0; j < ff.length; j++) {
+          Field f = ff[j];
+          switch (f.pos()) {
+          case 0:
+          case 1:
+          case 2:
+            r.readDouble();
+            break;
+          case 3:
+          case 4:
+          case 5:
+            r.readInt();
+            break;
+          case 6:
+          case 7:
+            r.readString(null);
+            break;
+          }
+        }
+      }
+    }
+  }
+  
+  /**
+   * Tests the performance of resolving a change in field order.
+   */
+  static class RecordWithOutOfOrder extends RecordTest {
+    private final Schema readerSchema;
+    public RecordWithOutOfOrder() throws IOException {
+      super("RecordWithOutOfOrder");
+      readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
+      isWriteTest = false;
+    }
+    @Override
+    protected Decoder getDecoder() throws IOException {
+      return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
+    }
+    @Override
+    protected void readInternal(Decoder d) throws IOException {
+      ResolvingDecoder r = (ResolvingDecoder) d;
+      Field[] ff = r.readFieldOrder();
+      for (int i = 0; i < count; i++) {
+        for (int j = 0; j < ff.length; j++) {
+          Field f = ff[j];
+          switch (f.pos()) {
+          case 0:
+          case 1:
+          case 3:
+            r.readDouble();
+            break;
+          case 2:
+          case 4:
+          case 5:
+            r.readInt();
+            break;
+          }
+        }
+      }
+    }
+  }
+  
+  /**
+   * Tests the performance of resolving a type promotion.
+   */
+  static class RecordWithPromotion extends RecordTest {
+    private final Schema readerSchema;
+    public RecordWithPromotion() throws IOException {
+      super("RecordWithPromotion");
+      readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
+      isWriteTest = false;
+    }
+    @Override
+    protected Decoder getDecoder() throws IOException {
+      return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
+    }
+    @Override
+    protected void readInternal(Decoder d) throws IOException {
+      ResolvingDecoder r = (ResolvingDecoder) d;
+      Field[] ff = r.readFieldOrder();
+      for (int i = 0; i < count; i++) {
+        for (int j = 0; j < ff.length; j++) {
+          Field f = ff[j];
+          switch (f.pos()) {
+          case 0:
+          case 1:
+          case 2:
+            r.readDouble();
+            break;
+          case 3:
+          case 4:
+          case 5:
+            r.readLong();
+            break;
+          }
+        }
+      }
+    }
+  }
+  
+  static class GenericTest extends BasicTest {
+    GenericRecord[] sourceData = null;
+    protected final GenericDatumReader<Object> reader;
+    public GenericTest() throws IOException {
+      this("Generic");
+    }
+    protected GenericTest(String name) throws IOException {
+      this(name, RECORD_SCHEMA);
+    }
+    protected GenericTest(String name, String writerSchema) throws IOException {
+      super(name, writerSchema, 12);
+      reader = newReader();
+    }
+    protected GenericDatumReader<Object> getReader() {
+      return reader;
+    }
+    protected GenericDatumReader<Object> newReader() {
+      return new GenericDatumReader<Object>(schema);
+    }
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new GenericRecord[count];
+      for (int i = 0; i < sourceData.length; i++) {
+        GenericRecord rec = new GenericData.Record(schema);
+        rec.put(0, r.nextDouble());
+        rec.put(1, r.nextDouble());
+        rec.put(2, r.nextDouble());
+        rec.put(3, r.nextInt());
+        rec.put(4, r.nextInt());
+        rec.put(5, r.nextInt());
+        sourceData[i] = rec; 
+      }
+    }
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i++) {
+        getReader().read(null, d);
+      }
+    }
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      GenericDatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
+      for (int i = 0; i < sourceData.length; i++) {
+        GenericRecord rec = sourceData[i];
+        writer.write(rec, e);
+      }
+    }
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+  
+  private static final String GENERIC_STRINGS = 
+    "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+    + "{ \"name\": \"f1\", \"type\": \"string\" },\n"
+    + "{ \"name\": \"f2\", \"type\": \"string\" },\n"
+    + "{ \"name\": \"f3\", \"type\": \"string\" }\n"
+    + "] }";
+  
+  static class GenericStrings extends GenericTest {
+    public GenericStrings() throws IOException {
+      super("GenericStrings", GENERIC_STRINGS);
+    }
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new GenericRecord[count];
+      for (int i = 0; i < sourceData.length; i++) {
+        GenericRecord rec = new GenericData.Record(schema);
+        rec.put(0, randomString(r));
+        rec.put(1, randomString(r));
+        rec.put(2, randomString(r));
+        sourceData[i] = rec; 
+      }
+    }
+  }
+
+  static class GenericNested extends GenericTest {
+    public GenericNested() throws IOException {
+      super("GenericNested_", NESTED_RECORD_SCHEMA);
+    }
+    @Override
+    void genSourceData() {
+      sourceData = generateGenericNested(schema, count);
+    }
+  }
+  static GenericRecord[] generateGenericNested(Schema schema, int count) {
+    Random r = newRandom();
+    GenericRecord[] sourceData = new GenericRecord[count];
+    Schema doubleSchema = schema.getFields().get(0).schema();
+    for (int i = 0; i < sourceData.length; i++) {
+      GenericRecord rec = new GenericData.Record(schema);
+      GenericRecord inner;
+      inner = new GenericData.Record(doubleSchema);
+      inner.put(0, r.nextDouble());
+      rec.put(0, inner);
+      inner = new GenericData.Record(doubleSchema);
+      inner.put(0, r.nextDouble());
+      rec.put(1, inner);
+      inner = new GenericData.Record(doubleSchema);
+      inner.put(0, r.nextDouble());
+      rec.put(2, inner);
+      rec.put(3, r.nextInt());
+      rec.put(4, r.nextInt());
+      rec.put(5, r.nextInt());
+      sourceData[i] = rec; 
+    }
+    return sourceData;
+  }
+  
+  static class GenericNestedFake extends BasicTest {
+    //reads and writes generic data, but not using
+    //GenericDatumReader or GenericDatumWriter
+    GenericRecord[] sourceData = null;
+    public GenericNestedFake() throws IOException {
+      super("GenericNestedFake_", NESTED_RECORD_SCHEMA, 12);
+    }
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      Schema doubleSchema = schema.getFields().get(0).schema();
+      for (int i = 0; i < count; i++) {
+        GenericRecord rec = new GenericData.Record(schema);
+        GenericRecord inner;
+        inner = new GenericData.Record(doubleSchema);
+        inner.put(0, d.readDouble());
+        rec.put(0, inner);
+        inner = new GenericData.Record(doubleSchema);
+        inner.put(0, d.readDouble());
+        rec.put(1, inner);
+        inner = new GenericData.Record(doubleSchema);
+        inner.put(0, d.readDouble());
+        rec.put(2, inner);
+        rec.put(3, d.readInt());
+        rec.put(4, d.readInt());
+        rec.put(5, d.readInt());
+      }
+    }
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i++) {
+        GenericRecord rec = sourceData[i];
+        GenericRecord inner;
+        inner = (GenericRecord)rec.get(0);
+        e.writeDouble((Double)inner.get(0));
+        inner = (GenericRecord)rec.get(1);
+        e.writeDouble((Double)inner.get(0));
+        inner = (GenericRecord)rec.get(2);
+        e.writeDouble((Double)inner.get(0));
+        e.writeInt((Integer)rec.get(3));
+        e.writeInt((Integer)rec.get(4));
+        e.writeInt((Integer)rec.get(5));
+      }
+    }
+    @Override
+    void genSourceData() {
+      sourceData = generateGenericNested(schema, count);
+    }
+    @Override
+    void reset() {
+      data = null;
+      sourceData = null;
+    }
+    
+  }
+
+  private static abstract class GenericResolving extends GenericTest {
+    protected GenericResolving(String name)
+    throws IOException {
+      super(name);
+      isWriteTest = false;
+    }
+    @Override
+    protected GenericDatumReader<Object> newReader() {
+      return new GenericDatumReader<Object>(schema, getReaderSchema());
+    }
+    protected abstract Schema getReaderSchema();
+  }
+
+  static class GenericWithDefault extends GenericResolving {
+    GenericWithDefault() throws IOException {
+      super("GenericWithDefault_");
+    }
+    @Override
+    protected Schema getReaderSchema() {
+      return  new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
+    }
+  }
+
+  static class GenericWithOutOfOrder extends GenericResolving {
+    GenericWithOutOfOrder() throws IOException {
+      super("GenericWithOutOfOrder_");
+    }
+    @Override
+    protected Schema getReaderSchema() {
+      return new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
+    }
+  }
+
+  static class GenericWithPromotion extends GenericResolving {
+    GenericWithPromotion() throws IOException {
+      super("GenericWithPromotion_");
+    }
+    @Override
+    protected Schema getReaderSchema() {
+      return new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
+    }
+  }
+  
+  static class GenericOneTimeDecoderUse extends GenericTest {
+    public GenericOneTimeDecoderUse() throws IOException {
+      super("GenericOneTimeDecoderUse_");
+      isWriteTest = false;
+    }
+    @Override
+    protected Decoder getDecoder() {
+      return newDecoder();
+    }
+  }
+
+  static class GenericOneTimeReaderUse extends GenericTest {
+    public GenericOneTimeReaderUse() throws IOException {
+      super("GenericOneTimeReaderUse_");
+      isWriteTest = false;
+    }
+    @Override
+    protected GenericDatumReader<Object> getReader() {
+      return newReader();
+    }
+  }
+
+  static class GenericOneTimeUse extends GenericTest {
+    public GenericOneTimeUse() throws IOException {
+      super("GenericOneTimeUse_");
+      isWriteTest = false;
+    }
+    @Override
+    protected GenericDatumReader<Object> getReader() {
+      return newReader();
+    }
+    @Override
+    protected Decoder getDecoder() {
+      return newDecoder();
+    }
+  }
+
+  static abstract class SpecificTest<T extends SpecificRecordBase> extends BasicTest {
+    protected final SpecificDatumReader<T> reader;
+    protected final SpecificDatumWriter<T> writer;
+    private Object[] sourceData;
+
+    protected SpecificTest(String name, String writerSchema) throws IOException {
+      super(name, writerSchema, 48);
+      reader = newReader();
+      writer = newWriter();
+    }
+    protected SpecificDatumReader<T> getReader() {
+      return reader;
+    }
+    protected SpecificDatumWriter<T> getWriter() {
+      return writer;
+    }
+    protected SpecificDatumReader<T> newReader() {
+      return new SpecificDatumReader<T>(schema);
+    }
+    protected SpecificDatumWriter<T> newWriter() {
+      return new SpecificDatumWriter<T>(schema);
+    }
+    @Override
+    void genSourceData() {
+      Random r = newRandom();
+      sourceData = new Object[count];
+      for (int i = 0; i < sourceData.length; i++) {
+        sourceData[i] = genSingleRecord(r);
+      }
+    }
+
+    protected abstract T genSingleRecord(Random r);
+
+    @Override
+    void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i++) {
+        getReader().read(null, d);
+      }
+    }
+    @Override
+    void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i++) {
+        @SuppressWarnings("unchecked")
+        T rec = (T) sourceData[i];
+        getWriter().write(rec, e);
+      }
+    }
+    @Override
+    void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+
+  static class FooBarSpecificRecordTest extends
+      SpecificTest<FooBarSpecificRecord> {
+    public FooBarSpecificRecordTest() throws IOException {
+      super("FooBarSpecificRecordTest", FooBarSpecificRecord.SCHEMA$.toString());
+    }
+
+    @Override
+    protected FooBarSpecificRecord genSingleRecord(Random r) {
+      TypeEnum[] typeEnums = TypeEnum.values();
+      List<Integer> relatedIds = new ArrayList<Integer>(10);
+      for (int i = 0; i < 10; i++) {
+        relatedIds.add(r.nextInt());
+      }
+
+      try {
+        return FooBarSpecificRecord.newBuilder().setId(r.nextInt())
+            .setName(randomString(r))
+            .setNicknames(Arrays.asList(randomString(r), randomString(r)))
+            .setTypeEnum(typeEnums[r.nextInt(typeEnums.length)])
+            .setRelatedids(relatedIds).build();
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+  static abstract class ReflectTest<T> extends BasicTest {
+    T[] sourceData = null;
+    ReflectDatumReader<T> reader;
+    ReflectDatumWriter<T> writer;
+    Class<T> clazz;
+
+    @SuppressWarnings("unchecked")
+    ReflectTest(String name, T sample, int factor) throws IOException {
+      super(name, ReflectData.get().getSchema(sample.getClass()).toString(), factor);
+      clazz = (Class<T>) sample.getClass();
+      reader = new ReflectDatumReader<T>(schema);
+      writer = new ReflectDatumWriter<T>(schema);
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    protected final void genSourceData() {
+      Random r = newRandom();
+      sourceData = (T[]) Array.newInstance(clazz, count);
+      for (int i = 0; i < sourceData.length; i++) {
+        sourceData[i] = createDatum(r);
+      }
+    }
+
+    protected abstract T createDatum(Random r);
+
+    @Override
+    protected final void readInternal(Decoder d) throws IOException {
+      for (int i = 0; i < count; i++) {
+        reader.read(null, d);
+      }
+    }
+
+    @Override
+    protected final void writeInternal(Encoder e) throws IOException {
+      for (int i = 0; i < sourceData.length; i++) {
+        writer.write(sourceData[i], e);
+      }
+    }
+
+    @Override
+    protected final void reset() {
+      sourceData = null;
+      data = null;
+    }
+  }
+
+  static class ReflectRecordTest extends ReflectTest<Rec> {
+    ReflectRecordTest() throws IOException {
+      super("ReflectRecord", new Rec(), 12);
+    }
+
+    @Override
+    protected Rec createDatum(Random r) {
+      return new Rec(r);
+    }
+  }
+  
+  static class ReflectFloatTest extends ReflectTest<float[]> {
+    ReflectFloatTest() throws IOException {
+      super("ReflectFloat", new float[0], COUNT);
+    }
+
+    @Override
+    protected float[] createDatum(Random r) {
+      return populateFloatArray(r, COUNT / count);
+    }
+  }
+
+  static class ReflectDoubleTest extends ReflectTest<double[]> {
+    ReflectDoubleTest() throws IOException {
+      super("ReflectDouble", new double[0], COUNT);
+    }
+
+    @Override
+    protected double[] createDatum(Random r) {
+      return populateDoubleArray(r, COUNT / count);
+    }
+  }
+
+  static class ReflectFloatArrayTest extends ReflectTest<float[]> {
+    ReflectFloatArrayTest() throws IOException {
+      super("ReflectFloatArray", new float[0], 10);
+    }
+
+    @Override
+    protected float[] createDatum(Random r) {
+      return populateFloatArray(r, false);
+    }
+  }
+
+  static class ReflectDoubleArrayTest extends ReflectTest<double[]> {
+    ReflectDoubleArrayTest() throws IOException {
+      super("ReflectDoubleArray", new double[0], 20);
+    }
+
+    @Override
+    protected double[] createDatum(Random r) {
+      return populateDoubleArray(r);
+    }
+  }
+  
+  static class ReflectIntArrayTest extends ReflectTest<int[]> {
+    ReflectIntArrayTest() throws IOException {
+      super("ReflectIntArray", new int[0], 12);
+    }
+
+    @Override
+    protected int[] createDatum(Random r) {
+      return populateIntArray(r);
+    }
+  }
+  
+  static class ReflectLongArrayTest extends ReflectTest<long[]> {
+    ReflectLongArrayTest() throws IOException {
+      super("ReflectLongArray", new long[0], 24);
+    }
+
+    @Override
+    protected long[] createDatum(Random r) {
+      return populateLongArray(r);
+    }
+  }
+
+
+  static class ReflectNestedObjectArrayTest extends
+      ReflectTest<ReflectNestedObjectArrayTest.Foo> {
+    ReflectNestedObjectArrayTest() throws IOException {
+      super("ReflectNestedObjectArray", new Foo(new Random()), 50);
+    }
+
+    @Override
+    protected Foo createDatum(Random r) {
+      return new Foo(r);
+    }
+
+    static public class Foo {
+      Vals[] bar;
+
+      Foo() {
+      }
+      
+      Foo(Random r) {
+        bar = new Vals[smallArraySize(r)];
+        for (int i = 0; i < bar.length; i++) {
+          bar[i] = new Vals(r);
+        }
+      }
+    }
+
+    static class Vals {
+      float f1;
+      float f2;
+      float f3;
+      float f4;
+
+      Vals(){
+      }
+      
+      Vals(Random r) {
+        this.f1 = r.nextFloat();
+        this.f2 = r.nextFloat();
+        this.f3 = r.nextFloat();
+        this.f4 = r.nextFloat();
+      }
+    }
+
+  }
+
+  static public class FloatFoo {
+    float[] floatBar;
+    
+    FloatFoo() {
+    }
+
+    FloatFoo(Random r, boolean large) {
+      floatBar = populateFloatArray(r, large);
+    }
+  }
+
+  // average of 8, between 1 and 15
+  private static int smallArraySize(Random r) {
+    return r.nextInt(15) + 1;
+  }
+
+  // average of 64, between 16 and 112
+  private static int largeArraySize(Random r) {
+    return r.nextInt(97) + 16;
+  }
+
+  static float[] populateFloatArray(Random r, boolean large) {
+    int size = large ? largeArraySize(r) : smallArraySize(r);
+    return populateFloatArray(r, size);
+  }
+  
+  static float[] populateFloatArray(Random r, int size) {
+    float[] result = new float[size];
+    for (int i = 0; i < result.length; i++) {
+      result[i] = r.nextFloat();
+    }
+    return result;
+  }
+  
+  static double[] populateDoubleArray(Random r) {
+    return populateDoubleArray(r, smallArraySize(r));
+  }
+  
+  static double[] populateDoubleArray(Random r, int size) {
+    double[] result = new double[size];
+    for (int i = 0; i < result.length; i++) {
+      result[i] = r.nextDouble();
+    }
+    return result;
+  }
+
+  static int[] populateIntArray(Random r) {
+    int size = smallArraySize(r);
+    int[] result = new int[size];
+    for (int i = 0; i < result.length; i++) {
+      result[i] = r.nextInt();
+    }
+    return result;
+  }
+  
+  static long[] populateLongArray(Random r) {
+    int size = smallArraySize(r);
+    long[] result = new long[size];
+    for (int i = 0; i < result.length; i++) {
+      result[i] = r.nextLong();
+    }
+    return result;
+  }
+  
+  static class ReflectNestedFloatArrayTest extends ReflectTest<FloatFoo> {
+    public ReflectNestedFloatArrayTest() throws IOException {
+      super("ReflectNestedFloatArray", new FloatFoo(new Random(), false), 10);
+    }
+
+    @Override
+    protected FloatFoo createDatum(Random r) {
+      return new FloatFoo(r, false);
+    }
+  }
+
+  static class ReflectNestedLargeFloatArrayTest extends ReflectTest<FloatFoo> {
+    public ReflectNestedLargeFloatArrayTest() throws IOException {
+      super("ReflectNestedLargeFloatArray", new FloatFoo(new Random(), true),
+          60);
+    }
+
+    @Override
+    protected FloatFoo createDatum(Random r) {
+      return new FloatFoo(r, true);
+    }
+
+  }
+  
+  static class ReflectNestedLargeFloatArrayBlockedTest extends ReflectTest<FloatFoo> {
+    public ReflectNestedLargeFloatArrayBlockedTest() throws IOException {
+      super("ReflectNestedLargeFloatArrayBlocked", new FloatFoo(new Random(), true),
+          60);
+    }
+
+    @Override
+    protected FloatFoo createDatum(Random r) {
+      return new FloatFoo(r, true);
+    }
+    
+    @Override
+    protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
+      return new EncoderFactory().configureBlockSize(254).blockingBinaryEncoder(out, null);
+    }
+
+  }
+
+  @SuppressWarnings("unused")
+  private static class Rec1 {
+    double d1;
+    double d11;
+    float f2;
+    float f22;
+    int f3;
+    int f33;
+    long f4;
+    long f44;
+    byte f5;
+    byte f55;
+    short f6;
+    short f66;
+
+    Rec1() {
+    }
+    
+    Rec1(Random r) {
+      d1 = r.nextDouble();
+      d11 = r.nextDouble();
+      f2 = r.nextFloat();
+      f22 = r.nextFloat();
+      f3 = r.nextInt();
+      f33 = r.nextInt();
+      f4 = r.nextLong();
+      f44 = r.nextLong();
+      f5 = (byte) r.nextInt();
+      f55 = (byte) r.nextInt();
+      f6 = (short) r.nextInt();
+      f66 = (short) r.nextInt();
+    }
+  }
+
+  static class ReflectBigRecordTest extends ReflectTest<Rec1> {
+    public ReflectBigRecordTest() throws IOException {
+      super("ReflectBigRecord", new Rec1(new Random()), 20);
+    }
+
+    @Override
+    protected Rec1 createDatum(Random r) {
+      return new Rec1(r);
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/NettyTransceiverWhenFailsToConnect.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/NettyTransceiverWhenFailsToConnect.java
new file mode 100644
index 0000000..6eefc2d
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/NettyTransceiverWhenFailsToConnect.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import junit.framework.Assert;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.socket.SocketChannel;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.util.concurrent.Executors;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * This is a very specific test that verifies that if the NettyTransceiver fails
+ * to connect it cleans up the netty channel that it has created.
+ */
+public class NettyTransceiverWhenFailsToConnect {
+
+    @Test(expected = IOException.class)
+    public void testNettyTransceiverReleasesNettyChannelOnFailingToConnect() throws Exception {
+        ServerSocket serverSocket = null;
+        LastChannelRememberingChannelFactory socketChannelFactory = null;
+
+        try {
+            serverSocket = new ServerSocket(0);
+            socketChannelFactory = new LastChannelRememberingChannelFactory();
+
+            try {
+                new NettyTransceiver(
+                        new InetSocketAddress(serverSocket.getLocalPort()),
+                        socketChannelFactory,
+                        1L
+                );
+            } finally {
+                assertEquals("expected that the channel opened by the transceiver is closed",
+                        false, socketChannelFactory.lastChannel.isOpen());
+            }
+        } finally {
+
+            if (serverSocket != null) {
+                // closing the server socket will actually free up the open channel in the
+                // transceiver, which would have hung otherwise (pre AVRO-1407)
+                serverSocket.close();
+            }
+
+            if (socketChannelFactory != null) {
+                socketChannelFactory.releaseExternalResources();
+            }
+        }
+    }
+
+    class LastChannelRememberingChannelFactory extends NioClientSocketChannelFactory implements ChannelFactory {
+
+        volatile SocketChannel lastChannel;
+
+        @Override
+        public SocketChannel newChannel(ChannelPipeline pipeline) {
+            return lastChannel= super.newChannel(pipeline);
+        }
+    }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java
new file mode 100644
index 0000000..09f7078
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.generic.GenericResponder;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+public class TestLocalTransceiver {
+
+  Protocol protocol = Protocol.parse("" + "{\"protocol\": \"Minimal\", "
+      + "\"messages\": { \"m\": {"
+      + "   \"request\": [{\"name\": \"x\", \"type\": \"string\"}], "
+      + "   \"response\": \"string\"} } }");
+
+  static class TestResponder extends GenericResponder {
+    public TestResponder(Protocol local) {
+      super(local);
+    }
+
+    @Override
+    public Object respond(Message message, Object request)
+        throws AvroRemoteException {
+      assertEquals(new Utf8("hello"), ((GenericRecord) request).get("x"));
+      return new Utf8("there");
+    }
+
+  }
+
+  @Test
+  public void testSingleRpc() throws IOException {
+    Transceiver t = new LocalTransceiver(new TestResponder(protocol));
+    GenericRecord params = new GenericData.Record(protocol.getMessages().get(
+        "m").getRequest());
+    params.put("x", new Utf8("hello"));
+    GenericRequestor r = new GenericRequestor(protocol, t);
+    
+    for(int x = 0; x < 5; x++)
+      assertEquals(new Utf8("there"), r.request("m", params));
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java
new file mode 100644
index 0000000..a39de4c
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServer.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import static org.junit.Assert.assertEquals;
+
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import junit.framework.Assert;
+
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Mail;
+import org.apache.avro.test.Message;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNettyServer {
+  static final long CONNECT_TIMEOUT_MILLIS = 2000; // 2 sec
+  private static Server server;
+  private static Transceiver transceiver;
+  private static Mail proxy;
+  private static MailImpl mailService;
+
+  public static class MailImpl implements Mail {
+
+    private CountDownLatch allMessages = new CountDownLatch(5);
+    
+    // in this simple example just return details of the message
+    public String send(Message message) {
+      return "Sent message to ["+ message.getTo().toString() + 
+          "] from [" + message.getFrom().toString() + "] with body [" + 
+          message.getBody().toString() + "]";
+    }
+    
+    public void fireandforget(Message message) {
+      allMessages.countDown();
+    }
+    
+    private void awaitMessages() throws InterruptedException {
+      allMessages.await(2, TimeUnit.SECONDS);
+    }
+    
+    private void assertAllMessagesReceived() {
+      assertEquals(0, allMessages.getCount());
+    }
+
+    public void reset() {
+      allMessages = new CountDownLatch(5);      
+    }
+  }
+  
+  @BeforeClass
+  public static void initializeConnections()throws Exception {
+    // start server
+    System.out.println("starting server...");
+    mailService = new MailImpl();
+    Responder responder = new SpecificResponder(Mail.class, mailService);
+    server = initializeServer(responder);
+    server.start();
+  
+    int serverPort = server.getPort();
+    System.out.println("server port : " + serverPort);
+
+    transceiver = initializeTransceiver(serverPort);
+    proxy = SpecificRequestor.getClient(Mail.class, transceiver);
+  }
+  
+  protected static Server initializeServer(Responder responder) {
+    return new NettyServer(responder, new InetSocketAddress(0));
+  }
+  
+  protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
+    return new NettyTransceiver(new InetSocketAddress(
+        serverPort), CONNECT_TIMEOUT_MILLIS);
+  }
+  
+  @AfterClass
+  public static void tearDownConnections() throws Exception{
+    transceiver.close();
+    server.close();
+  }
+
+  @Test
+  public void testRequestResponse() throws Exception {
+      for(int x = 0; x < 5; x++) {
+        verifyResponse(proxy.send(createMessage()));
+      }
+  }
+
+  private void verifyResponse(String result) {
+    Assert.assertEquals(
+        "Sent message to [wife] from [husband] with body [I love you!]",
+        result.toString());
+  }
+  
+  @Test
+  public void testOneway() throws Exception {
+    for (int x = 0; x < 5; x++) {
+      proxy.fireandforget(createMessage());
+    }
+    mailService.awaitMessages();
+    mailService.assertAllMessagesReceived();
+  }
+  
+  @Test
+  public void testMixtureOfRequests() throws Exception {
+    mailService.reset();
+    for (int x = 0; x < 5; x++) {
+      Message createMessage = createMessage();
+      proxy.fireandforget(createMessage);
+      verifyResponse(proxy.send(createMessage));
+    }
+    mailService.awaitMessages();
+    mailService.assertAllMessagesReceived();
+
+  }
+
+  @Test
+  public void testConnectionsCount() throws Exception {
+    Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
+            server.getPort()), CONNECT_TIMEOUT_MILLIS);
+    Mail proxy2 = SpecificRequestor.getClient(Mail.class, transceiver2);
+    proxy.fireandforget(createMessage());
+    proxy2.fireandforget(createMessage());
+    Assert.assertEquals(2, ((NettyServer) server).getNumActiveConnections());
+    transceiver2.close();
+    Assert.assertEquals(1, ((NettyServer) server).getNumActiveConnections());
+  }
+
+  private Message createMessage() {
+    Message msg = Message.newBuilder().
+      setTo("wife").
+      setFrom("husband").
+      setBody("I love you!").
+      build();
+    return msg;
+  }
+
+  // send a malformed request (HTTP) to the NettyServer port
+  @Test
+  public void testBadRequest() throws IOException {
+    int port = server.getPort();
+    String msg = "GET /status HTTP/1.1\n\n";
+    InetSocketAddress sockAddr = new InetSocketAddress("127.0.0.1", port);
+    Socket sock = new Socket();
+    sock.connect(sockAddr);
+    OutputStream out = sock.getOutputStream();
+    out.write(msg.getBytes(Charset.forName("UTF-8")));
+    out.flush();
+    byte[] buf = new byte[2048];
+    int bytesRead = sock.getInputStream().read(buf);
+    Assert.assertTrue("Connection should have been closed", bytesRead == -1);
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java
new file mode 100644
index 0000000..6d3fd1e
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerConcurrentExecution.java
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Simple;
+import org.apache.avro.test.TestError;
+import org.apache.avro.test.TestRecord;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.jboss.netty.handler.execution.ExecutionHandler;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Verifies that RPCs executed by different client threads using the same 
+ * NettyTransceiver will execute concurrently.  The test follows these steps:
+ * 1. Execute the {@link #org.apache.avro.test.Simple.add(int, int)} RPC to 
+ *    complete the Avro IPC handshake.
+ * 2a. In a background thread, wait for the waitLatch.
+ * 3a. In the main thread, invoke 
+ *    {@link #org.apache.avro.test.Simple.hello(String)} with the argument 
+ *    "wait".  This causes the ClientImpl running on the server to count down 
+ *    the wait latch, which will unblock the background thread and allow it to 
+ *    proceed.  After counting down the latch, this call blocks, waiting for 
+ *    {@link #org.apache.avro.test.Simple.ack()} to be invoked.
+ * 2b. The background thread wakes up because the waitLatch has been counted 
+ *     down.  Now we know that some thread is executing inside hello(String).
+ *     Next, execute {@link #org.apache.avro.test.Simple.ack()} in the 
+ *     background thread, which will allow the thread executing hello(String) 
+ *     to return.
+ * 3b. The thread executing hello(String) on the server unblocks (since ack() 
+ *     has been called), allowing hello(String) to return.
+ * 4. If control returns to the main thread, we know that two RPCs 
+ *    (hello(String) and ack()) were executing concurrently.
+ */
+public class TestNettyServerConcurrentExecution {
+  private Server server;
+  private Transceiver transceiver;
+  
+  @After
+  public void cleanUpAfter() throws Exception {
+    try {
+      if (transceiver != null) {
+        transceiver.close();
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    try {
+      if (server != null) {
+        server.close();
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+  
+  @Test(timeout=30000)
+  public void test() throws Exception {
+    final CountDownLatch waitLatch = new CountDownLatch(1);
+    server = new NettyServer(
+        new SpecificResponder(Simple.class, new SimpleImpl(waitLatch)), 
+        new InetSocketAddress(0), 
+        new NioServerSocketChannelFactory
+          (Executors.newCachedThreadPool(), Executors.newCachedThreadPool()), 
+        new ExecutionHandler(Executors.newCachedThreadPool()));
+    server.start();
+    
+    transceiver = new NettyTransceiver(new InetSocketAddress(
+        server.getPort()), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+    
+    // 1. Create the RPC proxy, and establish the handshake:
+    final Simple.Callback simpleClient = 
+        SpecificRequestor.getClient(Simple.Callback.class, transceiver);
+    SpecificRequestor.getRemote(simpleClient);    // force handshake
+    
+    /*
+     * 2a. In a background thread, wait for the Client.hello("wait") call to be
+     *    received by the server, then:
+     * 2b. Execute the Client.ack() RPC, which will unblock the 
+     *     Client.hello("wait") call, allowing it to return to the main thread.
+     */
+    new Thread() {
+      @Override
+      public void run() {
+        setName(TestNettyServerConcurrentExecution.class.getSimpleName() + 
+            "Ack Thread");
+        try {
+          // Step 2a:
+          waitLatch.await();
+          
+          // Step 2b:
+          simpleClient.ack();
+        } catch (InterruptedException e) {
+          e.printStackTrace();
+        }
+      }
+    }.start();
+    
+    /*
+     * 3. Execute the Client.hello("wait") RPC, which will block until the
+     *    Client.ack() call has completed in the background thread.
+     */
+    String response = simpleClient.hello("wait");
+    
+    // 4. If control reaches here, both RPCs have executed concurrently
+    Assert.assertEquals("wait", response); 
+  }
+
+  /**
+   * Implementation of the Simple interface for use with this unit test.
+   * If {@link #hello(String)} is called with "wait" as its argument,  
+   * {@link #waitLatch} will be counted down, and {@link #hello(String)} will 
+   * block until {@link #ack()} has been invoked.
+   */
+  private static class SimpleImpl implements Simple {
+    private final CountDownLatch waitLatch;
+    private final CountDownLatch ackLatch = new CountDownLatch(1);
+    
+    /**
+     * Creates a SimpleImpl that uses the given CountDownLatch.
+     * @param waitLatch the CountDownLatch to use in {@link #hello(String)}.
+     */
+    public SimpleImpl(final CountDownLatch waitLatch) {
+      this.waitLatch = waitLatch;
+    }
+    
+    @Override
+    public int add(int arg1, int arg2) throws AvroRemoteException {
+      // Step 1:
+      return arg1 + arg2;
+    }
+    
+    @Override
+    public String hello(String greeting) throws AvroRemoteException {
+      if (greeting.equals("wait")) {
+        try {
+          // Step 3a:
+          waitLatch.countDown();
+          
+          // Step 3b:
+          ackLatch.await();
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+          return e.toString();
+        }
+      }
+      return greeting;
+    }
+    
+    @Override
+    public void ack() {
+      // Step 2b:
+      ackLatch.countDown();
+    }
+    
+    // All RPCs below this line are irrelevant to this test:
+    
+    @Override
+    public TestRecord echo(TestRecord record) throws AvroRemoteException {
+      return record;
+    }
+
+    @Override
+    public ByteBuffer echoBytes(ByteBuffer data) throws AvroRemoteException {
+      return data;
+    }
+
+    @Override
+    public Void error() throws AvroRemoteException, TestError {
+      throw new TestError("TestError");
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
new file mode 100644
index 0000000..78816c0
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCallbacks.java
@@ -0,0 +1,691 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Simple;
+import org.apache.avro.test.TestError;
+import org.apache.avro.test.TestRecord;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Tests asynchronous RPCs with Netty.
+ */
+public class TestNettyServerWithCallbacks {
+  private static Server server;
+  private static Transceiver transceiver;
+  private static Simple.Callback simpleClient;
+  private static final AtomicBoolean ackFlag = new AtomicBoolean(false);
+  private static final AtomicReference<CountDownLatch> ackLatch = 
+    new AtomicReference<CountDownLatch>(new CountDownLatch(1));
+  private static Simple simpleService = new SimpleImpl(ackFlag);
+  
+  @BeforeClass
+  public static void initializeConnections() throws Exception {
+    // start server
+    Responder responder = new SpecificResponder(Simple.class, simpleService);
+    server = new NettyServer(responder, new InetSocketAddress(0));
+    server.start();
+  
+    int serverPort = server.getPort();
+    System.out.println("server port : " + serverPort);
+
+    transceiver = new NettyTransceiver(new InetSocketAddress(
+        serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+    simpleClient = SpecificRequestor.getClient(Simple.Callback.class, transceiver);
+  }
+  
+  @AfterClass
+  public static void tearDownConnections() throws Exception {
+    if (transceiver != null) {
+      transceiver.close();
+    }
+    if (server != null) {
+      server.close();
+    }
+  }
+  
+  @Test
+  public void greeting() throws Exception {
+    // Test synchronous RPC:
+    Assert.assertEquals("Hello, how are you?", simpleClient.hello("how are you?"));
+    
+    // Test asynchronous RPC (future):
+    CallFuture<String> future1 = new CallFuture<String>();
+    simpleClient.hello("World!", future1);
+    Assert.assertEquals("Hello, World!", future1.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future1.getError());
+    
+    // Test asynchronous RPC (callback):
+    final CallFuture<String> future2 = new CallFuture<String>();
+    simpleClient.hello("what's up?", new Callback<String>() {
+      @Override
+      public void handleResult(String result) {
+        future2.handleResult(result);
+      }
+      @Override
+      public void handleError(Throwable error) {
+        future2.handleError(error);
+      }
+    });
+    Assert.assertEquals("Hello, what's up?", future2.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future2.getError());
+  }
+  
+  @Test
+  public void echo() throws Exception {
+    TestRecord record = TestRecord.newBuilder().setHash(
+        new org.apache.avro.test.MD5(
+            new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 })).
+        setKind(org.apache.avro.test.Kind.FOO).
+        setName("My Record").build();
+    
+    // Test synchronous RPC:
+    Assert.assertEquals(record, simpleClient.echo(record));
+    
+    // Test asynchronous RPC (future):
+    CallFuture<TestRecord> future1 = new CallFuture<TestRecord>();
+    simpleClient.echo(record, future1);
+    Assert.assertEquals(record, future1.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future1.getError());
+    
+    // Test asynchronous RPC (callback):
+    final CallFuture<TestRecord> future2 = new CallFuture<TestRecord>();
+    simpleClient.echo(record, new Callback<TestRecord>() {
+      @Override
+      public void handleResult(TestRecord result) {
+        future2.handleResult(result);
+      }
+      @Override
+      public void handleError(Throwable error) {
+        future2.handleError(error);
+      }
+    });
+    Assert.assertEquals(record, future2.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future2.getError());
+  }
+  
+  @Test
+  public void add() throws Exception {
+    // Test synchronous RPC:
+    Assert.assertEquals(8, simpleClient.add(2, 6));
+    
+    // Test asynchronous RPC (future):
+    CallFuture<Integer> future1 = new CallFuture<Integer>();
+    simpleClient.add(8, 8, future1);
+    Assert.assertEquals(new Integer(16), future1.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future1.getError());
+    
+    // Test asynchronous RPC (callback):
+    final CallFuture<Integer> future2 = new CallFuture<Integer>();
+    simpleClient.add(512, 256, new Callback<Integer>() {
+      @Override
+      public void handleResult(Integer result) {
+        future2.handleResult(result);
+      }
+      @Override
+      public void handleError(Throwable error) {
+        future2.handleError(error);
+      }
+    });
+    Assert.assertEquals(new Integer(768), future2.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future2.getError());
+  }
+  
+  @Test
+  public void echoBytes() throws Exception {
+    ByteBuffer byteBuffer = ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 });
+    
+    // Test synchronous RPC:
+    Assert.assertEquals(byteBuffer, simpleClient.echoBytes(byteBuffer));
+    
+    // Test asynchronous RPC (future):
+    CallFuture<ByteBuffer> future1 = new CallFuture<ByteBuffer>();
+    simpleClient.echoBytes(byteBuffer, future1);
+    Assert.assertEquals(byteBuffer, future1.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future1.getError());
+    
+    // Test asynchronous RPC (callback):
+    final CallFuture<ByteBuffer> future2 = new CallFuture<ByteBuffer>();
+    simpleClient.echoBytes(byteBuffer, new Callback<ByteBuffer>() {
+      @Override
+      public void handleResult(ByteBuffer result) {
+        future2.handleResult(result);
+      }
+      @Override
+      public void handleError(Throwable error) {
+        future2.handleError(error);
+      }
+    });
+    Assert.assertEquals(byteBuffer, future2.get(2, TimeUnit.SECONDS));
+    Assert.assertNull(future2.getError());
+  }
+  
+  @Test()
+  public void error() throws IOException, InterruptedException, TimeoutException {
+    // Test synchronous RPC:
+    try {
+      simpleClient.error();
+      Assert.fail("Expected " + TestError.class.getCanonicalName());
+    } catch (TestError e) {
+      // Expected
+    } catch (AvroRemoteException e) {
+      e.printStackTrace();
+      Assert.fail("Unexpected error: " + e.toString());
+    }
+    
+    // Test asynchronous RPC (future):
+    CallFuture<Void> future = new CallFuture<Void>();
+    simpleClient.error(future);
+    try {
+      future.get(2, TimeUnit.SECONDS);
+      Assert.fail("Expected " + TestError.class.getCanonicalName() + " to be thrown");
+    } catch (ExecutionException e) {
+      Assert.assertTrue("Expected " + TestError.class.getCanonicalName(), 
+          e.getCause() instanceof TestError);
+    }
+    Assert.assertNotNull(future.getError());
+    Assert.assertTrue("Expected " + TestError.class.getCanonicalName(), 
+        future.getError() instanceof TestError);
+    Assert.assertNull(future.getResult());
+    
+    // Test asynchronous RPC (callback):
+    final CountDownLatch latch = new CountDownLatch(1);
+    final AtomicReference<Throwable> errorRef = new AtomicReference<Throwable>();
+    simpleClient.error(new Callback<Void>() {
+      @Override
+      public void handleResult(Void result) {
+        Assert.fail("Expected " + TestError.class.getCanonicalName());
+      }
+      @Override
+      public void handleError(Throwable error) {
+        errorRef.set(error);
+        latch.countDown();
+      }
+    });
+    Assert.assertTrue("Timed out waiting for error", latch.await(2, TimeUnit.SECONDS));
+    Assert.assertNotNull(errorRef.get());
+    Assert.assertTrue(errorRef.get() instanceof TestError);
+  }
+  
+  @Test
+  public void ack() throws Exception {
+    simpleClient.ack();
+    ackLatch.get().await(2, TimeUnit.SECONDS);
+    Assert.assertTrue("Expected ack flag to be set", ackFlag.get());
+    
+    ackLatch.set(new CountDownLatch(1));
+    simpleClient.ack();
+    ackLatch.get().await(2, TimeUnit.SECONDS);
+    Assert.assertFalse("Expected ack flag to be cleared", ackFlag.get());
+  }
+  
+  @Test
+  public void testSendAfterChannelClose() throws Exception {
+    // Start up a second server so that closing the server doesn't 
+    // interfere with the other unit tests:
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class, simpleService), 
+        new InetSocketAddress(0));
+    server2.start();
+    try {
+      int serverPort = server2.getPort();
+      System.out.println("server2 port : " + serverPort);
+
+      Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
+          serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+      try {
+        Simple.Callback simpleClient2 = 
+          SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
+
+        // Verify that connection works:
+        Assert.assertEquals(3, simpleClient2.add(1, 2));
+        
+        // Try again with callbacks:
+        CallFuture<Integer> addFuture = new CallFuture<Integer>();
+        simpleClient2.add(1, 2, addFuture);
+        Assert.assertEquals(new Integer(3), addFuture.get());
+
+        // Shut down server:
+        server2.close();
+        Thread.sleep(1000L);
+
+        // Send a new RPC, and verify that it throws an Exception that 
+        // can be detected by the client:
+        boolean ioeCaught = false;
+        try {
+          simpleClient2.add(1, 2);
+          Assert.fail("Send after server close should have thrown Exception");
+        } catch (AvroRemoteException e) {
+          ioeCaught = e.getCause() instanceof IOException;
+          Assert.assertTrue("Expected IOException", ioeCaught);
+        } catch (Exception e) {
+          e.printStackTrace();
+          throw e;
+        }
+        Assert.assertTrue("Expected IOException", ioeCaught);
+        
+        // Send a new RPC with callback, and verify that the correct Exception 
+        // is thrown:
+        ioeCaught = false;
+        try {
+          addFuture = new CallFuture<Integer>();
+          simpleClient2.add(1, 2, addFuture);
+          addFuture.get();
+          Assert.fail("Send after server close should have thrown Exception");
+        } catch (IOException e) {
+          ioeCaught = true;
+        } catch (Exception e) {
+          e.printStackTrace();
+          throw e;
+        }
+        Assert.assertTrue("Expected IOException", ioeCaught);
+      } finally {
+        transceiver2.close();
+      }
+    } finally {
+      server2.close();
+    }
+  }
+  
+  @Test
+  public void cancelPendingRequestsOnTransceiverClose() throws Exception {
+    // Start up a second server so that closing the server doesn't 
+    // interfere with the other unit tests:
+    BlockingSimpleImpl blockingSimpleImpl = new BlockingSimpleImpl();
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class, 
+        blockingSimpleImpl), new InetSocketAddress(0));
+    server2.start();
+    try {
+      int serverPort = server2.getPort();
+      System.out.println("server2 port : " + serverPort);
+
+      CallFuture<Integer> addFuture = new CallFuture<Integer>();
+      Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
+          serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+      try {        
+        Simple.Callback simpleClient2 = 
+          SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
+        
+        // The first call has to block for the handshake:
+        Assert.assertEquals(3, simpleClient2.add(1, 2));
+        
+        // Now acquire the semaphore so that the server will block:
+        blockingSimpleImpl.acquireRunPermit();
+        simpleClient2.add(1, 2, addFuture);
+      } finally {
+        // When the transceiver is closed, the CallFuture should get 
+        // an IOException
+        transceiver2.close();
+      }
+      boolean ioeThrown = false;
+      try {
+        addFuture.get();
+      } catch (ExecutionException e) {
+        ioeThrown = e.getCause() instanceof IOException;
+        Assert.assertTrue(e.getCause() instanceof IOException);
+      } catch (Exception e) {
+        e.printStackTrace();
+        Assert.fail("Unexpected Exception: " + e.toString());
+      }
+      Assert.assertTrue("Expected IOException to be thrown", ioeThrown);
+    } finally {
+      blockingSimpleImpl.releaseRunPermit();
+      server2.close();
+    }
+  }
+  
+  @Test
+  public void cancelPendingRequestsAfterChannelCloseByServerShutdown() throws Exception {
+    // The purpose of this test is to verify that a client doesn't stay
+    // blocked when a server is unexpectedly killed (or when for some
+    // other reason the channel is suddenly closed) while the server
+    // was in the process of handling a request (thus after it received
+    // the request, and before it returned the response).
+
+    // Start up a second server so that closing the server doesn't
+    // interfere with the other unit tests:
+    BlockingSimpleImpl blockingSimpleImpl = new BlockingSimpleImpl();
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class,
+        blockingSimpleImpl), new InetSocketAddress(0));
+    server2.start();
+
+    Transceiver transceiver2 = null;
+
+    try {
+      int serverPort = server2.getPort();
+      System.out.println("server2 port : " + serverPort);
+
+      transceiver2 = new NettyTransceiver(new InetSocketAddress(
+          serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+
+      final Simple.Callback simpleClient2 =
+          SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
+
+      // Acquire the method-enter permit, which will be released by the
+      // server method once we call it
+      blockingSimpleImpl.acquireEnterPermit();
+
+      // Acquire the run permit, to avoid that the server method returns immediately
+      blockingSimpleImpl.acquireRunPermit();
+
+      Thread t = new Thread(new Runnable() {
+        @Override
+        public void run() {
+          try {
+            simpleClient2.add(3, 4);
+            Assert.fail("Expected an exception");
+          } catch (Exception e) {
+            // expected
+          }
+        }
+      });
+      
+      // Start client call
+      t.start();
+      
+      // Wait until method is entered on the server side
+      blockingSimpleImpl.acquireEnterPermit();
+      
+      // The server side method is now blocked waiting on the run permit
+      // (= is busy handling the request)
+      
+      // Stop the server
+      server2.close();
+      
+      // With the server gone, we expect the client to get some exception and exit
+      // Wait for client thread to exit
+      t.join(10000);
+      
+      Assert.assertFalse("Client request should not be blocked on server shutdown", t.isAlive());
+      
+    } finally {
+      blockingSimpleImpl.releaseRunPermit();
+      server2.close();
+      if (transceiver2 != null)
+        transceiver2.close();
+    }
+  }
+  
+  @Test
+  public void clientReconnectAfterServerRestart() throws Exception {
+    // Start up a second server so that closing the server doesn't 
+    // interfere with the other unit tests:
+    SimpleImpl simpleImpl = new BlockingSimpleImpl();
+    Server server2 = new NettyServer(new SpecificResponder(Simple.class, 
+        simpleImpl), new InetSocketAddress(0));
+    try {      
+      server2.start();
+      int serverPort = server2.getPort();
+      System.out.println("server2 port : " + serverPort);
+
+      // Initialize a client, and establish a connection to the server:
+      Transceiver transceiver2 = new NettyTransceiver(new InetSocketAddress(
+          serverPort), TestNettyServer.CONNECT_TIMEOUT_MILLIS);
+      Simple.Callback simpleClient2 = 
+          SpecificRequestor.getClient(Simple.Callback.class, transceiver2);
+      Assert.assertEquals(3, simpleClient2.add(1, 2));
+      
+      // Restart the server:
+      server2.close();
+      try {
+        simpleClient2.add(2, -1);
+        Assert.fail("Client should not be able to invoke RPCs " +
+            "because server is no longer running");
+      } catch (Exception e) {
+        // Expected since server is no longer running
+      }
+      Thread.sleep(2000L);
+      server2 = new NettyServer(new SpecificResponder(Simple.class, 
+          simpleImpl), new InetSocketAddress(serverPort));
+      server2.start();
+      
+      // Invoke an RPC using the same client, which should reestablish the 
+      // connection to the server:
+      Assert.assertEquals(3, simpleClient2.add(1, 2));
+    } finally {
+      server2.close();
+    }
+  }
+
+  @Ignore
+  @Test
+  public void performanceTest() throws Exception {
+    final int threadCount = 8;
+    final long runTimeMillis = 10 * 1000L;
+    ExecutorService threadPool = Executors.newFixedThreadPool(threadCount);
+    
+    System.out.println("Running performance test for " + runTimeMillis + "ms...");
+    final AtomicLong rpcCount = new AtomicLong(0L);
+    final AtomicBoolean runFlag = new AtomicBoolean(true);
+    final CountDownLatch startLatch = new CountDownLatch(threadCount);
+    for (int ii = 0; ii < threadCount; ii++) {
+      threadPool.submit(new Runnable() {
+        @Override
+        public void run() {
+          try {
+            startLatch.countDown();
+            startLatch.await(2, TimeUnit.SECONDS);
+            while (runFlag.get()) {
+              rpcCount.incrementAndGet();
+              Assert.assertEquals("Hello, World!", simpleClient.hello("World!"));
+            }
+          } catch (Exception e) {
+            e.printStackTrace();
+          }
+        }
+      });
+    }
+    
+    startLatch.await(2, TimeUnit.SECONDS);
+    Thread.sleep(runTimeMillis);
+    runFlag.set(false);
+    threadPool.shutdown();
+    Assert.assertTrue("Timed out shutting down thread pool", threadPool.awaitTermination(2, TimeUnit.SECONDS));
+    System.out.println("Completed " + rpcCount.get() + " RPCs in " + runTimeMillis + 
+        "ms => " + (((double)rpcCount.get() / (double)runTimeMillis) * 1000) + " RPCs/sec, " + 
+        ((double)runTimeMillis / (double)rpcCount.get()) + " ms/RPC.");
+  }
+  
+  /**
+   * Implementation of the Simple interface.
+   */
+  private static class SimpleImpl implements Simple {
+    private final AtomicBoolean ackFlag;
+    
+    /**
+     * Creates a SimpleImpl.
+     * @param ackFlag the AtomicBoolean to toggle when ack() is called.
+     */
+    public SimpleImpl(final AtomicBoolean ackFlag) {
+      this.ackFlag = ackFlag;
+    }
+    
+    @Override
+    public String hello(String greeting) throws AvroRemoteException {
+      return "Hello, " + greeting;
+    }
+
+    @Override
+    public TestRecord echo(TestRecord record) throws AvroRemoteException {
+      return record;
+    }
+
+    @Override
+    public int add(int arg1, int arg2) throws AvroRemoteException {
+      return arg1 + arg2;
+    }
+
+    @Override
+    public ByteBuffer echoBytes(ByteBuffer data) throws AvroRemoteException {
+      return data;
+    }
+
+    @Override
+    public Void error() throws AvroRemoteException, TestError {
+      throw TestError.newBuilder().setMessage$("Test Message").build();
+    }
+
+    @Override
+    synchronized public void ack() {
+      ackFlag.set(!ackFlag.get());
+      ackLatch.get().countDown();
+    }
+  }
+  
+  /**
+   * A SimpleImpl that requires a semaphore permit before executing any method.
+   */
+  private static class BlockingSimpleImpl extends SimpleImpl {
+    /** Semaphore that is released when the method is entered. */
+    private final Semaphore enterSemaphore = new Semaphore(1);
+    /** Semaphore that must be acquired for the method to run and exit. */
+    private final Semaphore runSemaphore = new Semaphore(1);
+    
+    /**
+     * Creates a BlockingSimpleImpl.
+     */
+    public BlockingSimpleImpl() {
+      super(new AtomicBoolean());
+    }
+    
+    @Override
+    public String hello(String greeting) throws AvroRemoteException {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        return super.hello(greeting);
+      } finally {
+        releaseRunPermit();
+      }
+    }
+
+    @Override
+    public TestRecord echo(TestRecord record) throws AvroRemoteException {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        return super.echo(record);
+      } finally {
+        releaseRunPermit();
+      }
+    }
+
+    @Override
+    public int add(int arg1, int arg2) throws AvroRemoteException {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        return super.add(arg1, arg2);
+      } finally {
+        releaseRunPermit();
+      }
+    }
+
+    @Override
+    public ByteBuffer echoBytes(ByteBuffer data) throws AvroRemoteException {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        return super.echoBytes(data);
+      } finally {
+        releaseRunPermit();
+      }
+    }
+
+    @Override
+    public Void error() throws AvroRemoteException, TestError {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        return super.error();
+      } finally {
+        releaseRunPermit();
+      }
+    }
+
+    @Override
+    public void ack() {
+      releaseEnterPermit();
+      acquireRunPermit();
+      try {
+        super.ack();
+      } finally {
+        releaseRunPermit();
+      }
+    }
+    
+    /**
+     * Acquires a single permit from the semaphore.
+     */
+    public void acquireRunPermit() {
+      try {
+        runSemaphore.acquire();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new RuntimeException(e);
+    }
+    }
+    
+    /**
+     * Releases a single permit to the semaphore.
+     */
+    public void releaseRunPermit() {
+      runSemaphore.release();
+    }
+
+    /**
+     * Acquires a single permit from the semaphore.
+     */
+    public void acquireEnterPermit() {
+      try {
+        enterSemaphore.acquire();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new RuntimeException(e);
+  }
+}
+
+    /**
+     * Releases a single permit to the semaphore.
+     */
+    public void releaseEnterPermit() {
+        enterSemaphore.release();
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java
new file mode 100644
index 0000000..98dc9e6
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithCompression.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.KeyStore;
+import java.security.Security;
+import java.security.cert.X509Certificate;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import junit.framework.Assert;
+
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Mail;
+import org.apache.avro.test.Message;
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.socket.SocketChannel;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.jboss.netty.handler.codec.compression.ZlibDecoder;
+import org.jboss.netty.handler.codec.compression.ZlibEncoder;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNettyServerWithCompression extends TestNettyServer{
+
+
+  protected static Server initializeServer(Responder responder) {
+    ChannelFactory channelFactory = new NioServerSocketChannelFactory(
+        Executors.newCachedThreadPool(),
+        Executors.newCachedThreadPool()
+    );
+    return  new NettyServer(responder, new InetSocketAddress(0),
+        channelFactory, new CompressionChannelPipelineFactory(),
+        null);
+  }
+  
+  protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
+    return  new NettyTransceiver(new InetSocketAddress(serverPort),
+        new CompressionChannelFactory(),
+        CONNECT_TIMEOUT_MILLIS);
+  }
+
+
+  /**
+   * Factory of Compression-enabled client channels
+   */
+  private static class CompressionChannelFactory extends NioClientSocketChannelFactory {
+    public CompressionChannelFactory() {
+      super(Executors.newCachedThreadPool(), Executors.newCachedThreadPool());
+    }
+
+    @Override
+    public SocketChannel newChannel(ChannelPipeline pipeline) {
+      try {
+        ZlibEncoder encoder = new ZlibEncoder(6);
+        pipeline.addFirst("deflater", encoder);
+        pipeline.addFirst("inflater", new ZlibDecoder());
+        return super.newChannel(pipeline);
+      } catch (Exception ex) {
+        throw new RuntimeException("Cannot create Compression channel", ex);
+      }
+    }
+  }
+
+
+
+  /**
+   * Factory of Compression-enabled server worker channel pipelines
+   */
+  private static class CompressionChannelPipelineFactory
+      implements ChannelPipelineFactory {
+
+    @Override
+    public ChannelPipeline getPipeline() throws Exception {
+      ChannelPipeline pipeline = Channels.pipeline();
+      ZlibEncoder encoder = new ZlibEncoder(6);
+      pipeline.addFirst("deflater", encoder);
+      pipeline.addFirst("inflater", new ZlibDecoder());
+      return pipeline;
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java
new file mode 100644
index 0000000..1611c01
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyServerWithSSL.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.KeyStore;
+import java.security.Security;
+import java.security.cert.X509Certificate;
+import java.util.concurrent.Executors;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLEngine;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.X509TrustManager;
+
+import org.jboss.netty.channel.ChannelFactory;
+import org.jboss.netty.channel.ChannelPipeline;
+import org.jboss.netty.channel.ChannelPipelineFactory;
+import org.jboss.netty.channel.Channels;
+import org.jboss.netty.channel.socket.SocketChannel;
+import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
+import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
+import org.jboss.netty.handler.ssl.SslHandler;
+
+public class TestNettyServerWithSSL extends TestNettyServer{
+  public static final String TEST_CERTIFICATE = "servercert.p12";
+  public static final String TEST_CERTIFICATE_PASSWORD = "s3cret";
+  
+  protected static Server initializeServer(Responder responder) {
+    ChannelFactory channelFactory = new NioServerSocketChannelFactory(
+        Executors.newCachedThreadPool(),
+        Executors.newCachedThreadPool()
+    );
+    return new NettyServer(responder, new InetSocketAddress(0),
+        channelFactory, new SSLChannelPipelineFactory(),
+        null);
+  }
+  
+  protected static Transceiver initializeTransceiver(int serverPort) throws IOException {
+    return  new NettyTransceiver(new InetSocketAddress(serverPort),
+        new SSLChannelFactory(),
+        CONNECT_TIMEOUT_MILLIS);
+  }
+
+
+  /**
+   * Factory of SSL-enabled client channels
+   */
+  private static class SSLChannelFactory extends NioClientSocketChannelFactory {
+    public SSLChannelFactory() {
+      super(Executors.newCachedThreadPool(), Executors.newCachedThreadPool());
+    }
+
+    @Override
+    public SocketChannel newChannel(ChannelPipeline pipeline) {
+      try {
+        SSLContext sslContext = SSLContext.getInstance("TLS");
+        sslContext.init(null, new TrustManager[]{new BogusTrustManager()},
+                        null);
+        SSLEngine sslEngine = sslContext.createSSLEngine();
+        sslEngine.setUseClientMode(true);
+        pipeline.addFirst("ssl", new SslHandler(sslEngine));
+        return super.newChannel(pipeline);
+      } catch (Exception ex) {
+        throw new RuntimeException("Cannot create SSL channel", ex);
+      }
+    }
+  }
+
+  /**
+   * Bogus trust manager accepting any certificate
+   */
+  private static class BogusTrustManager implements X509TrustManager {
+    @Override
+    public void checkClientTrusted(X509Certificate[] certs, String s) {
+      // nothing
+    }
+
+    @Override
+    public void checkServerTrusted(X509Certificate[] certs, String s) {
+      // nothing
+    }
+
+    @Override
+    public X509Certificate[] getAcceptedIssuers() {
+      return new X509Certificate[0];
+    }
+  }
+
+  /**
+   * Factory of SSL-enabled server worker channel pipelines
+   */
+  private static class SSLChannelPipelineFactory
+      implements ChannelPipelineFactory {
+
+    private SSLContext createServerSSLContext() {
+      try {
+        KeyStore ks = KeyStore.getInstance("PKCS12");
+        ks.load(
+            TestNettyServer.class.getResource(TEST_CERTIFICATE).openStream(),
+            TEST_CERTIFICATE_PASSWORD.toCharArray());
+
+        // Set up key manager factory to use our key store
+        KeyManagerFactory kmf = KeyManagerFactory.getInstance(getAlgorithm());
+        kmf.init(ks, TEST_CERTIFICATE_PASSWORD.toCharArray());
+
+        SSLContext serverContext = SSLContext.getInstance("TLS");
+        serverContext.init(kmf.getKeyManagers(), null, null);
+        return serverContext;
+      } catch (Exception e) {
+        throw new Error("Failed to initialize the server-side SSLContext", e);
+      }
+    }
+
+    private String getAlgorithm() {
+      String algorithm = Security.getProperty(
+          "ssl.KeyManagerFactory.algorithm");
+      if (algorithm == null) {
+        algorithm = "SunX509";
+      }
+      return algorithm;
+    }
+
+    @Override
+    public ChannelPipeline getPipeline() throws Exception {
+      ChannelPipeline pipeline = Channels.pipeline();
+      SSLEngine sslEngine = createServerSSLContext().createSSLEngine();
+      sslEngine.setUseClientMode(false);
+      pipeline.addLast("ssl", new SslHandler(sslEngine));
+      return pipeline;
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyTransceiverWhenServerStops.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyTransceiverWhenServerStops.java
new file mode 100644
index 0000000..b6a5c71
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestNettyTransceiverWhenServerStops.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc;
+
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Mail;
+import org.apache.avro.test.Message;
+import org.junit.Test;
+
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.junit.Assert.fail;
+
+public class TestNettyTransceiverWhenServerStops {
+//  @Test                                           // disable flakey test!
+    public void testNettyTransceiverWhenServerStops() throws Exception {
+    Mail mailService = new TestNettyServer.MailImpl();
+    Responder responder = new SpecificResponder(Mail.class, mailService);
+    NettyServer server = new NettyServer(responder, new InetSocketAddress(0));
+    server.start();
+
+    int serverPort = server.getPort();
+
+    final NettyTransceiver transceiver = new NettyTransceiver(new InetSocketAddress(serverPort), 60000L);
+    final Mail mail = SpecificRequestor.getClient(Mail.class, transceiver);
+
+    final AtomicInteger successes = new AtomicInteger();
+    final AtomicInteger failures = new AtomicInteger();
+    final AtomicBoolean quitOnFailure = new AtomicBoolean();
+    List<Thread> threads = new ArrayList<Thread>();
+
+    // Start a bunch of client threads that use the transceiver to send messages
+    for (int i = 0; i < 100; i++) {
+      Thread thread = new Thread(new Runnable() {
+          @Override
+            public void run() {
+            while (true) {
+              try {
+                mail.send(createMessage());
+                successes.incrementAndGet();
+              } catch (Exception e) {
+                failures.incrementAndGet();
+                if (quitOnFailure.get()) {
+                  return;
+                }
+              }
+            }
+          }
+        });
+      threads.add(thread);
+      thread.start();
+    }
+
+    // Be sure the threads are running: wait until we get a good deal of successes
+    while (successes.get() < 10000) {
+      Thread.sleep(50);
+    }
+
+    // Now stop the server
+    server.close();
+
+    // Server is stopped: successes should not increase anymore: wait until we're in that situation
+    while (true) {
+      int previousSuccesses = successes.get();
+      Thread.sleep(500);
+      if (previousSuccesses == successes.get()) {
+        break;
+      }
+    }
+
+    // Start the server again
+    server.start();
+
+    // This part of the test is not solved by the current patch: it shows that when you stop/start
+    // a server, the client requests don't continue immediately but will stay blocked until the timeout
+    // passed to the NettyTransceiver has passed (IIUC)
+    long now = System.currentTimeMillis();
+    /*
+      System.out.println("Waiting on requests to continue");
+      int previousSuccesses = successes.get();
+      while (true) {
+      Thread.sleep(500);
+      if (successes.get() > previousSuccesses) {
+      break;
+      }
+      if (System.currentTimeMillis() - now > 5000) {
+      System.out.println("FYI: requests don't continue immediately...");
+      break;
+      }
+      }
+    */
+
+    // Stop our client, we would expect this to go on immediately
+    System.out.println("Stopping transceiver");
+    quitOnFailure.set(true);
+    now = System.currentTimeMillis();
+    transceiver.close(); // Without the patch, this close seems to hang forever
+
+    // Wait for all threads to quit
+    for (Thread thread : threads) {
+      thread.join();
+    }
+    if (System.currentTimeMillis() - now > 10000) {
+      fail("Stopping NettyTransceiver and waiting for client threads to quit took too long.");
+    } else {
+      System.out.println("Stopping NettyTransceiver and waiting for client threads to quit took "
+                         + (System.currentTimeMillis() - now) + " ms");
+    }
+  }
+
+  private Message createMessage() {
+    Message msg = Message.newBuilder().
+      setTo("wife").
+      setFrom("husband").
+      setBody("I love you!").
+      build();
+    return msg;
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java
new file mode 100644
index 0000000..d816fa5
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.ipc.LocalTransceiver;
+import org.apache.avro.ipc.RPCContext;
+import org.apache.avro.ipc.RPCPlugin;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.test.Mail;
+import org.apache.avro.test.Message;
+import org.junit.Test;
+
+public class TestRpcPluginOrdering {
+
+  private static AtomicInteger orderCounter = new AtomicInteger();
+  
+  public class OrderPlugin extends RPCPlugin{
+
+    public void clientStartConnect(RPCContext context) {
+      assertEquals(0, orderCounter.getAndIncrement());
+    }
+    
+    public void clientSendRequest(RPCContext context) {
+      assertEquals(1, orderCounter.getAndIncrement());
+    }
+    
+    public void clientReceiveResponse(RPCContext context) {
+      assertEquals(6, orderCounter.getAndIncrement());
+    }
+
+    public void clientFinishConnect(RPCContext context) {
+      assertEquals(5, orderCounter.getAndIncrement());
+    }
+
+    public void serverConnecting(RPCContext context) {
+      assertEquals(2, orderCounter.getAndIncrement());
+    }
+
+    public void serverReceiveRequest(RPCContext context) {
+      assertEquals(3, orderCounter.getAndIncrement());
+    }
+
+    public void serverSendResponse(RPCContext context) {
+      assertEquals(4, orderCounter.getAndIncrement());
+    }
+  }
+  
+  @Test
+  public void testRpcPluginOrdering() throws Exception {
+    OrderPlugin plugin = new OrderPlugin();
+    
+    SpecificResponder responder = new SpecificResponder(Mail.class, new TestMailImpl());
+    SpecificRequestor requestor = new SpecificRequestor(Mail.class, new LocalTransceiver(responder));
+    responder.addRPCPlugin(plugin);
+    requestor.addRPCPlugin(plugin);
+    
+    Mail client = SpecificRequestor.getClient(Mail.class, requestor);
+    Message message = createTestMessage();
+    client.send(message);
+  }
+
+  private Message createTestMessage() {
+    Message message = Message.newBuilder().
+      setTo("me at test.com").
+      setFrom("you at test.com").
+      setBody("plugin testing").
+      build();
+    return message;
+  }
+  
+  private static class TestMailImpl implements Mail{
+    public String send(Message message) throws AvroRemoteException {
+      return "Received";
+    }
+    public void fireandforget(Message message) {
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java
new file mode 100644
index 0000000..68b40bf
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.TestProtocolGeneric;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestSaslAnonymous extends TestProtocolGeneric {
+
+  private static final Logger LOG =
+    LoggerFactory.getLogger(TestSaslAnonymous.class);
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SaslSocketServer(new TestResponder(),new InetSocketAddress(0));
+    server.start();
+    client = new SaslSocketTransceiver(new InetSocketAddress(server.getPort()));
+    requestor = new GenericRequestor(PROTOCOL, client);
+  }
+
+  @Override public void testHandshake() throws IOException {}
+  @Override public void testResponseChange() throws IOException {}
+
+  public interface ProtoInterface {
+    byte[] test(byte[] b);
+  }
+
+  // test big enough to fill socket output buffer
+  @Test
+  public void test64kRequest() throws Exception {
+    SaslSocketServer s = new SaslSocketServer
+      (new ReflectResponder(ProtoInterface.class, new ProtoInterface() {
+        public byte[] test(byte[] b) { return b; }
+      }), new InetSocketAddress(0));
+    s.start();
+    SaslSocketTransceiver client =
+      new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
+    ProtoInterface proxy =
+      (ProtoInterface)ReflectRequestor.getClient(ProtoInterface.class, client);
+    
+    byte[] result = proxy.test(new byte[64*1024]);
+    
+    client.close();
+    s.close();
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java
new file mode 100644
index 0000000..cef4f77
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.avro.ipc;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.net.InetSocketAddress;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.NameCallback;
+import javax.security.auth.callback.PasswordCallback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import javax.security.sasl.RealmCallback;
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslClient;
+import javax.security.sasl.SaslException;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.TestProtocolGeneric;
+import org.apache.avro.util.Utf8;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+public class TestSaslDigestMd5 extends TestProtocolGeneric {
+
+  private static final Logger LOG =
+    LoggerFactory.getLogger(TestSaslDigestMd5.class);
+
+  private static final String HOST = "localhost";
+  private static final String SERVICE = "avro-test";
+  private static final String PRINCIPAL = "avro-test-principal";
+  private static final String PASSWORD = "super secret password";
+  private static final String REALM = "avro-test-realm";
+
+  private static final String DIGEST_MD5_MECHANISM = "DIGEST-MD5";
+  private static final Map<String, String> DIGEST_MD5_PROPS =
+    new HashMap<String, String>();
+
+  static {
+    DIGEST_MD5_PROPS.put(Sasl.QOP, "auth-int");
+    if (System.getProperty("java.vendor").contains("IBM")) {
+      DIGEST_MD5_PROPS.put("com.ibm.security.sasl.digest.realm", REALM);
+    } else {
+      DIGEST_MD5_PROPS.put("com.sun.security.sasl.digest.realm", REALM);
+    }
+  }
+
+  private static class TestSaslCallbackHandler implements CallbackHandler {
+    @Override
+    public void handle(Callback[] callbacks)
+      throws IOException, UnsupportedCallbackException {
+      for (Callback c : callbacks) {
+        if (c instanceof NameCallback) {
+          ((NameCallback) c).setName(PRINCIPAL);
+        } else if (c instanceof PasswordCallback) {
+          ((PasswordCallback) c).setPassword(PASSWORD.toCharArray());
+        } else if (c instanceof AuthorizeCallback) {
+          ((AuthorizeCallback) c).setAuthorized(true);
+        } else if (c instanceof RealmCallback) {
+          ((RealmCallback) c).setText(REALM);
+        } else {
+          throw new UnsupportedCallbackException(c);
+        }
+      }
+    }
+  }
+
+  @Before
+  public void testStartServer() throws Exception {
+    if (server != null) return;
+    server = new SaslSocketServer
+      (new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM,
+       SERVICE, HOST, DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
+    server.start();
+    SaslClient saslClient = Sasl.createSaslClient
+      (new String[]{DIGEST_MD5_MECHANISM}, PRINCIPAL, SERVICE, HOST,
+       DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
+    client = new SaslSocketTransceiver(new InetSocketAddress(server.getPort()),
+                                       saslClient);
+    requestor = new GenericRequestor(PROTOCOL, client);
+  }
+
+  @Test(expected=SaslException.class)
+  public void testAnonymousClient() throws Exception {
+    Server s = new SaslSocketServer
+      (new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM,
+       SERVICE, HOST, DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
+    s.start();
+    Transceiver c =
+      new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
+    GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
+    GenericRecord params = 
+      new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
+    params.put("greeting", "bob");
+    Utf8 response = (Utf8)requestor.request("hello", params);
+    assertEquals(new Utf8("goodbye"), response);
+    s.close();
+    c.close();
+  }
+
+
+  private static class WrongPasswordCallbackHandler implements CallbackHandler {
+    @Override
+    public void handle(Callback[] callbacks)
+      throws IOException, UnsupportedCallbackException {
+      for (Callback c : callbacks) {
+        if (c instanceof NameCallback) {
+          ((NameCallback) c).setName(PRINCIPAL);
+        } else if (c instanceof PasswordCallback) {
+          ((PasswordCallback) c).setPassword("wrong".toCharArray());
+        } else if (c instanceof AuthorizeCallback) {
+          ((AuthorizeCallback) c).setAuthorized(true);
+        } else if (c instanceof RealmCallback) {
+          ((RealmCallback) c).setText(REALM);
+        } else {
+          throw new UnsupportedCallbackException(c);
+        }
+      }
+    }
+  }
+
+  @Test(expected=SaslException.class)
+  public void testWrongPassword() throws Exception {
+    Server s = new SaslSocketServer
+      (new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM,
+       SERVICE, HOST, DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
+    s.start();
+    SaslClient saslClient = Sasl.createSaslClient
+      (new String[]{DIGEST_MD5_MECHANISM}, PRINCIPAL, SERVICE, HOST,
+       DIGEST_MD5_PROPS, new WrongPasswordCallbackHandler());
+    Transceiver c = new SaslSocketTransceiver
+      (new InetSocketAddress(server.getPort()), saslClient);
+    GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
+    GenericRecord params = 
+      new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
+    params.put("greeting", "bob");
+    Utf8 response = (Utf8)requestor.request("hello", params);
+    assertEquals(new Utf8("goodbye"), response);
+    s.close();
+    c.close();
+  }
+
+  @Override public void testHandshake() throws IOException {}
+  @Override public void testResponseChange() throws IOException {}
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/specific/TestSpecificRequestor.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/specific/TestSpecificRequestor.java
new file mode 100644
index 0000000..ac8cfe2
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/specific/TestSpecificRequestor.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.specific;
+
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.net.URL;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Protocol;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestSpecificRequestor {
+  public interface SampleSpecificProtocol {
+    public static final Protocol PROTOCOL = Protocol.parse("{\"protocol\":\"SampleSpecificProtocol\",\"namespace\":\"org.apache.avro.ipc.specific\",\"types\":[],\"messages\":{}}");
+  }
+
+  static Object proxy;
+
+  @BeforeClass
+  public static void initializeProxy() throws Exception {
+    HttpTransceiver client = new HttpTransceiver(new URL("http://localhost"));
+    SpecificRequestor requestor = new SpecificRequestor(SampleSpecificProtocol.class, client);
+    proxy = SpecificRequestor.getClient(SampleSpecificProtocol.class, requestor);
+  }
+
+  @Test
+  public void testHashCode() throws IOException {
+    try {
+      proxy.hashCode();
+    } catch (AvroRuntimeException e) {
+      fail(e.getMessage());
+    }
+  }
+
+  @Test
+  public void testEquals() throws IOException {
+    try {
+      proxy.equals(proxy);
+    } catch (AvroRuntimeException e) {
+      fail(e.getMessage());
+    }
+  }
+
+  @Test
+  public void testToString() throws IOException {
+    try {
+      proxy.toString();
+    } catch (AvroRuntimeException e) {
+      fail(e.getMessage());
+    }
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/FakeTicks.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/FakeTicks.java
new file mode 100644
index 0000000..442890e
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/FakeTicks.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import org.apache.avro.ipc.stats.Stopwatch.Ticks;
+
+/** Implements Ticks with manual time-winding. */
+class FakeTicks implements Ticks {
+  long time = 0;
+
+  @Override
+  public long ticks() {
+    return time;
+  }
+
+  public void passTime(long nanos) {
+    time += nanos;
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/StatsPluginOverhead.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/StatsPluginOverhead.java
new file mode 100644
index 0000000..3228f20
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/StatsPluginOverhead.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import java.io.IOException;
+import java.net.URL;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.generic.GenericResponder;
+
+/**
+ * Naively measures overhead of using the stats plugin.
+ *
+ * The API used is the generic one.
+ * The protocol is the "null" protocol: null is sent
+ * and returned.
+ */
+public class StatsPluginOverhead {
+  /** Number of RPCs per iteration. */
+  private static final int COUNT = 100000;
+  private static final Protocol NULL_PROTOCOL = Protocol.parse(
+      "{\"protocol\": \"null\", "
+      + "\"messages\": { \"null\": {"
+      + "   \"request\": [], "
+      + "   \"response\": \"null\"} } }");
+
+  private static class IdentityResponder extends GenericResponder {
+    public IdentityResponder(Protocol local) {
+      super(local);
+    }
+
+    @Override
+    public Object respond(Message message, Object request)
+        throws AvroRemoteException {
+      return request;
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    double with = sendRpcs(true)/1000000000.0;
+    double without = sendRpcs(false)/1000000000.0;
+
+    System.out.println(String.format(
+        "Overhead: %f%%.  RPC/s: %f (with) vs %f (without).  " +
+        "RPC time (ms): %f vs %f",
+        100*(with - without)/(without),
+        COUNT/with,
+        COUNT/without,
+        1000*with/COUNT,
+        1000*without/COUNT));
+  }
+
+  /** Sends RPCs and returns nanos elapsed. */
+  private static long sendRpcs(boolean withPlugin) throws IOException {
+    HttpServer server = createServer(withPlugin);
+    Transceiver t =
+      new HttpTransceiver(new URL("http://127.0.0.1:"+server.getPort()+"/"));
+    GenericRequestor requestor = new GenericRequestor(NULL_PROTOCOL, t);
+
+    long now = System.nanoTime();
+    for (int i = 0; i < COUNT; ++i) {
+      requestor.request("null", null);
+    }
+    long elapsed = System.nanoTime() - now;
+    t.close();
+    server.close();
+    return elapsed;
+  }
+
+  /** Starts an Avro server. */
+  private static HttpServer createServer(boolean withPlugin)
+      throws IOException {
+    Responder r = new IdentityResponder(NULL_PROTOCOL);
+    if (withPlugin) {
+      r.addRPCPlugin(new StatsPlugin());
+    }
+    // Start Avro server
+    HttpServer server = new HttpServer(r, 0);
+    server.start();
+    return server;
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java
new file mode 100644
index 0000000..0bdd700
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.TreeSet;
+
+import org.apache.avro.ipc.stats.Histogram.Entry;
+import org.apache.avro.ipc.stats.Histogram.Segmenter;
+import org.junit.Test;
+
+public class TestHistogram {
+
+  @Test
+  public void testBasicOperation() {
+    Segmenter<String, Integer> s = new Histogram.TreeMapSegmenter<Integer>(
+        new TreeSet<Integer>(Arrays.asList(0, 1, 2, 4, 8, 16)));
+
+    Histogram<String, Integer> h = new Histogram<String, Integer>(s);
+
+    for(int i = 0; i < 20; ++i) {
+      h.add(i);
+    }
+    assertEquals(20, h.getCount());
+    assertArrayEquals(new int[] { 1, 1, 2, 4, 8, 4 }, h.getHistogram());
+
+    assertEquals("[0,1)=1;[1,2)=1;[2,4)=2;[4,8)=4;[8,16)=8;[16,infinity)=4", h.toString());
+    
+    String[] correctBucketLabels = {
+        "[0,1)", "[1,2)", "[2,4)", "[4,8)", "[8,16)", "[16,infinity)"};
+    
+    // test bucket iterator
+    int pos = 0;
+    Iterator<String> it = h.getSegmenter().getBuckets();
+    while (it.hasNext()) {
+      assertEquals(correctBucketLabels[pos], it.next());
+      pos = pos + 1;
+    }
+    assertEquals(correctBucketLabels.length, pos);
+    
+    List<String> labels = h.getSegmenter().getBucketLabels();
+    assertEquals(correctBucketLabels.length, labels.size());
+    if (labels.size() == correctBucketLabels.length) {
+      for (int i = 0; i < labels.size(); i++) {
+        assertEquals(correctBucketLabels[i], labels.get(i));
+      }
+    }
+
+    String[] correctBoundryLabels = {
+        "0", "1", "2", "4", "8", "16"};
+    List<String> boundryLabels = h.getSegmenter().getBoundaryLabels();
+    
+    assertEquals(correctBoundryLabels.length, boundryLabels.size());
+    if (boundryLabels.size() == correctBoundryLabels.length) {
+      for (int i = 0; i < boundryLabels.size(); i++) {
+        assertEquals(correctBoundryLabels[i], boundryLabels.get(i));
+      }
+    }
+    
+    List<Entry<String>> entries = new ArrayList<Entry<String>>();
+    for (Entry<String> entry : h.entries()) {
+      entries.add(entry);
+    }
+    assertEquals("[0,1)", entries.get(0).bucket);
+    assertEquals(4, entries.get(5).count);
+    assertEquals(6, entries.size());
+    
+    h.add(1010);
+    h.add(9191);
+    List<Integer> recent = h.getRecentAdditions();
+    assertTrue(recent.contains(1010));
+    assertTrue(recent.contains(9191));
+    
+  }
+
+  @Test(expected=Histogram.SegmenterException.class)
+  public void testBadValue() {
+    Segmenter<String, Long> s = new Histogram.TreeMapSegmenter<Long>(
+        new TreeSet<Long>(Arrays.asList(0L, 1L, 2L, 4L, 8L, 16L)));
+
+    Histogram<String, Long> h = new Histogram<String, Long>(s);
+    h.add(-1L);
+  }
+
+  /** Only has one bucket */
+  static class SingleBucketSegmenter implements Segmenter<String, Float >{
+    @Override
+    public Iterator<String> getBuckets() {
+      return Arrays.asList("X").iterator();
+    }
+    
+    public List<String> getBoundaryLabels() {
+      return Arrays.asList("X");
+    }
+    
+    public List<String> getBucketLabels() {
+      return Arrays.asList("X");
+    }
+
+    @Override
+    public int segment(Float value) { return 0; }
+
+    @Override
+    public int size() { return 1; }
+
+  }
+
+  @Test
+  public void testFloatHistogram() {
+    FloatHistogram<String> h = new FloatHistogram<String>(new SingleBucketSegmenter());
+    h.add(12.0f);
+    h.add(10.0f);
+    h.add(20.0f);
+
+    assertEquals(3, h.getCount());
+    assertEquals(14.0f, h.getMean(), 0.0001);
+    assertEquals(5.291f, h.getUnbiasedStdDev(), 0.001);
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java
new file mode 100644
index 0000000..eb234a5
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStatsPluginAndServlet.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import javax.servlet.UnavailableException;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.LocalTransceiver;
+import org.apache.avro.ipc.RPCContext;
+import org.apache.avro.ipc.Responder;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.generic.GenericRequestor;
+import org.apache.avro.ipc.generic.GenericResponder;
+import org.junit.Test;
+import org.mortbay.log.Log;
+
+public class TestStatsPluginAndServlet {
+  Protocol protocol = Protocol.parse("" + "{\"protocol\": \"Minimal\", "
+      + "\"messages\": { \"m\": {"
+      + "   \"request\": [{\"name\": \"x\", \"type\": \"int\"}], "
+      + "   \"response\": \"int\"} } }");
+  Message message = protocol.getMessages().get("m");
+
+  private static final long MS = 1000*1000L;
+
+  /** Returns an HTML string. */
+  private String generateServletResponse(StatsPlugin statsPlugin)
+      throws IOException {
+    StatsServlet servlet;
+    try {
+      servlet = new StatsServlet(statsPlugin);
+    } catch (UnavailableException e1) {
+      throw new IOException();
+    }
+    StringWriter w = new StringWriter();
+    try {
+      servlet.writeStats(w);
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+    String o = w.toString();
+    return o;
+  }
+
+  /** Expects 0 and returns 1. */
+  static class TestResponder extends GenericResponder {
+    public TestResponder(Protocol local) {
+      super(local);
+    }
+
+    @Override
+    public Object respond(Message message, Object request)
+        throws AvroRemoteException {
+      assertEquals(0, ((GenericRecord) request).get("x"));
+      return 1;
+    }
+
+  }
+
+  private void makeRequest(Transceiver t) throws IOException {
+    GenericRecord params = new GenericData.Record(protocol.getMessages().get(
+        "m").getRequest());
+    params.put("x", 0);
+    GenericRequestor r = new GenericRequestor(protocol, t);
+    assertEquals(1, r.request("m", params));
+  }
+
+  @Test
+  public void testFullServerPath() throws IOException {
+    Responder r = new TestResponder(protocol);
+    StatsPlugin statsPlugin = new StatsPlugin();
+    r.addRPCPlugin(statsPlugin);
+    Transceiver t = new LocalTransceiver(r);
+
+    for (int i = 0; i < 10; ++i) {
+      makeRequest(t);
+    }
+
+    String o = generateServletResponse(statsPlugin);
+    assertTrue(o.contains("10 calls"));
+  }
+
+  @Test
+  public void testMultipleRPCs() throws IOException {
+    FakeTicks t = new FakeTicks();
+    StatsPlugin statsPlugin = new StatsPlugin(t, StatsPlugin.LATENCY_SEGMENTER,
+        StatsPlugin.PAYLOAD_SEGMENTER);
+    RPCContext context1 = makeContext();
+    RPCContext context2 = makeContext();
+    statsPlugin.serverReceiveRequest(context1);
+    t.passTime(100*MS); // first takes 100ms
+    statsPlugin.serverReceiveRequest(context2);
+    String r = generateServletResponse(statsPlugin);
+    // Check in progress RPCs
+    assertTrue(r.contains("m: 0ms"));
+    assertTrue(r.contains("m: 100ms"));
+    statsPlugin.serverSendResponse(context1);
+    t.passTime(900*MS); // second takes 900ms
+    statsPlugin.serverSendResponse(context2);
+    r = generateServletResponse(statsPlugin);
+    assertTrue(r.contains("Average: 500.0ms"));
+  }
+
+  @Test
+  public void testPayloadSize() throws IOException {
+    Responder r = new TestResponder(protocol);
+    StatsPlugin statsPlugin = new StatsPlugin();
+    r.addRPCPlugin(statsPlugin);
+    Transceiver t = new LocalTransceiver(r);
+    makeRequest(t);
+    
+    String resp = generateServletResponse(statsPlugin);
+    assertTrue(resp.contains("Average: 2.0"));
+ 
+  }
+  
+  private RPCContext makeContext() {
+    RPCContext context = new RPCContext();
+    context.setMessage(message);
+    return context;
+  }
+
+  /** Sleeps as requested. */
+  private static class SleepyResponder extends GenericResponder {
+    public SleepyResponder(Protocol local) {
+      super(local);
+    }
+
+    @Override
+    public Object respond(Message message, Object request)
+        throws AvroRemoteException {
+      try {
+        Thread.sleep((Long)((GenericRecord)request).get("millis"));
+      } catch (InterruptedException e) {
+        throw new AvroRemoteException(e);
+      }
+      return null;
+    }
+  }
+
+  /**
+   * Demo program for using RPC stats. This automatically generates
+   * client RPC requests. Alternatively a can be used (as below)
+   * to trigger RPCs.
+   * <pre>
+   * java -jar build/avro-tools-*.jar rpcsend '{"protocol":"sleepy","namespace":null,"types":[],"messages":{"sleep":{"request":[{"name":"millis","type":"long"}],"response":"null"}}}' sleep localhost 7002 '{"millis": 20000}'
+   * </pre>
+   * @param args
+   * @throws Exception
+   */
+  public static void main(String[] args) throws Exception {
+    if (args.length == 0) {
+      args = new String[] { "7002", "7003" };
+    }
+    Protocol protocol = Protocol.parse("{\"protocol\": \"sleepy\", "
+        + "\"messages\": { \"sleep\": {"
+        + "   \"request\": [{\"name\": \"millis\", \"type\": \"long\"}," +
+          "{\"name\": \"data\", \"type\": \"bytes\"}], "
+        + "   \"response\": \"null\"} } }");
+    Log.info("Using protocol: " + protocol.toString());
+    Responder r = new SleepyResponder(protocol);
+    StatsPlugin p = new StatsPlugin();
+    r.addRPCPlugin(p);
+
+    // Start Avro server
+    HttpServer avroServer = new HttpServer(r, Integer.parseInt(args[0]));
+    avroServer.start();
+
+    StatsServer ss = new StatsServer(p, 8080);
+    
+    HttpTransceiver trans = new HttpTransceiver(
+        new URL("http://localhost:" + Integer.parseInt(args[0])));
+    GenericRequestor req = new GenericRequestor(protocol, trans); 
+
+    while(true) {
+      Thread.sleep(1000);
+      GenericRecord params = new GenericData.Record(protocol.getMessages().get(
+        "sleep").getRequest());
+      Random rand = new Random();
+      params.put("millis", Math.abs(rand.nextLong()) % 1000);
+      int payloadSize = Math.abs(rand.nextInt()) % 10000;
+      byte[] payload = new byte[payloadSize];
+      rand.nextBytes(payload);
+      params.put("data", ByteBuffer.wrap(payload));
+      req.request("sleep", params);
+    }
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStopwatch.java b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStopwatch.java
new file mode 100644
index 0000000..62f1580
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStopwatch.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.ipc.stats;
+
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+
+public class TestStopwatch {
+  @Test
+  public void testNormal() {
+    FakeTicks f = new FakeTicks();
+    Stopwatch s = new Stopwatch(f);
+    f.passTime(10);
+    s.start();
+    f.passTime(20);
+    assertEquals(20, s.elapsedNanos());
+    f.passTime(40);
+    s.stop();
+    f.passTime(80);
+    assertEquals(60, s.elapsedNanos());
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testNotStarted1() {
+    FakeTicks f = new FakeTicks();
+    Stopwatch s = new Stopwatch(f);
+    s.elapsedNanos();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testNotStarted2() {
+    FakeTicks f = new FakeTicks();
+    Stopwatch s = new Stopwatch(f);
+    s.stop();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testTwiceStarted() {
+    FakeTicks f = new FakeTicks();
+    Stopwatch s = new Stopwatch(f);
+    s.start();
+    s.start();
+  }
+
+  @Test(expected=IllegalStateException.class)
+  public void testTwiceStopped() {
+    FakeTicks f = new FakeTicks();
+    Stopwatch s = new Stopwatch(f);
+    s.start();
+    s.stop();
+    s.stop();
+  }
+
+  @Test
+  public void testSystemStopwatch() {
+    Stopwatch s = new Stopwatch(Stopwatch.SYSTEM_TICKS);
+    s.start();
+    s.stop();
+    assertTrue(s.elapsedNanos() >= 0);
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificBuilderTree.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificBuilderTree.java
new file mode 100644
index 0000000..a0ad619
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificBuilderTree.java
@@ -0,0 +1,257 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.test.http.*;
+import org.junit.Test;
+
+import java.util.ArrayList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class TestSpecificBuilderTree {
+
+  private Request.Builder createPartialBuilder() {
+    Request.Builder requestBuilder = Request.newBuilder();
+    requestBuilder.setTimestamp(1234567890);
+
+    requestBuilder
+      .getConnectionBuilder()
+        .setNetworkType(NetworkType.IPv4);
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setUseragent("Chrome 123")
+          .setId("Foo");
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getURIBuilder()
+          .setMethod(HttpMethod.GET)
+          .setPath("/index.html");
+
+    if (!requestBuilder
+           .getHttpRequestBuilder()
+             .getURIBuilder()
+               .hasParameters()) {
+      requestBuilder
+        .getHttpRequestBuilder()
+          .getURIBuilder()
+            .setParameters(new ArrayList<QueryParameter>());
+    }
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getURIBuilder()
+          .getParameters()
+            .add(QueryParameter.newBuilder().setName("Foo").setValue("Bar").build());
+
+    return requestBuilder;
+  }
+
+  @Test(expected = org.apache.avro.AvroRuntimeException.class)
+  public void failOnIncompleteTree() {
+    Request.Builder requestBuilder = createPartialBuilder();
+    Request request = requestBuilder.build();
+    fail("Should NEVER get here");
+  }
+
+  @Test
+  public void copyBuilder() {
+    Request.Builder requestBuilder1 = createPartialBuilder();
+
+    Request.Builder requestBuilder2 = Request.newBuilder(requestBuilder1);
+
+    requestBuilder1
+      .getConnectionBuilder()
+        .setNetworkAddress("1.1.1.1");
+
+    requestBuilder2
+      .getConnectionBuilder()
+        .setNetworkAddress("2.2.2.2");
+
+    requestBuilder2
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setId("Bar");
+
+    Request request1 = requestBuilder1.build();
+    Request request2 = requestBuilder2.build();
+
+    assertEquals(NetworkType.IPv4,  request1.getConnection().getNetworkType());
+    assertEquals("1.1.1.1",         request1.getConnection().getNetworkAddress());
+    assertEquals("Chrome 123",      request1.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Foo",             request1.getHttpRequest().getUserAgent().getId());
+    assertEquals(HttpMethod.GET,    request1.getHttpRequest().getURI().getMethod());
+    assertEquals("/index.html",     request1.getHttpRequest().getURI().getPath());
+    assertEquals(1,                 request1.getHttpRequest().getURI().getParameters().size());
+    assertEquals("Foo",             request1.getHttpRequest().getURI().getParameters().get(0).getName());
+    assertEquals("Bar",             request1.getHttpRequest().getURI().getParameters().get(0).getValue());
+
+    assertEquals(NetworkType.IPv4,  request2.getConnection().getNetworkType());
+    assertEquals("2.2.2.2",         request2.getConnection().getNetworkAddress());
+    assertEquals("Chrome 123",      request2.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Bar",             request2.getHttpRequest().getUserAgent().getId());
+    assertEquals(HttpMethod.GET,    request2.getHttpRequest().getURI().getMethod());
+    assertEquals("/index.html",     request2.getHttpRequest().getURI().getPath());
+    assertEquals(1,                 request2.getHttpRequest().getURI().getParameters().size());
+    assertEquals("Foo",             request2.getHttpRequest().getURI().getParameters().get(0).getName());
+    assertEquals("Bar",             request2.getHttpRequest().getURI().getParameters().get(0).getValue());
+  }
+
+  @Test
+  public void createBuilderFromInstance(){
+    Request.Builder requestBuilder1 = createPartialBuilder();
+    requestBuilder1
+      .getConnectionBuilder()
+        .setNetworkAddress("1.1.1.1");
+
+    Request request1 = requestBuilder1.build();
+
+    Request.Builder requestBuilder2 = Request.newBuilder(request1);
+
+    requestBuilder2
+      .getConnectionBuilder()
+        .setNetworkAddress("2.2.2.2");
+
+    requestBuilder2
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setId("Bar");
+
+    requestBuilder2
+      .getHttpRequestBuilder()
+        .getURIBuilder()
+          .setMethod(HttpMethod.POST);
+
+    requestBuilder2
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setUseragent("Firefox 456");
+
+    Request request2 = requestBuilder2.build();
+
+    assertEquals(NetworkType.IPv4,  request1.getConnection().getNetworkType());
+    assertEquals("1.1.1.1",         request1.getConnection().getNetworkAddress());
+    assertEquals("Chrome 123",      request1.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Foo",             request1.getHttpRequest().getUserAgent().getId());
+    assertEquals(HttpMethod.GET,    request1.getHttpRequest().getURI().getMethod());
+    assertEquals("/index.html",     request1.getHttpRequest().getURI().getPath());
+    assertEquals(1,                 request1.getHttpRequest().getURI().getParameters().size());
+    assertEquals("Foo",             request1.getHttpRequest().getURI().getParameters().get(0).getName());
+    assertEquals("Bar",             request1.getHttpRequest().getURI().getParameters().get(0).getValue());
+
+    assertEquals(NetworkType.IPv4,  request2.getConnection().getNetworkType());
+    assertEquals("2.2.2.2",         request2.getConnection().getNetworkAddress());
+    assertEquals("Firefox 456",     request2.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Bar",             request2.getHttpRequest().getUserAgent().getId());
+    assertEquals(HttpMethod.POST,   request2.getHttpRequest().getURI().getMethod());
+    assertEquals("/index.html",     request2.getHttpRequest().getURI().getPath());
+    assertEquals(1,                 request2.getHttpRequest().getURI().getParameters().size());
+    assertEquals("Foo",             request2.getHttpRequest().getURI().getParameters().get(0).getName());
+    assertEquals("Bar",             request2.getHttpRequest().getURI().getParameters().get(0).getValue());
+  }
+
+  private Request.Builder createLastOneTestsBuilder() {
+    Request.Builder requestBuilder = Request.newBuilder();
+    requestBuilder.setTimestamp(1234567890);
+
+    requestBuilder
+      .getConnectionBuilder()
+        .setNetworkType(NetworkType.IPv4)
+        .setNetworkAddress("1.1.1.1");
+
+    return requestBuilder;
+  }
+
+  @Test
+  public void lastOneWins_Setter() {
+    Request.Builder requestBuilder = createLastOneTestsBuilder();
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getURIBuilder()
+          .setMethod(HttpMethod.GET)
+          .setPath("/index.html");
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setUseragent("Chrome 123")
+          .setId("Foo");
+
+    HttpRequest httpRequest = HttpRequest.newBuilder()
+            .setUserAgent(new UserAgent("Bar","Firefox 321"))
+            .setURI(HttpURI.newBuilder()
+                    .setMethod(HttpMethod.POST)
+                    .setPath("/login.php")
+                    .build())
+            .build();
+
+    Request request = requestBuilder.setHttpRequest(httpRequest).build();
+
+    assertEquals(NetworkType.IPv4,  request.getConnection().getNetworkType());
+    assertEquals("1.1.1.1",         request.getConnection().getNetworkAddress());
+    assertEquals(0,                 request.getHttpRequest().getURI().getParameters().size());
+
+    assertEquals("Firefox 321",     request.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Bar",             request.getHttpRequest().getUserAgent().getId());
+    assertEquals(HttpMethod.POST,   request.getHttpRequest().getURI().getMethod());
+    assertEquals("/login.php",      request.getHttpRequest().getURI().getPath());
+  }
+
+  @Test
+  public void lastOneWins_Builder() {
+    Request.Builder requestBuilder = createLastOneTestsBuilder();
+
+    HttpRequest httpRequest = HttpRequest.newBuilder()
+            .setUserAgent(new UserAgent("Bar", "Firefox 321"))
+            .setURI(HttpURI.newBuilder()
+                    .setMethod(HttpMethod.POST)
+                    .setPath("/login.php")
+                    .build())
+            .build();
+    requestBuilder.setHttpRequest(httpRequest);
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getURIBuilder()
+          .setMethod(HttpMethod.GET)
+          .setPath("/index.html");
+
+    requestBuilder
+      .getHttpRequestBuilder()
+        .getUserAgentBuilder()
+          .setUseragent("Chrome 123")
+          .setId("Foo");
+
+    Request request = requestBuilder.build();
+
+    assertEquals(NetworkType.IPv4,  request.getConnection().getNetworkType());
+    assertEquals("1.1.1.1",         request.getConnection().getNetworkAddress());
+    assertEquals("Chrome 123",      request.getHttpRequest().getUserAgent().getUseragent());
+    assertEquals("Foo",             request.getHttpRequest().getUserAgent().getId());
+    assertEquals(0,                 request.getHttpRequest().getURI().getParameters().size());
+
+    assertEquals(HttpMethod.GET,    request.getHttpRequest().getURI().getMethod());
+    assertEquals("/index.html",     request.getHttpRequest().getURI().getPath());
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
new file mode 100644
index 0000000..d1a0377
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.ArrayList;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+
+import org.apache.avro.FooBarSpecificRecord;
+import org.apache.avro.TypeEnum;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Test;
+import org.junit.Assert;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.util.Utf8;
+
+import org.apache.avro.TestSchema;
+import org.apache.avro.test.TestRecord;
+import org.apache.avro.test.MD5;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.Reserved;
+
+public class TestSpecificData {
+  
+  @Test
+  /** Make sure that even with nulls, hashCode() doesn't throw NPE. */
+  public void testHashCode() {
+    new TestRecord().hashCode();
+    SpecificData.get().hashCode(null, TestRecord.SCHEMA$);
+  }
+
+  @Test
+  /** Make sure that even with nulls, toString() doesn't throw NPE. */
+  public void testToString() {
+    new TestRecord().toString();
+  }
+
+  private static class X {
+    public Map<String,String> map;
+  }
+
+  @Test
+  public void testGetMapSchema() throws Exception {
+    SpecificData.get().getSchema(X.class.getField("map").getGenericType());
+  }
+
+  @Test
+  /** Test nesting of specific data within generic. */
+  public void testSpecificWithinGeneric() throws Exception {
+    // define a record with a field that's a generated TestRecord
+    Schema schema = Schema.createRecord("Foo", "", "x.y.z", false);
+    List<Schema.Field> fields = new ArrayList<Schema.Field>();
+    fields.add(new Schema.Field("f", TestRecord.SCHEMA$, "", null));
+    schema.setFields(fields);
+
+    // create a generic instance of this record
+    TestRecord nested = new TestRecord();
+    nested.setName("foo");
+    nested.setKind(Kind.BAR);
+    nested.setHash(new MD5(new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    GenericData.Record record = new GenericData.Record(schema);
+    record.put("f", nested);
+
+    // test that this instance can be written & re-read
+    TestSchema.checkBinary(schema, record,
+                           new SpecificDatumWriter<Object>(),
+                           new SpecificDatumReader<Object>());
+
+    TestSchema.checkDirectBinary(schema, record,
+        new SpecificDatumWriter<Object>(),
+        new SpecificDatumReader<Object>());
+
+    TestSchema.checkBlockingBinary(schema, record,
+        new SpecificDatumWriter<Object>(),
+        new SpecificDatumReader<Object>());
+}
+
+  @Test public void testGetClassSchema() throws Exception {
+    Assert.assertEquals(TestRecord.getClassSchema(), TestRecord.SCHEMA$);
+    Assert.assertEquals(MD5.getClassSchema(), MD5.SCHEMA$);
+    Assert.assertEquals(Kind.getClassSchema(), Kind.SCHEMA$);
+  }
+
+  @Test
+  public void testSpecificRecordToString() throws IOException {
+    FooBarSpecificRecord foo = FooBarSpecificRecord.newBuilder()
+      .setId(123)
+      .setName("foo")
+      .setNicknames(Arrays.asList("bar"))
+      .setRelatedids(Arrays.asList(1, 2, 3))
+      .setTypeEnum(TypeEnum.c)
+      .build();
+
+    String json = foo.toString();
+    JsonFactory factory = new JsonFactory();
+    JsonParser parser = factory.createJsonParser(json);
+    ObjectMapper mapper = new ObjectMapper();
+
+    // will throw exception if string is not parsable json
+    mapper.readTree(parser);
+  }
+
+  @Test public void testExternalizeable() throws Exception {
+    TestRecord before = new TestRecord();
+    before.setName("foo");
+    before.setKind(Kind.BAR);
+    before.setHash(new MD5(new byte[]{0,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5}));
+    ByteArrayOutputStream bytes = new ByteArrayOutputStream();
+    ObjectOutputStream out = new ObjectOutputStream(bytes);
+    out.writeObject(before);
+    out.close();
+
+    ObjectInputStream in =
+      new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()));
+    TestRecord after = (TestRecord)in.readObject();
+
+    Assert.assertEquals(before, after);
+
+  }
+
+  @Test public void testReservedEnumSymbol() throws Exception {
+    Assert.assertEquals(Reserved.default$,
+                        SpecificData.get().createEnum("default",
+                                                      Reserved.SCHEMA$));
+  }
+
+}
+
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java
new file mode 100644
index 0000000..4b73de7
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.specific;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.HashMap;
+
+import org.apache.avro.FooBarSpecificRecord;
+import org.apache.avro.FooBarSpecificRecord.Builder;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.junit.Test;
+
+import test.StringablesRecord;
+
+public class TestSpecificDatumReader {
+
+  public static byte[] serializeRecord(FooBarSpecificRecord fooBarSpecificRecord) throws IOException {
+    SpecificDatumWriter<FooBarSpecificRecord> datumWriter = 
+        new SpecificDatumWriter<FooBarSpecificRecord>(FooBarSpecificRecord.SCHEMA$);
+    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
+    datumWriter.write(fooBarSpecificRecord, encoder);
+    encoder.flush();
+    return byteArrayOutputStream.toByteArray();
+  }
+
+  public static byte[] serializeRecord(StringablesRecord stringablesRecord) throws IOException {
+    SpecificDatumWriter<StringablesRecord> datumWriter =
+      new SpecificDatumWriter<StringablesRecord>(StringablesRecord.SCHEMA$);
+    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
+    datumWriter.write(stringablesRecord, encoder);
+    encoder.flush();
+    return byteArrayOutputStream.toByteArray();
+  }
+
+  @Test
+  public void testRead() throws IOException {
+    Builder newBuilder = FooBarSpecificRecord.newBuilder();
+    newBuilder.setId(42);
+    newBuilder.setName("foo");
+    newBuilder.setNicknames(Arrays.asList("bar"));
+    newBuilder.setRelatedids(Arrays.asList(1,2,3));
+    FooBarSpecificRecord specificRecord = newBuilder.build();
+    
+    byte[] recordBytes = serializeRecord(specificRecord);
+    
+    Decoder decoder = DecoderFactory.get().binaryDecoder(recordBytes, null);
+    SpecificDatumReader<FooBarSpecificRecord> specificDatumReader = new SpecificDatumReader<FooBarSpecificRecord>(FooBarSpecificRecord.SCHEMA$);
+    FooBarSpecificRecord deserialized = new FooBarSpecificRecord();
+    specificDatumReader.read(deserialized, decoder);
+    
+    assertEquals(specificRecord, deserialized);
+  }
+
+  @Test
+  public void testStringables() throws IOException {
+    StringablesRecord.Builder newBuilder = StringablesRecord.newBuilder();
+    newBuilder.setValue(new BigDecimal("42.11"));
+    HashMap<String, BigDecimal> mapWithBigDecimalElements = new HashMap<String, BigDecimal>();
+    mapWithBigDecimalElements.put("test", new BigDecimal("11.11"));
+    newBuilder.setMapWithBigDecimalElements(mapWithBigDecimalElements);
+    HashMap<BigInteger, String> mapWithBigIntKeys = new HashMap<BigInteger, String>();
+    mapWithBigIntKeys.put(BigInteger.ONE, "test");
+    newBuilder.setMapWithBigIntKeys(mapWithBigIntKeys);
+    StringablesRecord stringablesRecord = newBuilder.build();
+
+    byte[] recordBytes = serializeRecord(stringablesRecord);
+
+    Decoder decoder = DecoderFactory.get().binaryDecoder(recordBytes, null);
+    SpecificDatumReader<StringablesRecord> specificDatumReader =
+      new SpecificDatumReader<StringablesRecord>(StringablesRecord.SCHEMA$);
+    StringablesRecord deserialized = new StringablesRecord();
+    specificDatumReader.read(deserialized, decoder);
+
+    assertEquals(stringablesRecord, deserialized);
+
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumWriter.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumWriter.java
new file mode 100644
index 0000000..d863891
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumWriter.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.test.Kind;
+import org.apache.avro.test.TestRecordWithUnion;
+import org.junit.Test;
+
+public class TestSpecificDatumWriter {
+  @Test
+  public void testResolveUnion() throws IOException {
+    final SpecificDatumWriter<TestRecordWithUnion> writer = new SpecificDatumWriter<TestRecordWithUnion>();
+    Schema schema = TestRecordWithUnion.SCHEMA$;
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
+
+    writer.setSchema(schema);
+
+    TestRecordWithUnion c = TestRecordWithUnion.newBuilder().
+      setKind(Kind.BAR).setValue("rab").build();
+    writer.write(c, encoder);
+    encoder.flush();
+    out.close();
+
+    String expectedJson = String.format(
+        "{'kind':{'org.apache.avro.test.Kind':'%s'},'value':{'string':'%s'}}",
+        c.getKind().toString(), c.getValue()).replace('\'', '"');
+
+    assertEquals(expectedJson, out.toString("UTF-8"));
+  }
+
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java
new file mode 100644
index 0000000..598a22a
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.test.errors.TestError;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Unit test for the SpecificErrorBuilderBase class.
+ */
+public class TestSpecificErrorBuilder {
+  @Test
+  public void testSpecificErrorBuilder() {
+    TestError.Builder testErrorBuilder = TestError.newBuilder().
+      setValue("value").setCause(new NullPointerException()).
+      setMessage$("message$");
+    
+    // Test has methods
+    Assert.assertTrue(testErrorBuilder.hasValue());
+    Assert.assertNotNull(testErrorBuilder.getValue());
+    Assert.assertTrue(testErrorBuilder.hasCause());
+    Assert.assertNotNull(testErrorBuilder.getCause());
+    Assert.assertTrue(testErrorBuilder.hasMessage$());
+    Assert.assertNotNull(testErrorBuilder.getMessage$());
+    
+    TestError testError = testErrorBuilder.build();
+    Assert.assertEquals("value", testError.getValue());
+    Assert.assertEquals("value", testError.getMessage());
+    Assert.assertEquals("message$", testError.getMessage$());
+    
+    // Test copy constructor
+    Assert.assertEquals(testErrorBuilder, 
+        TestError.newBuilder(testErrorBuilder));
+    Assert.assertEquals(testErrorBuilder, TestError.newBuilder(testError));
+    
+    TestError error = new TestError("value", new NullPointerException());
+    error.setMessage$("message");
+    Assert.assertEquals(error,
+        TestError.newBuilder().setValue("value").
+          setCause(new NullPointerException()).setMessage$("message").build());
+    
+    // Test clear
+    testErrorBuilder.clearValue();
+    Assert.assertFalse(testErrorBuilder.hasValue());
+    Assert.assertNull(testErrorBuilder.getValue());
+    testErrorBuilder.clearCause();
+    Assert.assertFalse(testErrorBuilder.hasCause());
+    Assert.assertNull(testErrorBuilder.getCause());
+    testErrorBuilder.clearMessage$();
+    Assert.assertFalse(testErrorBuilder.hasMessage$());
+    Assert.assertNull(testErrorBuilder.getMessage$());
+  }
+  
+  @Test(expected=org.apache.avro.AvroRuntimeException.class)
+  public void attemptToSetNonNullableFieldToNull() {
+    TestError.newBuilder().setMessage$(null);
+  }
+}
diff --git a/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
new file mode 100644
index 0000000..a94b498
--- /dev/null
+++ b/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java
@@ -0,0 +1,248 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Foo;
+import org.apache.avro.Interop;
+import org.apache.avro.Kind;
+import org.apache.avro.MD5;
+import org.apache.avro.Node;
+import org.apache.avro.ipc.specific.PageView;
+import org.apache.avro.ipc.specific.Person;
+import org.apache.avro.ipc.specific.ProductPage;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Unit test for the SpecificRecordBuilder class.
+ */
+public class TestSpecificRecordBuilder {
+  @Test
+  public void testSpecificBuilder() {
+    // Create a new builder, and leave some fields with default values empty:
+    Person.Builder builder = Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setState("CA");
+    Assert.assertTrue(builder.hasName());
+    Assert.assertEquals("James Gosling", builder.getName().toString());
+    Assert.assertTrue(builder.hasYearOfBirth());
+    Assert.assertEquals(new Integer(1955), builder.getYearOfBirth());
+    Assert.assertFalse(builder.hasCountry());
+    Assert.assertNull(builder.getCountry());
+    Assert.assertTrue(builder.hasState());
+    Assert.assertEquals("CA", builder.getState().toString());
+    Assert.assertFalse(builder.hasFriends());
+    Assert.assertNull(builder.getFriends());
+    Assert.assertFalse(builder.hasLanguages());
+    Assert.assertNull(builder.getLanguages());
+    
+    Person person = builder.build();
+    Assert.assertEquals("James Gosling", person.getName().toString());
+    Assert.assertEquals(new Integer(1955), person.getYearOfBirth());
+    Assert.assertEquals("US", person.getCountry().toString());  // country should default to "US"
+    Assert.assertEquals("CA", person.getState().toString());
+    Assert.assertNotNull(person.getFriends());  // friends should default to an empty list
+    Assert.assertEquals(0, person.getFriends().size());
+    Assert.assertNotNull(person.getLanguages()); // Languages should now be "English" and "Java"
+    Assert.assertEquals(2, person.getLanguages().size());
+    Assert.assertEquals("English", person.getLanguages().get(0).toString());
+    Assert.assertEquals("Java", person.getLanguages().get(1).toString());
+    
+    // Test copy constructors:
+    Assert.assertEquals(builder, Person.newBuilder(builder));
+    Assert.assertEquals(person, Person.newBuilder(person).build());
+    
+    Person.Builder builderCopy = Person.newBuilder(person);
+    Assert.assertEquals("James Gosling", builderCopy.getName().toString());
+    Assert.assertEquals(new Integer(1955), builderCopy.getYearOfBirth());
+    Assert.assertEquals("US", builderCopy.getCountry().toString());  // country should default to "US"
+    Assert.assertEquals("CA", builderCopy.getState().toString());
+    Assert.assertNotNull(builderCopy.getFriends());  // friends should default to an empty list
+    Assert.assertEquals(0, builderCopy.getFriends().size());
+    
+    // Test clearing fields:
+    builderCopy.clearFriends().clearCountry();
+    Assert.assertFalse(builderCopy.hasFriends());
+    Assert.assertFalse(builderCopy.hasCountry());
+    Assert.assertNull(builderCopy.getFriends());
+    Assert.assertNull(builderCopy.getCountry());
+    Person person2 = builderCopy.build();
+    Assert.assertNotNull(person2.getFriends());
+    Assert.assertTrue(person2.getFriends().isEmpty());
+  }
+  
+  @Test
+  public void testUnions() {
+    long datetime = 1234L;
+    String product = "widget";
+    PageView p = PageView.newBuilder()
+      .setDatetime(1234L)
+      .setPageContext(ProductPage.newBuilder()
+          .setProduct(product)
+          .build())
+      .build();
+    Assert.assertEquals(datetime, p.getDatetime().longValue());
+    Assert.assertEquals(ProductPage.class, p.getPageContext().getClass());
+    Assert.assertEquals(product, ((ProductPage)p.getPageContext()).getProduct());
+    
+    PageView p2 = PageView.newBuilder(p).build();
+    
+    Assert.assertEquals(datetime, p2.getDatetime().longValue());
+    Assert.assertEquals(ProductPage.class, p2.getPageContext().getClass());
+    Assert.assertEquals(product, ((ProductPage)p2.getPageContext()).getProduct());
+    
+    Assert.assertEquals(p, p2);
+    
+  }
+
+  @Test
+  public void testInterop() {
+    Interop interop = Interop.newBuilder()
+        .setNullField(null)
+        .setArrayField(Arrays.asList(new Double[] { 3.14159265, 6.022 }))
+        .setBoolField(true)
+        .setBytesField(ByteBuffer.allocate(4).put(new byte[] { 3, 2, 1, 0 }))
+        .setDoubleField(1.41421)
+        .setEnumField(Kind.C)
+        .setFixedField(new MD5(
+            new byte[] { 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3 }))
+        .setFloatField(1.61803f)
+        .setIntField(64)
+        .setLongField(1024)
+        .setMapField(Collections.singletonMap("Foo1", new Foo()))
+        .setRecordField(new Node())
+        .setStringField("MyInterop")
+        .setUnionField(2.71828)
+        .build();
+    
+    Interop copy = Interop.newBuilder(interop).build();
+    Assert.assertEquals(interop.getArrayField().size(), copy.getArrayField().size());
+    Assert.assertEquals(interop.getArrayField(), copy.getArrayField());
+    Assert.assertEquals(interop.getBoolField(), copy.getBoolField());
+    Assert.assertEquals(interop.getBytesField(), copy.getBytesField());
+    Assert.assertEquals(interop.getDoubleField(), copy.getDoubleField());
+    Assert.assertEquals(interop.getEnumField(), copy.getEnumField());
+    Assert.assertEquals(interop.getFixedField(), copy.getFixedField());
+    Assert.assertEquals(interop.getFloatField(), copy.getFloatField());
+    Assert.assertEquals(interop.getIntField(), copy.getIntField());
+    Assert.assertEquals(interop.getLongField(), copy.getLongField());
+    Assert.assertEquals(interop.getMapField(), copy.getMapField());
+    Assert.assertEquals(interop.getRecordField(), copy.getRecordField());
+    Assert.assertEquals(interop.getStringField(), copy.getStringField());
+    Assert.assertEquals(interop.getUnionField(), copy.getUnionField());
+    Assert.assertEquals(interop, copy);
+  }
+  
+  @Test(expected=org.apache.avro.AvroRuntimeException.class)
+  public void attemptToSetNonNullableFieldToNull() {
+    Person.newBuilder().setName(null);
+  }
+
+  @Test(expected=org.apache.avro.AvroRuntimeException.class)
+  public void buildWithoutSettingRequiredFields1() {
+    Person.newBuilder().build();
+  }
+
+  @Test
+  public void buildWithoutSettingRequiredFields2() {
+    // Omit required non-primitive field
+    try {
+      Person.newBuilder().setYearOfBirth(1900).setState("MA").build();
+      Assert.fail("Should have thrown " + AvroRuntimeException.class.getCanonicalName());
+    } catch (AvroRuntimeException e) {
+      // Exception should mention that the 'name' field has not been set
+      Assert.assertTrue(e.getMessage().contains("name"));
+    }
+  }
+
+  @Test
+  public void buildWithoutSettingRequiredFields3() {
+    // Omit required primitive field
+    try {
+      Person.newBuilder().setName("Anon").setState("CA").build();
+      Assert.fail("Should have thrown " + AvroRuntimeException.class.getCanonicalName());
+    } catch (AvroRuntimeException e) {
+      // Exception should mention that the 'year_of_birth' field has not been set
+      Assert.assertTrue(e.getMessage().contains("year_of_birth"));
+    }
+  }
+
+  @Ignore
+  @Test
+  public void testBuilderPerformance() {
+    int count = 1000000;
+    List<Person> friends = new ArrayList<Person>(0);
+    List<String> languages = new ArrayList<String>(Arrays.asList(new String[] { "English", "Java" }));
+    long startTimeNanos = System.nanoTime();
+    for (int ii = 0; ii < count; ii++) {
+      Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setCountry("US").setState("CA").setFriends(friends).
+        setLanguages(languages).build();
+    }
+    long durationNanos = System.nanoTime() - startTimeNanos;
+    double durationMillis = durationNanos / 1e6d;
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+        "ms/record");
+  }
+  
+  @Ignore
+  @Test
+  public void testBuilderPerformanceWithDefaultValues() {
+    int count = 1000000;
+    long startTimeNanos = System.nanoTime();
+    for (int ii = 0; ii < count; ii++) {
+      Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setState("CA").build();
+    }
+    long durationNanos = System.nanoTime() - startTimeNanos;
+    double durationMillis = durationNanos / 1e6d;
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+        "ms/record");
+  }
+
+  @Ignore
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testManualBuildPerformance() {
+    int count = 1000000;
+    List<Person> friends = new ArrayList<Person>(0);
+    List<String> languages = new ArrayList<String>(Arrays.asList(new String[] { "English", "Java" }));
+    long startTimeNanos = System.nanoTime();
+    for (int ii = 0; ii < count; ii++) {
+      Person person = new Person();
+      person.name = "James Gosling";
+      person.year_of_birth = 1955;
+      person.state = "CA";
+      person.country = "US";
+      person.friends = friends;
+      person.languages = languages;
+    }
+    long durationNanos = System.nanoTime() - startTimeNanos;
+    double durationMillis = durationNanos / 1e6d;
+    System.out.println("Built " + count + " records in " + durationMillis + "ms (" + 
+        (count / (durationMillis / 1000d)) + " records/sec, " + (durationMillis / count) + 
+        "ms/record");
+  }
+}
diff --git a/lang/java/ipc/src/test/keystore b/lang/java/ipc/src/test/keystore
new file mode 100644
index 0000000..a13beae
Binary files /dev/null and b/lang/java/ipc/src/test/keystore differ
diff --git a/lang/java/ipc/src/test/resources/org/apache/avro/ipc/servercert.p12 b/lang/java/ipc/src/test/resources/org/apache/avro/ipc/servercert.p12
new file mode 100644
index 0000000..7911ce6
Binary files /dev/null and b/lang/java/ipc/src/test/resources/org/apache/avro/ipc/servercert.p12 differ
diff --git a/lang/java/ipc/src/test/truststore b/lang/java/ipc/src/test/truststore
new file mode 100644
index 0000000..e442bdc
Binary files /dev/null and b/lang/java/ipc/src/test/truststore differ
diff --git a/lang/java/mapred/pom.xml b/lang/java/mapred/pom.xml
new file mode 100644
index 0000000..55f0239
--- /dev/null
+++ b/lang/java/mapred/pom.xml
@@ -0,0 +1,205 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-mapred</artifactId>
+
+  <name>Apache Avro Mapred API</name>
+  <description>An org.apache.hadoop.mapred compatible API for using Avro Serializatin in Hadoop</description>
+  <packaging>bundle</packaging>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro.mapred*,
+      org.apache.avro*;version="${project.version}",
+      org.apache.hadoop*,
+      *
+    </osgi.import>
+    <osgi.export>
+      org.apache.avro.mapred*;version="${project.version}",
+      org.apache.avro.hadoop*;version="${project.version}",
+    </osgi.export>
+  </properties>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>${project.groupId}</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
+        <version>${project.version}</version>
+        <executions>
+          <execution>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>schema</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>test-schemas</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>schema</goal>
+              <goal>protocol</goal>
+            </goals>
+            <!-- compile only test protocols and schemas and place them in the classpath. -->
+            <configuration>
+              <sourceDirectory></sourceDirectory>
+              <testSourceDirectory>${parent.project.basedir}/../../../../share/test/schemas/</testSourceDirectory>
+              <testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
+            </configuration>
+          </execution>
+          <execution>
+            <id>mapred-schema</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>protocol</goal>
+            </goals>
+            <configuration>
+              <stringType>String</stringType>
+              <includes>
+                <include>**/mapred/tether/*.avpr</include>
+              </includes>
+              <sourceDirectory>${parent.project.basedir}/../../../../share/schemas/</sourceDirectory>
+              <outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>main</id>
+            <goals><goal>jar</goal></goals>
+            <phase>package</phase>
+          </execution>
+          <execution>
+            <id>with-classifier</id>
+            <goals><goal>jar</goal></goals>
+            <phase>package</phase>
+            <configuration>
+              <classifier>${envClassifier}</classifier>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>     
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-library</artifactId>
+      <version>${hamcrest.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-codec</groupId>
+      <artifactId>commons-codec</artifactId>
+      <version>${commons-codec.version}</version>
+    </dependency>
+  </dependencies>
+  
+  <profiles>
+     <profile>
+      <id>hadoop1</id>
+      <activation>
+        <property>
+          <name>hadoop.version</name>
+          <value>1</value>
+        </property>
+      </activation>
+      <properties>
+        <envClassifier>hadoop1</envClassifier>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <!-- hadoop's execution environment provides its own jars, usurping any others.
+            So we should not include it here -->
+          <scope>provided</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop2</id>
+      <activation>
+        <property>
+          <name>!hadoop.version</name> <!-- if no hadoop.version is set -->
+        </property>
+      </activation>
+      <properties>
+        <envClassifier>hadoop2</envClassifier>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+          <!-- hadoop's execution environment provides its own jars, usurping any others.
+            So we should not include it here -->
+          <scope>provided</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java
new file mode 100644
index 0000000..1810208
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/HadoopCodecFactory.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.hadoop.file;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.file.CodecFactory;
+
+/**  
+ * Encapsulates the ability to specify and configure an avro compression codec
+ * from a given hadoop codec defined with the configuration parameter:
+ * mapred.output.compression.codec
+ *
+ * Currently there are three codecs registered by default:
+ * <ul>
+ *   <li>{@code org.apache.hadoop.io.compress.DeflateCodec} will map to  {@code deflate}</li>
+ *   <li>{@code org.apache.hadoop.io.compress.SnappyCodec} will map to {@code snappy}</li>
+ *   <li>{@code org.apache.hadoop.io.compress.BZip2Codec} will map to {@code zbip2}</li>
+ *   <li>{@code org.apache.hadoop.io.compress.GZipCodec} will map to {@code deflate}</li>
+ * </ul>
+ */
+public class HadoopCodecFactory {
+
+  private static final Map<String, String> HADOOP_AVRO_NAME_MAP =
+      new HashMap<String, String>();
+ 
+  static {
+    HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.DeflateCodec", "deflate");
+    HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.SnappyCodec", "snappy");
+    HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.BZip2Codec", "bzip2");
+    HADOOP_AVRO_NAME_MAP.put("org.apache.hadoop.io.compress.GZipCodec", "deflate");
+  }
+  
+  /** Maps a hadoop codec name into a CodecFactory.
+  *
+  * Currently there are four hadoop codecs registered:
+  * <ul>
+  *   <li>{@code org.apache.hadoop.io.compress.DeflateCodec} will map to  {@code deflate}</li>
+  *   <li>{@code org.apache.hadoop.io.compress.SnappyCodec} will map to {@code snappy}</li>
+  *   <li>{@code org.apache.hadoop.io.compress.BZip2Codec} will map to {@code zbip2}</li>
+  *   <li>{@code org.apache.hadoop.io.compress.GZipCodec} will map to {@code deflate}</li>
+  * </ul>
+  */
+  public static CodecFactory fromHadoopString(String hadoopCodecClass) {
+
+    CodecFactory o = null;
+    try {
+      String avroCodec = HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass);
+      if (avroCodec != null) {
+        o = CodecFactory.fromString(avroCodec);
+      }
+    } catch (Exception e) {
+      throw new AvroRuntimeException("Unrecognized hadoop codec: " + hadoopCodecClass, e);
+    }
+    return o;
+  }
+  
+  public static String getAvroCodecName(String hadoopCodecClass) {
+    return HADOOP_AVRO_NAME_MAP.get(hadoopCodecClass);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
new file mode 100644
index 0000000..f7a41bf
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/file/SortedKeyValueFile.java
@@ -0,0 +1,633 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.file;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.TreeMap;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.hadoop.util.AvroCharSequenceComparator;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.FsInput;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A SortedKeyValueFile is an indexed Avro container file of KeyValue records
+ * sorted by key.
+ *
+ * <p>The SortedKeyValueFile is a directory with two files, named 'data' and
+ * 'index'. The 'data' file is an ordinary Avro container file with
+ * records. Each record has exactly two fields, 'key' and 'value'. The keys are
+ * sorted lexicographically. The 'index' file is a small Avro container file
+ * mapping keys in the 'data' file to their byte positions. The index file is
+ * intended to fit in memory, so it should remain small. There is one entry in
+ * the index file for each data block in the Avro container file.</p>
+ *
+ * <p>SortedKeyValueFile is to Avro container file as MapFile is to
+ * SequenceFile.</p>
+ */
+public class SortedKeyValueFile {
+  private static final Logger LOG = LoggerFactory.getLogger(SortedKeyValueFile.class);
+
+  /** The name of the data file within the SortedKeyValueFile directory. */
+  public static final String DATA_FILENAME = "data";
+
+  /** The name of the index file within the SortedKeyValueFile directory. */
+  public static final String INDEX_FILENAME = "index";
+
+  /**
+   * Reads a SortedKeyValueFile by loading the key index into memory.
+   *
+   * <p>When doing a lookup, this reader finds the correct block in the data file using
+   * the key index. It performs a single disk seek to the block and loads the entire block
+   * into memory. The block is scanned until the key is found or is determined not to
+   * exist.</p>
+   *
+   * @param <K> The key type.
+   * @param <V> The value type.
+   */
+  public static class Reader<K, V> implements Closeable, Iterable<AvroKeyValue<K, V>> {
+    /** The index from key to its byte offset into the data file. */
+    private final NavigableMap<K, Long> mIndex;
+
+    /** The reader for the data file. */
+    private final DataFileReader<GenericRecord> mDataFileReader;
+
+    /** The key schema for the data file. */
+    private final Schema mKeySchema;
+
+    /** The model for the data. */
+    private GenericData model;
+
+    /** A class to encapsulate the options of a Reader. */
+    public static class Options {
+      /** The configuration. */
+      private Configuration mConf;
+
+      /** The path to the SortedKeyValueFile to read. */
+      private Path mPath;
+
+      /** The reader schema for the key. */
+      private Schema mKeySchema;
+
+      /** The reader schema for the value. */
+      private Schema mValueSchema;
+
+      /** The model for the data. */
+      private GenericData model = SpecificData.get();
+
+      /**
+       * Sets the configuration.
+       *
+       * @param conf The configuration.
+       * @return This options instance.
+       */
+      public Options withConfiguration(Configuration conf) {
+        mConf = conf;
+        return this;
+      }
+
+      /**
+       * Gets the configuration.
+       *
+       * @return The configuration.
+       */
+      public Configuration getConfiguration() {
+        return mConf;
+      }
+
+      /**
+       * Sets the input path.
+       *
+       * @param path The input path.
+       * @return This options instance.
+       */
+      public Options withPath(Path path) {
+        mPath = path;
+        return this;
+      }
+
+      /**
+       * Gets the input path.
+       *
+       * @return The input path.
+       */
+      public Path getPath() {
+        return mPath;
+      }
+
+      /**
+       * Sets the reader schema for the key.
+       *
+       * @param keySchema The reader schema for the key.
+       * @return This options instance.
+       */
+      public Options withKeySchema(Schema keySchema) {
+        mKeySchema = keySchema;
+        return this;
+      }
+
+      /**
+       * Gets the reader schema for the key.
+       *
+       * @return The reader schema for the key.
+       */
+      public Schema getKeySchema() {
+        return mKeySchema;
+      }
+
+      /**
+       * Sets the reader schema for the value.
+       *
+       * @param valueSchema The reader schema for the value.
+       * @return This options instance.
+       */
+      public Options withValueSchema(Schema valueSchema) {
+        mValueSchema = valueSchema;
+        return this;
+      }
+
+      /**
+       * Gets the reader schema for the value.
+       *
+       * @return The reader schema for the value.
+       */
+      public Schema getValueSchema() {
+        return mValueSchema;
+      }
+
+      /** Set the data model. */
+      public Options withDataModel(GenericData model) {
+        this.model = model;
+        return this;
+      }
+
+      /** Return the data model. */
+      public GenericData getDataModel() {
+        return model;
+      }
+
+    }
+
+    /**
+     * Constructs a reader.
+     *
+     * @param options The options.
+     * @throws IOException If there is an error.
+     */
+    public Reader(Options options) throws IOException {
+      mKeySchema = options.getKeySchema();
+      this.model = options.getDataModel();
+
+      // Load the whole index file into memory.
+      Path indexFilePath = new Path(options.getPath(), INDEX_FILENAME);
+      LOG.debug("Loading the index from " + indexFilePath);
+      mIndex = loadIndexFile(options.getConfiguration(), indexFilePath, mKeySchema);
+
+      // Open the data file.
+      Path dataFilePath = new Path(options.getPath(), DATA_FILENAME);
+      LOG.debug("Loading the data file " + dataFilePath);
+      Schema recordSchema = AvroKeyValue.getSchema(mKeySchema, options.getValueSchema());
+      DatumReader<GenericRecord> datumReader =
+        model.createDatumReader(recordSchema);
+      mDataFileReader =
+        new DataFileReader<GenericRecord>
+        (new FsInput(dataFilePath, options.getConfiguration()), datumReader);
+      
+    }
+
+    /**
+     * Gets the first value associated with a given key, or null if it is not found.
+     *
+     * <p>This method will move the current position in the file to the record immediately
+     * following the requested key.</p>
+     *
+     * @param key The key to look up.
+     * @return The value associated with the key, or null if not found.
+     * @throws IOException If there is an error.
+     */
+    public V get(K key) throws IOException {
+      // Look up the entry in the index.
+      LOG.debug("Looking up key " + key + " in the index.");
+      Map.Entry<K, Long> indexEntry = mIndex.floorEntry(key);
+      if (null == indexEntry) {
+        LOG.debug("Key " + key + " was not found in the index (it is before the first entry)");
+        return null;
+      }
+      LOG.debug("Key was found in the index, seeking to syncpoint " + indexEntry.getValue());
+
+      // Seek to the data block that would contain the entry.
+      mDataFileReader.seek(indexEntry.getValue());
+
+      // Scan from this position of the file until we find it or pass it.
+      Iterator<AvroKeyValue<K, V>> iter = iterator();
+      while (iter.hasNext()) {
+        AvroKeyValue<K, V> record = iter.next();
+        int comparison = model.compare(record.getKey(), key, mKeySchema);
+        if (0 == comparison) {
+          // We've found it!
+          LOG.debug("Found record for key " + key);
+          return record.getValue();
+        }
+        if (comparison > 0) {
+          // We've passed it.
+          LOG.debug("Searched beyond the point where key " + key + " would appear in the file.");
+          return null;
+        }
+      }
+
+      // We've reached the end of the file.
+      LOG.debug("Searched to the end of the file but did not find key " + key);
+      return null;
+    }
+
+    /**
+     * Returns an iterator starting at the current position in the file.
+     *
+     * <p>Use the get() method to move the current position.</p>
+     *
+     * <p>Note that this iterator is shared with other clients of the file; it does not
+     * contain a separate pointer into the file.</p>
+     *
+     * @return An iterator.
+     */
+    public Iterator<AvroKeyValue<K, V>> iterator() {
+      return new AvroKeyValue.Iterator<K, V>(mDataFileReader.iterator());
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public void close() throws IOException {
+      mDataFileReader.close();
+    }
+
+    /**
+     * Loads an index file into an in-memory map, from key to file offset in bytes.
+     *
+     * @param conf The configuration.
+     * @param path The path to the index file.
+     * @param keySchema The reader schema for the key.
+     * @throws IOException If there is an error.
+     */
+    private <K> NavigableMap<K, Long> loadIndexFile(
+        Configuration conf, Path path, Schema keySchema) throws IOException {
+      DatumReader<GenericRecord> datumReader = model.createDatumReader(
+          AvroKeyValue.getSchema(keySchema, Schema.create(Schema.Type.LONG)));
+      DataFileReader<GenericRecord> fileReader = new DataFileReader<GenericRecord>(
+          new FsInput(path, conf), datumReader);
+
+      NavigableMap<K, Long> index;
+      if (Schema.create(Schema.Type.STRING).equals(keySchema)) {
+        // Because Avro STRING types are mapped to the Java CharSequence class that does not
+        // mandate the implementation of Comparable, we need to specify a special
+        // CharSequence comparator if the key type is a string.  This hack only fixes the
+        // problem for primitive string types.  If, for example, you tried to use a record
+        // type as the key, any string fields inside of it would not be compared correctly
+        // against java.lang.Strings.
+        index = new TreeMap<K, Long>(new AvroCharSequenceComparator<K>());
+      } else {
+        index = new TreeMap<K, Long>();
+      }
+      try {
+        for (GenericRecord genericRecord : fileReader) {
+          AvroKeyValue<K, Long> indexRecord = new AvroKeyValue<K, Long>(genericRecord);
+          index.put(indexRecord.getKey(), indexRecord.getValue());
+        }
+      } finally {
+        fileReader.close();
+      }
+      return index;
+    }
+  }
+
+  /**
+   * Writes a SortedKeyValueFile.
+   *
+   * @param <K> The key type.
+   * @param <V> The value type.
+   */
+  public static class Writer<K, V> implements Closeable {
+    /** The key schema. */
+    private final Schema mKeySchema;
+
+    /** The value schema. */
+    private final Schema mValueSchema;
+
+    /** The schema of the data file records. */
+    private final Schema mRecordSchema;
+
+    /** The schema of the index file records. */
+    private final Schema mIndexSchema;
+
+    /** The model for the data. */
+    private GenericData model;
+
+    /** The writer for the data file. */
+    private final DataFileWriter<GenericRecord> mDataFileWriter;
+
+    /** The writer for the index file. */
+    private final DataFileWriter<GenericRecord> mIndexFileWriter;
+
+    /** We store an indexed key for every mIndexInterval records written to the data file. */
+    private final int mIndexInterval;
+
+    /** The number of records written to the file so far. */
+    private long mRecordsWritten;
+
+    /** The most recent key that was appended to the file, or null. */
+    private K mPreviousKey;
+
+    /** A class to encapsulate the various options of a SortedKeyValueFile.Writer. */
+    public static class Options {
+      /** The key schema. */
+      private Schema mKeySchema;
+
+      /** The value schema. */
+      private Schema mValueSchema;
+
+      /** The configuration. */
+      private Configuration mConf;
+
+      /** The path to the output file. */
+      private Path mPath;
+
+      /** The number of records between indexed entries. */
+      private int mIndexInterval = 128;
+
+      /** The model for the data. */
+      private GenericData model = SpecificData.get();
+
+      /** The compression codec for the data. */
+      private CodecFactory codec = CodecFactory.nullCodec();
+
+      /**
+       * Sets the key schema.
+       *
+       * @param keySchema The key schema.
+       * @return This options instance.
+       */
+      public Options withKeySchema(Schema keySchema) {
+        mKeySchema = keySchema;
+        return this;
+      }
+
+      /**
+       * Gets the key schema.
+       *
+       * @return The key schema.
+       */
+      public Schema getKeySchema() {
+        return mKeySchema;
+      }
+
+      /**
+       * Sets the value schema.
+       *
+       * @param valueSchema The value schema.
+       * @return This options instance.
+       */
+      public Options withValueSchema(Schema valueSchema) {
+        mValueSchema = valueSchema;
+        return this;
+      }
+
+      /**
+       * Gets the value schema.
+       *
+       * @return The value schema.
+       */
+      public Schema getValueSchema() {
+        return mValueSchema;
+      }
+
+      /**
+       * Sets the configuration.
+       *
+       * @param conf The configuration.
+       * @return This options instance.
+       */
+      public Options withConfiguration(Configuration conf) {
+        mConf = conf;
+        return this;
+      }
+
+      /**
+       * Gets the configuration.
+       *
+       * @return The configuration.
+       */
+      public Configuration getConfiguration() {
+        return mConf;
+      }
+
+      /**
+       * Sets the output path.
+       *
+       * @param path The output path.
+       * @return This options instance.
+       */
+      public Options withPath(Path path) {
+        mPath = path;
+        return this;
+      }
+
+      /**
+       * Gets the output path.
+       *
+       * @return The output path.
+       */
+      public Path getPath() {
+        return mPath;
+      }
+
+      /**
+       * Sets the index interval.
+       *
+       * <p>If the index inverval is N, then every N records will be indexed into the
+       * index file.</p>
+       *
+       * @param indexInterval The index interval.
+       * @return This options instance.
+       */
+      public Options withIndexInterval(int indexInterval) {
+        mIndexInterval = indexInterval;
+        return this;
+      }
+
+      /**
+       * Gets the index interval.
+       *
+       * @return The index interval.
+       */
+      public int getIndexInterval() {
+        return mIndexInterval;
+      }
+
+      /** Set the data model. */
+      public Options withDataModel(GenericData model) {
+        this.model = model;
+        return this;
+      }
+
+      /** Return the data model. */
+      public GenericData getDataModel() {
+        return model;
+      }
+
+      /** Set the compression codec. */
+      public Options withCodec(String codec) {
+          this.codec = CodecFactory.fromString(codec);
+          return this;
+      }
+
+      /** Set the compression codec. */
+      public Options withCodec(CodecFactory codec) {
+          this.codec = codec;
+          return this;
+      }
+
+      /** Return the compression codec. */
+      public CodecFactory getCodec() {
+          return this.codec;
+      }
+    }
+
+    /**
+     * Creates a writer for a new file.
+     *
+     * @param options The options.
+     * @throws IOException If there is an error.
+     */
+    public Writer(Options options) throws IOException {
+      this.model = options.getDataModel();
+
+      if (null == options.getConfiguration()) {
+        throw new IllegalArgumentException("Configuration may not be null");
+      }
+
+      FileSystem fileSystem = options.getPath().getFileSystem(options.getConfiguration());
+
+      // Save the key and value schemas.
+      mKeySchema = options.getKeySchema();
+      if (null == mKeySchema) {
+        throw new IllegalArgumentException("Key schema may not be null");
+      }
+      mValueSchema = options.getValueSchema();
+      if (null == mValueSchema) {
+        throw new IllegalArgumentException("Value schema may not be null");
+      }
+
+      // Save the index interval.
+      mIndexInterval = options.getIndexInterval();
+
+      // Create the directory.
+      if (!fileSystem.mkdirs(options.getPath())) {
+        throw new IOException(
+            "Unable to create directory for SortedKeyValueFile: " + options.getPath());
+      }
+      LOG.debug("Created directory " + options.getPath());
+
+      // Open a writer for the data file.
+      Path dataFilePath = new Path(options.getPath(), DATA_FILENAME);
+      LOG.debug("Creating writer for avro data file: " + dataFilePath);
+      mRecordSchema = AvroKeyValue.getSchema(mKeySchema, mValueSchema);
+      DatumWriter<GenericRecord> datumWriter =
+        model.createDatumWriter(mRecordSchema);
+      OutputStream dataOutputStream = fileSystem.create(dataFilePath);
+      mDataFileWriter = new DataFileWriter<GenericRecord>(datumWriter)
+          .setSyncInterval(1 << 20)  // Set the auto-sync interval sufficiently large, since
+                                     // we will manually sync every mIndexInterval records.
+          .setCodec(options.getCodec())
+          .create(mRecordSchema, dataOutputStream);
+
+      // Open a writer for the index file.
+      Path indexFilePath = new Path(options.getPath(), INDEX_FILENAME);
+      LOG.debug("Creating writer for avro index file: " + indexFilePath);
+      mIndexSchema = AvroKeyValue.getSchema(mKeySchema, Schema.create(Schema.Type.LONG));
+      DatumWriter<GenericRecord> indexWriter =
+        model.createDatumWriter(mIndexSchema);
+      OutputStream indexOutputStream = fileSystem.create(indexFilePath);
+      mIndexFileWriter = new DataFileWriter<GenericRecord>(indexWriter)
+          .create(mIndexSchema, indexOutputStream);
+    }
+
+    /**
+     * Appends a record to the SortedKeyValueFile.
+     *
+     * @param key The key.
+     * @param value The value.
+     * @throws IOException If there is an error.
+     */
+    public void append(K key, V value) throws IOException {
+      // Make sure the keys are inserted in sorted order.
+      if (null != mPreviousKey && model.compare(key, mPreviousKey, mKeySchema) < 0) {
+        throw new IllegalArgumentException("Records must be inserted in sorted key order."
+            + " Attempted to insert key " + key + " after " + mPreviousKey + ".");
+      }
+      mPreviousKey = model.deepCopy(mKeySchema, key);
+
+      // Construct the data record.
+      AvroKeyValue<K, V> dataRecord
+          = new AvroKeyValue<K, V>(new GenericData.Record(mRecordSchema));
+      dataRecord.setKey(key);
+      dataRecord.setValue(value);
+
+      // Index it if necessary.
+      if (0 == mRecordsWritten++ % mIndexInterval) {
+        // Force a sync to the data file writer, which closes the current data block (if
+        // nonempty) and reports the current position in the file.
+        long position = mDataFileWriter.sync();
+
+        // Construct the record to put in the index.
+        AvroKeyValue<K, Long> indexRecord
+            = new AvroKeyValue<K, Long>(new GenericData.Record(mIndexSchema));
+        indexRecord.setKey(key);
+        indexRecord.setValue(position);
+        mIndexFileWriter.append(indexRecord.get());
+      }
+
+      // Write it to the data file.
+      mDataFileWriter.append(dataRecord.get());
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public void close() throws IOException {
+      mIndexFileWriter.close();
+      mDataFileWriter.close();
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverter.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverter.java
new file mode 100644
index 0000000..2dfe240
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverter.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import org.apache.avro.Schema;
+
+/**
+ * Converts a Java object into an Avro datum.
+ *
+ * @param <INPUT> The type of the input Java object to convert.
+ * @param <OUTPUT> The type of the Avro datum to convert to.
+ */
+public abstract class AvroDatumConverter<INPUT, OUTPUT> {
+  public abstract OUTPUT convert(INPUT input);
+
+  /**
+   * Gets the writer schema that should be used to serialize the output Avro datum.
+   *
+   * @return The writer schema for the output Avro datum.
+   */
+  public abstract Schema getWriterSchema();
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverterFactory.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverterFactory.java
new file mode 100644
index 0000000..18836b9
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDatumConverterFactory.java
@@ -0,0 +1,368 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.ByteWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * Constructs converters that turn objects (usually from the output of a MR job) into Avro
+ * data that can be serialized.
+ *
+ * <p>Currently, only the following types have implemented converters:
+ *   <ul>
+ *     <li>AvroKey</li>
+ *     <li>AvroValue</li>
+ *     <li>BooleanWritable</li>
+ *     <li>BytesWritable</li>
+ *     <li>ByteWritable</li>
+ *     <li>DoubleWritable</li>
+ *     <li>FloatWritable</li>
+ *     <li>IntWritable</li>
+ *     <li>LongWritable</li>
+ *     <li>NullWritable</li>
+ *     <li>Text</li>
+ *   </ul>
+ * </p>
+ */
+public class AvroDatumConverterFactory extends Configured {
+  /**
+   * Creates a new <code>AvroDatumConverterFactory</code> instance.
+   *
+   * @param conf The job configuration.
+   */
+  public AvroDatumConverterFactory(Configuration conf) {
+    super(conf);
+  }
+
+  /**
+   * Creates a converter that turns objects of type <code>inputClass</code> into Avro data.
+   *
+   * @param inputClass The type of input data to convert.
+   * @return A converter that turns objects of type <code>inputClass</code> into Avro data.
+   */
+  @SuppressWarnings("unchecked")
+  public <IN, OUT> AvroDatumConverter<IN, OUT> create(Class<IN> inputClass) {
+    boolean isMapOnly = ((JobConf)getConf()).getNumReduceTasks() == 0;
+    if (AvroKey.class.isAssignableFrom(inputClass)) {
+      Schema schema;
+      if (isMapOnly) {
+        schema = AvroJob.getMapOutputKeySchema(getConf());
+        if (null == schema) {
+          schema = AvroJob.getOutputKeySchema(getConf());
+        }
+      }
+      else {
+        schema = AvroJob.getOutputKeySchema(getConf());
+      }
+      if (null == schema) {
+        throw new IllegalStateException(
+            "Writer schema for output key was not set. Use AvroJob.setOutputKeySchema().");
+      }
+      return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema);
+    }
+    if (AvroValue.class.isAssignableFrom(inputClass)) {
+      Schema schema;
+      if (isMapOnly) {
+        schema = AvroJob.getMapOutputValueSchema(getConf());
+        if (null == schema) {
+          schema = AvroJob.getOutputValueSchema(getConf());
+        }
+      }
+      else {
+        schema = AvroJob.getOutputValueSchema(getConf());
+      }
+      if (null == schema) {
+        throw new IllegalStateException(
+            "Writer schema for output value was not set. Use AvroJob.setOutputValueSchema().");
+      }
+      return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema);
+    }
+    if (BooleanWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new BooleanWritableConverter();
+    }
+    if (BytesWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new BytesWritableConverter();
+    }
+    if (ByteWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new ByteWritableConverter();
+    }
+    if (DoubleWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new DoubleWritableConverter();
+    }
+    if (FloatWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new FloatWritableConverter();
+    }
+    if (IntWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new IntWritableConverter();
+    }
+    if (LongWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new LongWritableConverter();
+    }
+    if (NullWritable.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new NullWritableConverter();
+    }
+    if (Text.class.isAssignableFrom(inputClass)) {
+      return (AvroDatumConverter<IN, OUT>) new TextConverter();
+    }
+
+    throw new UnsupportedOperationException("Unsupported input type: " + inputClass.getName());
+  }
+
+  /** Converts AvroWrappers into their wrapped Avro data. */
+  public static class AvroWrapperConverter extends AvroDatumConverter<AvroWrapper<?>, Object> {
+    private final Schema mSchema;
+
+    public AvroWrapperConverter(Schema schema) {
+      mSchema = schema;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Object convert(AvroWrapper<?> input) {
+      return input.datum();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts BooleanWritables into Booleans. */
+  public static class BooleanWritableConverter
+      extends AvroDatumConverter<BooleanWritable, Boolean> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public BooleanWritableConverter() {
+      mSchema = Schema.create(Schema.Type.BOOLEAN);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Boolean convert(BooleanWritable input) {
+      return input.get();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts BytesWritables into ByteBuffers. */
+  public static class BytesWritableConverter
+      extends AvroDatumConverter<BytesWritable, ByteBuffer> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public BytesWritableConverter() {
+      mSchema = Schema.create(Schema.Type.BYTES);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public ByteBuffer convert(BytesWritable input) {
+      return ByteBuffer.wrap(input.getBytes());
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts ByteWritables into GenericFixed of size 1. */
+  public static class ByteWritableConverter
+      extends AvroDatumConverter<ByteWritable, GenericFixed> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public ByteWritableConverter() {
+      mSchema = Schema.createFixed("Byte", "A single byte", "org.apache.avro.mapreduce", 1);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public GenericFixed convert(ByteWritable input) {
+      return new GenericData.Fixed(mSchema, new byte[] { input.get() });
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts DoubleWritables into Doubles. */
+  public static class DoubleWritableConverter extends AvroDatumConverter<DoubleWritable, Double> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public DoubleWritableConverter() {
+      mSchema = Schema.create(Schema.Type.DOUBLE);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Double convert(DoubleWritable input) {
+      return input.get();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts FloatWritables into Floats. */
+  public static class FloatWritableConverter extends AvroDatumConverter<FloatWritable, Float> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public FloatWritableConverter() {
+      mSchema = Schema.create(Schema.Type.FLOAT);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Float convert(FloatWritable input) {
+      return input.get();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts IntWritables into Ints. */
+  public static class IntWritableConverter extends AvroDatumConverter<IntWritable, Integer> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public IntWritableConverter() {
+      mSchema = Schema.create(Schema.Type.INT);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Integer convert(IntWritable input) {
+      return input.get();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts LongWritables into Longs. */
+  public static class LongWritableConverter extends AvroDatumConverter<LongWritable, Long> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public LongWritableConverter() {
+      mSchema = Schema.create(Schema.Type.LONG);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Long convert(LongWritable input) {
+      return input.get();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts NullWritables into Nulls. */
+  public static class NullWritableConverter extends AvroDatumConverter<NullWritable, Object> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public NullWritableConverter() {
+      mSchema = Schema.create(Schema.Type.NULL);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Object convert(NullWritable input) {
+      return null;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+
+  /** Converts Text into CharSequences. */
+  public static class TextConverter extends AvroDatumConverter<Text, CharSequence> {
+    private final Schema mSchema;
+
+    /** Constructor. */
+    public TextConverter() {
+      mSchema = Schema.create(Schema.Type.STRING);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public CharSequence convert(Text input) {
+      return input.toString();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public Schema getWriterSchema() {
+      return mSchema;
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDeserializer.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDeserializer.java
new file mode 100644
index 0000000..c11cd1c
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroDeserializer.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.hadoop.io.serializer.Deserializer;
+
+/**
+ * Deserializes AvroWrapper objects within Hadoop.
+ *
+ * <p>Keys and values containing Avro tyeps are more efficiently serialized
+ * outside of the WritableSerialization model, so they are wrapper in {@link
+ * org.apache.avro.mapred.AvroWrapper} objects and deserialization is handled
+ * by this class.</p>
+ *
+ * <p>MapReduce jobs that use AvroWrapper objects as keys or values need to be
+ * configured with {@link AvroSerialization}.  Use {@link
+ * org.apache.avro.mapreduce.AvroJob} to help with Job configuration.</p>
+ *
+ * @param <T> The type of Avro wrapper.
+ * @param <D> The Java type of the Avro data being wrapped.
+ */
+public abstract class AvroDeserializer<T extends AvroWrapper<D>, D> implements Deserializer<T> {
+  /** The Avro writer schema for deserializing. */
+  private final Schema mWriterSchema;
+
+  /** The Avro reader schema for deserializing. */
+  private final Schema mReaderSchema;
+
+  /** The Avro datum reader for deserializing. */
+  final DatumReader<D> mAvroDatumReader;
+
+  /** An Avro binary decoder for deserializing. */
+  private BinaryDecoder mAvroDecoder;
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize (may be null).
+   */
+  protected AvroDeserializer(Schema writerSchema, Schema readerSchema,
+                             ClassLoader classLoader) {
+    mWriterSchema = writerSchema;
+    mReaderSchema = null != readerSchema ? readerSchema : writerSchema;
+    mAvroDatumReader = new ReflectDatumReader<D>(mWriterSchema, mReaderSchema,
+                                                 new ReflectData(classLoader));
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize (may be null).
+   * @param datumReader The Avro datum reader to use for deserialization.
+   */
+  protected AvroDeserializer(Schema writerSchema, Schema readerSchema,
+                             DatumReader<D> datumReader) {
+    mWriterSchema = writerSchema;
+    mReaderSchema = null != readerSchema ? readerSchema : writerSchema;
+    mAvroDatumReader = datumReader;
+  }
+
+  /**
+   * Gets the writer schema used for deserializing.
+   *
+   * @return The writer schema;
+   */
+  public Schema getWriterSchema() {
+    return mWriterSchema;
+  }
+
+  /**
+   * Gets the reader schema used for deserializing.
+   *
+   * @return The reader schema.
+   */
+  public Schema getReaderSchema() {
+    return mReaderSchema;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void open(InputStream inputStream) throws IOException {
+    mAvroDecoder = DecoderFactory.get().directBinaryDecoder(inputStream, mAvroDecoder);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public T deserialize(T avroWrapperToReuse) throws IOException {
+    // Create a new Avro wrapper if there isn't one to reuse.
+    if (null == avroWrapperToReuse) {
+      avroWrapperToReuse = createAvroWrapper();
+    }
+
+    // Deserialize the Avro datum from the input stream.
+    avroWrapperToReuse.datum(mAvroDatumReader.read(avroWrapperToReuse.datum(), mAvroDecoder));
+    return avroWrapperToReuse;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close() throws IOException {
+    mAvroDecoder.inputStream().close();
+  }
+
+  /**
+   * Creates a new empty <code>T</code> (extends AvroWrapper) instance.
+   *
+   * @return A new empty <code>T</code> instance.
+   */
+  protected abstract T createAvroWrapper();
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyComparator.java
new file mode 100644
index 0000000..9cb7af7
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyComparator.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.RawComparator;
+
+/**
+ * The {@link org.apache.hadoop.io.RawComparator} used by jobs configured with
+ * {@link org.apache.avro.mapreduce.AvroJob}.
+ *
+ * <p>Compares AvroKeys output from the map phase for sorting.</p>
+ */
+public class AvroKeyComparator<T> extends Configured implements RawComparator<AvroKey<T>> {
+  /** The schema of the Avro data in the key to compare. */
+  private Schema mSchema;
+  private GenericData mDataModel;
+
+  /** {@inheritDoc} */
+  @Override
+  public void setConf(Configuration conf) {
+    super.setConf(conf);
+    if (null != conf) {
+      // The MapReduce framework will be using this comparator to sort AvroKey objects
+      // output from the map phase, so use the schema defined for the map output key
+      // and the data model non-raw compare() implementation.
+      mSchema = AvroJob.getMapOutputKeySchema(conf);
+      mDataModel = AvroSerialization.createDataModel(conf);
+    }
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+    return BinaryData.compare(b1, s1, b2, s2, mSchema);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public int compare(AvroKey<T> x, AvroKey<T> y) {
+    return mDataModel.compare(x.datum(), y.datum(), mSchema);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyDeserializer.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyDeserializer.java
new file mode 100644
index 0000000..dd36639
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyDeserializer.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroWrapper;
+
+/**
+ * Deserializes AvroKey objects within Hadoop.
+ *
+ * @param <D> The java type of the avro data to deserialize.
+ *
+ * @see AvroDeserializer
+ */
+public class AvroKeyDeserializer<D> extends AvroDeserializer<AvroWrapper<D>, D> {
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize.
+   */
+  public AvroKeyDeserializer(Schema writerSchema, Schema readerSchema,
+                             ClassLoader classLoader) {
+    super(writerSchema, readerSchema, classLoader);
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize.
+   * @param datumReader The Avro datum reader to use for deserialization.
+   */
+  public AvroKeyDeserializer(Schema writerSchema, Schema readerSchema,
+                             DatumReader<D> datumReader) {
+    super(writerSchema, readerSchema, datumReader);
+  }
+
+  /**
+   * Creates a new empty <code>AvroKey</code> instance.
+   *
+   * @return a new empty AvroKey.
+   */
+  @Override
+  protected AvroWrapper<D> createAvroWrapper() {
+    return new AvroKey<D>(null);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyValue.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyValue.java
new file mode 100644
index 0000000..1dff5c9
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroKeyValue.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.util.Arrays;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericRecord;
+
+/**
+ * A helper object for working with the Avro generic records that are used to store key/value
+ * pairs in an Avro container file.
+ *
+ * @param <K> The java type for the key.
+ * @param <V> The java type for the value.
+ */
+public class AvroKeyValue<K, V> {
+  /** The name of the key value pair generic record. */
+  public static final String KEY_VALUE_PAIR_RECORD_NAME = "KeyValuePair";
+
+  /** The namespace of the key value pair generic record. */
+  public static final String KEY_VALUE_PAIR_RECORD_NAMESPACE = "org.apache.avro.mapreduce";
+
+  /** The name of the generic record field containing the key. */
+  public static final String KEY_FIELD = "key";
+
+  /** The name of the generic record field containing the value. */
+  public static final String VALUE_FIELD = "value";
+
+  /** The key/value generic record wrapped by this class. */
+  private final GenericRecord mKeyValueRecord;
+
+  /**
+   * Wraps a GenericRecord that is a key value pair.
+   */
+  public AvroKeyValue(GenericRecord keyValueRecord) {
+    mKeyValueRecord = keyValueRecord;
+  }
+
+  /**
+   * Gets the wrapped key/value GenericRecord.
+   *
+   * @return The key/value Avro generic record.
+   */
+  public GenericRecord get() {
+    return mKeyValueRecord;
+  }
+
+  /**
+   * Read the key.
+   *
+   * @return The key from the key/value generic record.
+   */
+  @SuppressWarnings("unchecked")
+  public K getKey() {
+    return (K) mKeyValueRecord.get(KEY_FIELD);
+  }
+
+  /**
+   * Read the value.
+   *
+   * @return The value from the key/value generic record.
+   */
+  @SuppressWarnings("unchecked")
+  public V getValue() {
+    return (V) mKeyValueRecord.get(VALUE_FIELD);
+  }
+
+  /**
+   * Sets the key.
+   *
+   * @param key The key.
+   */
+  public void setKey(K key) {
+    mKeyValueRecord.put(KEY_FIELD, key);
+  }
+
+  /**
+   * Sets the value.
+   *
+   * @param value The value.
+   */
+  public void setValue(V value) {
+    mKeyValueRecord.put(VALUE_FIELD, value);
+  }
+
+  /**
+   * Creates a KeyValuePair generic record schema.
+   *
+   * @return A schema for a generic record with two fields: 'key' and 'value'.
+   */
+  public static Schema getSchema(Schema keySchema, Schema valueSchema) {
+    Schema schema = Schema.createRecord(
+        KEY_VALUE_PAIR_RECORD_NAME, "A key/value pair", KEY_VALUE_PAIR_RECORD_NAMESPACE, false);
+    schema.setFields(Arrays.asList(
+        new Schema.Field(KEY_FIELD, keySchema, "The key", null),
+        new Schema.Field(VALUE_FIELD, valueSchema, "The value", null)));
+    return schema;
+  }
+
+  /**
+   * A wrapper for iterators over GenericRecords that are known to be KeyValue records.
+   *
+   * @param <K> The key type.
+   * @param <V> The value type.
+   */
+  public static class Iterator<K, V> implements java.util.Iterator<AvroKeyValue<K, V>> {
+    /** The wrapped iterator. */
+    private final java.util.Iterator<? extends GenericRecord> mGenericIterator;
+
+    /**
+     * Constructs an iterator over key-value map entries out of a generic iterator.
+     *
+     * @param genericIterator An iterator over some generic record entries.
+     */
+    public Iterator(java.util.Iterator<? extends GenericRecord> genericIterator) {
+      mGenericIterator = genericIterator;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public boolean hasNext() {
+      return mGenericIterator.hasNext();
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public AvroKeyValue<K, V> next() {
+      GenericRecord genericRecord = mGenericIterator.next();
+      if (null == genericRecord) {
+        return null;
+      }
+      return new AvroKeyValue<K, V>(genericRecord);
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public void remove() {
+      mGenericIterator.remove();
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
new file mode 100644
index 0000000..73ab045
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSequenceFile.java
@@ -0,0 +1,735 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.SequenceFile.Metadata;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A wrapper around a Hadoop {@link org.apache.hadoop.io.SequenceFile} that
+ * also supports reading and writing Avro data.
+ *
+ * <p>The vanilla Hadoop <code>SequenceFile</code> contains a <i>header</i>
+ * followed by a sequence of <i>records</i>.  A <i>record</i> consists of a
+ * <i>key</i> and a <i>value</i>.  The <i>key</i> and <i>value</i> must
+ * either:</p>
+ *
+ * <ul>
+ *   <li>implement the <code>Writable</code> interface, or</li>
+ *   <li>be accepted by a <code>Serialization</code> registered with the
+ *       <code>SerializationFactory</code>.</li>
+ * </ul>
+ *
+ * <p>Since Avro data are Plain Old Java Objects (e.g., <code>Integer</code>
+ * for data with schema <i>"int"</i>), they do not implement <i>Writable</i>.
+ * Furthermore, a {@link org.apache.hadoop.io.Serialization} implementation
+ * cannot determine whether an object instance of type
+ * <code>CharSequence</code> that also implements <code>Writable</code> should
+ * be serialized using Avro or WritableSerialization.</p>
+ *
+ * <p>The solution implemented in <code>AvroSequenceFile</code> is to:</p>
+ *
+ * <ul>
+ *   <li>wrap Avro key data in an <code>AvroKey</code> object,</li>
+ *   <li>wrap Avro value data in an <code>AvroValue</code> object,</li>
+ *   <li>configure and register <code>AvroSerialization</code> with the
+ *       <code>SerializationFactory</code>, which will accept only objects that are instances
+ *       of either <code>AvroKey</code> or <code>AvroValue</code>, and</li>
+ *   <li>store the Avro key and value schemas in the SequenceFile <i>header</i>.</li>
+ * </ul>
+ */
+public class AvroSequenceFile {
+  private static final Logger LOG = LoggerFactory.getLogger(AvroSequenceFile.class);
+
+  /** The SequencFile.Metadata field for the Avro key writer schema. */
+  public static final Text METADATA_FIELD_KEY_SCHEMA = new Text("avro.key.schema");
+
+  /** The SequencFile.Metadata field for the Avro value writer schema. */
+  public static final Text METADATA_FIELD_VALUE_SCHEMA = new Text("avro.value.schema");
+
+  /** Constructor disabled for this container class. */
+  private AvroSequenceFile() {}
+
+  /**
+   * Creates a writer from a set of options.
+   *
+   * <p>Since there are different implementations of <code>Writer</code> depending on the
+   * compression type, this method constructs the appropriate subclass depending on the
+   * compression type given in the <code>options</code>.</p>
+   *
+   * @param options The options for the writer.
+   * @return A new writer instance.
+   * @throws IOException If the writer cannot be created.
+   */
+  public static SequenceFile.Writer createWriter(Writer.Options options) throws IOException {
+    return SequenceFile.createWriter(
+        options.getFileSystem(), options.getConfigurationWithAvroSerialization(),
+        options.getOutputPath(), options.getKeyClass(), options.getValueClass(),
+        options.getBufferSizeBytes(), options.getReplicationFactor(),
+        options.getBlockSizeBytes(), 
+        options.getCompressionType(), options.getCompressionCodec(),
+        options.getProgressable(), options.getMetadataWithAvroSchemas());
+  }
+
+  /**
+   * A writer for an uncompressed SequenceFile that supports Avro data.
+   */
+  public static class Writer extends SequenceFile.Writer {
+    /**
+     * A helper class to encapsulate the options that can be used to construct a Writer.
+     */
+    public static class Options {
+      /** The default write buffer size in bytes. */
+      public static final int DEFAULT_BUFFER_SIZE_BYTES = 4096;
+
+      /**
+       * A magic value representing the default for buffer size, block size, and
+       * replication factor.
+       */
+      private static final short DEFAULT = -1;
+
+      private FileSystem mFileSystem;
+      private Configuration mConf;
+      private Path mOutputPath;
+      private Class<?> mKeyClass;
+      private Schema mKeyWriterSchema;
+      private Class<?> mValueClass;
+      private Schema mValueWriterSchema;
+      private int mBufferSizeBytes;
+      private short mReplicationFactor;
+      private long mBlockSizeBytes;
+      private Progressable mProgressable;
+      private CompressionType mCompressionType;
+      private CompressionCodec mCompressionCodec;
+      private Metadata mMetadata;
+
+      /**
+       * Creates a new <code>Options</code> instance with default values.
+       */
+      public Options() {
+        mBufferSizeBytes = DEFAULT;
+        mReplicationFactor = DEFAULT;
+        mBlockSizeBytes = DEFAULT;
+        mCompressionType = CompressionType.NONE;
+        mMetadata = new Metadata();
+      }
+
+      /**
+       * Sets the filesystem the SequenceFile should be written to.
+       *
+       * @param fileSystem The filesystem.
+       * @return This options instance.
+       */
+      public Options withFileSystem(FileSystem fileSystem) {
+        if (null == fileSystem) {
+          throw new IllegalArgumentException("Filesystem may not be null");
+        }
+        mFileSystem = fileSystem;
+        return this;
+      }
+
+      /**
+       * Sets the Hadoop configuration.
+       *
+       * @param conf The configuration.
+       * @return This options instance.
+       */
+      public Options withConfiguration(Configuration conf) {
+        if (null == conf) {
+          throw new IllegalArgumentException("Configuration may not be null");
+        }
+        mConf = conf;
+        return this;
+      }
+
+      /**
+       * Sets the output path for the SequenceFile.
+       *
+       * @param outputPath The output path.
+       * @return This options instance.
+       */
+      public Options withOutputPath(Path outputPath) {
+        if (null == outputPath) {
+          throw new IllegalArgumentException("Output path may not be null");
+        }
+        mOutputPath = outputPath;
+        return this;
+      }
+
+      /**
+       * Sets the class of the key records to be written.
+       *
+       * <p>If the keys will be Avro data, use {@link
+       * #withKeySchema(org.apache.avro.Schema)} to specify the writer schema.  The key
+       * class will be automatically set to {@link org.apache.avro.mapred.AvroKey}.</p>
+       *
+       * @param keyClass The key class.
+       * @return This options instance.
+       */
+      public Options withKeyClass(Class<?> keyClass) {
+        if (null == keyClass) {
+          throw new IllegalArgumentException("Key class may not be null");
+        }
+        mKeyClass = keyClass;
+        return this;
+      }
+
+      /**
+       * Sets the writer schema of the key records when using Avro data.
+       *
+       * <p>The key class will automatically be set to {@link
+       * org.apache.avro.mapred.AvroKey}, so there is no need to call {@link
+       * #withKeyClass(Class)} when using this method.</p>
+       *
+       * @param keyWriterSchema The writer schema for the keys.
+       * @return This options instance.
+       */
+      public Options withKeySchema(Schema keyWriterSchema) {
+        if (null == keyWriterSchema) {
+          throw new IllegalArgumentException("Key schema may not be null");
+        }
+        withKeyClass(AvroKey.class);
+        mKeyWriterSchema = keyWriterSchema;
+        return this;
+      }
+
+      /**
+       * Sets the class of the value records to be written.
+       *
+       * <p>If the values will be Avro data, use {@link
+       * #withValueSchema(org.apache.avro.Schema)} to specify the writer schema.  The value
+       * class will be automatically set to {@link org.apache.avro.mapred.AvroValue}.</p>
+       *
+       * @param valueClass The value class.
+       * @return This options instance.
+       */
+      public Options withValueClass(Class<?> valueClass) {
+        if (null == valueClass) {
+          throw new IllegalArgumentException("Value class may not be null");
+        }
+        mValueClass = valueClass;
+        return this;
+      }
+
+      /**
+       * Sets the writer schema of the value records when using Avro data.
+       *
+       * <p>The value class will automatically be set to {@link
+       * org.apache.avro.mapred.AvroValue}, so there is no need to call {@link
+       * #withValueClass(Class)} when using this method.</p>
+       *
+       * @param valueWriterSchema The writer schema for the values.
+       * @return This options instance.
+       */
+      public Options withValueSchema(Schema valueWriterSchema) {
+        if (null == valueWriterSchema) {
+          throw new IllegalArgumentException("Value schema may not be null");
+        }
+        withValueClass(AvroValue.class);
+        mValueWriterSchema = valueWriterSchema;
+        return this;
+      }
+
+      /**
+       * Sets the write buffer size in bytes.
+       *
+       * @param bytes The desired buffer size.
+       * @return This options instance.
+       */
+      public Options withBufferSizeBytes(int bytes) {
+        if (bytes < 0) {
+          throw new IllegalArgumentException("Buffer size may not be negative");
+        }
+        mBufferSizeBytes = bytes;
+        return this;
+      }
+
+      /**
+       * Sets the desired replication factor for the file.
+       *
+       * @param replicationFactor The replication factor.
+       * @return This options instance.
+       */
+      public Options withReplicationFactor(short replicationFactor) {
+        if (replicationFactor <= 0) {
+          throw new IllegalArgumentException("Replication factor must be positive");
+        }
+        mReplicationFactor = replicationFactor;
+        return this;
+      }
+
+      /**
+       * Sets the desired size of the file blocks.
+       *
+       * @param bytes The desired block size in bytes.
+       * @return This options instance.
+       */
+      public Options withBlockSizeBytes(long bytes) {
+        if (bytes <= 0) {
+          throw new IllegalArgumentException("Block size must be positive");
+        }
+        mBlockSizeBytes = bytes;
+        return this;
+      }
+
+      /**
+       * Sets an object to report progress to.
+       *
+       * @param progressable A progressable object to track progress.
+       * @return This options instance.
+       */
+      public Options withProgressable(Progressable progressable) {
+        mProgressable = progressable;
+        return this;
+      }
+
+      /**
+       * Sets the type of compression.
+       *
+       * @param compressionType The type of compression for the output file.
+       * @return This options instance.
+       */
+      public Options withCompressionType(CompressionType compressionType) {
+        mCompressionType = compressionType;
+        return this;
+      }
+
+      /**
+       * Sets the compression codec to use if it is enabled.
+       *
+       * @param compressionCodec The compression codec.
+       * @return This options instance.
+       */
+      public Options withCompressionCodec(CompressionCodec compressionCodec) {
+        mCompressionCodec = compressionCodec;
+        return this;
+      }
+
+      /**
+       * Sets the metadata that should be stored in the file <i>header</i>.
+       *
+       * @param metadata The file metadata.
+       * @return This options instance.
+       */
+      public Options withMetadata(Metadata metadata) {
+        if (null == metadata) {
+          throw new IllegalArgumentException("Metadata may not be null");
+        }
+        mMetadata = metadata;
+        return this;
+      }
+
+      /**
+       * Gets the filesystem the SequenceFile should be written to.
+       *
+       * @return The file system to write to.
+       */
+      public FileSystem getFileSystem() {
+        if (null == mFileSystem) {
+          throw new RuntimeException("Must call Options.withFileSystem()");
+        }
+        return mFileSystem;
+      }
+
+      /**
+       * Gets the Hadoop configuration.
+       *
+       * @return The Hadoop configuration.
+       */
+      public Configuration getConfiguration() {
+        return mConf;
+      }
+
+      /**
+       * Gets the Hadoop configuration with Avro serialization registered.
+       *
+       * @return The Hadoop configuration.
+       */
+      public Configuration getConfigurationWithAvroSerialization() {
+        Configuration conf = getConfiguration();
+        if (null == conf) {
+          throw new RuntimeException("Must call Options.withConfiguration()");
+        }
+
+        Configuration confWithAvro = new Configuration(conf);
+        if (null != mKeyWriterSchema) {
+          AvroSerialization.setKeyWriterSchema(confWithAvro, mKeyWriterSchema);
+        }
+        if (null != mValueWriterSchema) {
+          AvroSerialization.setValueWriterSchema(confWithAvro, mValueWriterSchema);
+        }
+        AvroSerialization.addToConfiguration(confWithAvro);
+        return confWithAvro;
+      }
+
+      /**
+       * Gets the output path for the sequence file.
+       *
+       * @return The output path.
+       */
+      public Path getOutputPath() {
+        if (null == mOutputPath) {
+          throw new RuntimeException("Must call Options.withOutputPath()");
+        }
+        return mOutputPath;
+      }
+
+      /**
+       * Gets the class of the key records.
+       *
+       * @return The key class.
+       */
+      public Class<?> getKeyClass() {
+        if (null == mKeyClass) {
+          throw new RuntimeException(
+              "Must call Options.withKeyClass() or Options.withKeySchema()");
+        }
+        return mKeyClass;
+      }
+
+      /**
+       * Gets the class of the value records.
+       *
+       * @return The value class.
+       */
+      public Class<?> getValueClass() {
+        if (null == mValueClass) {
+          throw new RuntimeException(
+              "Must call Options.withValueClass() or Options.withValueSchema()");
+        }
+        return mValueClass;
+      }
+
+      /**
+       * Gets the desired size of the buffer used when flushing records to disk.
+       *
+       * @return The buffer size in bytes.
+       */
+      public int getBufferSizeBytes() {
+        if (DEFAULT == mBufferSizeBytes) {
+          return getConfiguration().getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE_BYTES);
+        }
+        return mBufferSizeBytes;
+      }
+
+      /**
+       * Gets the desired number of replicas to store for each block of the file.
+       *
+       * @return The replciation factor for the blocks of the file.
+       */
+      public short getReplicationFactor() {
+        if (DEFAULT == mReplicationFactor) {
+          return getFileSystem().getDefaultReplication();
+        }
+        return mReplicationFactor;
+      }
+
+      /**
+       * Gets the desired size of the file blocks.
+       *
+       * @return The size of a file block in bytes.
+       */
+      public long getBlockSizeBytes() {
+        if (DEFAULT == mBlockSizeBytes) {
+          return getFileSystem().getDefaultBlockSize();
+        }
+        return mBlockSizeBytes;
+      }
+
+      /**
+       * Gets the object to report progress to.
+       *
+       * @return A progressable object to track progress.
+       */
+      public Progressable getProgressable() {
+        return mProgressable;
+      }
+
+      /**
+       * Gets the type of compression.
+       *
+       * @return The compression type.
+       */
+      public CompressionType getCompressionType() {
+        return mCompressionType;
+      }
+
+      /**
+       * Gets the compression codec.
+       *
+       * @return The compression codec.
+       */
+      public CompressionCodec getCompressionCodec() {
+        return mCompressionCodec;
+      }
+
+      /**
+       * Gets the SequenceFile metadata to store in the <i>header</i>.
+       *
+       * @return The metadata header.
+       */
+      public Metadata getMetadata() {
+        return mMetadata;
+      }
+
+      /**
+       * Gets the metadata to store in the file header, which includes
+       * any necessary Avro writer schemas.
+       *
+       * @return The metadata header with Avro writer schemas if Avro data is being written.
+       */
+      private Metadata getMetadataWithAvroSchemas() {
+        // mMetadata was intialized in the constructor, and cannot be set to null.
+        assert null != mMetadata;
+
+        if (null != mKeyWriterSchema) {
+          mMetadata.set(METADATA_FIELD_KEY_SCHEMA, new Text(mKeyWriterSchema.toString()));
+        }
+        if (null != mValueWriterSchema) {
+          mMetadata.set(METADATA_FIELD_VALUE_SCHEMA, new Text(mValueWriterSchema.toString()));
+        }
+        return mMetadata;
+      }
+    }
+
+    /**
+     * Creates a new <code>Writer</code> to a SequenceFile that supports Avro data.
+     *
+     * @param options The writer options.
+     * @throws IOException If the writer cannot be initialized.
+     */
+    public Writer(Options options) throws IOException {
+      super(options.getFileSystem(), options.getConfigurationWithAvroSerialization(),
+          options.getOutputPath(), options.getKeyClass(), options.getValueClass(),
+          options.getBufferSizeBytes(), options.getReplicationFactor(),
+          options.getBlockSizeBytes(), options.getProgressable(),
+          options.getMetadataWithAvroSchemas());
+    }
+  }
+
+  /**
+   * A reader for SequenceFiles that may contain Avro data.
+   */
+  public static class Reader extends SequenceFile.Reader {
+    /**
+     * A helper class to encapsulate the options that can be used to construct a Reader.
+     */
+    public static class Options {
+      private FileSystem mFileSystem;
+      private Path mInputPath;
+      private Configuration mConf;
+      private Schema mKeyReaderSchema;
+      private Schema mValueReaderSchema;
+
+      /**
+       * Sets the filesystem the SequenceFile should be read from.
+       *
+       * @param fileSystem The filesystem.
+       * @return This options instance.
+       */
+      public Options withFileSystem(FileSystem fileSystem) {
+        if (null == fileSystem) {
+          throw new IllegalArgumentException("Filesystem may not be null");
+        }
+        mFileSystem = fileSystem;
+        return this;
+      }
+
+      /**
+       * Sets the input path for the SequenceFile.
+       *
+       * @param inputPath The input path.
+       * @return This options instance.
+       */
+      public Options withInputPath(Path inputPath) {
+        if (null == inputPath) {
+          throw new IllegalArgumentException("Input path may not be null");
+        }
+        mInputPath = inputPath;
+        return this;
+      }
+
+      /**
+       * Sets the Hadoop configuration.
+       *
+       * @param conf The configuration.
+       * @return This options instance.
+       */
+      public Options withConfiguration(Configuration conf) {
+        if (null == conf) {
+          throw new IllegalArgumentException("Configuration may not be null");
+        }
+        mConf = conf;
+        return this;
+      }
+
+      /**
+       * Sets the reader schema of the key records when using Avro data.
+       *
+       * <p>If not set, the writer schema will be used as the reader schema.</p>
+       *
+       * @param keyReaderSchema The reader schema for the keys.
+       * @return This options instance.
+       */
+      public Options withKeySchema(Schema keyReaderSchema) {
+        mKeyReaderSchema = keyReaderSchema;
+        return this;
+      }
+
+      /**
+       * Sets the reader schema of the value records when using Avro data.
+       *
+       * <p>If not set, the writer schema will be used as the reader schema.</p>
+       *
+       * @param valueReaderSchema The reader schema for the values.
+       * @return This options instance.
+       */
+      public Options withValueSchema(Schema valueReaderSchema) {
+        mValueReaderSchema = valueReaderSchema;
+        return this;
+      }
+
+      /**
+       * Gets the filesystem the SequenceFile should be read rom.
+       *
+       * @return The file system to read from.
+       */
+      public FileSystem getFileSystem() {
+        if (null == mFileSystem) {
+          throw new RuntimeException("Must call Options.withFileSystem()");
+        }
+        return mFileSystem;
+      }
+
+      /**
+       * Gets the input path for the sequence file.
+       *
+       * @return The input path.
+       */
+      public Path getInputPath() {
+        if (null == mInputPath) {
+          throw new RuntimeException("Must call Options.withInputPath()");
+        }
+        return mInputPath;
+      }
+
+      /**
+       * Gets the Hadoop configuration.
+       *
+       * @return The Hadoop configuration.
+       */
+      public Configuration getConfiguration() {
+        return mConf;
+      }
+
+      /**
+       * Gets the Hadoop configuration with Avro serialization registered.
+       *
+       * @return The Hadoop configuration.
+       * @throws IOException If there is an error configuring Avro serialization.
+       */
+      public Configuration getConfigurationWithAvroSerialization() throws IOException {
+        Configuration conf = getConfiguration();
+        if (null == conf) {
+          throw new RuntimeException("Must call Options.withConfiguration()");
+        }
+
+        // Configure schemas and add Avro serialization to the configuration.
+        Configuration confWithAvro = new Configuration(conf);
+        AvroSerialization.addToConfiguration(confWithAvro);
+
+        // Read the metadata header from the SequenceFile to get the writer schemas.
+        Metadata metadata = AvroSequenceFile.getMetadata(
+            getFileSystem(), getInputPath(), confWithAvro);
+
+        // Set the key schema if present in the metadata.
+        Text keySchemaText = metadata.get(METADATA_FIELD_KEY_SCHEMA);
+        if (null != keySchemaText) {
+          LOG.debug("Using key writer schema from SequenceFile metadata: "
+              + keySchemaText.toString());
+          AvroSerialization.setKeyWriterSchema(
+              confWithAvro, Schema.parse(keySchemaText.toString()));
+          if (null != mKeyReaderSchema) {
+            AvroSerialization.setKeyReaderSchema(confWithAvro, mKeyReaderSchema);
+          }
+        }
+
+        // Set the value schema if present in the metadata.
+        Text valueSchemaText = metadata.get(METADATA_FIELD_VALUE_SCHEMA);
+        if (null != valueSchemaText) {
+          LOG.debug("Using value writer schema from SequenceFile metadata: "
+              + valueSchemaText.toString());
+          AvroSerialization.setValueWriterSchema(
+              confWithAvro, Schema.parse(valueSchemaText.toString()));
+          if (null != mValueReaderSchema) {
+            AvroSerialization.setValueReaderSchema(confWithAvro, mValueReaderSchema);
+          }
+        }
+        return confWithAvro;
+      }
+    }
+
+    /**
+     * Creates a new <code>Reader</code> from a SequenceFile that supports Avro data.
+     *
+     * @param options The reader options.
+     * @throws IOException If the reader cannot be initialized.
+     */
+    public Reader(Options options) throws IOException {
+      super(options.getFileSystem(), options.getInputPath(),
+          options.getConfigurationWithAvroSerialization());
+    }
+  }
+
+  /**
+   * Open and read just the metadata header from a SequenceFile.
+   *
+   * @param fs The FileSystem the SequenceFile is on.
+   * @param path The path to the file.
+   * @param conf The Hadoop configuration.
+   * @return The metadata header.
+   * @throws IOException If the metadata cannot be read from the file.
+   */
+  private static Metadata getMetadata(FileSystem fs, Path path, Configuration conf)
+      throws IOException {
+    SequenceFile.Reader metadataReader = null;
+    try {
+      metadataReader = new SequenceFile.Reader(fs, path, conf);
+      return metadataReader.getMetadata();
+    } finally {
+      if (null != metadataReader) {
+        metadataReader.close();
+      }
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java
new file mode 100644
index 0000000..dda7543
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerialization.java
@@ -0,0 +1,266 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.lang.reflect.Constructor;
+import java.util.Collection;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serialization;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * The {@link org.apache.hadoop.io.serializer.Serialization} used by jobs configured with
+ * {@link org.apache.avro.mapreduce.AvroJob}.
+ *
+ * @param <T> The Java type of the Avro data to serialize.
+ */
+public class AvroSerialization<T> extends Configured implements Serialization<AvroWrapper<T>> {
+  /** Conf key for the writer schema of the AvroKey datum being serialized/deserialized. */
+  private static final String CONF_KEY_WRITER_SCHEMA = "avro.serialization.key.writer.schema";
+
+  /** Conf key for the reader schema of the AvroKey datum being serialized/deserialized. */
+  private static final String CONF_KEY_READER_SCHEMA = "avro.serialization.key.reader.schema";
+
+  /** Conf key for the writer schema of the AvroValue datum being serialized/deserialized. */
+  private static final String CONF_VALUE_WRITER_SCHEMA = "avro.serialization.value.writer.schema";
+
+  /** Conf key for the reader schema of the AvroValue datum being serialized/deserialized. */
+  private static final String CONF_VALUE_READER_SCHEMA = "avro.serialization.value.reader.schema";
+
+  /** Conf key for the data model implementation class. */
+  private static final String CONF_DATA_MODEL = "avro.serialization.data.model";
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean accept(Class<?> c) {
+    return AvroKey.class.isAssignableFrom(c) || AvroValue.class.isAssignableFrom(c);
+  }
+
+  /**
+   * Gets an object capable of deserializing the output from a Mapper.
+   *
+   * @param c The class to get a deserializer for.
+   * @return A deserializer for objects of class <code>c</code>.
+   */
+  @Override
+  public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
+    Configuration conf = getConf();
+    GenericData dataModel = createDataModel(conf);
+    if (AvroKey.class.isAssignableFrom(c)) {
+      Schema writerSchema = getKeyWriterSchema(conf);
+      Schema readerSchema = getKeyReaderSchema(conf);
+      DatumReader<T> datumReader = (readerSchema != null)
+        ? dataModel.createDatumReader(writerSchema, readerSchema)
+        : dataModel.createDatumReader(writerSchema);
+      return new AvroKeyDeserializer<T>(writerSchema, readerSchema, datumReader);
+    } else if (AvroValue.class.isAssignableFrom(c)) {
+      Schema writerSchema = getValueWriterSchema(conf);
+      Schema readerSchema = getValueReaderSchema(conf);
+      DatumReader<T> datumReader = (readerSchema != null)
+        ? dataModel.createDatumReader(writerSchema, readerSchema)
+        : dataModel.createDatumReader(writerSchema);
+      return new AvroValueDeserializer<T>(writerSchema, readerSchema, datumReader);
+    } else {
+      throw new IllegalStateException("Only AvroKey and AvroValue are supported.");
+    }
+  }
+
+  /**
+   * Gets an object capable of serializing output from a Mapper.
+   *
+   * @param c The class to get a serializer for.
+   * @return A serializer for objects of class <code>c</code>.
+   */
+  @Override
+  public Serializer<AvroWrapper<T>> getSerializer(Class<AvroWrapper<T>> c) {
+    Configuration conf = getConf();
+    Schema schema;
+    if (AvroKey.class.isAssignableFrom(c)) {
+      schema = getKeyWriterSchema(conf);
+    } else if (AvroValue.class.isAssignableFrom(c)) {
+      schema = getValueWriterSchema(conf);
+    } else {
+      throw new IllegalStateException("Only AvroKey and AvroValue are supported.");
+    }
+    GenericData dataModel = createDataModel(conf);
+    DatumWriter<T> datumWriter = dataModel.createDatumWriter(schema);
+    return new AvroSerializer<T>(schema, datumWriter);
+  }
+
+  /**
+   * Adds the AvroSerialization scheme to the configuration, so SerializationFactory
+   * instances constructed from the given configuration will be aware of it.
+   *
+   * @param conf The configuration to add AvroSerialization to.
+   */
+  public static void addToConfiguration(Configuration conf) {
+    Collection<String> serializations = conf.getStringCollection("io.serializations");
+    if (!serializations.contains(AvroSerialization.class.getName())) {
+      serializations.add(AvroSerialization.class.getName());
+      conf.setStrings("io.serializations",
+          serializations.toArray(new String[serializations.size()]));
+    }
+  }
+
+  /**
+   * Sets the writer schema of the AvroKey datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @param schema The Avro key schema.
+   */
+  public static void setKeyWriterSchema(Configuration conf, Schema schema) {
+    if (null == schema) {
+      throw new IllegalArgumentException("Writer schema may not be null");
+    }
+    conf.set(CONF_KEY_WRITER_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the reader schema of the AvroKey datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @param schema The Avro key schema.
+   */
+  public static void setKeyReaderSchema(Configuration conf, Schema schema) {
+    conf.set(CONF_KEY_READER_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the writer schema of the AvroValue datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @param schema The Avro value schema.
+   */
+  public static void setValueWriterSchema(Configuration conf, Schema schema) {
+    if (null == schema) {
+      throw new IllegalArgumentException("Writer schema may not be null");
+    }
+    conf.set(CONF_VALUE_WRITER_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the reader schema of the AvroValue datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @param schema The Avro value schema.
+   */
+  public static void setValueReaderSchema(Configuration conf, Schema schema) {
+    conf.set(CONF_VALUE_READER_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the data model class for de/seralization.
+   *
+   * @param conf The configuration.
+   * @param modelClass The data model class.
+   */
+  public static void setDataModelClass(Configuration conf, Class<? extends GenericData> modelClass) {
+    conf.setClass(CONF_DATA_MODEL, modelClass, GenericData.class);
+  }
+
+  /**
+   * Gets the writer schema of the AvroKey datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @return The Avro key writer schema, or null if none was set.
+   */
+  public static Schema getKeyWriterSchema(Configuration conf) {
+    String json = conf.get(CONF_KEY_WRITER_SCHEMA);
+    return null == json ? null : Schema.parse(json);
+  }
+
+  /**
+   * Gets the reader schema of the AvroKey datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @return The Avro key reader schema, or null if none was set.
+   */
+  public static Schema getKeyReaderSchema(Configuration conf) {
+    String json = conf.get(CONF_KEY_READER_SCHEMA);
+    return null == json ? null : Schema.parse(json);
+  }
+
+  /**
+   * Gets the writer schema of the AvroValue datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @return The Avro value writer schema, or null if none was set.
+   */
+  public static Schema getValueWriterSchema(Configuration conf) {
+    String json = conf.get(CONF_VALUE_WRITER_SCHEMA);
+    return null == json ? null : Schema.parse(json);
+  }
+
+  /**
+   * Gets the reader schema of the AvroValue datum that is being serialized/deserialized.
+   *
+   * @param conf The configuration.
+   * @return The Avro value reader schema, or null if none was set.
+   */
+  public static Schema getValueReaderSchema(Configuration conf) {
+    String json = conf.get(CONF_VALUE_READER_SCHEMA);
+    return null == json ? null : Schema.parse(json);
+  }
+
+  /**
+   * Gets the data model class for de/seralization.
+   *
+   * @param conf The configuration.
+   */
+  public static Class<? extends GenericData> getDataModelClass(Configuration conf) {
+    return conf.getClass(CONF_DATA_MODEL, ReflectData.class, GenericData.class);
+  }
+
+  private static GenericData newDataModelInstance(Class<? extends GenericData> modelClass, Configuration conf) {
+    GenericData dataModel;
+    try {
+      Constructor<? extends GenericData> ctor = modelClass.getDeclaredConstructor(ClassLoader.class);
+      ctor.setAccessible(true);
+      dataModel = ctor.newInstance(conf.getClassLoader());
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    ReflectionUtils.setConf(dataModel, conf);
+    return dataModel;
+  }
+
+  /**
+   * Gets an instance of data model implementation, defaulting to
+   * {@link ReflectData} if not explicitly specified.
+   *
+   * @param conf The job configuration.
+   * @return Instance of the job data model implementation.
+   */
+  public static GenericData createDataModel(Configuration conf) {
+    Class<? extends GenericData> modelClass = getDataModelClass(conf);
+    return newDataModelInstance(modelClass, conf);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java
new file mode 100644
index 0000000..560c37f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroSerializer.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.hadoop.io.serializer.Serializer;
+
+/**
+ * Serializes AvroWrapper objects within Hadoop.
+ *
+ * <p>Keys and values containing Avro types are more efficiently serialized outside of the
+ * WritableSerialization model, so they are wrapped in {@link
+ * org.apache.avro.mapred.AvroWrapper} objects and serialization is handled by this
+ * class.</p>
+ *
+ * <p>MapReduce jobs that use AvroWrapper objects as keys or values need to be configured
+ * with {@link AvroSerialization}.  Use {@link
+ * org.apache.avro.mapreduce.AvroJob} to help with Job configuration.</p>
+ *
+ * @param <T> The Java type of the Avro data.
+ */
+public class AvroSerializer<T> implements Serializer<AvroWrapper<T>> {
+  /**
+   * The block size for the Avro encoder.
+   *
+   * This number was copied from the AvroSerialization of org.apache.avro.mapred in Avro 1.5.1.
+   *
+   * TODO(gwu): Do some benchmarking with different numbers here to see if it is important.
+   */
+  private static final int AVRO_ENCODER_BLOCK_SIZE_BYTES = 512;
+
+  /** An factory for creating Avro datum encoders. */
+  private static EncoderFactory mEncoderFactory
+      = new EncoderFactory().configureBlockSize(AVRO_ENCODER_BLOCK_SIZE_BYTES);
+
+  /** The writer schema for the data to serialize. */
+  private final Schema mWriterSchema;
+
+  /** The Avro datum writer for serializing. */
+  private final DatumWriter<T> mAvroDatumWriter;
+
+  /** The Avro encoder for serializing. */
+  private BinaryEncoder mAvroEncoder;
+
+  /** The output stream for serializing. */
+  private OutputStream mOutputStream;
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The writer schema for the Avro data being serialized.
+   */
+  public AvroSerializer(Schema writerSchema) {
+    if (null == writerSchema) {
+      throw new IllegalArgumentException("Writer schema may not be null");
+    }
+    mWriterSchema = writerSchema;
+    mAvroDatumWriter = new ReflectDatumWriter<T>(writerSchema);
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The writer schema for the Avro data being serialized.
+   * @param datumWriter The datum writer to use for serialization.
+   */
+  public AvroSerializer(Schema writerSchema, DatumWriter<T> datumWriter) {
+    if (null == writerSchema) {
+      throw new IllegalArgumentException("Writer schema may not be null");
+    }
+    mWriterSchema = writerSchema;
+    mAvroDatumWriter = datumWriter;
+  }
+
+  /**
+   * Gets the writer schema being used for serialization.
+   *
+   * @return The writer schema.
+   */
+  public Schema getWriterSchema() {
+    return mWriterSchema;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void open(OutputStream outputStream) throws IOException {
+    mOutputStream = outputStream;
+    mAvroEncoder = mEncoderFactory.binaryEncoder(outputStream, mAvroEncoder);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void serialize(AvroWrapper<T> avroWrapper) throws IOException {
+    mAvroDatumWriter.write(avroWrapper.datum(), mAvroEncoder);
+    // This would be a lot faster if the Serializer interface had a flush() method and the
+    // Hadoop framework called it when needed.  For now, we'll have to flush on every record.
+    mAvroEncoder.flush();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close() throws IOException {
+    mOutputStream.close();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroValueDeserializer.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroValueDeserializer.java
new file mode 100644
index 0000000..370f944
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/io/AvroValueDeserializer.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.AvroWrapper;
+
+/**
+ * Deserializes AvroValue objects within Hadoop.
+ *
+ * @param <D> The java type of the avro data to deserialize.
+ *
+ * @see AvroDeserializer
+ */
+public class AvroValueDeserializer<D> extends AvroDeserializer<AvroWrapper<D>, D> {
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize.
+   */
+  public AvroValueDeserializer(Schema writerSchema, Schema readerSchema,
+                               ClassLoader classLoader) {
+    super(writerSchema, readerSchema, classLoader);
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The Avro writer schema for the data to deserialize.
+   * @param readerSchema The Avro reader schema for the data to deserialize.
+   * @param datumReader The Avro datum reader to use for deserialization.
+   */
+  public AvroValueDeserializer(Schema writerSchema, Schema readerSchema,
+                               DatumReader<D> datumReader) {
+    super(writerSchema, readerSchema, datumReader);
+  }
+
+  /**
+   * Creates a new empty <code>AvroValue</code> instance.
+   *
+   * @return a new empty AvroValue.
+   */
+  @Override
+  protected AvroWrapper<D> createAvroWrapper() {
+    return new AvroValue<D>(null);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/hadoop/util/AvroCharSequenceComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/util/AvroCharSequenceComparator.java
new file mode 100644
index 0000000..2a1c972
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/hadoop/util/AvroCharSequenceComparator.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.util;
+
+import java.util.Comparator;
+
+/**
+ * Compares Avro string data (data with schema <i>"string"</i>).
+ *
+ * <p>The only case where comparing Avro objects does not work using their natural order
+ * is when the schema is <i>"string"</i>.  The Avro string schema maps to the Java
+ * <code>CharSequence</code> interface, which does not define <code>equals</code>,
+ * <code>hashCode</code>, or <code>compareTo</code>.</p>
+ *
+ * <p>Using this comparator enables comparisons between <code>String</code> and
+ * <code>Utf8</code> objects that are both valid when working with Avro strings.</p>
+ *
+ * @param <T> The type of object to compare.
+ */
+public class AvroCharSequenceComparator<T> implements Comparator<T> {
+  /** A singleton instance. */
+  public static final AvroCharSequenceComparator<CharSequence> INSTANCE
+      = new AvroCharSequenceComparator<CharSequence>();
+
+  /** {@inheritDoc} */
+  @Override
+  public int compare(T o1, T o2) {
+    if (!(o1 instanceof CharSequence) || !(o2 instanceof CharSequence)) {
+      throw new RuntimeException(
+          "Attempted use of AvroCharSequenceComparator on non-CharSequence objects: "
+          + o1.getClass().getName() + " and " + o2.getClass().getName());
+    }
+    return compareCharSequence((CharSequence) o1, (CharSequence) o2);
+  }
+
+  /**
+   * Compares the CharSequences <code>o1</code> and <code>o2</code>.
+   *
+   * @param o1 The left charsequence.
+   * @param o2 The right charsequence.
+   * @return a negative integer, zero, or a positive integer if the first argument is
+   *     less than, equal to, or greater than the second, respectively.
+   */
+  private int compareCharSequence(CharSequence o1, CharSequence o2) {
+    for (int i = 0; i < Math.max(o1.length(), o2.length()); i++) {
+      int charComparison = compareCharacter(o1, o2, i);
+      if (0 != charComparison) {
+        return charComparison;
+      }
+    }
+    return 0;
+  }
+
+  /**
+   * Compares the characters of <code>o1</code> and <code>o2</code> at index <code>index</code>.
+   *
+   * @param o1 The left charsequence.
+   * @param o2 The right charsequence.
+   * @param index The zero-based index into the charsequences to compare.
+   * @return a negative integer, zero, or a positive integer if the first argument is
+   *     less than, equal to, or greater than the second, respectively.
+   */
+  private int compareCharacter(CharSequence o1, CharSequence o2, int index) {
+    if (index < o1.length() && index < o2.length()) {
+      return Character.valueOf(o1.charAt(index)).compareTo(Character.valueOf(o2.charAt(index)));
+    }
+    if (index >= o1.length() && index >= o2.length()) {
+      return 0;
+    }
+    return o1.length() - o2.length();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java
new file mode 100644
index 0000000..ca7dab8
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextInputFormat.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * An {@link org.apache.hadoop.mapred.InputFormat} for Avro data files, which
+ * converts each datum to string form in the input key. The input value is
+ * always empty. The string representation is
+ * <a href="http://www.json.org/">JSON</a>.
+ * <p>
+ * This {@link org.apache.hadoop.mapred.InputFormat} is useful for applications
+ * that wish to process Avro data using tools like MapReduce Streaming.
+ * 
+ * By default, when pointed at a directory, this will silently skip over any
+ * files in it that do not have .avro extension. To instead include all files,
+ * set the avro.mapred.ignore.inputs.without.extension property to false.
+ */
+public class AvroAsTextInputFormat extends FileInputFormat<Text, Text> {
+
+  @Override
+  protected FileStatus[] listStatus(JobConf job) throws IOException {
+    if (job.getBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY,
+        AvroInputFormat.IGNORE_INPUTS_WITHOUT_EXTENSION_DEFAULT)) {
+      List<FileStatus> result = new ArrayList<FileStatus>();
+      for (FileStatus file : super.listStatus(job))
+        if (file.getPath().getName().endsWith(AvroOutputFormat.EXT))
+          result.add(file);
+      return result.toArray(new FileStatus[0]);
+    } else {
+      return super.listStatus(job);
+    }
+  }
+  
+  @Override
+  public RecordReader<Text, Text>
+    getRecordReader(InputSplit split, JobConf job, Reporter reporter)
+    throws IOException {
+    reporter.setStatus(split.toString());
+    return new AvroAsTextRecordReader(job, (FileSplit) split);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
new file mode 100644
index 0000000..517b472
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroAsTextRecordReader.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+
+class AvroAsTextRecordReader<T> implements RecordReader<Text, Text> {
+
+  private FileReader<T> reader;
+  private T datum;
+  private long start;
+  private long end;
+
+  public AvroAsTextRecordReader(JobConf job, FileSplit split)
+    throws IOException {
+    this(DataFileReader.openReader
+         (new FsInput(split.getPath(), job), new GenericDatumReader<T>()), split);
+  }
+
+  protected AvroAsTextRecordReader(FileReader<T> reader, FileSplit split)
+    throws IOException {
+    this.reader = reader;
+    reader.sync(split.getStart());                    // sync to start
+    this.start = reader.tell();
+    this.end = split.getStart() + split.getLength();
+  }
+
+  public Text createKey() {
+    return new Text();
+  }
+  
+  public Text createValue() {
+    return new Text();
+  }
+    
+  public boolean next(Text key, Text ignore) throws IOException {
+    if (!reader.hasNext() || reader.pastSync(end))
+      return false;
+    datum = reader.next(datum);
+    if (datum instanceof ByteBuffer) {
+      ByteBuffer b = (ByteBuffer) datum;
+      if (b.hasArray()) {
+        int offset = b.arrayOffset();
+        int start = b.position();
+        int length = b.remaining();
+        key.set(b.array(), offset + start, offset + start + length);
+      } else {
+        byte[] bytes = new byte[b.remaining()];
+        b.duplicate().get(bytes);
+        key.set(bytes);
+      }
+    } else {
+      key.set(GenericData.get().toString(datum));
+    }
+    return true;
+  }
+  
+  public float getProgress() throws IOException {
+    if (end == start) {
+      return 0.0f;
+    } else {
+      return Math.min(1.0f, (getPos() - start) / (float)(end - start));
+    }
+  }
+  
+  public long getPos() throws IOException {
+    return reader.tell();
+  }
+
+  public void close() throws IOException { reader.close(); }
+  
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroCollector.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroCollector.java
new file mode 100644
index 0000000..ac4d506
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroCollector.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configured;
+
+/** A collector for map and reduce output. */
+public abstract class AvroCollector<T> extends Configured {
+  public abstract void collect(T datum) throws IOException;
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java
new file mode 100644
index 0000000..252339a
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroInputFormat.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RecordReader;
+
+/**
+ * An {@link org.apache.hadoop.mapred.InputFormat} for Avro data files.
+ * 
+ * By default, when pointed at a directory, this will silently skip over any
+ * files in it that do not have .avro extension. To instead include all files,
+ * set the avro.mapred.ignore.inputs.without.extension property to false.
+ */
+public class AvroInputFormat<T>
+  extends FileInputFormat<AvroWrapper<T>, NullWritable> {
+
+  /** Whether to silently ignore input files without the .avro extension */
+  public static final String IGNORE_FILES_WITHOUT_EXTENSION_KEY =
+      "avro.mapred.ignore.inputs.without.extension";
+  
+  /** Default of whether to silently ignore input files without the .avro
+   * extension. */
+  public static final boolean IGNORE_INPUTS_WITHOUT_EXTENSION_DEFAULT = true;
+  
+  @Override
+  protected FileStatus[] listStatus(JobConf job) throws IOException {
+    if (job.getBoolean(IGNORE_FILES_WITHOUT_EXTENSION_KEY,
+        IGNORE_INPUTS_WITHOUT_EXTENSION_DEFAULT)) {
+      List<FileStatus> result = new ArrayList<FileStatus>();
+      for (FileStatus file : super.listStatus(job))
+        if (file.getPath().getName().endsWith(AvroOutputFormat.EXT))
+          result.add(file);
+      return result.toArray(new FileStatus[0]);
+    } else {
+      return super.listStatus(job);
+    }
+  }
+
+  @Override
+  public RecordReader<AvroWrapper<T>, NullWritable>
+    getRecordReader(InputSplit split, JobConf job, Reporter reporter)
+    throws IOException {
+    reporter.setStatus(split.toString());
+    return new AvroRecordReader<T>(job, (FileSplit)split);
+  }
+
+}
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
new file mode 100644
index 0000000..21f130c
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroJob.java
@@ -0,0 +1,254 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.util.Collection;
+import java.lang.reflect.Constructor;
+import java.net.URLEncoder;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.specific.SpecificData;
+
+/** Setters to configure jobs for Avro data. */
+public class AvroJob {
+  private AvroJob() {}                            // no public ctor
+
+  static final String MAPPER = "avro.mapper";
+  static final String COMBINER = "avro.combiner";
+  static final String REDUCER = "avro.reducer";
+
+  /** The configuration key for a job's input schema. */
+  public static final String INPUT_SCHEMA = "avro.input.schema";
+  /** The configuration key for a job's intermediate schema. */
+  public static final String MAP_OUTPUT_SCHEMA = "avro.map.output.schema";
+  /** The configuration key for a job's output schema. */
+  public static final String OUTPUT_SCHEMA = "avro.output.schema";
+  /** The configuration key for a job's output compression codec.
+   *  This takes one of the strings registered in {@link org.apache.avro.file.CodecFactory} */
+  public static final String OUTPUT_CODEC = "avro.output.codec";
+  /** The configuration key prefix for a text output metadata. */
+  public static final String TEXT_PREFIX = "avro.meta.text.";
+  /** The configuration key prefix for a binary output metadata. */
+  public static final String BINARY_PREFIX = "avro.meta.binary.";
+  /** The configuration key for reflection-based input representation. */
+  public static final String INPUT_IS_REFLECT = "avro.input.is.reflect";
+  /** The configuration key for reflection-based map output representation. */
+  public static final String MAP_OUTPUT_IS_REFLECT = "avro.map.output.is.reflect";
+  /** The configuration key for the data model implementation class. */
+  private static final String CONF_DATA_MODEL = "avro.serialization.data.model";
+
+  /** Configure a job's map input schema. */
+  public static void setInputSchema(JobConf job, Schema s) {
+    job.set(INPUT_SCHEMA, s.toString());
+    configureAvroInput(job);
+  }
+
+  /** Return a job's map input schema. */
+  public static Schema getInputSchema(Configuration job) {
+    String schemaString = job.get(INPUT_SCHEMA);
+    return schemaString != null ? Schema.parse(schemaString) : null;
+  }
+
+  /** Configure a job's map output schema.  The map output schema defaults to
+   * the output schema and need only be specified when it differs.  Thus must
+   * be a {@link Pair} schema. */
+  public static void setMapOutputSchema(JobConf job, Schema s) {
+    job.set(MAP_OUTPUT_SCHEMA, s.toString());
+    configureAvroShuffle(job);
+  }
+
+  /** Return a job's map output key schema. */
+  public static Schema getMapOutputSchema(Configuration job) {
+    return Schema.parse(job.get(MAP_OUTPUT_SCHEMA, job.get(OUTPUT_SCHEMA)));
+  }
+
+  /** Configure a job's output schema.  Unless this is a map-only job, this
+   * must be a {@link Pair} schema. */
+  public static void setOutputSchema(JobConf job, Schema s) {
+    job.set(OUTPUT_SCHEMA, s.toString());
+    configureAvroOutput(job);
+  }
+
+  /** Configure a job's output compression codec. */
+  public static void setOutputCodec(JobConf job, String codec) {
+    job.set(OUTPUT_CODEC, codec);
+  }
+
+  /** Add metadata to job output files.*/
+  public static void setOutputMeta(JobConf job, String key, String value) {
+    job.set(TEXT_PREFIX+key, value);
+  }
+  /** Add metadata to job output files.*/
+  public static void setOutputMeta(JobConf job, String key, long value) {
+    job.set(TEXT_PREFIX+key, Long.toString(value));
+  }
+  /** Add metadata to job output files.*/
+  public static void setOutputMeta(JobConf job, String key, byte[] value) {
+    try {
+      job.set(BINARY_PREFIX+key,
+              URLEncoder.encode(new String(value, "ISO-8859-1"),
+                                "ISO-8859-1"));
+    } catch (UnsupportedEncodingException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  /** Indicate that a job's input files are in SequenceFile format.*/
+  public static void setInputSequenceFile(JobConf job) {
+    job.setInputFormat(SequenceFileInputFormat.class);
+  }
+
+  /** Indicate that all a job's data should use the reflect representation.*/
+  public static void setReflect(JobConf job) {
+    setInputReflect(job);
+    setMapOutputReflect(job);
+  }
+  
+  /** Indicate that a job's input data should use reflect representation.*/
+  public static void setInputReflect(JobConf job) {
+    job.setBoolean(INPUT_IS_REFLECT, true);
+  }
+  
+  /** Indicate that a job's map output data should use reflect representation.*/
+  public static void setMapOutputReflect(JobConf job) {
+    job.setBoolean(MAP_OUTPUT_IS_REFLECT, true);
+  }
+
+  /** Return a job's output key schema. */
+  public static Schema getOutputSchema(Configuration job) {
+    return Schema.parse(job.get(OUTPUT_SCHEMA));
+  }
+
+  private static void configureAvroInput(JobConf job) {
+    if (job.get("mapred.input.format.class") == null)
+      job.setInputFormat(AvroInputFormat.class);
+
+    if (job.getMapperClass() == IdentityMapper.class)
+      job.setMapperClass(HadoopMapper.class);
+
+    configureAvroShuffle(job);
+  }
+
+  private static void configureAvroOutput(JobConf job) {
+    if (job.get("mapred.output.format.class") == null)
+      job.setOutputFormat(AvroOutputFormat.class);
+
+    if (job.getReducerClass() == IdentityReducer.class)
+      job.setReducerClass(HadoopReducer.class);
+
+    job.setOutputKeyClass(AvroWrapper.class);
+    configureAvroShuffle(job);
+  }
+
+  private static void configureAvroShuffle(JobConf job) {
+    job.setOutputKeyComparatorClass(AvroKeyComparator.class);
+    job.setMapOutputKeyClass(AvroKey.class);
+    job.setMapOutputValueClass(AvroValue.class);
+
+    // add AvroSerialization to io.serializations
+    Collection<String> serializations =
+      job.getStringCollection("io.serializations");
+    if (!serializations.contains(AvroSerialization.class.getName())) {
+      serializations.add(AvroSerialization.class.getName());
+      job.setStrings("io.serializations",
+                     serializations.toArray(new String[0]));
+    }
+  }
+
+  /** Configure a job's mapper implementation. */
+  public static void setMapperClass(JobConf job,
+                                    Class<? extends AvroMapper> c) {
+    job.set(MAPPER, c.getName());
+  }
+
+  /** Configure a job's combiner implementation. */
+  public static void setCombinerClass(JobConf job,
+                                      Class<? extends AvroReducer> c) {
+    job.set(COMBINER, c.getName());
+    job.setCombinerClass(HadoopCombiner.class);
+  }
+
+  /** Configure a job's reducer implementation. */
+  public static void setReducerClass(JobConf job,
+                                     Class<? extends AvroReducer> c) {
+    job.set(REDUCER, c.getName());
+  }
+
+  /** Configure a job's data model implementation class. */
+  public static void setDataModelClass(JobConf job, Class<? extends GenericData> modelClass) {
+    job.setClass(CONF_DATA_MODEL, modelClass, GenericData.class);
+  }
+  
+  /** Return the job's data model implementation class. */
+  public static Class<? extends GenericData> getDataModelClass(Configuration conf) {
+    return (Class<? extends GenericData>) conf.getClass(
+        CONF_DATA_MODEL, ReflectData.class, GenericData.class);
+  }
+
+  private static GenericData newDataModelInstance(Class<? extends GenericData> modelClass, Configuration conf) {
+    GenericData dataModel;
+    try {
+      Constructor<? extends GenericData> ctor = modelClass.getDeclaredConstructor(ClassLoader.class);
+      ctor.setAccessible(true);
+      dataModel = ctor.newInstance(conf.getClassLoader());
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    ReflectionUtils.setConf(dataModel, conf);
+    return dataModel;
+  }
+
+  public static GenericData createDataModel(Configuration conf) {
+    return newDataModelInstance(getDataModelClass(conf), conf);
+  }
+
+  public static GenericData createInputDataModel(Configuration conf) {
+    String className = conf.get(CONF_DATA_MODEL, null);
+    Class<? extends GenericData> modelClass;
+    if (className != null) {
+      modelClass = getDataModelClass(conf);
+    } else if (conf.getBoolean(INPUT_IS_REFLECT, false)) {
+      modelClass = ReflectData.class;
+    } else {
+      modelClass = SpecificData.class;
+    }
+    return newDataModelInstance(modelClass, conf);
+  }
+
+  public static GenericData createMapOutputDataModel(Configuration conf) {
+    String className = conf.get(CONF_DATA_MODEL, null);
+    Class<? extends GenericData> modelClass;
+    if (className != null) {
+      modelClass = getDataModelClass(conf);
+    } else if (conf.getBoolean(MAP_OUTPUT_IS_REFLECT, false)) {
+      modelClass = ReflectData.class;
+    } else {
+      modelClass = SpecificData.class;
+    }
+    return newDataModelInstance(modelClass, conf);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKey.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKey.java
new file mode 100644
index 0000000..a5eceee
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKey.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+/** The wrapper of keys for jobs configured with {@link AvroJob} . */
+public class AvroKey<T> extends AvroWrapper<T> {
+  /** Wrap null. Construct {@link AvroKey} wrapping no key. */
+  public AvroKey() { this(null); }
+
+  /** Wrap a key. */
+  public AvroKey(T datum) { super(datum); }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java
new file mode 100644
index 0000000..886f4a7
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroKeyComparator.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.conf.Configuration;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.reflect.ReflectData;
+
+/** The {@link RawComparator} used by jobs configured with {@link AvroJob}. */
+public class AvroKeyComparator<T>
+  extends Configured implements RawComparator<AvroWrapper<T>> {
+
+  private Schema schema;
+
+  @Override
+  public void setConf(Configuration conf) {
+    super.setConf(conf);
+    if (conf != null)
+      schema = Pair.getKeySchema(AvroJob.getMapOutputSchema(conf));
+  }
+
+  public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+    return BinaryData.compare(b1, s1, l1, b2, s2, l2, schema);
+  }
+
+  public int compare(AvroWrapper<T> x, AvroWrapper<T> y) {
+    return ReflectData.get().compare(x.datum(), y.datum(), schema);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMapper.java
new file mode 100644
index 0000000..f4af97f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMapper.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobConfigurable;
+import org.apache.hadoop.mapred.Reporter;
+
+/** A mapper for Avro data.
+ *
+ * <p>Applications subclass this class and pass their subclass to {@link
+ * AvroJob#setMapperClass(JobConf, Class)}, overriding {@link #map(Object, AvroCollector, Reporter)}.
+ */
+public class AvroMapper<IN, OUT> extends Configured implements JobConfigurable, Closeable {
+
+  /** Called with each map input datum.  By default, collects inputs. */
+  @SuppressWarnings("unchecked")
+  public void map(IN datum, AvroCollector<OUT> collector, Reporter reporter)
+    throws IOException {
+    collector.collect((OUT)datum);
+  }
+
+
+  /** Subclasses can override this as desired. */
+  @Override
+  public void close() throws IOException {
+    // no op
+  }
+
+  /** Subclasses can override this as desired. */
+  @Override
+  public void configure(JobConf jobConf) {
+    // no op
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java
new file mode 100644
index 0000000..bdf6a5e
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleInputs.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaParseException;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+
+/**
+ * This class supports Avro-MapReduce jobs that have multiple input paths with
+ * a different {@link Schema} and {@link AvroMapper} for each path.
+ *
+ * <p>
+ * Usage:
+ * </p>
+ * <p>
+ * <strong>Case 1: (ReflectData based inputs)</strong>
+ * </p>
+ * <pre>
+ * // Enable ReflectData usage across job.
+ * AvroJob.setReflect(job);
+ *
+ * Schema type1Schema = ReflectData.get().getSchema(Type1Record.class)
+ * AvroMultipleInputs.addInputPath(job, inputPath1, type1Schema, Type1AvroMapper.class);
+ * </pre>
+ *
+ * Where Type1AvroMapper would be implemented as
+ * <pre>
+ *  class Type1AvroMapper extends AvroMapper<Type1Record, Pair<ComparingKeyRecord, CommonValueRecord>>
+ * </pre>
+ *
+ * <pre>
+ * Schema type2Schema = ReflectData.get().getSchema(Type2Record.class)
+ * AvroMultipleInputs.addInputPath(job, inputPath2, type2Schema, Type2AvroMapper.class);
+ * </pre>
+ *
+ * Where Type2AvroMapper would be implemented as
+ * <pre>
+ *  class Type2AvroMapper extends AvroMapper<Type2Record, Pair<ComparingKeyRecord, CommonValueRecord>>
+ * </pre>
+ *
+ * <p>
+ * <strong>Case 2: (SpecificData based inputs)</strong>
+ * </p>
+ *
+ * <pre>
+ * Schema type1Schema = Type1Record.SCHEMA$;
+ * AvroMultipleInputs.addInputPath(job, inputPath1, type1Schema, Type1AvroMapper.class);
+ * </pre>
+ *
+ * Where Type1AvroMapper would be implemented as
+ * <pre>
+ *  class Type1AvroMapper extends AvroMapper<Type1Record, Pair<ComparingKeyRecord, CommonValueRecord>>
+ * </pre>
+ *
+ * <pre>
+ * Schema type2Schema = Type2Record.SCHEMA$;
+ * AvroMultipleInputs.addInputPath(job, inputPath2, type2Schema, Type2AvroMapper.class);
+ * </pre>
+ *
+ * Where Type2AvroMapper would be implemented as
+ * <pre>
+ *  class Type2AvroMapper extends AvroMapper<Type2Record, Pair<ComparingKeyRecord, CommonValueRecord>>
+ * </pre>
+ *
+ * <p>
+ * <strong>Note on InputFormat:</strong>
+ *   The InputFormat used will always be {@link AvroInputFormat} when using this class.
+ * </p>
+ * <p>
+ * <strong>Note on collector outputs:</strong>
+ *   When using this class, you will need to ensure that the mapper implementations
+ *   involved must all emit the same Key type and Value
+ *   record types, as set by {@link AvroJob#setOutputSchema(JobConf, Schema)}
+ *   or {@link AvroJob#setMapOutputSchema(JobConf, Schema)}.
+ * </p>
+ */
+public class AvroMultipleInputs {
+  private static String schemaKey =
+      "avro.mapreduce.input.multipleinputs.dir.schemas";
+  private static String mappersKey =
+      "avro.mapreduce.input.multipleinputs.dir.mappers";
+  /**
+   * Add a {@link Path} with a custom {@link Schema} to the list of
+   * inputs for the map-reduce job.
+   *
+   * @param conf The configuration of the job
+   * @param path {@link Path} to be added to the list of inputs for the job
+   * @param inputSchema {@link Schema} class to use for this path
+   */
+  private static void addInputPath(JobConf conf, Path path,
+      Schema inputSchema) {
+
+    String schemaMapping = path.toString() + ";"
+       + toBase64(inputSchema.toString());
+
+    String schemas = conf.get(schemaKey);
+    conf.set(schemaKey,
+        schemas == null ? schemaMapping : schemas + ","
+            + schemaMapping);
+
+    conf.setInputFormat(DelegatingInputFormat.class);
+  }
+
+  /**
+   * Add a {@link Path} with a custom {@link Schema} and
+   * {@link AvroMapper} to the list of inputs for the map-reduce job.
+   *
+   * @param conf The configuration of the job
+   * @param path {@link Path} to be added to the list of inputs for the job
+   * @param inputSchema {@link Schema} to use for this path
+   * @param mapperClass {@link AvroMapper} class to use for this path
+   */
+  public static void addInputPath(JobConf conf, Path path,
+      Class<? extends AvroMapper> mapperClass,
+      Schema inputSchema) {
+
+    addInputPath(conf, path, inputSchema);
+
+    String mapperMapping = path.toString() + ";" + mapperClass.getName();
+    System.out.println(mapperMapping);
+    String mappers = conf.get(mappersKey);
+    conf.set(mappersKey, mappers == null ? mapperMapping
+       : mappers + "," + mapperMapping);
+
+    conf.setMapperClass(DelegatingMapper.class);
+  }
+
+  /**
+   * Retrieves a map of {@link Path}s to the {@link AvroMapper} class that
+   * should be used for them.
+   *
+   * @param conf The configuration of the job
+   * @see #addInputPath(JobConf, Path, Class, Schema)
+   * @return A map of paths-to-mappers for the job
+   */
+  @SuppressWarnings("unchecked")
+  static Map<Path, Class<? extends AvroMapper>> getMapperTypeMap(JobConf conf) {
+    if (conf.get(mappersKey) == null) {
+      return Collections.emptyMap();
+    }
+    Map<Path, Class<? extends AvroMapper>> m = new HashMap<Path, Class<? extends AvroMapper>>();
+    String[] pathMappings = conf.get(mappersKey).split(",");
+    for (String pathMapping : pathMappings) {
+      String[] split = pathMapping.split(";");
+      Class<? extends AvroMapper> mapClass;
+      try {
+       mapClass = (Class<? extends AvroMapper>) conf.getClassByName(split[1]);
+      } catch (ClassNotFoundException e) {
+       throw new RuntimeException(e);
+      }
+      m.put(new Path(split[0]), mapClass);
+    }
+    return m;
+  }
+
+  /**
+   * Retrieves a map of {@link Path}s to the {@link Schema} that
+   * should be used for them.
+   *
+   * @param conf The configuration of the job
+   * @see #addInputPath(JobConf, Path, Class, Schema)
+   * @return A map of paths to schemas for the job
+   */
+  static Map<Path, Schema> getInputSchemaMap(JobConf conf) {
+    if (conf.get(schemaKey) == null) {
+      return Collections.emptyMap();
+    }
+    Map<Path, Schema> m = new HashMap<Path, Schema>();
+    String[] schemaMappings =
+        conf.get(schemaKey).split(",");
+    Schema.Parser schemaParser = new Schema.Parser();
+    for (String schemaMapping : schemaMappings) {
+      String[] split = schemaMapping.split(";");
+      String schemaString = fromBase64(split[1]);
+      Schema inputSchema;
+      try {
+       inputSchema = schemaParser.parse(schemaString);
+      } catch (SchemaParseException e) {
+       throw new RuntimeException(e);
+      }
+      m.put(new Path(split[0]), inputSchema);
+    }
+    return m;
+  }
+
+  private static String toBase64(String rawString) {
+    Base64 base64decoder = new Base64();
+    return new String(base64decoder.encode(rawString.getBytes()));
+  }
+
+  private static String fromBase64(String base64String) {
+    Base64 base64decoder = new Base64();
+    return new String(base64decoder.decode(base64String.getBytes()));
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
new file mode 100644
index 0000000..a1b4a1c
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroMultipleOutputs.java
@@ -0,0 +1,626 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.StringTokenizer;
+import java.util.List;
+import java.util.Set;
+import java.util.HashMap;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Collections;
+
+import org.apache.hadoop.mapred.OutputFormat;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.Progressable;
+import org.apache.avro.Schema;
+
+import org.apache.hadoop.io.NullWritable;
+
+
+
+/**
+ * The AvroMultipleOutputs class simplifies writing Avro output data 
+ * to multiple outputs
+ * 
+ * <p> 
+ * Case one: writing to additional outputs other than the job default output.
+ *
+ * Each additional output, or named output, may be configured with its own
+ * <code>Schema</code> and <code>OutputFormat</code>.
+ * A named output can be a single file or a multi file. The later is refered as
+ * a multi named output which is an unbound set of files all sharing the same
+ * <code>Schema</code>.
+ * </p>
+ * <p>
+ * Case two: to write data to different files provided by user
+ * </p>
+ * 
+ * <p>
+ * AvroMultipleOutputs supports counters, by default they are disabled. The 
+ * counters group is the {@link AvroMultipleOutputs} class name. The names of the 
+ * counters are the same as the output name. These count the number of records 
+ * written to each output name. For multi
+ * named outputs the name of the counter is the concatenation of the named
+ * output, and underscore '_' and the multiname.
+ * </p>
+ * 
+ * Usage pattern for job submission:
+ * <pre>
+ *
+ * JobConf job = new JobConf();
+ *
+ * FileInputFormat.setInputPath(job, inDir);
+ * FileOutputFormat.setOutputPath(job, outDir);
+ *
+ * job.setMapperClass(MyAvroMapper.class);
+ * job.setReducerClass(HadoopReducer.class);
+ * job.set("avro.reducer",MyAvroReducer.class);
+ * ...
+ *  
+ * Schema schema;
+ * ...
+ * // Defines additional single output 'avro1' for the job
+ * AvroMultipleOutputs.addNamedOutput(job, "avro1", AvroOutputFormat.class,
+ * schema);
+ *
+ * // Defines additional output 'avro2' with different schema for the job
+ * AvroMultipleOutputs.addNamedOutput(job, "avro2",
+ *   AvroOutputFormat.class,
+ *   null); // if Schema is specified as null then the default output schema is used
+ * ...
+ *
+ * job.waitForCompletion(true);
+ * ...
+ * </pre>
+ * <p>
+ * Usage in Reducer:
+ * <pre>
+ * 
+ * public class MyAvroReducer extends
+ *   AvroReducer<K, V, OUT> {
+ * private MultipleOutputs amos;
+ *
+ *
+ * public void configure(JobConf conf) {
+ * ...
+ * amos = new AvroMultipleOutputs(conf);
+ * }
+ *
+ * public void reduce(K, Iterator<V> values,
+ * AvroCollector<OUT>, Reporter reporter)
+ * throws IOException {
+ * ...
+ * amos.collect("avro1", reporter,datum);
+ * amos.getCollector("avro2", "A", reporter).collect(datum);
+ * amos.collect("avro1",reporter,schema,datum,"testavrofile");// this create a file testavrofile and writes data with schema "schema" into it
+ *                                                            and uses other values from namedoutput "avro1" like outputclass etc.
+ * amos.collect("avro1",reporter,schema,datum,"testavrofile1");
+ * ...
+ * }
+ *
+ * public void close() throws IOException {
+ * amos.close();
+ * ...
+ * }
+ *
+ * }
+ * </pre>
+ */
+
+public class AvroMultipleOutputs {
+
+  private static final String NAMED_OUTPUTS = "mo.namedOutputs";
+
+  private static final String MO_PREFIX = "mo.namedOutput.";
+
+  private static final String FORMAT = ".avro";
+  private static final String MULTI = ".multi";
+
+  private static final String COUNTERS_ENABLED = "mo.counters";
+ 
+ 
+  /**
+   * Counters group used by the counters of MultipleOutputs.
+   */
+  private static final String COUNTERS_GROUP = AvroMultipleOutputs.class.getName();
+
+  /**
+   * Checks if a named output is alreadyDefined or not.
+   *
+   * @param conf           job conf
+   * @param namedOutput    named output names
+   * @param alreadyDefined whether the existence/non-existence of
+   *                       the named output is to be checked
+   * @throws IllegalArgumentException if the output name is alreadyDefined or
+   *                                  not depending on the value of the
+   *                                  'alreadyDefined' parameter
+   */
+  private static void checkNamedOutput(JobConf conf, String namedOutput,
+                                       boolean alreadyDefined) {
+    List<String> definedChannels = getNamedOutputsList(conf);
+    if (alreadyDefined && definedChannels.contains(namedOutput)) {
+      throw new IllegalArgumentException("Named output '" + namedOutput +
+        "' already alreadyDefined");
+    } else if (!alreadyDefined && !definedChannels.contains(namedOutput)) {
+      throw new IllegalArgumentException("Named output '" + namedOutput +
+        "' not defined");
+    }
+  }
+
+  /**
+   * Checks if a named output name is valid token.
+   *
+   * @param namedOutput named output Name
+   * @throws IllegalArgumentException if the output name is not valid.
+   */
+  private static void checkTokenName(String namedOutput) {
+    if (namedOutput == null || namedOutput.length() == 0) {
+      throw new IllegalArgumentException(
+        "Name cannot be NULL or empty");
+    }
+    for (char ch : namedOutput.toCharArray()) {
+      if ((ch >= 'A') && (ch <= 'Z')) {
+        continue;
+      }
+      if ((ch >= 'a') && (ch <= 'z')) {
+        continue;
+      }
+      if ((ch >= '0') && (ch <= '9')) {
+        continue;
+      }
+      throw new IllegalArgumentException(
+        "Name cannot have a '" + ch + "' char");
+    }
+  }
+
+  /**
+   * Checks if a named output name is valid.
+   *
+   * @param namedOutput named output Name
+   * @throws IllegalArgumentException if the output name is not valid.
+   */
+  private static void checkNamedOutputName(String namedOutput) {
+    checkTokenName(namedOutput);
+    // name cannot be the name used for the default output
+    if (namedOutput.equals("part")) {
+      throw new IllegalArgumentException(
+        "Named output name cannot be 'part'");
+    }
+  }
+
+  /**
+   * Returns list of channel names.
+   *
+   * @param conf job conf
+   * @return List of channel Names
+   */
+  public static List<String> getNamedOutputsList(JobConf conf) {
+    List<String> names = new ArrayList<String>();
+    StringTokenizer st = new StringTokenizer(conf.get(NAMED_OUTPUTS, ""), " ");
+    while (st.hasMoreTokens()) {
+      names.add(st.nextToken());
+    }
+    return names;
+  }
+
+
+  /**
+   * Returns if a named output is multiple.
+   *
+   * @param conf        job conf
+   * @param namedOutput named output
+   * @return <code>true</code> if the name output is multi, <code>false</code>
+   *         if it is single. If the name output is not defined it returns
+   *         <code>false</code>
+   */
+  public static boolean isMultiNamedOutput(JobConf conf, String namedOutput) {
+    checkNamedOutput(conf, namedOutput, false);
+    return conf.getBoolean(MO_PREFIX + namedOutput + MULTI, false);
+  }
+
+  /**
+   * Returns the named output OutputFormat.
+   *
+   * @param conf        job conf
+   * @param namedOutput named output
+   * @return namedOutput OutputFormat
+   */
+  public static Class<? extends OutputFormat> getNamedOutputFormatClass(
+    JobConf conf, String namedOutput) {
+    checkNamedOutput(conf, namedOutput, false);
+    return conf.getClass(MO_PREFIX + namedOutput + FORMAT, null,
+      OutputFormat.class);
+  }
+
+  /**
+   * Adds a named output for the job.
+   * <p/>
+   *
+   * @param conf              job conf to add the named output
+   * @param namedOutput       named output name, it has to be a word, letters
+   *                          and numbers only, cannot be the word 'part' as
+   *                          that is reserved for the
+   *                          default output.
+   * @param outputFormatClass OutputFormat class.
+   * @param schema            Schema to used for this namedOutput
+   */
+  public static void addNamedOutput(JobConf conf, String namedOutput,
+                                Class<? extends OutputFormat> outputFormatClass,
+                                Schema schema) {
+    addNamedOutput(conf, namedOutput, false, outputFormatClass, schema);
+  }
+
+  /**
+   * Adds a multi named output for the job.
+   * <p/>
+   *
+   * @param conf              job conf to add the named output
+   * @param namedOutput       named output name, it has to be a word, letters
+   *                          and numbers only, cannot be the word 'part' as
+   *                          that is reserved for the
+   *                          default output.
+   * @param outputFormatClass OutputFormat class.
+   * @param schema            Schema to used for this namedOutput
+   */
+  public static void addMultiNamedOutput(JobConf conf, String namedOutput,
+                               Class<? extends OutputFormat> outputFormatClass,
+                               Schema schema) {
+    addNamedOutput(conf, namedOutput, true, outputFormatClass, schema);
+  }
+
+  /**
+   * Adds a named output for the job.
+   * <p/>
+   *
+   * @param conf              job conf to add the named output
+   * @param namedOutput       named output name, it has to be a word, letters
+   *                          and numbers only, cannot be the word 'part' as
+   *                          that is reserved for the
+   *                          default output.
+   * @param multi             indicates if the named output is multi
+   * @param outputFormatClass OutputFormat class.
+   * @param schema            Schema to used for this namedOutput
+   */
+  private static void addNamedOutput(JobConf conf, String namedOutput,
+                               boolean multi,
+                               Class<? extends OutputFormat> outputFormatClass,
+                               Schema schema) {
+    checkNamedOutputName(namedOutput);
+    checkNamedOutput(conf, namedOutput, true);
+    boolean isMapOnly = conf.getNumReduceTasks() == 0;
+    if(schema!=null)
+      conf.set(MO_PREFIX+namedOutput+".schema", schema.toString());
+    conf.set(NAMED_OUTPUTS, conf.get(NAMED_OUTPUTS, "") + " " + namedOutput);
+    conf.setClass(MO_PREFIX + namedOutput + FORMAT, outputFormatClass,
+      OutputFormat.class);
+    conf.setBoolean(MO_PREFIX + namedOutput + MULTI, multi);
+  }
+
+  /**
+   * Enables or disables counters for the named outputs.
+   * <p/>
+   * By default these counters are disabled.
+   * <p/>
+   * MultipleOutputs supports counters, by default the are disabled.
+   * The counters group is the {@link AvroMultipleOutputs} class name.
+   * </p>
+   * The names of the counters are the same as the named outputs. For multi
+   * named outputs the name of the counter is the concatenation of the named
+   * output, and underscore '_' and the multiname.
+   *
+   * @param conf    job conf to enableadd the named output.
+   * @param enabled indicates if the counters will be enabled or not.
+   */
+  public static void setCountersEnabled(JobConf conf, boolean enabled) {
+    conf.setBoolean(COUNTERS_ENABLED, enabled);
+  }
+
+  /**
+   * Returns if the counters for the named outputs are enabled or not.
+   * <p/>
+   * By default these counters are disabled.
+   * <p/>
+   * MultipleOutputs supports counters, by default the are disabled.
+   * The counters group is the {@link AvroMultipleOutputs} class name.
+   * </p>
+   * The names of the counters are the same as the named outputs. For multi
+   * named outputs the name of the counter is the concatenation of the named
+   * output, and underscore '_' and the multiname.
+   *
+   *
+   * @param conf    job conf to enableadd the named output.
+   * @return TRUE if the counters are enabled, FALSE if they are disabled.
+   */
+  public static boolean getCountersEnabled(JobConf conf) {
+    return conf.getBoolean(COUNTERS_ENABLED, false);
+  }
+
+  // instance code, to be used from Mapper/Reducer code
+
+  private JobConf conf;
+  private OutputFormat outputFormat;
+  private Set<String> namedOutputs;
+  private Map<String, RecordWriter> recordWriters;
+  private boolean countersEnabled;
+
+  /**
+   * Creates and initializes multiple named outputs support, it should be
+   * instantiated in the Mapper/Reducer configure method.
+   *
+   * @param job the job configuration object
+   */
+  public AvroMultipleOutputs(JobConf job) {
+    this.conf = job;
+    outputFormat = new InternalFileOutputFormat();
+    namedOutputs = Collections.unmodifiableSet(
+      new HashSet<String>(AvroMultipleOutputs.getNamedOutputsList(job)));
+    recordWriters = new HashMap<String, RecordWriter>();
+    countersEnabled = getCountersEnabled(job);
+  }
+
+  /**
+   * Returns iterator with the defined name outputs.
+   *
+   * @return iterator with the defined named outputs
+   */
+  public Iterator<String> getNamedOutputs() {
+    return namedOutputs.iterator();
+  }
+
+
+  // by being synchronized MultipleOutputTask can be use with a
+  // MultithreaderMapRunner.
+  private synchronized RecordWriter getRecordWriter(String namedOutput,
+                                                    String baseFileName,
+                                                    final Reporter reporter,Schema schema)
+    throws IOException {
+    RecordWriter writer = recordWriters.get(baseFileName);
+    if (writer == null) {
+      if (countersEnabled && reporter == null) {
+        throw new IllegalArgumentException(
+          "Counters are enabled, Reporter cannot be NULL");
+      }
+      if(schema!=null)
+        conf.set(MO_PREFIX+namedOutput+".schema",schema.toString());
+      JobConf jobConf = new JobConf(conf);
+      jobConf.set(InternalFileOutputFormat.CONFIG_NAMED_OUTPUT, namedOutput);
+      FileSystem fs = FileSystem.get(conf);
+      writer = outputFormat.getRecordWriter(fs, jobConf, baseFileName, reporter);
+
+      if (countersEnabled) {
+        if (reporter == null) {
+          throw new IllegalArgumentException(
+            "Counters are enabled, Reporter cannot be NULL");
+        }
+        writer = new RecordWriterWithCounter(writer, baseFileName, reporter);
+      }
+      recordWriters.put(baseFileName, writer);
+    }
+    return writer;
+  }
+
+  private static class RecordWriterWithCounter implements RecordWriter {
+    private RecordWriter writer;
+    private String counterName;
+    private Reporter reporter;
+
+    public RecordWriterWithCounter(RecordWriter writer, String counterName,
+                                   Reporter reporter) {
+      this.writer = writer;
+      this.counterName = counterName;
+      this.reporter = reporter;
+    }
+
+    @SuppressWarnings({"unchecked"})
+    public void write(Object key, Object value) throws IOException {
+      reporter.incrCounter(COUNTERS_GROUP, counterName, 1);
+      writer.write(key, value);
+    }
+
+    public void close(Reporter reporter) throws IOException {
+      writer.close(reporter);
+    }
+  }
+  
+  /**
+   * Output Collector for the default schema.
+   * <p/>
+   *
+   * @param namedOutput the named output name
+   * @param reporter    the reporter
+   * @param datum       output data
+   * @throws IOException thrown if output collector could not be created
+   */
+  public void collect(String namedOutput, Reporter reporter,Object datum) throws IOException{
+    getCollector(namedOutput,reporter).collect(datum);
+  }
+  
+  /**
+   * OutputCollector with custom schema.
+   * <p/>
+   *
+   * @param namedOutput the named output name (this will the output file name)
+   * @param reporter    the reporter
+   * @param datum       output data
+   * @param schema      schema to use for this output
+   * @throws IOException thrown if output collector could not be created
+  */
+  public void collect(String namedOutput, Reporter reporter, Schema schema,Object datum) throws IOException{
+    getCollector(namedOutput,reporter,schema).collect(datum);
+  }
+  
+  /**
+   * OutputCollector with custom schema and file name.
+   * <p/>
+   *
+   * @param namedOutput the named output name
+   * @param reporter    the reporter
+   * @param baseOutputPath outputfile name to use.
+   * @param datum       output data
+   * @param schema      schema to use for this output
+   * @throws IOException thrown if output collector could not be created
+  */
+  public void collect(String namedOutput,Reporter reporter,Schema schema,Object datum,String baseOutputPath) throws IOException{
+    getCollector(namedOutput,null,reporter,baseOutputPath,schema).collect(datum);
+  }
+  
+  /**
+   * Gets the output collector for a named output.
+   * <p/>
+   *
+   * @param namedOutput the named output name
+   * @param reporter    the reporter
+   * @return the output collector for the given named output
+   * @throws IOException thrown if output collector could not be created
+   * @deprecated Use {@link #collect} method for collecting output
+   */
+  @SuppressWarnings({"unchecked", "rawtypes"})
+  public AvroCollector getCollector(String namedOutput, Reporter reporter)
+    throws IOException {
+    return getCollector(namedOutput, null, reporter,namedOutput,null);
+  }
+
+  @SuppressWarnings("rawtypes")
+  private AvroCollector getCollector(String namedOutput, Reporter reporter, Schema schema)
+      throws IOException{
+    return getCollector(namedOutput,null,reporter,namedOutput,schema);
+  }
+  
+  /**
+   * Gets the output collector for a named output.
+   * <p/>
+   *
+   * @param namedOutput the named output name
+   * @param reporter    the reporter
+   * @param multiName   the multiname 
+   * @return the output collector for the given named output
+   * @throws IOException thrown if output collector could not be created
+   */
+  @SuppressWarnings("rawtypes")
+  public AvroCollector getCollector(String namedOutput,String multiName, Reporter reporter)
+      throws IOException{
+    return getCollector(namedOutput,multiName,reporter,namedOutput,null);
+  }
+
+  @SuppressWarnings("rawtypes")
+  private AvroCollector getCollector(String namedOutput,Schema schema, Reporter reporter, String baseFileName)
+      throws IOException{
+    //namedOutputs.add(baseFileName);
+    return getCollector(namedOutput,null,reporter,baseFileName,schema);
+  }  
+  
+  /**
+   * Gets the output collector for a multi named output.
+   * <p/>
+   *
+   * @param namedOutput the named output name
+   * @param multiName   the multi name part
+   * @param reporter    the reporter
+   * @return the output collector for the given named output
+   * @throws IOException thrown if output collector could not be created
+   */
+  @SuppressWarnings({"unchecked"})
+  private AvroCollector getCollector(String namedOutput, String multiName,
+                                      Reporter reporter,String baseOutputFileName, Schema schema)
+    throws IOException {
+
+    checkNamedOutputName(namedOutput);
+    if (!namedOutputs.contains(namedOutput)) {
+      throw new IllegalArgumentException("Undefined named output '" +
+        namedOutput + "'");
+    }
+    boolean multi = isMultiNamedOutput(conf, namedOutput);
+
+    if (!multi && multiName != null) {
+      throw new IllegalArgumentException("Name output '" + namedOutput +
+        "' has not been defined as multi");
+    }
+    if (multi) {
+      checkTokenName(multiName);
+    }
+
+    String baseFileName = (multi) ? namedOutput + "_" + multiName : baseOutputFileName;
+
+    final RecordWriter writer =
+      getRecordWriter(namedOutput, baseFileName, reporter,schema);
+
+    return new AvroCollector() {
+   
+      @SuppressWarnings({"unchecked"})
+      public void collect(Object key) throws IOException{
+       AvroWrapper wrapper = new AvroWrapper(key);
+       writer.write(wrapper, NullWritable.get());
+      }
+      
+      public void collect(Object key,Object value) throws IOException
+      {
+        writer.write(key,value);
+      }  
+    
+    };
+  }
+
+  /**
+   * Closes all the opened named outputs.
+   * <p/>
+   * If overriden subclasses must invoke <code>super.close()</code> at the
+   * end of their <code>close()</code>
+   *
+   * @throws java.io.IOException thrown if any of the MultipleOutput files
+   *                             could not be closed properly.
+   */
+  public void close() throws IOException {
+    for (RecordWriter writer : recordWriters.values()) {
+      writer.close(null);
+    }
+  }
+  
+  private static class InternalFileOutputFormat extends FileOutputFormat<Object, Object> {
+   public static final String CONFIG_NAMED_OUTPUT = "mo.config.namedOutput";
+
+   @SuppressWarnings({"unchecked", "deprecation"})
+   public RecordWriter<Object, Object> getRecordWriter(FileSystem fs,JobConf job, String baseFileName, Progressable arg3) throws IOException {
+   String nameOutput = job.get(CONFIG_NAMED_OUTPUT, null);
+   String fileName = getUniqueName(job, baseFileName);
+   Schema schema = null;
+   String schemastr = job.get(MO_PREFIX+nameOutput+".schema",null);
+   if (schemastr!=null)
+      schema = Schema.parse(schemastr);
+   JobConf outputConf = new JobConf(job);
+   outputConf.setOutputFormat(getNamedOutputFormatClass(job, nameOutput));
+   boolean isMapOnly = job.getNumReduceTasks() == 0;
+   if (schema != null) {
+     if (isMapOnly)
+       AvroJob.setMapOutputSchema(outputConf, schema);
+     else
+       AvroJob.setOutputSchema(outputConf, schema);
+   }
+   OutputFormat outputFormat = outputConf.getOutputFormat();
+   return outputFormat.getRecordWriter(fs, outputConf, fileName, arg3);
+   }   
+  }
+}
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
new file mode 100644
index 0000000..2a681cd
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroOutputFormat.java
@@ -0,0 +1,175 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.Map;
+import java.net.URLDecoder;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.util.Progressable;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.file.HadoopCodecFactory;
+
+import static org.apache.avro.file.DataFileConstants.DEFAULT_SYNC_INTERVAL;
+import static org.apache.avro.file.DataFileConstants.DEFLATE_CODEC;
+import static org.apache.avro.file.DataFileConstants.XZ_CODEC;
+import static org.apache.avro.file.CodecFactory.DEFAULT_DEFLATE_LEVEL;
+import static org.apache.avro.file.CodecFactory.DEFAULT_XZ_LEVEL;
+
+/**
+ * An {@link org.apache.hadoop.mapred.OutputFormat} for Avro data files.
+ * <p/>
+ * You can specify various options using Job Configuration properties.
+ * Look at the fields in {@link AvroJob} as well as this class to get
+ * an overview of the supported options.
+ */
+public class AvroOutputFormat <T>
+  extends FileOutputFormat<AvroWrapper<T>, NullWritable> {
+
+  /** The file name extension for avro data files. */
+  public final static String EXT = ".avro";
+
+  /** The configuration key for Avro deflate level. */
+  public static final String DEFLATE_LEVEL_KEY = "avro.mapred.deflate.level";
+
+  /** The configuration key for Avro XZ level. */
+  public static final String XZ_LEVEL_KEY = "avro.mapred.xz.level";
+
+  /** The configuration key for Avro sync interval. */
+  public static final String SYNC_INTERVAL_KEY = "avro.mapred.sync.interval";
+
+  /** Enable output compression using the deflate codec and specify its level.*/
+  public static void setDeflateLevel(JobConf job, int level) {
+    FileOutputFormat.setCompressOutput(job, true);
+    job.setInt(DEFLATE_LEVEL_KEY, level);
+  }
+
+  /** Set the sync interval to be used by the underlying {@link DataFileWriter}.*/
+  public static void setSyncInterval(JobConf job, int syncIntervalInBytes) {
+    job.setInt(SYNC_INTERVAL_KEY, syncIntervalInBytes);
+  }
+  
+  static <T> void configureDataFileWriter(DataFileWriter<T> writer,
+      JobConf job) throws UnsupportedEncodingException {
+    
+    CodecFactory factory = getCodecFactory(job);
+    
+    if (factory != null) {
+      writer.setCodec(factory);  
+    }
+    
+    writer.setSyncInterval(job.getInt(SYNC_INTERVAL_KEY, DEFAULT_SYNC_INTERVAL));
+
+    // copy metadata from job
+    for (Map.Entry<String,String> e : job) {
+      if (e.getKey().startsWith(AvroJob.TEXT_PREFIX))
+        writer.setMeta(e.getKey().substring(AvroJob.TEXT_PREFIX.length()),
+                       e.getValue());
+      if (e.getKey().startsWith(AvroJob.BINARY_PREFIX))
+        writer.setMeta(e.getKey().substring(AvroJob.BINARY_PREFIX.length()),
+                       URLDecoder.decode(e.getValue(), "ISO-8859-1")
+                       .getBytes("ISO-8859-1"));
+    }
+  }
+
+  /** This will select the correct compression codec from the JobConf.
+   * The order of selection is as follows:
+   * <ul>
+   *   <li>If mapred.output.compress is true then look for codec otherwise no compression</li>
+   *   <li>Use avro.output.codec if populated</li>
+   *   <li>Next use mapred.output.compression.codec if populated</li>
+   *   <li>If not default to Deflate Codec</li>
+   * </ul>  
+   */
+  static CodecFactory getCodecFactory(JobConf job) {
+    CodecFactory factory = null;
+    
+    if (FileOutputFormat.getCompressOutput(job)) {
+      int deflateLevel = job.getInt(DEFLATE_LEVEL_KEY, DEFAULT_DEFLATE_LEVEL);
+      int xzLevel = job.getInt(XZ_LEVEL_KEY, DEFAULT_XZ_LEVEL);
+      String codecName = job.get(AvroJob.OUTPUT_CODEC);
+      
+      if (codecName == null) {
+        String codecClassName = job.get("mapred.output.compression.codec", null);
+        String avroCodecName = HadoopCodecFactory.getAvroCodecName(codecClassName);
+        if ( codecClassName != null && avroCodecName != null){
+          factory = HadoopCodecFactory.fromHadoopString(codecClassName);
+          job.set(AvroJob.OUTPUT_CODEC , avroCodecName);
+          return factory;
+        } else {
+          return CodecFactory.deflateCodec(deflateLevel);
+        }
+      } else { 
+        if ( codecName.equals(DEFLATE_CODEC)) {
+          factory = CodecFactory.deflateCodec(deflateLevel);
+        } else if ( codecName.equals(XZ_CODEC)) {
+          factory = CodecFactory.xzCodec(xzLevel);
+        } else {
+          factory = CodecFactory.fromString(codecName);
+        }
+      }
+    }
+    
+    return factory;
+  }
+
+  @Override
+  public RecordWriter<AvroWrapper<T>, NullWritable>
+    getRecordWriter(FileSystem ignore, JobConf job,
+                    String name, Progressable prog)
+    throws IOException {
+
+    boolean isMapOnly = job.getNumReduceTasks() == 0;
+    Schema schema = isMapOnly
+      ? AvroJob.getMapOutputSchema(job)
+      : AvroJob.getOutputSchema(job);
+    GenericData dataModel = AvroJob.createDataModel(job);
+
+    final DataFileWriter<T> writer =
+      new DataFileWriter<T>(dataModel.createDatumWriter(null));
+    
+    configureDataFileWriter(writer, job);
+
+    Path path = FileOutputFormat.getTaskOutputPath(job, name+EXT);
+    writer.create(schema, path.getFileSystem(job).create(path));
+
+    return new RecordWriter<AvroWrapper<T>, NullWritable>() {
+        public void write(AvroWrapper<T> wrapper, NullWritable ignore)
+          throws IOException {
+          writer.append(wrapper.datum());
+        }
+        public void close(Reporter reporter) throws IOException {
+          writer.close();
+        }
+      };
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
new file mode 100644
index 0000000..c173d05
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroRecordReader.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.RecordReader;
+
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.DataFileReader;
+
+/** An {@link RecordReader} for Avro data files. */
+public class AvroRecordReader<T>
+  implements RecordReader<AvroWrapper<T>, NullWritable> {
+
+  private FileReader<T> reader;
+  private long start;
+  private long end;
+
+  public AvroRecordReader(JobConf job, FileSplit split)
+    throws IOException {
+    this(DataFileReader.openReader
+         (new FsInput(split.getPath(), job),
+          AvroJob.createInputDataModel(job)
+          .createDatumReader(AvroJob.getInputSchema(job))),
+         split);
+  }
+
+  protected AvroRecordReader(FileReader<T> reader, FileSplit split)
+    throws IOException {
+    this.reader = reader;
+    reader.sync(split.getStart());                    // sync to start
+    this.start = reader.tell();
+    this.end = split.getStart() + split.getLength();
+  }
+
+  public AvroWrapper<T> createKey() {
+    return new AvroWrapper<T>(null);
+  }
+  
+  public NullWritable createValue() { return NullWritable.get(); }
+    
+  public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
+    throws IOException {
+    if (!reader.hasNext() || reader.pastSync(end))
+      return false;
+    wrapper.datum(reader.next(wrapper.datum()));
+    return true;
+  }
+  
+  public float getProgress() throws IOException {
+    if (end == start) {
+      return 0.0f;
+    } else {
+      return Math.min(1.0f, (getPos() - start) / (float)(end - start));
+    }
+  }
+  
+  public long getPos() throws IOException {
+    return reader.tell();
+  }
+
+  public void close() throws IOException { reader.close(); }
+  
+}
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroReducer.java
new file mode 100644
index 0000000..d7c0103
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroReducer.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobConfigurable;
+import org.apache.hadoop.mapred.Reporter;
+
+/** A reducer for Avro data.
+ *
+ * <p>Applications should subclass this class and pass their subclass to {@link
+ * AvroJob#setReducerClass(JobConf, Class)} and perhaps {@link AvroJob#setCombinerClass(JobConf, Class)}.
+ * Subclasses override {@link #reduce(Object, Iterable, AvroCollector, Reporter)}.
+ */
+
+public class AvroReducer<K,V,OUT> extends Configured implements JobConfigurable, Closeable {
+
+  private Pair<K,V> outputPair;
+
+  /** Called with all map output values with a given key.  By default, pairs
+   * key with each value, collecting {@link Pair} instances. */
+  @SuppressWarnings("unchecked")
+  public void reduce(K key, Iterable<V> values,
+                     AvroCollector<OUT> collector,
+                     Reporter reporter) throws IOException {
+    if (outputPair == null)
+      outputPair = new Pair<K,V>(AvroJob.getOutputSchema(getConf()));
+    for (V value : values) {
+      outputPair.set(key, value);
+      collector.collect((OUT)outputPair);
+    }
+  }
+
+  /** Subclasses can override this as desired. */
+  @Override
+  public void close() throws IOException {
+    // no op
+  }
+
+  /** Subclasses can override this as desired. */
+  @Override
+  public void configure(JobConf jobConf) {
+    // no op
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
new file mode 100644
index 0000000..92501bf
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroSerialization.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.io.serializer.Serialization;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+
+/** The {@link Serialization} used by jobs configured with {@link AvroJob}. */
+public class AvroSerialization<T> extends Configured 
+  implements Serialization<AvroWrapper<T>> {
+
+  public boolean accept(Class<?> c) {
+    return AvroWrapper.class.isAssignableFrom(c);
+  }
+  
+  /** Returns the specified map output deserializer.  Defaults to the final
+   * output deserializer if no map output schema was specified. */
+  public Deserializer<AvroWrapper<T>> getDeserializer(Class<AvroWrapper<T>> c) {
+    Configuration conf = getConf();
+    boolean isKey = AvroKey.class.isAssignableFrom(c);
+    Schema schema = isKey
+      ? Pair.getKeySchema(AvroJob.getMapOutputSchema(conf))
+      : Pair.getValueSchema(AvroJob.getMapOutputSchema(conf));
+    GenericData dataModel = AvroJob.createMapOutputDataModel(conf);
+    DatumReader<T> datumReader = dataModel.createDatumReader(schema);
+    return new AvroWrapperDeserializer(datumReader, isKey);
+  }
+  
+  private static final DecoderFactory FACTORY = DecoderFactory.get();
+
+  private class AvroWrapperDeserializer
+    implements Deserializer<AvroWrapper<T>> {
+
+    private DatumReader<T> reader;
+    private BinaryDecoder decoder;
+    private boolean isKey;
+    
+    public AvroWrapperDeserializer(DatumReader<T> reader, boolean isKey) {
+      this.reader = reader;
+      this.isKey = isKey;
+    }
+    
+    public void open(InputStream in) {
+      this.decoder = FACTORY.directBinaryDecoder(in, decoder);
+    }
+    
+    public AvroWrapper<T> deserialize(AvroWrapper<T> wrapper)
+      throws IOException {
+      T datum = reader.read(wrapper == null ? null : wrapper.datum(), decoder);
+      if (wrapper == null) {
+        wrapper = isKey? new AvroKey<T>(datum) : new AvroValue<T>(datum);
+      } else {
+        wrapper.datum(datum);
+      }
+      return wrapper;
+    }
+
+    public void close() throws IOException {
+      decoder.inputStream().close();
+    }
+    
+  }
+  
+  /** Returns the specified output serializer. */
+  public Serializer<AvroWrapper<T>> getSerializer(Class<AvroWrapper<T>> c) {
+    // AvroWrapper used for final output, AvroKey or AvroValue for map output
+    boolean isFinalOutput = c.equals(AvroWrapper.class);
+    Configuration conf = getConf();
+    Schema schema = isFinalOutput
+      ? AvroJob.getOutputSchema(conf)
+      : (AvroKey.class.isAssignableFrom(c)
+         ? Pair.getKeySchema(AvroJob.getMapOutputSchema(conf))
+         : Pair.getValueSchema(AvroJob.getMapOutputSchema(conf)));
+    GenericData dataModel = AvroJob.createDataModel(conf);
+    return new AvroWrapperSerializer(dataModel.createDatumWriter(schema));
+  }
+
+  private class AvroWrapperSerializer implements Serializer<AvroWrapper<T>> {
+
+    private DatumWriter<T> writer;
+    private OutputStream out;
+    private BinaryEncoder encoder;
+    
+    public AvroWrapperSerializer(DatumWriter<T> writer) {
+      this.writer = writer;
+    }
+
+    public void open(OutputStream out) {
+      this.out = out;
+      this.encoder = new EncoderFactory().configureBlockSize(512)
+          .binaryEncoder(out, null);
+    }
+
+    public void serialize(AvroWrapper<T> wrapper) throws IOException {
+      writer.write(wrapper.datum(), encoder);
+      // would be a lot faster if the Serializer interface had a flush()
+      // method and the Hadoop framework called it when needed rather
+      // than for every record.
+      encoder.flush();
+    }
+
+    public void close() throws IOException {
+      out.close();
+    }
+
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
new file mode 100644
index 0000000..ef1fae9
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroTextOutputFormat.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import static org.apache.avro.mapred.AvroOutputFormat.EXT;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.Progressable;
+
+/** The equivalent of {@link org.apache.hadoop.mapred.TextOutputFormat} for
+ * writing to Avro Data Files with a <code>"bytes"</code> schema. */
+public class AvroTextOutputFormat<K, V> extends FileOutputFormat<K, V> {
+
+  private static final String UTF8 = "UTF-8";
+
+  @Override
+  public RecordWriter<K, V>
+    getRecordWriter(FileSystem ignore, JobConf job,
+                    String name, Progressable prog)
+    throws IOException {
+
+    Schema schema = Schema.create(Schema.Type.BYTES);
+    
+    final byte[] keyValueSeparator =
+      job.get("mapreduce.output.textoutputformat.separator", "\t").getBytes(UTF8);
+
+    final DataFileWriter<ByteBuffer> writer =
+      new DataFileWriter<ByteBuffer>(new ReflectDatumWriter<ByteBuffer>());
+
+    AvroOutputFormat.configureDataFileWriter(writer, job);
+
+    Path path = FileOutputFormat.getTaskOutputPath(job, name+EXT);
+    writer.create(schema, path.getFileSystem(job).create(path));
+
+    return new AvroTextRecordWriter(writer, keyValueSeparator);
+  }
+  
+  class AvroTextRecordWriter implements RecordWriter<K, V> {
+    private final DataFileWriter<ByteBuffer> writer;
+    private final byte[] keyValueSeparator;
+    
+    public AvroTextRecordWriter(DataFileWriter<ByteBuffer> writer,
+        byte[] keyValueSeparator) {
+      this.writer = writer;
+      this.keyValueSeparator = keyValueSeparator;
+    }
+    
+    public void write(K key, V value) throws IOException {
+      boolean nullKey = key == null || key instanceof NullWritable;
+      boolean nullValue = value == null || value instanceof NullWritable;
+      if (nullKey && nullValue) {
+        return;
+      } else if (!nullKey && nullValue) {
+        writer.append(toByteBuffer(key));
+      } else if (nullKey && !nullValue) {
+        writer.append(toByteBuffer(value));
+      } else {
+        writer.append(toByteBuffer(key, keyValueSeparator, value));
+      }
+    }
+    
+    public void close(Reporter reporter) throws IOException {
+      writer.close();
+    }
+    
+    private ByteBuffer toByteBuffer(Object o) throws IOException {
+      if (o instanceof Text) {
+        Text to = (Text) o;
+        return ByteBuffer.wrap(to.getBytes(), 0, to.getLength());
+      } else {
+        return ByteBuffer.wrap(o.toString().getBytes(UTF8));
+      }
+    }
+    
+    private ByteBuffer toByteBuffer(Object key, byte[] sep, Object value)
+        throws IOException {
+      byte[] keyBytes, valBytes;
+      int keyLength, valLength;
+      if (key instanceof Text) {
+        Text tkey = (Text) key;
+        keyBytes = tkey.getBytes();
+        keyLength = tkey.getLength();
+      } else {
+        keyBytes = key.toString().getBytes(UTF8);
+        keyLength = keyBytes.length;
+      }
+      if (value instanceof Text) {
+        Text tval = (Text) value;
+        valBytes = tval.getBytes();
+        valLength = tval.getLength();
+      } else {
+        valBytes = value.toString().getBytes(UTF8);
+        valLength = valBytes.length;
+      }
+      ByteBuffer buf = ByteBuffer.allocate(keyLength + sep.length + valLength);
+      buf.put(keyBytes, 0, keyLength);
+      buf.put(sep);
+      buf.put(valBytes, 0, valLength);
+      buf.rewind();
+      return buf;
+    }
+
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
new file mode 100644
index 0000000..ac91109
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroUtf8InputFormat.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.compress.CompressionCodecFactory;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobConfigurable;
+import org.apache.hadoop.mapred.LineRecordReader;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * An {@link org.apache.hadoop.mapred.InputFormat} for text files.
+ * Each line is a {@link Utf8} key; values are null.
+ */
+public class AvroUtf8InputFormat
+  extends FileInputFormat<AvroWrapper<Utf8>, NullWritable>
+  implements JobConfigurable {
+
+  static class Utf8LineRecordReader implements
+    RecordReader<AvroWrapper<Utf8>, NullWritable> {
+
+    private LineRecordReader lineRecordReader;
+    
+    private LongWritable currentKeyHolder = new LongWritable();
+    private Text currentValueHolder = new Text();
+    
+    public Utf8LineRecordReader(Configuration job, 
+        FileSplit split) throws IOException {
+      this.lineRecordReader = new LineRecordReader(job, split);
+    }
+    
+    public void close() throws IOException {
+      lineRecordReader.close();
+    }
+
+    public long getPos() throws IOException {
+      return lineRecordReader.getPos();
+    }
+
+    public float getProgress() throws IOException {
+      return lineRecordReader.getProgress();
+    }
+
+    public boolean next(AvroWrapper<Utf8> key, NullWritable value)
+      throws IOException {
+      boolean success = lineRecordReader.next(currentKeyHolder,
+          currentValueHolder);
+      if (success) {
+        key.datum(new Utf8(currentValueHolder.getBytes())
+            .setLength(currentValueHolder.getLength()));
+      } else {
+        key.datum(null);
+      }
+      return success;
+    }
+
+    @Override
+    public AvroWrapper<Utf8> createKey() {
+      return new AvroWrapper<Utf8>(null);
+    }
+
+    @Override
+    public NullWritable createValue() {
+      return NullWritable.get();
+    }
+
+  }
+
+  private CompressionCodecFactory compressionCodecs = null;
+
+  public void configure(JobConf conf) {
+    compressionCodecs = new CompressionCodecFactory(conf);
+  }
+
+  protected boolean isSplitable(FileSystem fs, Path file) {
+    return compressionCodecs.getCodec(file) == null;
+  }
+
+  @Override
+  public RecordReader<AvroWrapper<Utf8>, NullWritable>
+    getRecordReader(InputSplit split, JobConf job, Reporter reporter)
+    throws IOException {
+
+    reporter.setStatus(split.toString());
+    return new Utf8LineRecordReader(job, (FileSplit) split);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroValue.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroValue.java
new file mode 100644
index 0000000..223a2d7
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroValue.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+/** The wrapper of values for jobs configured with {@link AvroJob} . */
+public class AvroValue<T> extends AvroWrapper<T> {
+  /** Wrap null. Construct {@link AvroValue} wrapping no value. */
+  public AvroValue() { this(null); }
+
+  /** Wrap a value. */
+  public AvroValue(T datum) { super(datum); }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
new file mode 100644
index 0000000..12c4d9e
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/AvroWrapper.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+/** The wrapper of data for jobs configured with {@link AvroJob} . */
+public class AvroWrapper<T> {
+  private T datum;
+
+  /** Wrap null. Construct {@link AvroWrapper} wrapping no datum. */
+  public AvroWrapper() { this(null); }
+
+  /** Wrap a datum. */
+  public AvroWrapper(T datum) { this.datum = datum; }
+
+  /** Return the wrapped datum. */
+  public T datum() { return datum; }
+
+  /** Set the wrapped datum. */
+  public void datum(T datum) { this.datum = datum; }
+  
+  public int hashCode() {
+    return (datum == null) ? 0 : datum.hashCode();
+  }
+
+  public boolean equals(Object obj) {
+    if (this == obj)
+      return true;
+    if (obj == null)
+      return false;
+    if (getClass() != obj.getClass())
+      return false;
+    AvroWrapper that = (AvroWrapper)obj;
+    if (this.datum == null) {
+      if (that.datum != null)
+        return false;
+    } else if (!datum.equals(that.datum))
+      return false;
+    return true;
+  }
+    
+  /** Get the wrapped datum as JSON. */
+  @Override
+  public String toString() {
+    return datum.toString();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java
new file mode 100644
index 0000000..1358e3c
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingInputFormat.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.avro.Schema;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * An {@link InputFormat} that delegates read behavior of paths based on
+ * their associated avro schema.
+ * @see MultipleInputs#addInputPath(JobConf, Path, Class, Class)
+ */
+class DelegatingInputFormat<K, V> implements InputFormat<K, V> {
+
+  public InputSplit[] getSplits(JobConf conf, int numSplits) throws IOException {
+
+    JobConf confCopy = new JobConf(conf);
+    List<InputSplit> splits = new ArrayList<InputSplit>();
+
+    Map<Path, Class<? extends AvroMapper>> mapperMap = AvroMultipleInputs
+       .getMapperTypeMap(conf);
+    Map<Path, Schema> schemaMap = AvroMultipleInputs
+        .getInputSchemaMap(conf);
+    Map<Schema, List<Path>> schemaPaths
+        = new HashMap<Schema, List<Path>>();
+
+    // First, build a map of Schemas to Paths
+    for (Entry<Path, Schema> entry : schemaMap.entrySet()) {
+      if (!schemaPaths.containsKey(entry.getValue())) {
+        schemaPaths.put(entry.getValue(), new LinkedList<Path>());
+        System.out.println(entry.getValue());
+        System.out.println(entry.getKey());
+      }
+
+      schemaPaths.get(entry.getValue()).add(entry.getKey());
+    }
+
+    for (Entry<Schema, List<Path>> schemaEntry :
+        schemaPaths.entrySet()) {
+      Schema schema = schemaEntry.getKey();
+      System.out.println(schema);
+      InputFormat format = (InputFormat) ReflectionUtils.newInstance(
+         AvroInputFormat.class, conf);
+      List<Path> paths = schemaEntry.getValue();
+
+      Map<Class<? extends AvroMapper>, List<Path>> mapperPaths
+          = new HashMap<Class<? extends AvroMapper>, List<Path>>();
+
+      // Now, for each set of paths that have a common Schema, build
+      // a map of Mappers to the paths they're used for
+      for (Path path : paths) {
+       Class<? extends AvroMapper> mapperClass = mapperMap.get(path);
+       if (!mapperPaths.containsKey(mapperClass)) {
+         mapperPaths.put(mapperClass, new LinkedList<Path>());
+       }
+
+       mapperPaths.get(mapperClass).add(path);
+      }
+
+      // Now each set of paths that has a common InputFormat and Mapper can
+      // be added to the same job, and split together.
+      for (Entry<Class<? extends AvroMapper>, List<Path>> mapEntry : mapperPaths
+         .entrySet()) {
+       paths = mapEntry.getValue();
+       Class<? extends AvroMapper> mapperClass = mapEntry.getKey();
+
+       if (mapperClass == null) {
+         mapperClass = (Class<? extends AvroMapper>) conf.getMapperClass();
+       }
+
+       FileInputFormat.setInputPaths(confCopy, paths.toArray(new Path[paths
+           .size()]));
+
+       // Get splits for each input path and tag with InputFormat
+       // and Mapper types by wrapping in a TaggedInputSplit.
+       InputSplit[] pathSplits = format.getSplits(confCopy, numSplits);
+       for (InputSplit pathSplit : pathSplits) {
+         splits.add(new TaggedInputSplit(pathSplit, conf, format.getClass(),
+             mapperClass, schema));
+       }
+      }
+    }
+
+    return splits.toArray(new InputSplit[splits.size()]);
+  }
+
+  @SuppressWarnings("unchecked")
+  public RecordReader<K, V> getRecordReader(InputSplit split, JobConf conf,
+      Reporter reporter) throws IOException {
+
+    // Find the Schema and then build the RecordReader from the
+    // TaggedInputSplit.
+
+    TaggedInputSplit taggedInputSplit = (TaggedInputSplit) split;
+    Schema schema = taggedInputSplit.getSchema();
+    AvroJob.setInputSchema(conf, schema);
+    InputFormat<K, V> inputFormat = (InputFormat<K, V>) ReflectionUtils
+       .newInstance(taggedInputSplit.getInputFormatClass(), conf);
+    return inputFormat.getRecordReader(taggedInputSplit.getInputSplit(), conf,
+       reporter);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java
new file mode 100644
index 0000000..64392e1
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/DelegatingMapper.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * An {@link Mapper} that delegates behaviour of paths to multiple other
+ * mappers. Similar to {@link HadoopMapper}, but instantiates map classes
+ * in the map() call instead of during configure(), as we rely on the split
+ * object to provide us that information.
+ *
+ * @see {@link AvroMultipleInputs#addInputPath(JobConf, Path, Class, Schema)}
+ */
+class DelegatingMapper<IN,OUT,K,V,KO,VO> extends MapReduceBase
+implements Mapper<AvroWrapper<IN>,NullWritable,KO,VO>
+{
+  AvroMapper<IN, OUT> mapper;
+  JobConf conf;
+  boolean isMapOnly;
+  AvroCollector<OUT> out;
+
+  public void configure(JobConf conf) {
+    this.conf = conf;
+    this.isMapOnly = conf.getNumReduceTasks() == 0;
+  }
+
+  @Override
+  public void map(AvroWrapper<IN> wrapper, NullWritable value,
+      OutputCollector<KO, VO> collector, Reporter reporter)
+          throws IOException {
+    if (mapper == null) {
+      TaggedInputSplit is = (TaggedInputSplit) reporter.getInputSplit();
+      Class<? extends AvroMapper> mapperClass = is.getMapperClass();
+      mapper = (AvroMapper<IN,OUT>)
+          ReflectionUtils.newInstance(mapperClass, conf);
+    }
+    if (out == null)
+      out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly);
+    mapper.map(wrapper.datum(), out, reporter);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/FsInput.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/FsInput.java
new file mode 100644
index 0000000..9125168
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/FsInput.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataInputStream;
+
+import org.apache.avro.file.SeekableInput;
+
+/** Adapt an {@link FSDataInputStream} to {@link SeekableInput}. */
+public class FsInput implements Closeable, SeekableInput {
+  private final FSDataInputStream stream;
+  private final long len;
+
+  /** Construct given a path and a configuration. */
+  public FsInput(Path path, Configuration conf) throws IOException {
+    this(path, path.getFileSystem(conf));
+  }
+
+  /** Construct given a path and a {@code FileSystem}. */
+  public FsInput(Path path, FileSystem fileSystem) throws IOException {
+    this.len = fileSystem.getFileStatus(path).getLen();
+    this.stream = fileSystem.open(path);
+  }
+
+  @Override
+  public long length() {
+    return len;
+  }
+
+  @Override
+  public int read(byte[] b, int off, int len) throws IOException {
+    return stream.read(b, off, len);
+  }
+
+  @Override
+  public void seek(long p) throws IOException {
+    stream.seek(p);
+  }
+
+  @Override
+  public long tell() throws IOException {
+    return stream.getPos();
+  }
+
+  @Override
+  public void close() throws IOException {
+    stream.close();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
new file mode 100644
index 0000000..5f914fb
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopCombiner.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/** Bridge between a {@link org.apache.hadoop.mapred.Reducer} and an {@link
+ * AvroReducer} used when combining.  When combining, map output pairs must be
+ * split before they're collected. */
+class HadoopCombiner<K,V>
+  extends HadoopReducerBase<K,V,Pair<K,V>,AvroKey<K>,AvroValue<V>> {
+
+  @Override @SuppressWarnings("unchecked")
+  protected AvroReducer<K,V,Pair<K,V>> getReducer(JobConf conf) {
+    return ReflectionUtils.newInstance
+      (conf.getClass(AvroJob.COMBINER, AvroReducer.class, AvroReducer.class),
+       conf);
+  }
+
+  private class PairCollector extends AvroCollector<Pair<K,V>> {
+    private final AvroKey<K> keyWrapper = new AvroKey<K>(null);
+    private final AvroValue<V> valueWrapper = new AvroValue<V>(null);
+    private OutputCollector<AvroKey<K>,AvroValue<V>> collector;
+  
+    public PairCollector(OutputCollector<AvroKey<K>,AvroValue<V>> collector) {
+      this.collector = collector;
+    }
+
+    public void collect(Pair<K,V> datum) throws IOException {
+      keyWrapper.datum(datum.key());              // split the Pair
+      valueWrapper.datum(datum.value());
+      collector.collect(keyWrapper, valueWrapper);
+    }
+  }
+
+  @Override
+  protected AvroCollector<Pair<K,V>>
+    getCollector(OutputCollector<AvroKey<K>,AvroValue<V>> collector) {
+    return new PairCollector(collector);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java
new file mode 100644
index 0000000..35f11d6
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopMapper.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/** Bridge between a {@link org.apache.hadoop.mapred.Mapper} and an {@link
+ * AvroMapper}.  Outputs are written directly when a job is map-only, but are
+ * otherwise assumed to be pairs that are split. */
+class HadoopMapper<IN,OUT,K,V,KO,VO> extends MapReduceBase
+  implements Mapper<AvroWrapper<IN>, NullWritable, KO, VO> {
+    
+  private AvroMapper<IN,OUT> mapper;
+  private MapCollector<OUT,K,V,KO,VO> out;
+  private boolean isMapOnly;
+
+  @Override @SuppressWarnings("unchecked")
+  public void configure(JobConf conf) {
+    this.mapper =
+      ReflectionUtils.newInstance
+      (conf.getClass(AvroJob.MAPPER, AvroMapper.class, AvroMapper.class),
+       conf);
+    this.isMapOnly = conf.getNumReduceTasks() == 0;
+  }
+
+  @Override
+  public void map(AvroWrapper<IN> wrapper, NullWritable value, 
+                  OutputCollector<KO,VO> collector, 
+                  Reporter reporter) throws IOException {
+    if (this.out == null)
+      this.out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly);
+    mapper.map(wrapper.datum(), out, reporter);
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.mapper.close();
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java
new file mode 100644
index 0000000..2d78252
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducer.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/** Bridge between a {@link org.apache.hadoop.mapred.Reducer} and an {@link
+ * AvroReducer}. */
+class HadoopReducer<K,V,OUT>
+  extends HadoopReducerBase<K,V, OUT, AvroWrapper<OUT>, NullWritable> {
+
+  @Override @SuppressWarnings("unchecked")
+  protected AvroReducer<K,V,OUT> getReducer(JobConf conf) {
+    return ReflectionUtils.newInstance
+      (conf.getClass(AvroJob.REDUCER, AvroReducer.class, AvroReducer.class),
+       conf);
+  }
+
+  private class ReduceCollector extends AvroCollector<OUT> {
+    private final AvroWrapper<OUT> wrapper = new AvroWrapper<OUT>(null);
+    private OutputCollector<AvroWrapper<OUT>, NullWritable> out;
+
+    public ReduceCollector(OutputCollector<AvroWrapper<OUT>,NullWritable> out) {
+      this.out = out;
+    }
+
+    public void collect(OUT datum) throws IOException {
+      wrapper.datum(datum);
+      out.collect(wrapper, NullWritable.get());
+    }
+  }
+
+  @Override
+  protected AvroCollector<OUT>
+    getCollector(OutputCollector<AvroWrapper<OUT>, NullWritable> collector) {
+    return new ReduceCollector(collector);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
new file mode 100644
index 0000000..6874969
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/HadoopReducerBase.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.Reducer;
+
+abstract class HadoopReducerBase<K,V,OUT,KO,VO> extends MapReduceBase
+  implements Reducer<AvroKey<K>, AvroValue<V>, KO, VO> {
+  
+  private AvroReducer<K,V,OUT> reducer;
+  private AvroCollector<OUT> collector;
+  
+  protected abstract AvroReducer<K,V,OUT> getReducer(JobConf conf);
+  protected abstract AvroCollector<OUT> getCollector(OutputCollector<KO,VO> c);
+
+  @Override
+  public void configure(JobConf conf) {
+    this.reducer = getReducer(conf);
+  }
+
+  class ReduceIterable implements Iterable<V>, Iterator<V> {
+    private Iterator<AvroValue<V>> values;
+    public boolean hasNext() { return values.hasNext(); }
+    public V next() { return values.next().datum(); }
+    public void remove() { throw new UnsupportedOperationException(); }
+    public Iterator<V> iterator() { return this; }
+  }
+  private ReduceIterable reduceIterable = new ReduceIterable();
+
+  @Override
+  public final void reduce(AvroKey<K> key, Iterator<AvroValue<V>> values,
+                           OutputCollector<KO, VO> out, 
+                           Reporter reporter) throws IOException {
+    if (this.collector == null) 
+      this.collector = getCollector(out);
+    reduceIterable.values = values;
+    reducer.reduce(key.datum(), reduceIterable, collector, reporter);
+  }
+
+  @Override
+  public void close() throws IOException {
+    this.reducer.close();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java
new file mode 100644
index 0000000..3a07574
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/MapCollector.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.OutputCollector;
+
+ at SuppressWarnings("unchecked")
+class MapCollector<OUT,K,V,KO,VO> extends AvroCollector<OUT> {
+  private final AvroWrapper<OUT> wrapper = new AvroWrapper<OUT>(null);
+  private final AvroKey<K> keyWrapper = new AvroKey<K>(null);
+  private final AvroValue<V> valueWrapper = new AvroValue<V>(null);
+  private OutputCollector<KO,VO> collector;
+  private boolean isMapOnly;
+
+  public MapCollector(OutputCollector<KO,VO> collector, boolean isMapOnly) {
+    this.collector = collector;
+    this.isMapOnly = isMapOnly;
+  }
+
+  public void collect(OUT datum) throws IOException {
+    if (isMapOnly) {
+      wrapper.datum(datum);
+      collector.collect((KO)wrapper, (VO)NullWritable.get());
+    } else {
+      // split a pair
+      Pair<K,V> pair = (Pair<K,V>)datum;
+      keyWrapper.datum(pair.key());
+      valueWrapper.datum(pair.value());
+      collector.collect((KO)keyWrapper, (VO)valueWrapper);
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java
new file mode 100644
index 0000000..010b08d
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/Pair.java
@@ -0,0 +1,533 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.WeakHashMap;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericContainer;
+import org.apache.avro.generic.IndexedRecord;
+import org.apache.avro.specific.SpecificData.SchemaConstructable;
+import org.apache.avro.reflect.ReflectData;
+
+/** A key/value pair. */
+public class Pair<K,V>
+  implements IndexedRecord, Comparable<Pair>, SchemaConstructable {
+
+  private static final String PAIR = Pair.class.getName();
+  private static final String KEY = "key";
+  private static final String VALUE = "value";
+
+  private Schema schema;
+  private K key;
+  private V value;
+
+  public Pair(Schema schema) {
+    checkIsPairSchema(schema);
+    this.schema = schema;
+  }
+
+  public Pair(K key, Schema keySchema, V value, Schema valueSchema) {
+    this.schema = getPairSchema(keySchema, valueSchema);
+    this.key = key;
+    this.value = value;
+  }
+
+  private static void checkIsPairSchema(Schema schema) {
+    if (!PAIR.equals(schema.getFullName()))
+      throw new IllegalArgumentException("Not a Pair schema: "+schema);
+  }
+
+  /** Return a pair's key schema. */
+  public static Schema getKeySchema(Schema pair) {
+    checkIsPairSchema(pair);
+    return pair.getField(KEY).schema();
+  }
+
+  /** Return a pair's value schema. */
+  public static Schema getValueSchema(Schema pair) {
+    checkIsPairSchema(pair);
+    return pair.getField(VALUE).schema();
+  }
+
+  private static final Map<Schema,Map<Schema,Schema>> SCHEMA_CACHE = 
+    new WeakHashMap<Schema,Map<Schema,Schema>>();
+
+  /** Get a pair schema. */
+  public static Schema getPairSchema(Schema key, Schema value) {
+    Map<Schema,Schema> valueSchemas;
+    synchronized (SCHEMA_CACHE) {
+      valueSchemas = SCHEMA_CACHE.get(key);
+      if (valueSchemas == null) {
+        valueSchemas = new WeakHashMap<Schema,Schema>();
+        SCHEMA_CACHE.put(key, valueSchemas);
+      }
+      Schema result;
+      result = valueSchemas.get(value);
+      if (result == null) {
+        result = makePairSchema(key, value);
+        valueSchemas.put(value, result);
+      }
+      return result;
+    }
+  }
+
+  private static Schema makePairSchema(Schema key, Schema value) {
+    Schema pair = Schema.createRecord(PAIR, null, null, false);
+    List<Field> fields = new ArrayList<Field>();
+    fields.add(new Field(KEY, key, "", null));
+    fields.add(new Field(VALUE, value, "", null, Field.Order.IGNORE));
+    pair.setFields(fields);
+    return pair;
+  }
+
+  @Override public Schema getSchema() { return schema; }
+
+  /** Get the key. */
+  public K key() { return key; }
+  /** Set the key. */
+  public void key(K key) { this.key = key; }
+
+  /** Get the value. */
+  public V value() { return value; }
+  /** Set the value. */
+  public void value(V value) { this.value = value; }
+
+  /** Set both the key and value. */
+  public void set(K key, V value) { this.key = key; this.value = value; }
+
+  @Override public boolean equals(Object o) {
+    if (o == this) return true;                 // identical object
+    if (!(o instanceof Pair)) return false;     // not a pair
+    Pair that = (Pair)o;
+    if (!this.schema.equals(that.schema))
+      return false;                             // not the same schema
+    return this.compareTo(that) == 0;
+  }
+  @Override public int hashCode() {
+    return GenericData.get().hashCode(this, schema);
+  }
+  @Override public int compareTo(Pair that) {
+    return GenericData.get().compare(this, that, schema);
+  }
+  @Override public String toString() {
+    return GenericData.get().toString(this);
+  }
+
+  @Override
+  public Object get(int i) {
+    switch (i) {
+    case 0: return key;
+    case 1: return value;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index: "+i);
+    } 
+  }
+
+  @Override @SuppressWarnings("unchecked")
+  public void put(int i, Object o) {
+    switch (i) {
+    case 0: this.key = (K)o;    break;
+    case 1: this.value = (V)o;  break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index: "+i);
+    } 
+  }
+
+  private static final Schema STRING_SCHEMA = Schema.create(Type.STRING);
+  private static final Schema BYTES_SCHEMA = Schema.create(Type.BYTES);
+  private static final Schema INT_SCHEMA = Schema.create(Type.INT);
+  private static final Schema LONG_SCHEMA = Schema.create(Type.LONG);
+  private static final Schema FLOAT_SCHEMA = Schema.create(Type.FLOAT);
+  private static final Schema DOUBLE_SCHEMA = Schema.create(Type.DOUBLE);
+  private static final Schema NULL_SCHEMA = Schema.create(Type.NULL);
+
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Object value) {
+    this((K)key, getSchema(key), (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, GenericContainer value) {
+    this((K)key, getSchema(key), (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, CharSequence value) {
+    this((K)key, getSchema(key), (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, ByteBuffer value) {
+    this((K)key, getSchema(key), (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Integer value) {
+    this((K)key, getSchema(key), (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Long value) {
+    this((K)key, getSchema(key), (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Float value) {
+    this((K)key, getSchema(key), (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Double value) {
+    this((K)key, getSchema(key), (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Object key, Void value) {
+    this((K)key, getSchema(key), (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Object value) {
+    this((K)key, key.getSchema(), (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, GenericContainer value) {
+    this((K)key, key.getSchema(), (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, CharSequence value) {
+    this((K)key, key.getSchema(), (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, ByteBuffer value) {
+    this((K)key, key.getSchema(), (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Integer value) {
+    this((K)key, key.getSchema(), (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Long value) {
+    this((K)key, key.getSchema(), (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Float value) {
+    this((K)key, key.getSchema(), (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Double value) {
+    this((K)key, key.getSchema(), (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(GenericContainer key, Void value) {
+    this((K)key, key.getSchema(), (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Object value) {
+    this((K)key, STRING_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, GenericContainer value) {
+    this((K)key, STRING_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, CharSequence value) {
+    this((K)key, STRING_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, ByteBuffer value) {
+    this((K)key, STRING_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Integer value) {
+    this((K)key, STRING_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Long value) {
+    this((K)key, STRING_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Float value) {
+    this((K)key, STRING_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Double value) {
+    this((K)key, STRING_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(CharSequence key, Void value) {
+    this((K)key, STRING_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Object value) {
+    this((K)key, BYTES_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, GenericContainer value) {
+    this((K)key, BYTES_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, CharSequence value) {
+    this((K)key, BYTES_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, ByteBuffer value) {
+    this((K)key, BYTES_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Integer value) {
+    this((K)key, BYTES_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Long value) {
+    this((K)key, BYTES_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Float value) {
+    this((K)key, BYTES_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Double value) {
+    this((K)key, BYTES_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(ByteBuffer key, Void value) {
+    this((K)key, BYTES_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Object value) {
+    this((K)key, INT_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, GenericContainer value) {
+    this((K)key, INT_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, CharSequence value) {
+    this((K)key, INT_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, ByteBuffer value) {
+    this((K)key, INT_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Integer value) {
+    this((K)key, INT_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Long value) {
+    this((K)key, INT_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Float value) {
+    this((K)key, INT_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Double value) {
+    this((K)key, INT_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Integer key, Void value) {
+    this((K)key, INT_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Object value) {
+    this((K)key, LONG_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, GenericContainer value) {
+    this((K)key, LONG_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, CharSequence value) {
+    this((K)key, LONG_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, ByteBuffer value) {
+    this((K)key, LONG_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Integer value) {
+    this((K)key, LONG_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Long value) {
+    this((K)key, LONG_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Float value) {
+    this((K)key, LONG_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Double value) {
+    this((K)key, LONG_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Long key, Void value) {
+    this((K)key, LONG_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Object value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, GenericContainer value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, CharSequence value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, ByteBuffer value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Integer value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Long value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Float value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Double value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Float key, Void value) {
+    this((K)key, FLOAT_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Object value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, GenericContainer value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, CharSequence value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, ByteBuffer value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Integer value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Long value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Float value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Double value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Double key, Void value) {
+    this((K)key, DOUBLE_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Object value) {
+    this((K)key, NULL_SCHEMA, (V)value, getSchema(value));
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, GenericContainer value) {
+    this((K)key, NULL_SCHEMA, (V)value, value.getSchema());
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, CharSequence value) {
+    this((K)key, NULL_SCHEMA, (V)value, STRING_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, ByteBuffer value) {
+    this((K)key, NULL_SCHEMA, (V)value, BYTES_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Integer value) {
+    this((K)key, NULL_SCHEMA, (V)value, INT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Long value) {
+    this((K)key, NULL_SCHEMA, (V)value, LONG_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Float value) {
+    this((K)key, NULL_SCHEMA, (V)value, FLOAT_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Double value) {
+    this((K)key, NULL_SCHEMA, (V)value, DOUBLE_SCHEMA);
+  }
+  @SuppressWarnings("unchecked")
+  public Pair(Void key, Void value) {
+    this((K)key, NULL_SCHEMA, (V)value, NULL_SCHEMA);
+  }
+
+  private static Schema getSchema(Object o) {
+    try {
+      return ReflectData.get().getSchema(o.getClass());
+    } catch (AvroRuntimeException e) {
+      throw new AvroRuntimeException
+        ("Cannot infer schema for : " + o.getClass()
+         + ".  Must create Pair with explicit key and value schemas.", e);
+    }
+  }
+
+  // private static final String[][] TABLE = new String[][] {
+  //   {"Object", "getSchema({0})"},
+  //   {"GenericContainer", "{0}.getSchema()"},
+  //   {"CharSequence", "STRING_SCHEMA"},
+  //   {"ByteBuffer", "BYTES_SCHEMA"},
+  //   {"Integer", "INT_SCHEMA"},
+  //   {"Long", "LONG_SCHEMA"},
+  //   {"Float", "FLOAT_SCHEMA"},
+  //   {"Double", "DOUBLE_SCHEMA"},
+  //   {"Void", "NULL_SCHEMA"},
+  // };
+  
+  // private static String f(String pattern, String value) {
+  //   return java.text.MessageFormat.format(pattern, value);
+  // }
+  
+  // public static void main(String... args) throws Exception {
+  //   StringBuffer b = new StringBuffer();
+  //   for (String[] k : TABLE) {
+  //     for (String[] v : TABLE) {
+  //       b.append("@SuppressWarnings(\"unchecked\")\n");
+  //       b.append("public Pair("+k[0]+" key, "+v[0]+" value) {\n");
+  //       b.append("  this((K)key, "+f(k[1],"key")
+  //                +", (V)value, "+f(v[1],"value")+");\n");
+  //       b.append("}\n");
+  //     }
+  //   }
+  //   System.out.println(b);
+  // }
+
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileInputFormat.java
new file mode 100644
index 0000000..f3bf5b8
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileInputFormat.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RecordReader;
+
+/** An {@link org.apache.hadoop.mapred.InputFormat} for sequence files. */
+public class SequenceFileInputFormat<K,V>
+  extends FileInputFormat<AvroWrapper<Pair<K,V>>, NullWritable> {
+
+  @Override
+  public RecordReader<AvroWrapper<Pair<K,V>>, NullWritable>
+      getRecordReader(InputSplit split, JobConf job, Reporter reporter)
+    throws IOException {
+    reporter.setStatus(split.toString());
+    return new SequenceFileRecordReader<K,V>(job, (FileSplit)split);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
new file mode 100644
index 0000000..83c9de1
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileReader.java
@@ -0,0 +1,247 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.NoSuchElementException;
+import java.net.URI;
+import java.lang.reflect.Type;
+
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.reflect.ReflectData;
+
+/** A {@link FileReader} for sequence files. */
+ at SuppressWarnings(value="unchecked")
+public class SequenceFileReader<K,V> implements FileReader<Pair<K,V>> {
+  private SequenceFile.Reader reader;
+  private Schema schema;
+  private boolean ready = false;            // true iff done & key are current
+  private boolean done = false;             // true iff at EOF
+  private Writable key, spareKey, value;
+
+  private Converter<K> keyConverter =
+    new Converter<K>() { public K convert(Writable o) { return (K)o; } };
+
+  private Converter<V> valConverter =
+    new Converter<V>() { public V convert(Writable o) { return (V)o; } };
+
+  public SequenceFileReader(File file) throws IOException {
+    this(file.toURI(), new Configuration());
+  }
+
+  public SequenceFileReader(URI uri, Configuration c) throws IOException {
+    this(new SequenceFile.Reader(FileSystem.get(uri, c),
+                                 new Path(uri.toString()), c), c);
+  }
+
+  public SequenceFileReader(SequenceFile.Reader reader, Configuration conf) {
+    this.reader = reader;
+    this.schema =
+      Pair.getPairSchema(WritableData.get().getSchema(reader.getKeyClass()),
+                         WritableData.get().getSchema(reader.getValueClass()));
+    this.key =
+      (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+    this.spareKey =
+      (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+    this.value =
+      (Writable)ReflectionUtils.newInstance(reader.getValueClass(), conf);
+
+    if (WRITABLE_CONVERTERS.containsKey(reader.getKeyClass()) )
+      keyConverter = WRITABLE_CONVERTERS.get(reader.getKeyClass());
+    if (WRITABLE_CONVERTERS.containsKey(reader.getValueClass()) )
+      valConverter = WRITABLE_CONVERTERS.get(reader.getValueClass());
+  }
+
+  @Override public void close() throws IOException { reader.close(); }
+
+  @Override public void remove() { throw new UnsupportedOperationException(); }
+
+  @Override public Iterator<Pair<K,V>> iterator() { return this; }
+
+  @Override public Schema getSchema() { return schema; }
+
+  private void prepare() throws IOException {
+    if (ready) return;
+    this.done = !reader.next(key);
+    ready = true;
+  }
+
+  @Override public boolean hasNext() {
+    try {
+      prepare();
+      return !done;
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override public Pair<K,V> next() {
+    try {
+      return next(null);
+    } catch (IOException e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override public Pair<K,V> next(Pair<K,V> reuse) throws IOException {
+    prepare();
+    if (!hasNext())
+      throw new NoSuchElementException();
+
+    Pair<K,V> result = reuse;
+    if (result == null)
+      result = new Pair<K,V>(schema);
+
+    result.key(keyConverter.convert(key));
+    reader.getCurrentValue(value);
+    result.value(valConverter.convert(value));
+
+    // swap key and spareKey
+    Writable k = key;
+    key = spareKey;
+    spareKey = k;
+
+    ready = false;
+
+    return result;
+  }
+
+  @Override public void sync(long position) throws IOException {
+    if (position > reader.getPosition())
+      reader.sync(position);
+    ready = false;
+  }
+  
+  @Override public boolean pastSync(long position) throws IOException {
+    return reader.getPosition() >= position && reader.syncSeen();
+  }
+
+  @Override public long tell() throws IOException {return reader.getPosition();}
+
+  private static final Map<Type,Schema> WRITABLE_SCHEMAS =
+    new HashMap<Type,Schema>();
+  static {
+    WRITABLE_SCHEMAS.put(NullWritable.class,
+                         Schema.create(Schema.Type.NULL));
+    WRITABLE_SCHEMAS.put(BooleanWritable.class,
+                         Schema.create(Schema.Type.BOOLEAN));
+    WRITABLE_SCHEMAS.put(IntWritable.class,
+                         Schema.create(Schema.Type.INT));
+    WRITABLE_SCHEMAS.put(LongWritable.class,
+                         Schema.create(Schema.Type.LONG));
+    WRITABLE_SCHEMAS.put(FloatWritable.class,
+                         Schema.create(Schema.Type.FLOAT));
+    WRITABLE_SCHEMAS.put(DoubleWritable.class,
+                         Schema.create(Schema.Type.DOUBLE));
+    WRITABLE_SCHEMAS.put(BytesWritable.class,
+                         Schema.create(Schema.Type.BYTES));
+    WRITABLE_SCHEMAS.put(Text.class,
+                         Schema.create(Schema.Type.STRING));
+  }
+
+  private static class WritableData extends ReflectData {
+    private static final WritableData INSTANCE = new WritableData();
+    protected WritableData() {}
+    
+    /** Return the singleton instance. */
+    public static WritableData get() { return INSTANCE; }
+
+    @Override public Schema getSchema(java.lang.reflect.Type type) {
+      if (WRITABLE_SCHEMAS.containsKey(type))
+        return WRITABLE_SCHEMAS.get(type);
+      else
+        return super.getSchema(type);
+    }
+  }
+
+  private interface Converter<T> {
+    T convert(Writable o);
+  }
+  
+  private static final Map<Type,Converter> WRITABLE_CONVERTERS =
+    new HashMap<Type,Converter>();
+  static {
+    WRITABLE_CONVERTERS.put
+      (NullWritable.class,
+       new Converter<Void>() {
+        public Void convert(Writable o) { return null; }
+      });
+    WRITABLE_CONVERTERS.put
+      (BooleanWritable.class,
+       new Converter<Boolean>() {
+        public Boolean convert(Writable o) {return ((BooleanWritable)o).get();}
+      });
+    WRITABLE_CONVERTERS.put
+      (IntWritable.class,
+       new Converter<Integer>() {
+        public Integer convert(Writable o) { return ((IntWritable)o).get(); }
+      });
+    WRITABLE_CONVERTERS.put
+      (LongWritable.class,
+       new Converter<Long>() {
+        public Long convert(Writable o) { return ((LongWritable)o).get(); }
+      });
+    WRITABLE_CONVERTERS.put
+      (FloatWritable.class,
+       new Converter<Float>() {
+        public Float convert(Writable o) { return ((FloatWritable)o).get(); }
+      });
+    WRITABLE_CONVERTERS.put
+      (DoubleWritable.class,
+       new Converter<Double>() {
+        public Double convert(Writable o) { return ((DoubleWritable)o).get(); }
+      });
+    WRITABLE_CONVERTERS.put
+      (BytesWritable.class,
+       new Converter<ByteBuffer>() {
+        public ByteBuffer convert(Writable o) {
+          BytesWritable b = (BytesWritable)o;
+          return ByteBuffer.wrap(b.getBytes(), 0, b.getLength());
+        }
+      });
+    WRITABLE_CONVERTERS.put
+      (Text.class,
+       new Converter<String>() {
+        public String convert(Writable o) { return o.toString(); }
+      });
+  }
+
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java
new file mode 100644
index 0000000..693f34e
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/SequenceFileRecordReader.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileSplit;
+
+/** A {@link org.apache.hadoop.mapred.RecordReader} for sequence files. */
+public class SequenceFileRecordReader<K,V> extends AvroRecordReader<Pair<K,V>> {
+
+  public SequenceFileRecordReader(JobConf job, FileSplit split)
+    throws IOException {
+    super(new SequenceFileReader<K,V>(split.getPath().toUri(), job),
+          split);
+  }
+  
+}
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java
new file mode 100644
index 0000000..eb2422d
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/TaggedInputSplit.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * An {@link InputSplit} that tags another InputSplit with extra data for use
+ * by {@link DelegatingInputFormat}s and {@link DelegatingMapper}s.
+ */
+class TaggedInputSplit implements Configurable, InputSplit {
+
+  private Class<? extends InputSplit> inputSplitClass;
+
+  private InputSplit inputSplit;
+
+  private Class<? extends InputFormat> inputFormatClass;
+
+  private Class<? extends AvroMapper> mapperClass;
+
+  private Schema schema;
+
+  private Schema.Parser schemaParser = new Schema.Parser();
+
+  private Configuration conf;
+
+  public TaggedInputSplit() {
+    // Default constructor.
+  }
+
+  /**
+   * Creates a new TaggedInputSplit.
+   *
+   * @param inputSplit The InputSplit to be tagged
+   * @param conf The configuration to use
+   * @param inputFormatClass The InputFormat class to use for this job
+   * @param mapperClass The Mapper class to use for this job
+   */
+  public TaggedInputSplit(InputSplit inputSplit, Configuration conf,
+      Class<? extends InputFormat> inputFormatClass,
+      Class<? extends AvroMapper> mapperClass,
+      Schema inputSchema) {
+    this.inputSplitClass = inputSplit.getClass();
+    this.inputSplit = inputSplit;
+    this.conf = conf;
+    this.inputFormatClass = inputFormatClass;
+    this.mapperClass = mapperClass;
+    this.schema = inputSchema;
+  }
+
+  /**
+   * Retrieves the original InputSplit.
+   *
+   * @return The InputSplit that was tagged
+   */
+  public InputSplit getInputSplit() {
+    return inputSplit;
+  }
+
+  /**
+   * Retrieves the InputFormat class to use for this split.
+   *
+   * @return The InputFormat class to use
+   */
+  public Class<? extends InputFormat> getInputFormatClass() {
+    return inputFormatClass;
+  }
+
+  /**
+   * Retrieves the Mapper class to use for this split.
+   *
+   * @return The Mapper class to use
+   */
+  public Class<? extends AvroMapper> getMapperClass() {
+    return mapperClass;
+  }
+
+  /**
+   * Retrieves the Schema to use for this split.
+   *
+   * @return The schema for record readers to use
+   */
+  public Schema getSchema() {
+    return schema;
+  }
+
+  public long getLength() throws IOException {
+    return inputSplit.getLength();
+  }
+
+  public String[] getLocations() throws IOException {
+    return inputSplit.getLocations();
+  }
+
+  @SuppressWarnings("unchecked")
+  public void readFields(DataInput in) throws IOException {
+    inputSplitClass = (Class<? extends InputSplit>) readClass(in);
+    inputSplit = (InputSplit) ReflectionUtils
+       .newInstance(inputSplitClass, conf);
+    inputSplit.readFields(in);
+    inputFormatClass = (Class<? extends InputFormat>) readClass(in);
+    mapperClass = (Class<? extends AvroMapper>) readClass(in);
+    String schemaString = Text.readString(in);
+    schema = schemaParser.parse(schemaString);
+  }
+
+  private Class<?> readClass(DataInput in) throws IOException {
+    String className = Text.readString(in);
+    try {
+      return conf.getClassByName(className);
+    } catch (ClassNotFoundException e) {
+      throw new RuntimeException("readObject can't find class", e);
+    }
+  }
+
+  public void write(DataOutput out) throws IOException {
+    Text.writeString(out, inputSplitClass.getName());
+    inputSplit.write(out);
+    Text.writeString(out, inputFormatClass.getName());
+    Text.writeString(out, mapperClass.getName());
+    Text.writeString(out, schema.toString());
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public String toString() {
+    return inputSplit.toString();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/package.html b/lang/java/mapred/src/main/java/org/apache/avro/mapred/package.html
new file mode 100644
index 0000000..d497f20
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/package.html
@@ -0,0 +1,123 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Run <a href="http://hadoop.apache.org/">Hadoop</a> MapReduce jobs over
+Avro data, with map and reduce functions written in Java.
+
+<p>Avro data files do not contain key/value pairs as expected by
+  Hadoop's MapReduce API, but rather just a sequence of values.  Thus
+  we provide here a layer on top of Hadoop's MapReduce API.</p>
+
+<p>In all cases, input and output paths are set and jobs are submitted
+  as with standard Hadoop jobs:
+ <ul>
+   <li>Specify input files with {@link
+   org.apache.hadoop.mapred.FileInputFormat#setInputPaths}</li>
+   <li>Specify an output directory with {@link
+   org.apache.hadoop.mapred.FileOutputFormat#setOutputPath}</li>
+   <li>Run your job with {@link org.apache.hadoop.mapred.JobClient#runJob}</li>
+ </ul>
+</p>
+
+<p>For jobs whose input and output are Avro data files:
+ <ul>
+   <li>Call {@link org.apache.avro.mapred.AvroJob#setInputSchema} and
+   {@link org.apache.avro.mapred.AvroJob#setOutputSchema} with your
+   job's input and output schemas.</li>
+   <li>Subclass {@link org.apache.avro.mapred.AvroMapper} and specify
+   this as your job's mapper with {@link
+   org.apache.avro.mapred.AvroJob#setMapperClass}</li>
+   <li>Subclass {@link org.apache.avro.mapred.AvroReducer} and specify
+   this as your job's reducer and perhaps combiner, with {@link
+   org.apache.avro.mapred.AvroJob#setReducerClass} and {@link
+   org.apache.avro.mapred.AvroJob#setCombinerClass}</li>
+ </ul>
+</p>
+
+<p>For jobs whose input is an Avro data file and which use an {@link
+  org.apache.avro.mapred.AvroMapper}, but whose reducer is a non-Avro
+  {@link org.apache.hadoop.mapred.Reducer} and whose output is a
+  non-Avro format:
+ <ul>
+   <li>Call {@link org.apache.avro.mapred.AvroJob#setInputSchema} with your
+   job's input schema.</li>
+   <li>Subclass {@link org.apache.avro.mapred.AvroMapper} and specify
+   this as your job's mapper with {@link
+   org.apache.avro.mapred.AvroJob#setMapperClass}</li>
+   <li>Implement {@link org.apache.hadoop.mapred.Reducer} and specify
+   your job's reducer with {@link
+   org.apache.hadoop.mapred.JobConf#setReducerClass}.  The input key
+   and value types should be {@link org.apache.avro.mapred.AvroKey} and {@link
+   org.apache.avro.mapred.AvroValue}.</li>
+   <li>Optionally implement {@link org.apache.hadoop.mapred.Reducer} and
+   specify your job's combiner with {@link
+   org.apache.hadoop.mapred.JobConf#setCombinerClass}.  You will be unable to
+   re-use the same Reducer class as the Combiner, as the Combiner will need
+   input and output key to be {@link org.apache.avro.mapred.AvroKey}, and
+   input and output value to be {@link org.apache.avro.mapred.AvroValue}.</li>
+   <li>Specify your job's output key and value types {@link
+   org.apache.hadoop.mapred.JobConf#setOutputKeyClass} and {@link
+   org.apache.hadoop.mapred.JobConf#setOutputValueClass}.</li>
+   <li>Specify your job's output format {@link
+   org.apache.hadoop.mapred.JobConf#setOutputFormat}.</li>
+ </ul>
+</p>
+
+<p>For jobs whose input is non-Avro data file and which use a
+  non-Avro {@link org.apache.hadoop.mapred.Mapper}, but whose reducer
+  is an {@link org.apache.avro.mapred.AvroReducer} and whose output is
+  an Avro data file:
+ <ul>
+   <li>Set your input file format with {@link
+   org.apache.hadoop.mapred.JobConf#setInputFormat}.</li>
+   <li>Implement {@link org.apache.hadoop.mapred.Mapper} and specify
+   your job's mapper with {@link
+   org.apache.hadoop.mapred.JobConf#setMapperClass}.  The output key
+   and value type should be {@link org.apache.avro.mapred.AvroKey} and
+   {@link org.apache.avro.mapred.AvroValue}.</li>
+   <li>Subclass {@link org.apache.avro.mapred.AvroReducer} and specify
+   this as your job's reducer and perhaps combiner, with {@link
+   org.apache.avro.mapred.AvroJob#setReducerClass} and {@link
+   org.apache.avro.mapred.AvroJob#setCombinerClass}</li>
+   <li>Call {@link org.apache.avro.mapred.AvroJob#setOutputSchema} with your
+   job's output schema.</li>
+ </ul>
+</p>
+
+<p>For jobs whose input is non-Avro data file and which use a
+  non-Avro {@link org.apache.hadoop.mapred.Mapper} and no reducer,
+  i.e., a <i>map-only</i> job:
+ <ul>
+   <li>Set your input file format with {@link
+   org.apache.hadoop.mapred.JobConf#setInputFormat}.</li>
+   <li>Implement {@link org.apache.hadoop.mapred.Mapper} and specify
+   your job's mapper with {@link
+   org.apache.hadoop.mapred.JobConf#setMapperClass}.  The output key
+   and value type should be {@link org.apache.avro.mapred.AvroWrapper} and
+   {@link org.apache.hadoop.io.NullWritable}.</li>
+   <li>Call {@link
+   org.apache.hadoop.mapred.JobConf#setNumReduceTasks(int)} with zero.
+   <li>Call {@link org.apache.avro.mapred.AvroJob#setOutputSchema} with your
+   job's output schema.</li>
+ </ul>
+</p>
+
+</body>
+</html>
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java
new file mode 100644
index 0000000..6365745
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherData.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.nio.ByteBuffer;
+
+/** A wrapper for a ByteBuffer containing binary-encoded data. */
+class TetherData {
+  private int count = 1;                          // only used for task input
+  private ByteBuffer buffer;
+
+  public TetherData() {}
+  public TetherData(ByteBuffer buffer) { this.buffer = buffer; }
+
+  /** Return the count of records in the buffer.  Used for task input only.*/
+  public int count() { return count; }
+
+  /** Set the count of records in the buffer.  Used for task input only. */
+  public void count(int count) { this.count = count; }
+    
+  /** Return the buffer. */
+  public ByteBuffer buffer() { return buffer; }
+
+  /** Set the buffer. */
+  public void buffer(ByteBuffer buffer) { this.buffer = buffer; }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java
new file mode 100644
index 0000000..de0ee26
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherInputFormat.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RecordReader;
+
+import org.apache.avro.mapred.AvroInputFormat;
+import org.apache.avro.mapred.AvroOutputFormat;
+
+/**
+ * An {@link org.apache.hadoop.mapred.InputFormat} for tethered Avro input.
+ * 
+ * By default, when pointed at a directory, this will silently skip over any
+ * files in it that do not have .avro extension. To instead include all files,
+ * set the avro.mapred.ignore.inputs.without.extension property to false.
+ * */
+class TetherInputFormat
+  extends FileInputFormat<TetherData, NullWritable> {
+
+  @Override
+  protected FileStatus[] listStatus(JobConf job) throws IOException {
+    if (job.getBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY,
+        AvroInputFormat.IGNORE_INPUTS_WITHOUT_EXTENSION_DEFAULT)) {
+      List<FileStatus> result = new ArrayList<FileStatus>();
+      for (FileStatus file : super.listStatus(job))
+        if (file.getPath().getName().endsWith(AvroOutputFormat.EXT))
+          result.add(file);
+      return result.toArray(new FileStatus[0]);
+    } else {
+      return super.listStatus(job);
+    }
+  }
+
+  @Override
+  public RecordReader<TetherData, NullWritable>
+    getRecordReader(InputSplit split, JobConf job, Reporter reporter)
+    throws IOException {
+    reporter.setStatus(split.toString());
+    return new TetherRecordReader(job, (FileSplit)split);
+  }
+
+}
+
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
new file mode 100644
index 0000000..169699f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherJob.java
@@ -0,0 +1,171 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RunningJob;
+
+/** Constructs and submits tether jobs. This may be used as an API-based
+ *  method to launch tether jobs. */
+ at SuppressWarnings("deprecation")
+public class TetherJob extends Configured {
+
+  public static final String TETHER_EXEC="avro.tether.executable";
+  public static final String TETHER_EXEC_ARGS="avro.tether.executable_args";
+  public static final String TETHER_EXEC_CACHED="avro.tether.executable_cached";
+  public static final String TETHER_PROTOCOL="avro.tether.protocol";
+  
+  /** Get the URI of the application's executable. */
+  public static URI getExecutable(JobConf job) {
+    try {
+      return new URI(job.get("avro.tether.executable"));
+    } catch (URISyntaxException e) {
+      throw new RuntimeException(e);
+    }
+  }
+  
+  /** Set the URI for the application's executable. Normally this in HDFS. */
+  public static void setExecutable(JobConf job, File executable) {
+    setExecutable(job,executable, new ArrayList<String>(),false);
+  }
+  
+  /**
+   * Set the URI for the application's executable (i.e the program to run in a subprocess 
+   * and provides the mapper/reducer). 
+   * @param job - Job
+   * @param executable - The URI of the executable
+   * @param args - List of additional arguments; Null if no arguments
+   * @param cached - If true, the executable URI is cached using DistributedCache
+   *               - if false its not cached. I.e if the file is already stored on each local file system
+   *                or if its on a NFS share
+   */
+  public static void setExecutable(JobConf job, File executable, List<String> args, boolean cached) {
+        job.set(TETHER_EXEC, executable.toString());
+        if (args != null){
+          StringBuilder sb = new StringBuilder();
+          for (String a : args) {
+            sb.append(a);
+            sb.append('\n');
+          }
+          job.set(TETHER_EXEC_ARGS, sb.toString());
+        }
+        job.set(TETHER_EXEC_CACHED,  (new Boolean(cached)).toString());
+  }
+
+  /**
+   * Extract from the job configuration file an instance of the TRANSPROTO enumeration
+   * to represent the protocol to use for the communication
+   * @param job
+   * @return
+   */
+  public static TetheredProcess.Protocol getProtocol(JobConf job) {
+
+    if (job.get(TetherJob.TETHER_PROTOCOL)==null) {
+      return TetheredProcess.Protocol.NONE;
+    } else if (job.get(TetherJob.TETHER_PROTOCOL).equals("http")) {
+      return TetheredProcess.Protocol.HTTP;
+    } else if (job.get(TetherJob.TETHER_PROTOCOL).equals("sasl")) {
+      return TetheredProcess.Protocol.SASL;
+    } else {
+      throw new RuntimeException("Unknown value for protocol: " +job.get(TetherJob.TETHER_PROTOCOL));
+    }
+
+  }
+
+  /** Submit a job to the map/reduce cluster. All of the necessary
+   * modifications to the job to run under tether are made to the
+   * configuration.
+   */
+  public static RunningJob runJob(JobConf job) throws IOException {
+    setupTetherJob(job);
+    return JobClient.runJob(job);
+  }
+
+  /** Submit a job to the Map-Reduce framework. */
+  public static RunningJob submitJob(JobConf conf) throws IOException {
+    setupTetherJob(conf);
+    return new JobClient(conf).submitJob(conf);
+  }
+  
+  /**
+   * Determines which transport protocol (e.g http or sasl) used to communicate
+   * between the parent and subprocess
+   *
+   * @param job - job configuration
+   * @param proto - String identifying the protocol currently http or sasl
+   */
+  public static void setProtocol(JobConf job, String proto) throws IOException {
+    proto=proto.trim().toLowerCase();
+
+    if (!(proto.equals("http") || proto.equals("sasl"))) {
+      throw new IOException("protocol must be 'http' or 'sasl'");
+    }
+
+    job.set(TETHER_PROTOCOL,proto);
+
+  }
+
+  private static void setupTetherJob(JobConf job) throws IOException {
+    job.setMapRunnerClass(TetherMapRunner.class);
+    job.setPartitionerClass(TetherPartitioner.class);
+    job.setReducerClass(TetherReducer.class);
+
+    job.setInputFormat(TetherInputFormat.class);
+    job.setOutputFormat(TetherOutputFormat.class);
+
+    job.setOutputKeyClass(TetherData.class);
+    job.setOutputKeyComparatorClass(TetherKeyComparator.class);
+    job.setMapOutputValueClass(NullWritable.class);
+
+    // set the map output key class to TetherData
+    job.setMapOutputKeyClass(TetherData.class);
+    
+    // if protocol isn't set
+    if (job.getStrings(TETHER_PROTOCOL)==null) {
+      job.set(TETHER_PROTOCOL, "sasl");
+    }
+
+    // add TetherKeySerialization to io.serializations
+    Collection<String> serializations =
+      job.getStringCollection("io.serializations");
+    if (!serializations.contains(TetherKeySerialization.class.getName())) {
+      serializations.add(TetherKeySerialization.class.getName());
+      job.setStrings("io.serializations",
+                     serializations.toArray(new String[0]));
+    }
+
+    // determine whether the executable should be added to the cache.
+    if (job.getBoolean(TETHER_EXEC_CACHED,false)){
+      DistributedCache.addCacheFile(getExecutable(job), job);
+    }
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java
new file mode 100644
index 0000000..f1b74b0
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeyComparator.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.conf.Configuration;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.mapred.AvroJob;
+
+/** The {@link RawComparator} used by jobs configured with {@link TetherJob}. */
+class TetherKeyComparator
+  extends Configured implements RawComparator<TetherData> {
+
+  private Schema schema;
+
+  @Override
+  public void setConf(Configuration conf) {
+    super.setConf(conf);
+    if (conf != null)
+      schema = AvroJob.getMapOutputSchema(conf);
+  }
+
+  @Override
+  public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+    int diff = BinaryData.compare(b1, BinaryData.skipLong(b1, s1), l1,
+                                  b2, BinaryData.skipLong(b2, s2), l2,
+                                  schema);
+    return diff == 0 ? -1 : diff;
+  }
+
+  @Override
+  public int compare(TetherData x, TetherData y) {
+    ByteBuffer b1 = x.buffer(), b2 = y.buffer();
+    int diff = BinaryData.compare(b1.array(), b1.position(), 
+                                  b2.array(), b2.position(),
+                                  schema);
+    return diff == 0 ? -1 : diff;
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java
new file mode 100644
index 0000000..b91053e
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherKeySerialization.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.hadoop.io.serializer.Serialization;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.conf.Configured;
+
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
+
+/** A {@link Serialization} for {@link TetherData}. */
+class TetherKeySerialization
+  extends Configured implements Serialization<TetherData> {
+
+  public boolean accept(Class<?> c) {
+    return TetherData.class.isAssignableFrom(c);
+  }
+  
+  public Deserializer<TetherData> getDeserializer(Class<TetherData> c) {
+    return new TetherDataDeserializer();
+  }
+  
+  private static final DecoderFactory FACTORY = DecoderFactory.get();
+
+  private class TetherDataDeserializer implements Deserializer<TetherData> {
+    private BinaryDecoder decoder;
+    
+    public void open(InputStream in) {
+      this.decoder = FACTORY.directBinaryDecoder(in, decoder);
+    }
+    
+    public TetherData deserialize(TetherData datum) throws IOException {
+      if (datum == null) datum = new TetherData();
+      datum.buffer(decoder.readBytes(datum.buffer()));
+      return datum;
+    }
+
+    public void close() throws IOException {
+      decoder.inputStream().close();
+    }
+  }
+  
+  public Serializer<TetherData> getSerializer(Class<TetherData> c) {
+    return new TetherDataSerializer();
+  }
+
+  private class TetherDataSerializer implements Serializer<TetherData> {
+
+    private OutputStream out;
+    private BinaryEncoder encoder;
+    
+    public void open(OutputStream out) {
+      this.out = out;
+      this.encoder = EncoderFactory.get().directBinaryEncoder(out, encoder);
+    }
+
+    public void serialize(TetherData datum) throws IOException {
+      encoder.writeBytes(datum.buffer());
+      encoder.flush(); //Flush shouldn't be required. Might be a bug in AVRO.
+    }
+
+    public void close() throws IOException {
+      encoder.flush();
+      out.close();
+    }
+
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java
new file mode 100644
index 0000000..c8b335f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherMapRunner.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapRunner;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.Counters.Counter;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.mapred.AvroJob;
+
+class TetherMapRunner
+  extends MapRunner<TetherData, NullWritable, TetherData, NullWritable> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(TetherMapRunner.class);
+
+  private JobConf job;
+  private TetheredProcess process;
+
+  public void configure(JobConf job) {
+    this.job = job;
+  }
+
+  @SuppressWarnings("unchecked")
+  public void run(RecordReader<TetherData, NullWritable> recordReader,
+                  OutputCollector<TetherData, NullWritable> collector,
+                  Reporter reporter) throws IOException {
+    try {
+      // start tethered process
+      process = new TetheredProcess(job, collector, reporter);
+
+      // configure it
+      LOG.info("send configure to subprocess for map task");
+      process.inputClient.configure
+        (TaskType.MAP, 
+         job.get(AvroJob.INPUT_SCHEMA),
+         AvroJob.getMapOutputSchema(job).toString());
+         
+      LOG.info("send partitions to subprocess for map task");
+      process.inputClient.partitions(job.getNumReduceTasks());
+
+      // run map
+      Counter inputRecordCounter =
+        reporter.getCounter("org.apache.hadoop.mapred.Task$Counter",
+                            "MAP_INPUT_RECORDS");
+      TetherData data = new TetherData();
+      while (recordReader.next(data, NullWritable.get())) {
+        process.inputClient.input(data.buffer(), data.count());
+        inputRecordCounter.increment(data.count()-1);
+        if (process.outputService.isFinished())
+          break;
+      }
+      LOG.info("send complete to subprocess for map task");
+      process.inputClient.complete();
+
+      // wait for completion
+      if (process.outputService.waitForFinish())
+        throw new IOException("Task failed: "+process.outputService.error());
+
+    } catch (Throwable t) {                       // send abort
+      LOG.warn("Task failed", t);
+      process.inputClient.abort();
+      throw new IOException("Task failed: "+t, t);
+
+    } finally {                                   // clean up
+      if (process != null)
+        process.close();
+    }
+  }
+  
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
new file mode 100644
index 0000000..8365938
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputFormat.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.util.Progressable;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.AvroOutputFormat;
+
+/** An {@link org.apache.hadoop.mapred.OutputFormat} for Avro data files. */
+class TetherOutputFormat
+  extends FileOutputFormat<TetherData, NullWritable> {
+
+  /** Enable output compression using the deflate codec and specify its level.*/
+  public static void setDeflateLevel(JobConf job, int level) {
+    FileOutputFormat.setCompressOutput(job, true);
+    job.setInt(AvroOutputFormat.DEFLATE_LEVEL_KEY, level);
+  }
+
+  @SuppressWarnings("unchecked")
+  public RecordWriter<TetherData, NullWritable>
+    getRecordWriter(FileSystem ignore, JobConf job,
+                    String name, Progressable prog)
+    throws IOException {
+
+    Schema schema = AvroJob.getOutputSchema(job);
+    
+    final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter());
+
+    if (FileOutputFormat.getCompressOutput(job)) {
+      int level = job.getInt(AvroOutputFormat.DEFLATE_LEVEL_KEY,
+                             CodecFactory.DEFAULT_DEFLATE_LEVEL);
+      writer.setCodec(CodecFactory.deflateCodec(level));
+    }
+
+    Path path =
+      FileOutputFormat.getTaskOutputPath(job, name+AvroOutputFormat.EXT);
+    writer.create(schema, path.getFileSystem(job).create(path));
+
+    return new RecordWriter<TetherData, NullWritable>() {
+        public void write(TetherData datum, NullWritable ignore)
+          throws IOException {
+          writer.appendEncoded(datum.buffer());
+        }
+        public void close(Reporter reporter) throws IOException {
+          writer.close();
+        }
+      };
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java
new file mode 100644
index 0000000..e58ff1f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherOutputService.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class TetherOutputService implements OutputProtocol {
+  private Reporter reporter;
+  private OutputCollector<TetherData, NullWritable> collector;
+  private int inputPort;
+  private boolean complete;
+  private String error;
+
+  private static final Logger LOG = LoggerFactory.getLogger(TetherOutputService.class);
+
+  // timeout when waiting for messages in seconds.
+  // what is a good value?
+  public static final long TIMEOUT=10*1000;
+  public TetherOutputService(OutputCollector<TetherData,NullWritable> collector,
+                             Reporter reporter) {
+    this.reporter = reporter;
+    this.collector = collector;
+  }
+
+  public synchronized void configure(int inputPort) {
+    LOG.info("got input port from child: inputport="+inputPort);
+    this.inputPort = inputPort;
+    notify();
+  }
+
+  public synchronized int inputPort() throws Exception {
+    if (inputPort==0) {
+      LOG.info("waiting for input port from child");
+      wait(TIMEOUT);
+    }
+
+    if (inputPort==0) {
+      LOG.error("Parent process timed out waiting for subprocess to send input port. Check the job log files for more info.");
+      throw new Exception("Parent process timed out waiting for subprocess to send input port");
+    }
+    return inputPort;
+  }
+
+  public void output(ByteBuffer datum) {
+    try {
+      collector.collect(new TetherData(datum), NullWritable.get());
+    } catch (Throwable e) {
+      LOG.warn("Error: "+e, e);
+      synchronized (this) {
+        error = e.toString();
+      }
+    }
+  }
+
+  public void outputPartitioned(int partition, ByteBuffer datum) {
+    TetherPartitioner.setNextPartition(partition);
+    output(datum);
+  }
+
+  public void status(String message) { reporter.setStatus(message.toString());  }
+
+
+  public void count(String group, String name, long amount) {
+    reporter.getCounter(group.toString(), name.toString()).increment(amount);
+  }
+
+  public synchronized void fail(String message) {
+    LOG.warn("Failing: "+message);
+    error = message.toString();
+    notify();
+  }
+
+  public synchronized void complete() {
+    LOG.info("got task complete");
+    complete = true;
+    notify();
+  }
+
+  public synchronized boolean isFinished() {
+    return complete || (error != null);
+  }
+
+  public String error() { return error; }
+
+  public synchronized boolean waitForFinish() throws InterruptedException {
+    while (!isFinished())
+      wait();
+    return error != null;
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
new file mode 100644
index 0000000..ff0c619
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherPartitioner.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Partitioner;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.mapred.AvroJob;
+
+class TetherPartitioner implements Partitioner<TetherData, NullWritable> {
+  
+  private static final ThreadLocal<Integer> CACHE = new ThreadLocal<Integer>();
+
+  private Schema schema;
+
+  public void configure(JobConf job) {
+    schema = AvroJob.getMapOutputSchema(job);
+  }
+
+  static void setNextPartition(int newValue) {
+    CACHE.set(newValue);
+  }
+
+  public int getPartition(TetherData key, NullWritable value,
+                          int numPartitions) {
+    Integer result = CACHE.get();
+    if (result != null)                           // return cached value
+      return result;
+
+    ByteBuffer b = key.buffer();
+    int p = b.position();
+    int hashCode = BinaryData.hashCode(b.array(), p, b.limit()-p, schema);
+    if (hashCode < 0)
+      hashCode = -hashCode;
+    return hashCode % numPartitions;
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
new file mode 100644
index 0000000..33c06a8
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherRecordReader.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.RecordReader;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.FsInput;
+
+class TetherRecordReader
+  implements RecordReader<TetherData, NullWritable> {
+
+  private FsInput in;
+  private DataFileReader reader;
+  private long start;
+  private long end;
+
+  public TetherRecordReader(JobConf job, FileSplit split)
+    throws IOException {
+    this.in = new FsInput(split.getPath(), job);
+    this.reader =
+      new DataFileReader<Object>(in, new GenericDatumReader<Object>());
+
+    reader.sync(split.getStart());                    // sync to start
+    this.start = in.tell();
+    this.end = split.getStart() + split.getLength();
+
+    job.set(AvroJob.INPUT_SCHEMA, reader.getSchema().toString());
+  }
+
+  public Schema getSchema() { return reader.getSchema(); }
+
+  public TetherData createKey() { return new TetherData(); }
+  
+  public NullWritable createValue() { return NullWritable.get(); }
+    
+  public boolean next(TetherData data, NullWritable ignore)
+    throws IOException {
+    if (!reader.hasNext() || reader.pastSync(end))
+      return false;
+    data.buffer(reader.nextBlock());
+    data.count((int)reader.getBlockCount());
+    return true;
+  }
+  
+  public float getProgress() throws IOException {
+    if (end == start) {
+      return 0.0f;
+    } else {
+      return Math.min(1.0f, (in.tell() - start) / (float)(end - start));
+    }
+  }
+  
+  public long getPos() throws IOException {
+    return in.tell();
+  }
+
+  public void close() throws IOException { reader.close(); }
+  
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
new file mode 100644
index 0000000..35b4231
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetherReducer.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.avro.mapred.AvroJob;
+
+class TetherReducer
+  implements Reducer<TetherData,NullWritable,TetherData,NullWritable> {
+
+  private JobConf job;
+  private TetheredProcess process;
+  private boolean error;
+
+  public void configure(JobConf job) {
+    this.job = job;
+  }
+
+  public void reduce(TetherData datum, Iterator<NullWritable> ignore, 
+                     OutputCollector<TetherData, NullWritable> collector,
+                     Reporter reporter) throws IOException {
+    try {
+      if (process == null) {
+        process = new TetheredProcess(job, collector, reporter);
+        process.inputClient.configure
+          (TaskType.REDUCE,
+           AvroJob.getMapOutputSchema(job).toString(),
+           AvroJob.getOutputSchema(job).toString());
+      }
+      process.inputClient.input(datum.buffer(), datum.count());
+    } catch (IOException e) {
+      error = true;
+      throw e;
+    } catch (Exception e) {
+      error = true;
+      throw new IOException(e);
+    }
+  }
+
+  /**
+   * Handle the end of the input by closing down the application.
+   */
+  public void close() throws IOException {
+    if (process == null) return;
+    try {
+      if (error)
+        process.inputClient.abort();
+      else
+        process.inputClient.complete();
+      process.outputService.waitForFinish();
+    } catch (InterruptedException e) {
+      throw new IOException(e);
+    } finally {
+      process.close();
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java
new file mode 100644
index 0000000..8ad8e8b
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/TetheredProcess.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TaskAttemptID;
+import org.apache.hadoop.mapred.TaskLog;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileUtil;
+
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.SaslSocketServer;
+import org.apache.avro.ipc.SaslSocketTransceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class TetheredProcess  {
+
+  static final Logger LOG = LoggerFactory.getLogger(TetheredProcess.class);
+
+  private JobConf job;
+
+  TetherOutputService outputService;
+  Server outputServer;
+  Process subprocess;
+  Transceiver clientTransceiver;
+  InputProtocol inputClient;
+
+  /**
+   * Enumeration defines which transport protocol to use to communicate between
+   * the map/reduce java daemons and the tethered proce
+   */
+  public enum Protocol {HTTP,SASL,NONE};
+
+  //which protocol we are using
+  Protocol proto;
+
+  public TetheredProcess(JobConf job,
+                          OutputCollector<TetherData, NullWritable> collector,
+                          Reporter reporter) throws Exception {
+    try {
+      // start server
+      this.outputService = new TetherOutputService(collector, reporter);
+
+      proto=TetherJob.getProtocol(job);
+
+      InetSocketAddress iaddress;
+      switch (proto) {
+      case SASL:
+        iaddress=new InetSocketAddress(0);
+        this.outputServer = new SaslSocketServer
+            (new SpecificResponder(OutputProtocol.class, outputService),
+                iaddress);
+        break;
+      case HTTP:
+        iaddress=new InetSocketAddress(0);
+        //set it up for http
+        this.outputServer= new  HttpServer
+            (new SpecificResponder(OutputProtocol.class, outputService),
+                iaddress.getPort());
+        break;
+      case NONE:
+      default:
+        throw new RuntimeException("No transport protocol was specified in the job configuraiton");
+      }
+
+      outputServer.start();
+      
+      // start sub-process, connecting back to server
+      this.subprocess = startSubprocess(job);
+
+      // check if the process has exited -- is there a better way to do this?
+      boolean hasexited = false;
+      try {
+        // exitValue throws an exception if process hasn't exited
+        this.subprocess.exitValue();
+        hasexited = true;
+      } catch (IllegalThreadStateException e) {
+      }
+      if (hasexited) {
+        LOG.error("Could not start subprocess");
+        throw new RuntimeException("Could not start subprocess");
+      }
+      // open client, connecting to sub-process
+      switch (proto) {
+      case SASL:
+        this.clientTransceiver =new SaslSocketTransceiver(new InetSocketAddress(outputService.inputPort()));
+        break;
+      case HTTP:
+        this.clientTransceiver =new HttpTransceiver(new URL("http://127.0.0.1:"+outputService.inputPort()));
+        break;
+      default:
+        throw new RuntimeException("Error: code to handle this protocol is not implemented");
+      }
+
+      this.inputClient =
+        SpecificRequestor.getClient(InputProtocol.class, clientTransceiver);
+
+
+    } catch (Exception t) {
+      close();
+      throw t;
+    }
+  }
+
+  public void close() {
+    if (clientTransceiver != null)
+      try {
+        clientTransceiver.close();
+      } catch (IOException e) {}                  // ignore
+    if (subprocess != null)
+      subprocess.destroy();
+    if (outputServer != null)
+      outputServer.close();
+  }
+
+  private Process startSubprocess(JobConf job)
+    throws IOException, InterruptedException {
+    // get the executable command
+    List<String> command = new ArrayList<String>();
+
+    String executable="";
+    if (job.getBoolean(TetherJob.TETHER_EXEC_CACHED,false)){
+      //we want to use the cached executable
+      Path[] localFiles = DistributedCache.getLocalCacheFiles(job);
+      if (localFiles == null) {                     // until MAPREDUCE-476
+        URI[] files = DistributedCache.getCacheFiles(job);
+        localFiles = new Path[] { new Path(files[0].toString()) };
+      }
+      executable=localFiles[0].toString();
+      FileUtil.chmod(executable.toString(), "a+x");
+    }
+    else {
+      executable=job.get(TetherJob.TETHER_EXEC);
+    }
+
+    command.add(executable);
+
+    // Add the executable arguments. We assume the arguments are separated by
+    // newlines so we split the argument string based on newlines and add each
+    // token to command We need to do it this way because
+    // TaskLog.captureOutAndError will put quote marks around each argument so
+    // if we pass a single string containing all arguments we get quoted
+    // incorrectly
+    String args=job.get(TetherJob.TETHER_EXEC_ARGS);
+
+    // args might be null if TETHER_EXEC_ARGS wasn't set.
+    if (args != null) {
+      String[] aparams=args.split("\n");
+      for (int i=0;i<aparams.length; i++){
+        aparams[i]=aparams[i].trim();
+        if (aparams[i].length()>0){
+          command.add(aparams[i]);
+        }
+      }
+    }
+
+    if (System.getProperty("hadoop.log.dir") == null
+        && System.getenv("HADOOP_LOG_DIR") != null)
+      System.setProperty("hadoop.log.dir", System.getenv("HADOOP_LOG_DIR"));
+
+    // wrap the command in a stdout/stderr capture
+    TaskAttemptID taskid = TaskAttemptID.forName(job.get("mapred.task.id"));
+    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
+    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
+    long logLength = TaskLog.getTaskLogLength(job);
+    command = TaskLog.captureOutAndError(null, command, stdout, stderr, logLength, false);
+    stdout.getParentFile().mkdirs();
+    stderr.getParentFile().mkdirs();
+
+    // add output server's port to env
+    Map<String, String> env = new HashMap<String,String>();
+    env.put("AVRO_TETHER_OUTPUT_PORT",
+            Integer.toString(outputServer.getPort()));
+
+    // add an environment variable to specify what protocol to use for communication
+    env.put("AVRO_TETHER_PROTOCOL", job.get(TetherJob.TETHER_PROTOCOL));
+
+    // print an info message about the command
+    String imsg="";
+    for (int i=0; i<command.size();i++) {
+      imsg=command.get(i)+" ";
+    }
+    LOG.info("TetheredProcess.startSubprocess: command: "+imsg);
+    LOG.info("Tetheredprocess.startSubprocess: stdout logged to: " + stdout.toString()) ;
+    LOG.info("Tetheredprocess.startSubprocess: stderr logged to: " + stderr.toString()) ;
+
+    // start child process
+    ProcessBuilder builder = new ProcessBuilder(command);
+    System.out.println(command);
+    builder.environment().putAll(env);
+    return builder.start();
+  }
+  
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/package.html b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/package.html
new file mode 100644
index 0000000..0f6fce2
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapred/tether/package.html
@@ -0,0 +1,32 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Run <a href="http://hadoop.apache.org/">Hadoop</a> MapReduce jobs over
+Avro data, with map and reduce functions run in a sub-process.  This
+permits MapReduce programs over Avro data in languages besides Java.
+<p>
+Each language will provide a framework to permit easy implementation
+of MapReduce programs in that language.  Currently only a Java
+framework has been implemented, for test purposes, so this feature is
+not yet useful.
+<p>
+This is still an experimental API, subject to change.
+</body>
+</html>
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java
new file mode 100644
index 0000000..6d3639e
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroJob.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.io.AvroKeyComparator;
+import org.apache.avro.hadoop.io.AvroSerialization;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.Job;
+
+/**
+ * Utility methods for configuring jobs that work with Avro.
+ *
+ * <p>When using Avro data as MapReduce keys and values, data must be wrapped in a
+ * suitable AvroWrapper implementation.  MapReduce keys must be wrapped in an AvroKey
+ * object, and MapReduce values must be wrapped in an AvroValue object.</p>
+ *
+ * <p>Suppose you would like to write a line count mapper that reads from a text file. If
+ * instead of using a Text and IntWritable output value, you would like to use Avro data
+ * with a schema of <i>"string"</i> and <i>"int"</i>, respectively, you may parameterize
+ * your mapper with {@code AvroKey<CharSequence>} and {@code AvroValue<Integer>}
+ * types.  Then, use the <code>setMapOutputKeySchema()</code> and
+ * <code>setMapOutputValueSchema()</code> methods to set writer schemas for the records
+ * you will generate.</p>
+ */
+public final class AvroJob {
+  /** Disable the constructor for this utility class. */
+  private AvroJob() {}
+
+  /** Configuration key for the input key schema. */
+  private static final String CONF_INPUT_KEY_SCHEMA = "avro.schema.input.key";
+
+  /** Configuration key for the input value schema. */
+  private static final String CONF_INPUT_VALUE_SCHEMA = "avro.schema.input.value";
+
+  /** Configuration key for the output key schema. */
+  private static final String CONF_OUTPUT_KEY_SCHEMA = "avro.schema.output.key";
+
+  /** Configuration key for the output value schema. */
+  private static final String CONF_OUTPUT_VALUE_SCHEMA = "avro.schema.output.value";
+
+  /** The configuration key for a job's output compression codec.
+   *  This takes one of the strings registered in {@link org.apache.avro.file.CodecFactory} */
+  public static final String CONF_OUTPUT_CODEC = "avro.output.codec";
+
+  /**
+   * Sets the job input key schema.
+   *
+   * @param job The job to configure.
+   * @param schema The input key schema.
+   */
+  public static void setInputKeySchema(Job job, Schema schema) {
+    job.getConfiguration().set(CONF_INPUT_KEY_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the job input value schema.
+   *
+   * @param job The job to configure.
+   * @param schema The input value schema.
+   */
+  public static void setInputValueSchema(Job job, Schema schema) {
+    job.getConfiguration().set(CONF_INPUT_VALUE_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the map output key schema.
+   *
+   * @param job The job to configure.
+   * @param schema The map output key schema.
+   */
+  public static void setMapOutputKeySchema(Job job, Schema schema) {
+    job.setMapOutputKeyClass(AvroKey.class);
+    job.setGroupingComparatorClass(AvroKeyComparator.class);
+    job.setSortComparatorClass(AvroKeyComparator.class);
+    AvroSerialization.setKeyWriterSchema(job.getConfiguration(), schema);
+    AvroSerialization.setKeyReaderSchema(job.getConfiguration(), schema);
+    AvroSerialization.addToConfiguration(job.getConfiguration());
+  }
+
+  /**
+   * Sets the map output value schema.
+   *
+   * @param job The job to configure.
+   * @param schema The map output value schema.
+   */
+  public static void setMapOutputValueSchema(Job job, Schema schema) {
+    job.setMapOutputValueClass(AvroValue.class);
+    AvroSerialization.setValueWriterSchema(job.getConfiguration(), schema);
+    AvroSerialization.setValueReaderSchema(job.getConfiguration(), schema);
+    AvroSerialization.addToConfiguration(job.getConfiguration());
+  }
+
+  /**
+   * Sets the job output key schema.
+   *
+   * @param job The job to configure.
+   * @param schema The job output key schema.
+   */
+  public static void setOutputKeySchema(Job job, Schema schema) {
+    job.setOutputKeyClass(AvroKey.class);
+    job.getConfiguration().set(CONF_OUTPUT_KEY_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the job output value schema.
+   *
+   * @param job The job to configure.
+   * @param schema The job output value schema.
+   */
+  public static void setOutputValueSchema(Job job, Schema schema) {
+    job.setOutputValueClass(AvroValue.class);
+    job.getConfiguration().set(CONF_OUTPUT_VALUE_SCHEMA, schema.toString());
+  }
+
+  /**
+   * Sets the job data model class.
+   *
+   * @param job The job to configure.
+   * @param modelClass The job data model class.
+   */
+  public static void setDataModelClass(Job job, Class<? extends GenericData> modelClass) {
+    AvroSerialization.setDataModelClass(job.getConfiguration(), modelClass);
+  }
+
+  /**
+   * Gets the job input key schema.
+   *
+   * @param conf The job configuration.
+   * @return The job input key schema, or null if not set.
+   */
+  public static Schema getInputKeySchema(Configuration conf) {
+    String schemaString = conf.get(CONF_INPUT_KEY_SCHEMA);
+    return schemaString != null ? Schema.parse(schemaString) : null;
+  }
+
+  /**
+   * Gets the job input value schema.
+   *
+   * @param conf The job configuration.
+   * @return The job input value schema, or null if not set.
+   */
+  public static Schema getInputValueSchema(Configuration conf) {
+    String schemaString = conf.get(CONF_INPUT_VALUE_SCHEMA);
+    return schemaString != null ? Schema.parse(schemaString) : null;
+  }
+
+  /**
+   * Gets the map output key schema.
+   *
+   * @param conf The job configuration.
+   * @return The map output key schema, or null if not set.
+   */
+  public static Schema getMapOutputKeySchema(Configuration conf) {
+    return AvroSerialization.getKeyWriterSchema(conf);
+  }
+
+  /**
+   * Gets the map output value schema.
+   *
+   * @param conf The job configuration.
+   * @return The map output value schema, or null if not set.
+   */
+  public static Schema getMapOutputValueSchema(Configuration conf) {
+    return AvroSerialization.getValueWriterSchema(conf);
+  }
+
+  /**
+   * Gets the job output key schema.
+   *
+   * @param conf The job configuration.
+   * @return The job output key schema, or null if not set.
+   */
+  public static Schema getOutputKeySchema(Configuration conf) {
+    String schemaString = conf.get(CONF_OUTPUT_KEY_SCHEMA);
+    return schemaString != null ? Schema.parse(schemaString) : null;
+  }
+
+  /**
+   * Gets the job output value schema.
+   *
+   * @param conf The job configuration.
+   * @return The job output value schema, or null if not set.
+   */
+  public static Schema getOutputValueSchema(Configuration conf) {
+    String schemaString = conf.get(CONF_OUTPUT_VALUE_SCHEMA);
+    return schemaString != null ? Schema.parse(schemaString) : null;
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyInputFormat.java
new file mode 100644
index 0000000..f95b144
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyInputFormat.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A MapReduce InputFormat that can handle Avro container files.
+ *
+ * <p>Keys are AvroKey wrapper objects that contain the Avro data.  Since Avro
+ * container files store only records (not key/value pairs), the value from
+ * this InputFormat is a NullWritable.</p>
+ */
+public class AvroKeyInputFormat<T> extends FileInputFormat<AvroKey<T>, NullWritable> {
+  private static final Logger LOG = LoggerFactory.getLogger(AvroKeyInputFormat.class);
+
+  /** {@inheritDoc} */
+  @Override
+  public RecordReader<AvroKey<T>, NullWritable> createRecordReader(
+      InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+    Schema readerSchema = AvroJob.getInputKeySchema(context.getConfiguration());
+    if (null == readerSchema) {
+      LOG.warn("Reader schema was not set. Use AvroJob.setInputKeySchema() if desired.");
+      LOG.info("Using a reader schema equal to the writer schema.");
+    }
+    return new AvroKeyRecordReader<T>(readerSchema);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyOutputFormat.java
new file mode 100644
index 0000000..4c86112
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyOutputFormat.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.io.AvroSerialization;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * FileOutputFormat for writing Avro container files.
+ *
+ * <p>Since Avro container files only contain records (not key/value pairs), this output
+ * format ignores the value.</p>
+ *
+ * @param <T> The (java) type of the Avro data to write.
+ */
+public class AvroKeyOutputFormat<T> extends AvroOutputFormatBase<AvroKey<T>, NullWritable> {
+  /** A factory for creating record writers. */
+  private final RecordWriterFactory mRecordWriterFactory;
+
+  /**
+   * Constructor.
+   */
+  public AvroKeyOutputFormat() {
+    this(new RecordWriterFactory());
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param recordWriterFactory A factory for creating record writers.
+   */
+  protected AvroKeyOutputFormat(RecordWriterFactory recordWriterFactory) {
+    mRecordWriterFactory = recordWriterFactory;
+  }
+
+  /**
+   * A factory for creating record writers.
+   *
+   * @param <T> The java type of the avro record to write.
+   */
+  protected static class RecordWriterFactory<T> {
+    /**
+     * Creates a new record writer instance.
+     *
+     * @param writerSchema The writer schema for the records to write.
+     * @param compressionCodec The compression type for the writer file.
+     * @param outputStream The target output stream for the records.
+     * @param syncInterval The sync interval for the writer file.
+     */
+    protected RecordWriter<AvroKey<T>, NullWritable> create(
+        Schema writerSchema, GenericData dataModel, CodecFactory compressionCodec,
+        OutputStream outputStream, int syncInterval) throws IOException {
+      return new AvroKeyRecordWriter<T>(writerSchema, dataModel, compressionCodec, outputStream, syncInterval);
+    }
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  @SuppressWarnings("unchecked")
+  public RecordWriter<AvroKey<T>, NullWritable> getRecordWriter(TaskAttemptContext context)
+      throws IOException {
+    Configuration conf = context.getConfiguration();
+    // Get the writer schema.
+    Schema writerSchema = AvroJob.getOutputKeySchema(conf);
+    boolean isMapOnly = context.getNumReduceTasks() == 0;
+    if (isMapOnly) {
+      Schema mapOutputSchema = AvroJob.getMapOutputKeySchema(conf);
+      if (mapOutputSchema != null) {
+        writerSchema = mapOutputSchema;
+      }
+    }
+    if (null == writerSchema) {
+      throw new IOException(
+          "AvroKeyOutputFormat requires an output schema. Use AvroJob.setOutputKeySchema().");
+    }
+
+    GenericData dataModel = AvroSerialization.createDataModel(conf);
+
+    return mRecordWriterFactory.create
+      (writerSchema, dataModel, getCompressionCodec(context),
+       getAvroFileOutputStream(context), getSyncInterval(context));
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordReader.java
new file mode 100644
index 0000000..4b87582
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordReader.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Reads records from an input split representing a chunk of an Avro container file.
+ *
+ * @param <T> The (java) type of data in Avro container file.
+ */
+public class AvroKeyRecordReader<T> extends AvroRecordReaderBase<AvroKey<T>, NullWritable, T> {
+  private static final Logger LOG = LoggerFactory.getLogger(AvroKeyRecordReader.class);
+
+  /** A reusable object to hold records of the Avro container file. */
+  private final AvroKey<T> mCurrentRecord;
+
+  /**
+   * Constructor.
+   *
+   * @param readerSchema The reader schema to use for the records in the Avro container file.
+   */
+  public AvroKeyRecordReader(Schema readerSchema) {
+    super(readerSchema);
+    mCurrentRecord = new AvroKey<T>(null);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    boolean hasNext = super.nextKeyValue();
+    mCurrentRecord.datum(getCurrentRecord());
+    return hasNext;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public AvroKey<T> getCurrentKey() throws IOException, InterruptedException {
+    return mCurrentRecord;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public NullWritable getCurrentValue() throws IOException, InterruptedException {
+    return NullWritable.get();
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java
new file mode 100644
index 0000000..ce7bc58
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyRecordWriter.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Writes Avro records to an Avro container file output stream.
+ *
+ * @param <T> The Java type of the Avro data to write.
+ */
+public class AvroKeyRecordWriter<T> extends RecordWriter<AvroKey<T>, NullWritable> implements Syncable {
+  /** A writer for the Avro container file. */
+  private final DataFileWriter<T> mAvroFileWriter;
+
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The writer schema for the records in the Avro container file.
+   * @param compressionCodec A compression codec factory for the Avro container file.
+   * @param outputStream The output stream to write the Avro container file to.
+   * @param syncInterval The sync interval for the Avro container file.
+   * @throws IOException If the record writer cannot be opened.
+   */
+  public AvroKeyRecordWriter(Schema writerSchema, GenericData dataModel,
+      CodecFactory compressionCodec, OutputStream outputStream, int syncInterval) throws IOException {
+    // Create an Avro container file and a writer to it.
+    mAvroFileWriter = new DataFileWriter<T>(dataModel.createDatumWriter(writerSchema));
+    mAvroFileWriter.setCodec(compressionCodec);
+    mAvroFileWriter.setSyncInterval(syncInterval);
+    mAvroFileWriter.create(writerSchema, outputStream);
+  }
+  /**
+   * Constructor.
+   *
+   * @param writerSchema The writer schema for the records in the Avro container file.
+   * @param compressionCodec A compression codec factory for the Avro container file.
+   * @param outputStream The output stream to write the Avro container file to.
+   * @throws IOException If the record writer cannot be opened.
+   */
+  public AvroKeyRecordWriter(Schema writerSchema, GenericData dataModel,
+      CodecFactory compressionCodec, OutputStream outputStream) throws IOException {
+    this(writerSchema, dataModel, compressionCodec, outputStream, 
+        DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void write(AvroKey<T> record, NullWritable ignore) throws IOException {
+    mAvroFileWriter.append(record.datum());
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close(TaskAttemptContext context) throws IOException {
+    mAvroFileWriter.close();
+  }
+  
+  /** {@inheritDoc} */
+  @Override
+  public long sync() throws IOException {
+    return mAvroFileWriter.sync();
+  }  
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueInputFormat.java
new file mode 100644
index 0000000..f2d73c3
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueInputFormat.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A MapReduce InputFormat that reads from Avro container files of key/value generic records.
+ *
+ * <p>Avro container files that container generic records with the two fields 'key' and
+ * 'value' are expected.  The contents of the 'key' field will be used as the job input
+ * key, and the contents of the 'value' field will be used as the job output value.</p>
+ *
+ * @param <K> The type of the Avro key to read.
+ * @param <V> The type of the Avro value to read.
+ */
+public class AvroKeyValueInputFormat<K, V> extends FileInputFormat<AvroKey<K>, AvroValue<V>> {
+  private static final Logger LOG = LoggerFactory.getLogger(AvroKeyValueInputFormat.class);
+
+  /** {@inheritDoc} */
+  @Override
+  public RecordReader<AvroKey<K>, AvroValue<V>> createRecordReader(
+      InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+    Schema keyReaderSchema = AvroJob.getInputKeySchema(context.getConfiguration());
+    if (null == keyReaderSchema) {
+      LOG.warn("Key reader schema was not set. Use AvroJob.setInputKeySchema() if desired.");
+      LOG.info("Using a key reader schema equal to the writer schema.");
+    }
+    Schema valueReaderSchema = AvroJob.getInputValueSchema(context.getConfiguration());
+    if (null == valueReaderSchema) {
+      LOG.warn("Value reader schema was not set. Use AvroJob.setInputValueSchema() if desired.");
+      LOG.info("Using a value reader schema equal to the writer schema.");
+    }
+    return new AvroKeyValueRecordReader<K, V>(keyReaderSchema, valueReaderSchema);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueOutputFormat.java
new file mode 100644
index 0000000..b844abd
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueOutputFormat.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.io.AvroDatumConverter;
+import org.apache.avro.hadoop.io.AvroDatumConverterFactory;
+import org.apache.avro.hadoop.io.AvroSerialization;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * FileOutputFormat for writing Avro container files of key/value pairs.
+ *
+ * <p>Since Avro container files can only contain records (not key/value pairs), this
+ * output format puts the key and value into an Avro generic record with two fields, named
+ * 'key' and 'value'.</p>
+ *
+ * <p>The keys and values given to this output format may be Avro objects wrapped in
+ * <code>AvroKey</code> or <code>AvroValue</code> objects.  The basic Writable types are
+ * also supported (e.g., IntWritable, Text); they will be converted to their corresponding
+ * Avro types.</p>
+ *
+ * @param <K> The type of key. If an Avro type, it must be wrapped in an <code>AvroKey</code>.
+ * @param <V> The type of value. If an Avro type, it must be wrapped in an <code>AvroValue</code>.
+ */
+public class AvroKeyValueOutputFormat<K, V> extends AvroOutputFormatBase<K, V> {
+  /** {@inheritDoc} */
+  @Override
+  @SuppressWarnings("unchecked")
+  public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException {
+    Configuration conf = context.getConfiguration();
+
+    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(conf);
+
+    AvroDatumConverter<K, ?> keyConverter = converterFactory.create(
+        (Class<K>) context.getOutputKeyClass());
+    AvroDatumConverter<V, ?> valueConverter = converterFactory.create(
+        (Class<V>) context.getOutputValueClass());
+
+    GenericData dataModel = AvroSerialization.createDataModel(conf);
+
+    return new AvroKeyValueRecordWriter<K, V>(keyConverter, valueConverter,
+        dataModel, getCompressionCodec(context), getAvroFileOutputStream(context),
+        getSyncInterval(context));
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordReader.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordReader.java
new file mode 100644
index 0000000..64d427c
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordReader.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+
+/**
+ * Reads Avro generic records from an Avro container file, where the records contain two
+ * fields: 'key' and 'value'.
+ *
+ * <p>The contents of the 'key' field will be parsed into an AvroKey object. The contents
+ * of the 'value' field will be parsed into an AvroValue object.</p>
+ *
+ * @param <K> The type of the Avro key to read.
+ * @param <V> The type of the Avro value to read.
+ */
+public class AvroKeyValueRecordReader<K, V>
+    extends AvroRecordReaderBase<AvroKey<K>, AvroValue<V>, GenericRecord> {
+  /** The current key the reader is on. */
+  private final AvroKey<K> mCurrentKey;
+
+  /** The current value the reader is on. */
+  private final AvroValue<V> mCurrentValue;
+
+  /**
+   * Constructor.
+   *
+   * @param keyReaderSchema The reader schema for the key within the generic record.
+   * @param valueReaderSchema The reader schema for the value within the generic record.
+   */
+  public AvroKeyValueRecordReader(Schema keyReaderSchema, Schema valueReaderSchema) {
+    super(AvroKeyValue.getSchema(keyReaderSchema, valueReaderSchema));
+    mCurrentKey = new AvroKey<K>(null);
+    mCurrentValue = new AvroValue<V>(null);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    boolean hasNext = super.nextKeyValue();
+    if (hasNext) {
+      AvroKeyValue<K, V> avroKeyValue = new AvroKeyValue<K, V>(getCurrentRecord());
+      mCurrentKey.datum(avroKeyValue.getKey());
+      mCurrentValue.datum(avroKeyValue.getValue());
+    } else {
+      mCurrentKey.datum(null);
+      mCurrentValue.datum(null);
+    }
+    return hasNext;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public AvroKey<K> getCurrentKey() throws IOException, InterruptedException {
+    return mCurrentKey;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public AvroValue<V> getCurrentValue() throws IOException, InterruptedException {
+    return mCurrentValue;
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
new file mode 100644
index 0000000..71baa41
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroKeyValueRecordWriter.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroDatumConverter;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Writes key/value pairs to an Avro container file.
+ *
+ * <p>Each entry in the Avro container file will be a generic record with two fields,
+ * named 'key' and 'value'.  The input types may be basic Writable objects like Text or
+ * IntWritable, or they may be AvroWrapper subclasses (AvroKey or AvroValue).  Writable
+ * objects will be converted to their corresponding Avro types when written to the generic
+ * record key/value pair.</p>
+ *
+ * @param <K> The type of key to write.
+ * @param <V> The type of value to write.
+ */
+public class AvroKeyValueRecordWriter<K, V> extends RecordWriter<K, V> implements Syncable {
+  /** A writer for the Avro container file. */
+  private final DataFileWriter<GenericRecord> mAvroFileWriter;
+
+  /** The writer schema for the generic record entries of the Avro container file. */
+  private final Schema mKeyValuePairSchema;
+
+  /** A reusable Avro generic record for writing key/value pairs to the file. */
+  private final AvroKeyValue<Object, Object> mOutputRecord;
+
+  /** A helper object that converts the input key to an Avro datum. */
+  private final AvroDatumConverter<K, ?> mKeyConverter;
+
+  /** A helper object that converts the input value to an Avro datum. */
+  private final AvroDatumConverter<V, ?> mValueConverter;
+
+  /**
+   * Constructor.
+   *
+   * @param keyConverter A key to Avro datum converter.
+   * @param valueConverter A value to Avro datum converter.
+   * @param dataModel The data model for key and value.
+   * @param compressionCodec A compression codec factory for the Avro container file.
+   * @param outputStream The output stream to write the Avro container file to.
+   * @param syncInterval The sync interval for the Avro container file.
+   * @throws IOException If the record writer cannot be opened.
+   */
+  public AvroKeyValueRecordWriter(AvroDatumConverter<K, ?> keyConverter,
+      AvroDatumConverter<V, ?> valueConverter, GenericData dataModel,
+      CodecFactory compressionCodec, OutputStream outputStream, int syncInterval) throws IOException {
+    // Create the generic record schema for the key/value pair.
+    mKeyValuePairSchema = AvroKeyValue.getSchema(
+        keyConverter.getWriterSchema(), valueConverter.getWriterSchema());
+
+    // Create an Avro container file and a writer to it.
+    mAvroFileWriter = new DataFileWriter<GenericRecord>(
+        dataModel.createDatumWriter(mKeyValuePairSchema));
+    mAvroFileWriter.setCodec(compressionCodec);
+    mAvroFileWriter.setSyncInterval(syncInterval);
+    mAvroFileWriter.create(mKeyValuePairSchema, outputStream);
+
+    // Keep a reference to the converters.
+    mKeyConverter = keyConverter;
+    mValueConverter = valueConverter;
+
+    // Create a reusable output record.
+    mOutputRecord = new AvroKeyValue<Object, Object>(new GenericData.Record(mKeyValuePairSchema));
+  }
+
+  /**
+   * Constructor.
+   *
+   * @param keyConverter A key to Avro datum converter.
+   * @param valueConverter A value to Avro datum converter.
+   * @param dataModel The data model for key and value.
+   * @param compressionCodec A compression codec factory for the Avro container file.
+   * @param outputStream The output stream to write the Avro container file to.
+   * @throws IOException If the record writer cannot be opened.
+   */
+  public AvroKeyValueRecordWriter(AvroDatumConverter<K, ?> keyConverter,
+      AvroDatumConverter<V, ?> valueConverter, GenericData dataModel,
+      CodecFactory compressionCodec, OutputStream outputStream) throws IOException {
+    this(keyConverter, valueConverter, dataModel, compressionCodec, outputStream, 
+        DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+  
+  /**
+   * Gets the writer schema for the key/value pair generic record.
+   *
+   * @return The writer schema used for entries of the Avro container file.
+   */
+  public Schema getWriterSchema() {
+    return mKeyValuePairSchema;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void write(K key, V value) throws IOException {
+    mOutputRecord.setKey(mKeyConverter.convert(key));
+    mOutputRecord.setValue(mValueConverter.convert(value));
+    mAvroFileWriter.append(mOutputRecord.get());
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close(TaskAttemptContext context) throws IOException {
+    mAvroFileWriter.close();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public long sync() throws IOException {
+    return mAvroFileWriter.sync();
+  }  
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
new file mode 100644
index 0000000..9db8c68
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroMultipleOutputs.java
@@ -0,0 +1,600 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import org.apache.avro.Schema;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputFormat;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskInputOutputContext;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * The AvroMultipleOutputs class simplifies writing Avro output data 
+ * to multiple outputs
+ * 
+ * <p> 
+ * Case one: writing to additional outputs other than the job default output.
+ *
+ * Each additional output, or named output, may be configured with its own
+ * <code>Schema</code> and <code>OutputFormat</code>.
+ * </p>
+ * <p>
+ * Case two: to write data to different files provided by user
+ * </p>
+ * 
+ * <p>
+ * AvroMultipleOutputs supports counters, by default they are disabled. The 
+ * counters group is the {@link AvroMultipleOutputs} class name. The names of the 
+ * counters are the same as the output name. These count the number of records 
+ * written to each output name. 
+ * </p>
+ * 
+ * Usage pattern for job submission:
+ * <pre>
+ *
+ * Job job = new Job();
+ *
+ * FileInputFormat.setInputPath(job, inDir);
+ * FileOutputFormat.setOutputPath(job, outDir);
+ *
+ * job.setMapperClass(MyAvroMapper.class);
+ * job.setReducerClass(MyAvroReducer.class);
+ * ...
+ *  
+ * Schema schema;
+ * ...
+ * // Defines additional single output 'avro1' for the job
+ * AvroMultipleOutputs.addNamedOutput(job, "avro1", AvroKeyValueOutputFormat.class,
+ * keyschema, valueSchema);  // valueSchema can be set to null if there only Key to be written 
+                                   to file in the RecordWriter
+ *
+ * // Defines additional output 'avro2' with different schema for the job
+ * AvroMultipleOutputs.addNamedOutput(job, "avro2",
+ *   AvroKeyOutputFormat.class,
+ *   schema,null); 
+ * ...
+ *
+ * job.waitForCompletion(true);
+ * ...
+ * </pre>
+ * <p>
+ * Usage in Reducer:
+ * <pre>
+ * 
+ * public class MyAvroReducer extends
+ *   Reducer<K, V, T, NullWritable> {
+ * private MultipleOutputs amos;
+ *
+ *
+ * public void setup(Context context) {
+ * ...
+ * amos = new AvroMultipleOutputs(context);
+ * }
+ *
+ * public void reduce(K, Iterator<V> values,Context context)
+ * throws IOException {
+ * ...
+ * amos.write("avro1",datum,NullWritable.get());
+ * amos.write("avro2",datum,NullWritable.get());
+ * amos.getCollector("avro3",datum); // here the value is taken as NullWritable
+ * ...
+ * }
+ *
+ * public void cleanup(Context context) throws IOException {
+ * amos.close();
+ * ...
+ * }
+ *
+ * }
+ * </pre>
+ */
+
+
+public class AvroMultipleOutputs{
+
+  private static final String MULTIPLE_OUTPUTS = "avro.mapreduce.multipleoutputs";
+
+  private static final String MO_PREFIX = 
+    "avro.mapreduce.multipleoutputs.namedOutput.";
+
+  private static final String FORMAT = ".format";
+  private static final String COUNTERS_ENABLED = 
+    "avro.mapreduce.multipleoutputs.counters";
+
+  /**
+   * Counters group used by the counters of MultipleOutputs.
+   */
+  private static final String COUNTERS_GROUP = AvroMultipleOutputs.class.getName();
+  
+  /**
+   * Cache for the taskContexts
+   */
+  private Map<String, TaskAttemptContext> taskContexts = new HashMap<String, TaskAttemptContext>();
+
+  /**
+   * Checks if a named output name is valid token.
+   *
+   * @param namedOutput named output Name
+   * @throws IllegalArgumentException if the output name is not valid.
+   */
+  private static void checkTokenName(String namedOutput) {
+    if (namedOutput == null || namedOutput.length() == 0) {
+      throw new IllegalArgumentException(
+        "Name cannot be NULL or empty");
+    }
+    for (char ch : namedOutput.toCharArray()) {
+      if ((ch >= 'A') && (ch <= 'Z')) {
+        continue;
+      }
+      if ((ch >= 'a') && (ch <= 'z')) {
+        continue;
+      }
+      if ((ch >= '0') && (ch <= '9')) {
+        continue;
+      }
+      throw new IllegalArgumentException(
+        "Name cannot have a '" + ch + "' char");
+    }
+  }
+
+  /**
+   * Checks if output name is valid.
+   *
+   * name cannot be the name used for the default output
+   * @param outputPath base output Name
+   * @throws IllegalArgumentException if the output name is not valid.
+   */
+  private static void checkBaseOutputPath(String outputPath) {
+    if (outputPath.equals("part")) {
+      throw new IllegalArgumentException("output name cannot be 'part'");
+    }
+  }
+  
+  /**
+   * Checks if a named output name is valid.
+   *
+   * @param namedOutput named output Name
+   * @throws IllegalArgumentException if the output name is not valid.
+   */
+  private static void checkNamedOutputName(JobContext job,
+      String namedOutput, boolean alreadyDefined) {
+    checkTokenName(namedOutput);
+    checkBaseOutputPath(namedOutput);
+    List<String> definedChannels = getNamedOutputsList(job);
+    if (alreadyDefined && definedChannels.contains(namedOutput)) {
+      throw new IllegalArgumentException("Named output '" + namedOutput +
+        "' already alreadyDefined");
+    } else if (!alreadyDefined && !definedChannels.contains(namedOutput)) {
+      throw new IllegalArgumentException("Named output '" + namedOutput +
+        "' not defined");
+    }
+  }
+
+  // Returns list of channel names.
+  private static List<String> getNamedOutputsList(JobContext job) {
+    List<String> names = new ArrayList<String>();
+    StringTokenizer st = new StringTokenizer(
+      job.getConfiguration().get(MULTIPLE_OUTPUTS, ""), " ");
+    while (st.hasMoreTokens()) {
+      names.add(st.nextToken());
+    }
+    return names;
+  }
+
+  // Returns the named output OutputFormat.
+  @SuppressWarnings("unchecked")
+  private static Class<? extends OutputFormat<?, ?>> getNamedOutputFormatClass(
+    JobContext job, String namedOutput) {
+    return (Class<? extends OutputFormat<?, ?>>)
+      job.getConfiguration().getClass(MO_PREFIX + namedOutput + FORMAT, null,
+      OutputFormat.class);
+  }
+
+ /**
+   * Adds a named output for the job.
+   * <p/>
+   *
+   * @param job               job to add the named output
+   * @param namedOutput       named output name, it has to be a word, letters
+   *                          and numbers only, cannot be the word 'part' as
+   *                          that is reserved for the default output.
+   * @param outputFormatClass OutputFormat class.
+   * @param keySchema          Schema for the Key
+   */
+  @SuppressWarnings("unchecked")
+  public static void addNamedOutput(Job job, String namedOutput,
+      Class<? extends OutputFormat> outputFormatClass,
+      Schema keySchema) {
+      addNamedOutput(job,namedOutput,outputFormatClass,keySchema,null);
+  }
+
+  /**
+   * Adds a named output for the job.
+   * <p/>
+   *
+   * @param job               job to add the named output
+   * @param namedOutput       named output name, it has to be a word, letters
+   *                          and numbers only, cannot be the word 'part' as
+   *                          that is reserved for the default output.
+   * @param outputFormatClass OutputFormat class.
+   * @param keySchema          Schema for the Key
+   * @param valueSchema        Schema for the Value (used in case of AvroKeyValueOutputFormat or null)
+   */
+  @SuppressWarnings("unchecked")
+  public static void addNamedOutput(Job job, String namedOutput,
+      Class<? extends OutputFormat> outputFormatClass,
+      Schema keySchema, Schema valueSchema) {
+    checkNamedOutputName(job, namedOutput, true);
+    Configuration conf = job.getConfiguration();
+    conf.set(MULTIPLE_OUTPUTS,
+      conf.get(MULTIPLE_OUTPUTS, "") + " " + namedOutput);
+    conf.setClass(MO_PREFIX + namedOutput + FORMAT, outputFormatClass,
+      OutputFormat.class);
+    conf.set(MO_PREFIX+namedOutput+".keyschema", keySchema.toString());
+    if(valueSchema!=null){
+      conf.set(MO_PREFIX+namedOutput+".valueschema",valueSchema.toString());
+    }
+  }
+
+  /**
+   * Enables or disables counters for the named outputs.
+   * 
+   * The counters group is the {@link AvroMultipleOutputs} class name.
+   * The names of the counters are the same as the named outputs. These
+   * counters count the number records written to each output name.
+   * By default these counters are disabled.
+   *
+   * @param job    job  to enable counters
+   * @param enabled indicates if the counters will be enabled or not.
+   */
+  public static void setCountersEnabled(Job job, boolean enabled) {
+    job.getConfiguration().setBoolean(COUNTERS_ENABLED, enabled);
+  }
+
+  /**
+   * Returns if the counters for the named outputs are enabled or not.
+   * By default these counters are disabled.
+   *
+   * @param job    the job 
+   * @return TRUE if the counters are enabled, FALSE if they are disabled.
+   */
+  public static boolean getCountersEnabled(JobContext job) {
+    return job.getConfiguration().getBoolean(COUNTERS_ENABLED, false);
+  }
+
+  /**
+   * Wraps RecordWriter to increment counters. 
+   */
+  @SuppressWarnings("unchecked")
+  private static class RecordWriterWithCounter extends RecordWriter {
+    private RecordWriter writer;
+    private String counterName;
+    private TaskInputOutputContext context;
+
+    public RecordWriterWithCounter(RecordWriter writer, String counterName,
+                                   TaskInputOutputContext context) {
+      this.writer = writer;
+      this.counterName = counterName;
+      this.context = context;
+    }
+
+    @SuppressWarnings({"unchecked"})
+    public void write(Object key, Object value) 
+        throws IOException, InterruptedException {
+      context.getCounter(COUNTERS_GROUP, counterName).increment(1);
+      writer.write(key, value);
+    }
+
+    public void close(TaskAttemptContext context) 
+        throws IOException, InterruptedException {
+      writer.close(context);
+    }
+  }
+
+  // instance code, to be used from Mapper/Reducer code
+
+  private TaskInputOutputContext<?, ?, ?, ?> context;
+  private Set<String> namedOutputs;
+  private Map<String, RecordWriter<?, ?>> recordWriters;
+  private boolean countersEnabled;
+  
+  /**
+   * Creates and initializes multiple outputs support,
+   * it should be instantiated in the Mapper/Reducer setup method.
+   *
+   * @param context the TaskInputOutputContext object
+   */
+  public AvroMultipleOutputs(
+      TaskInputOutputContext<?, ?, ?, ?> context) {
+    this.context = context;
+    namedOutputs = Collections.unmodifiableSet(
+      new HashSet<String>(AvroMultipleOutputs.getNamedOutputsList(context)));
+    recordWriters = new HashMap<String, RecordWriter<?, ?>>();
+    countersEnabled = getCountersEnabled(context);
+  }
+
+  /**
+   * Write key and value to the namedOutput.
+   *
+   * Output path is a unique file generated for the namedOutput.
+   * For example, {namedOutput}-(m|r)-{part-number}
+   * 
+   * @param namedOutput the named output name
+   * @param key         the key , value is NullWritable
+   */
+  @SuppressWarnings("unchecked")
+  public void write(String namedOutput, Object key)
+      throws IOException, InterruptedException {
+    write(namedOutput, key, NullWritable.get(), namedOutput);
+  }
+
+
+
+  /**
+   * Write key and value to the namedOutput.
+   *
+   * Output path is a unique file generated for the namedOutput.
+   * For example, {namedOutput}-(m|r)-{part-number}
+   * 
+   * @param namedOutput the named output name
+   * @param key         the key
+   * @param value       the value
+   */
+  @SuppressWarnings("unchecked")
+  public void write(String namedOutput, Object key, Object value)
+      throws IOException, InterruptedException {
+    write(namedOutput, key, value, namedOutput);
+  }
+
+  /**
+   * Write key and value to baseOutputPath using the namedOutput.
+   * 
+   * @param namedOutput    the named output name
+   * @param key            the key
+   * @param value          the value
+   * @param baseOutputPath base-output path to write the record to.
+   * Note: Framework will generate unique filename for the baseOutputPath
+   */
+  @SuppressWarnings("unchecked")
+  public void write(String namedOutput, Object key, Object value,
+      String baseOutputPath) throws IOException, InterruptedException {
+    checkNamedOutputName(context, namedOutput, false);
+    checkBaseOutputPath(baseOutputPath);
+    if (!namedOutputs.contains(namedOutput)) {
+      throw new IllegalArgumentException("Undefined named output '" +
+        namedOutput + "'");
+    }
+    TaskAttemptContext taskContext = getContext(namedOutput);
+    getRecordWriter(taskContext, baseOutputPath).write(key, value);
+  }
+
+  /**
+   * Write key value to an output file name.
+   * 
+   * Gets the record writer from job's output format.  
+   * Job's output format should be a FileOutputFormat.
+   * 
+   * @param key       the key
+   * @param value     the value
+   * @param baseOutputPath base-output path to write the record to.
+   * Note: Framework will generate unique filename for the baseOutputPath
+   */
+  public void write(Object key, Object value, String baseOutputPath) 
+      throws IOException, InterruptedException {
+        write(key, value, null, null, baseOutputPath);
+  }
+  
+  /**
+   * Write key value to an output file name.
+   * 
+   * Gets the record writer from job's output format. Job's output format should
+   * be a FileOutputFormat.
+   * 
+   * @param key   the key
+   * @param value the value
+   * @param keySchema   keySchema to use
+   * @param valSchema   ValueSchema to use
+   * @param baseOutputPath base-output path to write the record to. Note: Framework will
+   *          generate unique filename for the baseOutputPath
+   */
+  @SuppressWarnings("unchecked")
+  public void write(Object key, Object value, Schema keySchema,
+      Schema valSchema, String baseOutputPath) throws IOException,
+      InterruptedException {
+    checkBaseOutputPath(baseOutputPath);
+    Job job = new Job(context.getConfiguration());
+    setSchema(job, keySchema, valSchema);
+    TaskAttemptContext taskContext = createTaskAttemptContext(job.getConfiguration(), context.getTaskAttemptID());
+    getRecordWriter(taskContext, baseOutputPath).write(key, value);
+  }
+
+  /**
+   * 
+   * Gets the record writer from job's output format. Job's output format should
+   * be a FileOutputFormat.If the record writer implements Syncable then returns 
+   * the current position as a value that may be passed to DataFileReader.seek(long)
+   * otherwise returns -1. 
+   * Forces the end of the current block, emitting a synchronization marker.
+   * 
+   * @param namedOutput   the namedOutput
+   * @param baseOutputPath base-output path to write the record to. Note: Framework will
+   *          generate unique filename for the baseOutputPath
+   */
+  @SuppressWarnings("unchecked")
+  public long sync(String namedOutput, String baseOutputPath) throws IOException, InterruptedException {
+   checkNamedOutputName(context, namedOutput, false);
+   checkBaseOutputPath(baseOutputPath);
+   if (!namedOutputs.contains(namedOutput)) {
+      throw new IllegalArgumentException("Undefined named output '" + namedOutput + "'");
+   }
+   TaskAttemptContext taskContext = getContext(namedOutput);
+   RecordWriter recordWriter = getRecordWriter(taskContext, baseOutputPath);
+   long position = -1;
+   if (recordWriter instanceof Syncable) {
+      Syncable syncableWriter = (Syncable) recordWriter;
+      position = syncableWriter.sync();
+   }
+   return position;
+ }
+  // by being synchronized MultipleOutputTask can be use with a
+  // MultithreadedMapper.
+  @SuppressWarnings("unchecked")
+  private synchronized RecordWriter getRecordWriter(
+      TaskAttemptContext taskContext, String baseFileName) 
+      throws IOException, InterruptedException {
+    
+    // look for record-writer in the cache
+    RecordWriter writer = recordWriters.get(baseFileName);
+    
+    // If not in cache, create a new one
+    if (writer == null) {
+      // get the record writer from context output format
+      //FileOutputFormat.setOutputName(taskContext, baseFileName);
+      taskContext.getConfiguration().set("avro.mo.config.namedOutput",baseFileName);
+      try {
+        writer = ((OutputFormat) ReflectionUtils.newInstance(
+          taskContext.getOutputFormatClass(), taskContext.getConfiguration()))
+          .getRecordWriter(taskContext);
+      } catch (ClassNotFoundException e) {
+        throw new IOException(e);
+      }
+ 
+      // if counters are enabled, wrap the writer with context 
+      // to increment counters 
+      if (countersEnabled) {
+        writer = new RecordWriterWithCounter(writer, baseFileName, context);
+      }
+      
+      // add the record-writer to the cache
+      recordWriters.put(baseFileName, writer);
+    }
+    return writer;
+  }
+
+  private void setSchema(Job job, Schema keySchema, Schema valSchema) {
+
+    boolean isMaponly = job.getNumReduceTasks() == 0;
+    if (keySchema != null) {
+      if (isMaponly)
+        AvroJob.setMapOutputKeySchema(job, keySchema);
+      else
+        AvroJob.setOutputKeySchema(job, keySchema);
+    }
+    if (valSchema != null) {
+      if (isMaponly)
+        AvroJob.setMapOutputValueSchema(job, valSchema);
+      else
+        AvroJob.setOutputValueSchema(job, valSchema);
+    }
+
+  }
+
+   // Create a taskAttemptContext for the named output with 
+   // output format and output key/value types put in the context
+  @SuppressWarnings("deprecation")
+  private TaskAttemptContext getContext(String nameOutput) throws IOException {
+
+    TaskAttemptContext taskContext = taskContexts.get(nameOutput);
+
+    if (taskContext != null) {
+      return taskContext;
+    }
+
+    // The following trick leverages the instantiation of a record writer via
+    // the job thus supporting arbitrary output formats.
+    Job job = new Job(context.getConfiguration());
+    job.setOutputFormatClass(getNamedOutputFormatClass(context, nameOutput));
+    Schema keySchema=null,valSchema=null;
+    if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".keyschema",null) != null)
+        keySchema = Schema.parse(job.getConfiguration().get(
+            MO_PREFIX + nameOutput + ".keyschema"));
+      if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".valueschema",
+          null) != null)
+        valSchema = Schema.parse(job.getConfiguration().get(
+            MO_PREFIX + nameOutput + ".valueschema"));
+    setSchema(job, keySchema, valSchema);
+    taskContext = createTaskAttemptContext(
+      job.getConfiguration(), context.getTaskAttemptID());
+    
+    taskContexts.put(nameOutput, taskContext);
+    
+    return taskContext;
+  }
+  
+  private TaskAttemptContext createTaskAttemptContext(Configuration conf, 
+      TaskAttemptID taskId) {
+    // Use reflection since the context types changed incompatibly between 1.0
+    // and 2.0.
+    try {
+      Class<?> c = getTaskAttemptContextClass();
+      Constructor<?> cons = c.getConstructor(Configuration.class,
+          TaskAttemptID.class);
+      return (TaskAttemptContext) cons.newInstance(conf, taskId);
+    } catch (Exception e) {
+      throw new IllegalStateException(e);
+    }
+  }
+  
+  private Class<?> getTaskAttemptContextClass() {
+    try {
+      return Class.forName(
+          "org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
+    } catch (Exception e) {
+      try {
+        return Class.forName(
+            "org.apache.hadoop.mapreduce.TaskAttemptContext");
+      } catch (Exception ex) {
+        throw new IllegalStateException(ex);
+      }
+    }
+  }
+  
+  /**
+   * Closes all the opened outputs.
+   * 
+   * This should be called from cleanup method of map/reduce task.
+   * If overridden subclasses must invoke <code>super.close()</code> at the
+   * end of their <code>close()</code>
+   * 
+   */
+  @SuppressWarnings("unchecked")
+  public void close() throws IOException, InterruptedException {
+    for (RecordWriter writer : recordWriters.values()) {
+      writer.close(context);
+    }
+  }
+}
+
+ 
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
new file mode 100644
index 0000000..5f77190
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroOutputFormatBase.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
+
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.hadoop.file.HadoopCodecFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+/**
+ * Abstract base class for output formats that write Avro container files.
+ *
+ * @param <K> The type of key to write.
+ * @param <V> The type of value to write.
+ */
+public abstract class AvroOutputFormatBase<K, V> extends FileOutputFormat<K, V> {
+  
+  /**
+   * Gets the configured compression codec from the task context.
+   *
+   * @param context The task attempt context.
+   * @return The compression codec to use for the output Avro container file.
+   */
+  protected static CodecFactory getCompressionCodec(TaskAttemptContext context) {
+    if (FileOutputFormat.getCompressOutput(context)) {
+      // Default to deflate compression.
+      int deflateLevel = context.getConfiguration().getInt(
+          org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY,
+          CodecFactory.DEFAULT_DEFLATE_LEVEL);
+      int xzLevel = context.getConfiguration().getInt(
+              org.apache.avro.mapred.AvroOutputFormat.XZ_LEVEL_KEY,
+              CodecFactory.DEFAULT_XZ_LEVEL);
+      
+      String outputCodec = context.getConfiguration()
+        .get(AvroJob.CONF_OUTPUT_CODEC);
+
+      if (outputCodec == null) {
+        String compressionCodec = context.getConfiguration().get("mapred.output.compression.codec");
+        String avroCodecName = HadoopCodecFactory.getAvroCodecName(compressionCodec);
+        if ( avroCodecName != null){
+          context.getConfiguration().set(AvroJob.CONF_OUTPUT_CODEC, avroCodecName);
+          return HadoopCodecFactory.fromHadoopString(compressionCodec);
+        } else {
+          return CodecFactory.deflateCodec(deflateLevel);
+        }
+      } else if (DataFileConstants.DEFLATE_CODEC.equals(outputCodec)) {
+        return CodecFactory.deflateCodec(deflateLevel);
+      } else if (DataFileConstants.XZ_CODEC.equals(outputCodec)) {
+          return CodecFactory.xzCodec(xzLevel);
+        } else {
+          return CodecFactory.fromString(outputCodec);
+        }
+      
+      }
+
+    // No compression.
+    return CodecFactory.nullCodec();
+  }
+
+  /**
+   * Gets the target output stream where the Avro container file should be written.
+   *
+   * @param context The task attempt context.
+   * @return The target output stream.
+   */
+  protected OutputStream getAvroFileOutputStream(TaskAttemptContext context) throws IOException {
+    Path path = new Path(((FileOutputCommitter)getOutputCommitter(context)).getWorkPath(),
+      getUniqueFile(context,context.getConfiguration().get("avro.mo.config.namedOutput","part"),org.apache.avro.mapred.AvroOutputFormat.EXT));
+    return path.getFileSystem(context.getConfiguration()).create(path);
+  }
+
+  /**
+   * Gets the configured sync interval from the task context.
+   *
+   * @param context The task attempt context.
+   * @return The sync interval to use for the output Avro container file.
+   */
+  protected static int getSyncInterval(TaskAttemptContext context) {
+    return context.getConfiguration().getInt(
+          org.apache.avro.mapred.AvroOutputFormat.SYNC_INTERVAL_KEY,
+          DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroRecordReaderBase.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroRecordReaderBase.java
new file mode 100644
index 0000000..82d0a8a
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroRecordReaderBase.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.SeekableInput;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.io.AvroSerialization;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.FsInput;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Abstract base class for <code>RecordReader</code>s that read Avro container files.
+ *
+ * @param <K> The type of key the record reader should generate.
+ * @param <V> The type of value the record reader should generate.
+ * @param <T> The type of the entries within the Avro container file being read.
+ */
+public abstract class AvroRecordReaderBase<K, V, T> extends RecordReader<K, V> {
+  private static final Logger LOG = LoggerFactory.getLogger(AvroRecordReaderBase.class);
+
+  /** The reader schema for the records within the input Avro container file. */
+  private final Schema mReaderSchema;
+
+  /** The current record from the Avro container file being read. */
+  private T mCurrentRecord;
+
+  /** A reader for the Avro container file containing the current input split. */
+  private DataFileReader<T> mAvroFileReader;
+
+  /**
+   * The byte offset into the Avro container file where the first block that fits
+   * completely within the current input split begins.
+   */
+  private long mStartPosition;
+
+  /** The byte offset into the Avro container file where the current input split ends. */
+  private long mEndPosition;
+
+  /**
+   * Constructor.
+   *
+   * @param readerSchema The reader schema for the records of the Avro container file.
+   */
+  protected AvroRecordReaderBase(Schema readerSchema) {
+    mReaderSchema = readerSchema;
+    mCurrentRecord = null;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void initialize(InputSplit inputSplit, TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    if (!(inputSplit instanceof FileSplit)) {
+      throw new IllegalArgumentException("Only compatible with FileSplits.");
+    }
+    FileSplit fileSplit = (FileSplit) inputSplit;
+
+    // Open a seekable input stream to the Avro container file.
+    SeekableInput seekableFileInput
+        = createSeekableInput(context.getConfiguration(), fileSplit.getPath());
+
+    // Wrap the seekable input stream in an Avro DataFileReader.
+    Configuration conf = context.getConfiguration();
+    GenericData dataModel = AvroSerialization.createDataModel(conf);
+    DatumReader<T> datumReader = dataModel.createDatumReader(mReaderSchema);
+    mAvroFileReader = createAvroFileReader(seekableFileInput, datumReader);
+
+    // Initialize the start and end offsets into the file based on the boundaries of the
+    // input split we're responsible for.  We will read the first block that begins
+    // after the input split start boundary.  We will read up to but not including the
+    // first block that starts after input split end boundary.
+
+    // Sync to the closest block/record boundary just after beginning of our input split.
+    mAvroFileReader.sync(fileSplit.getStart());
+
+    // Initialize the start position to the beginning of the first block of the input split.
+    mStartPosition = mAvroFileReader.previousSync();
+
+    // Initialize the end position to the end of the input split (this isn't necessarily
+    // on a block boundary so using this for reporting progress will be approximate.
+    mEndPosition = fileSplit.getStart() + fileSplit.getLength();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    assert null != mAvroFileReader;
+
+    if (mAvroFileReader.hasNext() && !mAvroFileReader.pastSync(mEndPosition)) {
+      mCurrentRecord = mAvroFileReader.next(mCurrentRecord);
+      return true;
+    }
+    return false;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public float getProgress() throws IOException, InterruptedException {
+    assert null != mAvroFileReader;
+
+    if (mEndPosition == mStartPosition) {
+      // Trivial empty input split.
+      return 0.0f;
+    }
+    long bytesRead = mAvroFileReader.previousSync() - mStartPosition;
+    long bytesTotal = mEndPosition - mStartPosition;
+    LOG.debug("Progress: bytesRead=" + bytesRead + ", bytesTotal=" + bytesTotal);
+    return Math.min(1.0f, (float) bytesRead / (float) bytesTotal);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close() throws IOException {
+    if (null != mAvroFileReader) {
+      try {
+        mAvroFileReader.close();
+      } finally {
+        mAvroFileReader = null;
+      }
+    }
+  }
+
+  /**
+   * Gets the current record read from the Avro container file.
+   *
+   * <p>Calling <code>nextKeyValue()</code> moves this to the next record.</p>
+   *
+   * @return The current Avro record (may be null if no record has been read).
+   */
+  protected T getCurrentRecord() {
+    return mCurrentRecord;
+  }
+
+  /**
+   * Creates a seekable input stream to an Avro container file.
+   *
+   * @param conf The hadoop configuration.
+   * @param path The path to the avro container file.
+   * @throws IOException If there is an error reading from the path.
+   */
+  protected SeekableInput createSeekableInput(Configuration conf, Path path)
+      throws IOException {
+    return new FsInput(path, conf);
+  }
+
+  /**
+   * Creates an Avro container file reader from a seekable input stream.
+   *
+   * @param input The input containing the Avro container file.
+   * @param datumReader The reader to use for the individual records in the Avro container file.
+   * @throws IOException If there is an error reading from the input stream.
+   */
+  protected DataFileReader<T> createAvroFileReader(
+      SeekableInput input, DatumReader<T> datumReader) throws IOException {
+    return new DataFileReader<T>(input, datumReader);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileInputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileInputFormat.java
new file mode 100644
index 0000000..2fc8a37
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileInputFormat.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.avro.hadoop.io.AvroSequenceFile;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+
+/**
+ * An input format for reading from AvroSequenceFiles (sequence files that support Avro data).
+ *
+ * @param <K> The input key type.
+ * @param <V> The input value type.
+ */
+public class AvroSequenceFileInputFormat<K, V> extends SequenceFileInputFormat<K, V> {
+  /** {@inheritDoc} */
+  @Override
+  public RecordReader<K, V> createRecordReader(InputSplit inputSplit, TaskAttemptContext context)
+      throws IOException {
+    return new AvroSequenceFileRecordReader();
+  }
+
+  /**
+   * Reads records from a SequenceFile that supports Avro data.
+   *
+   * <p>This class is based on Hadoop's SequenceFileRecordReader, modified to construct an
+   * AvroSequenceFile.Reader instead of a SequenceFile.Reader.</p>
+   */
+  protected class AvroSequenceFileRecordReader extends RecordReader<K, V> {
+    private SequenceFile.Reader mReader;
+    private long mStart;
+    private long mEnd;
+    private boolean mHasMoreData;
+    private K mCurrentKey;
+    private V mCurrentValue;
+
+    /** {@inheritDoc} */
+    @Override
+    public void initialize(InputSplit split, TaskAttemptContext context)
+        throws IOException, InterruptedException {
+      FileSplit fileSplit = (FileSplit) split;
+      Configuration conf = context.getConfiguration();
+      Path path = fileSplit.getPath();
+      FileSystem fs = path.getFileSystem(conf);
+
+      // Configure the SequenceFile reader.
+      AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options()
+          .withFileSystem(fs)
+          .withInputPath(path)
+          .withConfiguration(conf);
+      Schema keySchema = AvroJob.getInputKeySchema(conf);
+      if (null != keySchema) {
+        options.withKeySchema(keySchema);
+      }
+      Schema valueSchema = AvroJob.getInputValueSchema(conf);
+      if (null != valueSchema) {
+        options.withValueSchema(valueSchema);
+      }
+
+      mReader = new AvroSequenceFile.Reader(options);
+      mEnd = fileSplit.getStart() + fileSplit.getLength();
+
+      if (fileSplit.getStart() > mReader.getPosition()) {
+        // Sync to the beginning of the input split.
+        mReader.sync(fileSplit.getStart());
+      }
+
+      mStart = mReader.getPosition();
+      mHasMoreData = mStart < mEnd;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    @SuppressWarnings("unchecked")
+    public boolean nextKeyValue() throws IOException, InterruptedException {
+      if (!mHasMoreData) {
+        return false;
+      }
+      long pos = mReader.getPosition();
+      mCurrentKey = (K) mReader.next(mCurrentKey);
+      if (null == mCurrentKey || (pos >= mEnd && mReader.syncSeen())) {
+        mHasMoreData = false;
+        mCurrentKey = null;
+        mCurrentValue = null;
+      } else {
+        mCurrentValue = (V) mReader.getCurrentValue(mCurrentValue);
+      }
+      return mHasMoreData;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public K getCurrentKey() {
+      return mCurrentKey;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public V getCurrentValue() {
+      return mCurrentValue;
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public float getProgress() throws IOException {
+      if (mEnd == mStart) {
+        return 0.0f;
+      } else {
+        return Math.min(1.0f, (mReader.getPosition() - mStart) / (float)(mEnd - mStart));
+      }
+    }
+
+    /** {@inheritDoc} */
+    @Override
+    public synchronized void close() throws IOException {
+      mReader.close();
+    }
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileOutputFormat.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileOutputFormat.java
new file mode 100644
index 0000000..064c5ce
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/AvroSequenceFileOutputFormat.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.hadoop.io.AvroSequenceFile;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.DefaultCodec;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
+
+/**
+ * A sequence file output format that knows how to write AvroKeys and AvroValues in
+ * addition to Writables.
+ *
+ * @param <K> The job output key type (may be a Writable, AvroKey).
+ * @param <V> The job output value type (may be a Writable, AvroValue).
+ */
+public class AvroSequenceFileOutputFormat<K, V> extends FileOutputFormat<K, V> {
+  /** Configuration key for storing the type of compression for the target sequence file. */
+  private static final String CONF_COMPRESSION_TYPE = "mapred.output.compression.type";
+
+  /** The default compression type for the target sequence file. */
+  private static final CompressionType DEFAULT_COMPRESSION_TYPE = CompressionType.RECORD;
+
+  /** {@inheritDoc} */
+  @Override
+  public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    Configuration conf = context.getConfiguration();
+
+    // Configure compression if requested.
+    CompressionCodec codec = null;
+    CompressionType compressionType = CompressionType.NONE;
+    if (getCompressOutput(context)) {
+      // Find the kind of compression to do.
+      compressionType = getOutputCompressionType(conf);
+
+      // Find the right codec.
+      Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
+      codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
+    }
+
+    // Get the path of the output file.
+    Path outputFile = getDefaultWorkFile(context, "");
+    FileSystem fs = outputFile.getFileSystem(conf);
+
+    // Configure the writer.
+    AvroSequenceFile.Writer.Options options = new AvroSequenceFile.Writer.Options()
+        .withFileSystem(fs)
+        .withConfiguration(conf)
+        .withOutputPath(outputFile)
+        .withKeyClass(context.getOutputKeyClass())
+        .withValueClass(context.getOutputValueClass())
+        .withProgressable(context)
+        .withCompressionType(compressionType)
+        .withCompressionCodec(codec);
+    Schema keySchema = AvroJob.getOutputKeySchema(conf);
+    if (null != keySchema) {
+      options.withKeySchema(keySchema);
+    }
+    Schema valueSchema = AvroJob.getOutputValueSchema(conf);
+    if (null != valueSchema) {
+      options.withValueSchema(valueSchema);
+    }
+    final SequenceFile.Writer out = AvroSequenceFile.createWriter(options);
+
+    return new RecordWriter<K, V>() {
+      @Override
+      public void write(K key, V value) throws IOException {
+        out.append(key, value);
+      }
+
+      @Override
+      public void close(TaskAttemptContext context) throws IOException {
+        out.close();
+      }
+    };
+  }
+
+  /**
+   * Sets the type of compression for the output sequence file.
+   *
+   * @param job The job configuration.
+   * @param compressionType The compression type for the target sequence file.
+   */
+  public static void setOutputCompressionType(Job job, CompressionType compressionType) {
+    setCompressOutput(job, true);
+    job.getConfiguration().set(CONF_COMPRESSION_TYPE, compressionType.name());
+  }
+
+  /**
+   * Gets type of compression for the output sequence file.
+   *
+   * @param conf The job configuration.
+   * @return The compression type.
+   */
+  public static CompressionType getOutputCompressionType(Configuration conf) {
+    String typeName = conf.get(CONF_COMPRESSION_TYPE, DEFAULT_COMPRESSION_TYPE.name());
+    return CompressionType.valueOf(typeName);
+  }
+}
diff --git a/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/Syncable.java b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/Syncable.java
new file mode 100644
index 0000000..fe1b48f
--- /dev/null
+++ b/lang/java/mapred/src/main/java/org/apache/avro/mapreduce/Syncable.java
@@ -0,0 +1,32 @@
+package org.apache.avro.mapreduce;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import java.io.IOException;
+
+public interface Syncable {
+
+  /**
+   * Return the current position as a value that may be passed to DataFileReader.seek(long).
+   * Forces the end of the current block, emitting a synchronization marker.
+   *
+   * @throws IOException - if an error occurred while attempting to sync.
+   */
+  long sync() throws IOException;
+}
diff --git a/lang/java/mapred/src/test/avro/TextStats.avsc b/lang/java/mapred/src/test/avro/TextStats.avsc
new file mode 100644
index 0000000..b9f2ab8
--- /dev/null
+++ b/lang/java/mapred/src/test/avro/TextStats.avsc
@@ -0,0 +1,9 @@
+{
+  "namespace": "org.apache.avro.mapreduce",
+  "type": "record",
+  "name": "TextStats",
+  "fields": [
+    { "name": "name", "type": "string", "default": "" },
+    { "name": "count", "type": "int", "default": 0 }
+  ]
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
new file mode 100644
index 0000000..af340d8
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.hadoop.file;
+
+import org.apache.avro.file.CodecFactory;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
+
+public class TestHadoopCodecFactory {
+  
+  @Test
+  public void testHadoopCodecFactoryDeflate(){
+    CodecFactory hadoopDeflateCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.DeflateCodec");
+    CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
+    assertTrue(hadoopDeflateCodec.getClass().equals(avroDeflateCodec.getClass()));
+  }
+  
+  @Test
+  public void testHadoopCodecFactorySnappy(){
+    CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.SnappyCodec");
+    CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
+    assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
+  }
+  
+  @Test
+  public void testHadoopCodecFactoryBZip2(){
+    CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.BZip2Codec");
+    CodecFactory avroSnappyCodec = CodecFactory.fromString("bzip2");
+    assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
+  }
+  
+  @Test
+  public void testHadoopCodecFactoryGZip(){
+    CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.GZipCodec");
+    CodecFactory avroSnappyCodec = CodecFactory.fromString("deflate");
+    assertTrue(hadoopSnappyCodec.getClass().equals(avroSnappyCodec.getClass()));
+  }
+  
+  @Test
+  public void testHadoopCodecFactoryFail(){
+    CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.FooCodec");
+    assertTrue(hadoopSnappyCodec == null);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestSortedKeyValueFile.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestSortedKeyValueFile.java
new file mode 100644
index 0000000..74f65ac
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestSortedKeyValueFile.java
@@ -0,0 +1,350 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.file;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestSortedKeyValueFile {
+  private static final Logger LOG = LoggerFactory.getLogger(TestSortedKeyValueFile.class);
+
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  @Test(expected=IllegalArgumentException.class)
+  public void testWriteOutOfSortedOrder() throws IOException {
+    LOG.debug("Writing some records to a SortedKeyValueFile...");
+
+    Configuration conf = new Configuration();
+    SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
+        .withKeySchema(Schema.create(Schema.Type.STRING))
+        .withValueSchema(Schema.create(Schema.Type.STRING))
+        .withConfiguration(conf)
+        .withPath(new Path(mTempDir.getRoot().getPath(), "myfile"))
+        .withIndexInterval(2);  // Index every other record.
+
+    SortedKeyValueFile.Writer<CharSequence, CharSequence> writer
+        = new SortedKeyValueFile.Writer<CharSequence, CharSequence>(options);
+
+    Utf8 key = new Utf8();                        // re-use key, to test copied
+
+    try {
+      writer.append(key.set("banana"), "Banana");
+      writer.append(key.set("apple"), "Apple");  // Ruh, roh!
+    } finally {
+      writer.close();
+    }
+  }
+
+  @Test
+  public void testNamedCodecs() throws IOException {
+    Configuration conf = new Configuration();
+    Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile");
+    Schema key = Schema.create(Schema.Type.STRING);
+    Schema value = Schema.create(Schema.Type.STRING);
+    Schema recordSchema = AvroKeyValue.getSchema(key, value);
+    DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema);
+    DataFileReader<GenericRecord> reader;
+
+    SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
+        .withKeySchema(key)
+        .withValueSchema(value)
+        .withConfiguration(conf)
+        .withPath(myfile);
+
+    SortedKeyValueFile.Writer<CharSequence, CharSequence> writer;
+
+    for(String codec : new String[]{"null", "deflate", "snappy", "bzip2"}) {
+        LOG.debug("Using " + codec + "codec for a SortedKeyValueFile...");
+
+        options.withCodec(codec);
+
+        writer = new SortedKeyValueFile.Writer<CharSequence, CharSequence>(options);
+        writer.close();
+
+        reader = new DataFileReader<GenericRecord>(
+            new FsInput(new Path(myfile,SortedKeyValueFile.DATA_FILENAME), conf),
+            datumReader);
+
+        assertEquals(codec, reader.getMetaString("avro.codec"));
+        reader.close();
+    }
+  }
+
+  @Test
+  public void testDeflateClassCodec() throws IOException {
+    Configuration conf = new Configuration();
+    Path myfile = new Path(mTempDir.getRoot().getPath(), "myfile");
+    Schema key = Schema.create(Schema.Type.STRING);
+    Schema value = Schema.create(Schema.Type.STRING);
+    Schema recordSchema = AvroKeyValue.getSchema(key, value);
+    DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema);
+    DataFileReader<GenericRecord> reader;
+
+    LOG.debug("Using CodecFactory.deflateCodec() for a SortedKeyValueFile...");
+    SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
+        .withKeySchema(key)
+        .withValueSchema(value)
+        .withConfiguration(conf)
+        .withPath(myfile)
+        .withCodec(CodecFactory.deflateCodec(9));
+
+    SortedKeyValueFile.Writer<CharSequence, CharSequence> writer =
+        new SortedKeyValueFile.Writer<CharSequence, CharSequence>(options);
+    writer.close();
+
+    reader = new DataFileReader<GenericRecord>(
+        new FsInput(new Path(myfile,SortedKeyValueFile.DATA_FILENAME), conf),
+        datumReader);
+
+    assertEquals("deflate", reader.getMetaString("avro.codec"));
+    reader.close();
+  }
+
+  @Test
+  public void testBadCodec() throws IOException {
+    LOG.debug("Using a bad codec for a SortedKeyValueFile...");
+
+    try {
+      SortedKeyValueFile.Writer.Options options =
+          new SortedKeyValueFile.Writer.Options().withCodec("foobar");
+    } catch (AvroRuntimeException e) {
+        assertEquals("Unrecognized codec: foobar", e.getMessage());
+    }
+  }
+
+  @Test
+  public void testWriter() throws IOException {
+    LOG.debug("Writing some records to a SortedKeyValueFile...");
+
+    Configuration conf = new Configuration();
+    SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options()
+        .withKeySchema(Schema.create(Schema.Type.STRING))
+        .withValueSchema(Schema.create(Schema.Type.STRING))
+        .withConfiguration(conf)
+        .withPath(new Path(mTempDir.getRoot().getPath(), "myfile"))
+        .withIndexInterval(2);  // Index every other record.
+
+    SortedKeyValueFile.Writer<CharSequence, CharSequence> writer
+        = new SortedKeyValueFile.Writer<CharSequence, CharSequence>(options);
+
+    try {
+      writer.append("apple", "Apple");  // Will be indexed.
+      writer.append("banana", "Banana");
+      writer.append("carrot", "Carrot");  // Will be indexed.
+      writer.append("durian", "Durian");
+    } finally {
+      writer.close();
+    }
+
+
+    LOG.debug("Checking the generated directory...");
+    File directory = new File(mTempDir.getRoot().getPath(), "myfile");
+    assertTrue(directory.exists());
+
+
+    LOG.debug("Checking the generated index file...");
+    File indexFile = new File(directory, SortedKeyValueFile.INDEX_FILENAME);
+    DatumReader<GenericRecord> indexReader = new GenericDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(options.getKeySchema(), Schema.create(Schema.Type.LONG)));
+    FileReader<GenericRecord> indexFileReader = DataFileReader.openReader(indexFile, indexReader);
+
+    List<AvroKeyValue<CharSequence, Long>> indexRecords
+        = new ArrayList<AvroKeyValue<CharSequence, Long>>();
+    try {
+      for (GenericRecord indexRecord : indexFileReader) {
+        indexRecords.add(new AvroKeyValue<CharSequence, Long>(indexRecord));
+      }
+    } finally {
+      indexFileReader.close();
+    }
+
+    assertEquals(2, indexRecords.size());
+    assertEquals("apple", indexRecords.get(0).getKey().toString());
+    LOG.debug("apple's position in the file: " + indexRecords.get(0).getValue());
+    assertEquals("carrot", indexRecords.get(1).getKey().toString());
+    LOG.debug("carrot's position in the file: " + indexRecords.get(1).getValue());
+
+    LOG.debug("Checking the generated data file...");
+    File dataFile = new File(directory, SortedKeyValueFile.DATA_FILENAME);
+    DatumReader<GenericRecord> dataReader = new GenericDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(options.getKeySchema(), options.getValueSchema()));
+    DataFileReader<GenericRecord> dataFileReader
+        = new DataFileReader<GenericRecord>(dataFile, dataReader);
+
+    try {
+      dataFileReader.seek(indexRecords.get(0).getValue());
+      assertTrue(dataFileReader.hasNext());
+      AvroKeyValue<CharSequence, CharSequence> appleRecord
+          = new AvroKeyValue<CharSequence, CharSequence>(dataFileReader.next());
+      assertEquals("apple", appleRecord.getKey().toString());
+      assertEquals("Apple", appleRecord.getValue().toString());
+
+      dataFileReader.seek(indexRecords.get(1).getValue());
+      assertTrue(dataFileReader.hasNext());
+      AvroKeyValue<CharSequence, CharSequence> carrotRecord
+          = new AvroKeyValue<CharSequence, CharSequence>(dataFileReader.next());
+      assertEquals("carrot", carrotRecord.getKey().toString());
+      assertEquals("Carrot", carrotRecord.getValue().toString());
+
+      assertTrue(dataFileReader.hasNext());
+      AvroKeyValue<CharSequence, CharSequence> durianRecord
+          = new AvroKeyValue<CharSequence, CharSequence>(dataFileReader.next());
+      assertEquals("durian", durianRecord.getKey().toString());
+      assertEquals("Durian", durianRecord.getValue().toString());
+    } finally {
+      dataFileReader.close();
+    }
+  }
+
+  @Test
+  public void testReader() throws IOException {
+    Configuration conf = new Configuration();
+    SortedKeyValueFile.Writer.Options writerOptions = new SortedKeyValueFile.Writer.Options()
+        .withKeySchema(Schema.create(Schema.Type.STRING))
+        .withValueSchema(Schema.create(Schema.Type.STRING))
+        .withConfiguration(conf)
+        .withPath(new Path(mTempDir.getRoot().getPath(), "myfile"))
+        .withIndexInterval(2);  // Index every other record.
+
+    SortedKeyValueFile.Writer<CharSequence, CharSequence> writer
+        = new SortedKeyValueFile.Writer<CharSequence, CharSequence>(writerOptions);
+
+    try {
+      writer.append("apple", "Apple");  // Will be indexed.
+      writer.append("banana", "Banana");
+      writer.append("carrot", "Carrot");  // Will be indexed.
+      writer.append("durian", "Durian");
+    } finally {
+      writer.close();
+    }
+
+    LOG.debug("Reading the file back using a reader...");
+    SortedKeyValueFile.Reader.Options readerOptions = new SortedKeyValueFile.Reader.Options()
+        .withKeySchema(Schema.create(Schema.Type.STRING))
+        .withValueSchema(Schema.create(Schema.Type.STRING))
+        .withConfiguration(conf)
+        .withPath(new Path(mTempDir.getRoot().getPath(), "myfile"));
+
+    SortedKeyValueFile.Reader<CharSequence, CharSequence> reader
+        = new SortedKeyValueFile.Reader<CharSequence, CharSequence>(readerOptions);
+
+    try {
+      assertEquals("Carrot", reader.get("carrot").toString());
+      assertEquals("Banana", reader.get("banana").toString());
+      assertNull(reader.get("a-vegetable"));
+      assertNull(reader.get("beet"));
+      assertNull(reader.get("zzz"));
+    } finally {
+      reader.close();
+    }
+  }
+
+  public static class Stringy implements Comparable<Stringy> {
+    private String s;
+    public Stringy() {};
+    public Stringy(String s) { this.s = s; }
+    @Override public String toString() { return s; }
+    @Override public int hashCode() { return s.hashCode(); }
+    @Override public boolean equals(Object that) {
+      return this.s.equals(that.toString());
+    }
+    @Override public int compareTo(Stringy that) {
+      return this.s.compareTo(that.s);
+    }
+  }
+
+  @Test public void testAlternateModel() throws Exception {
+    LOG.debug("Writing some reflect records...");
+
+    ReflectData model = ReflectData.get();
+
+    Configuration conf = new Configuration();
+    SortedKeyValueFile.Writer.Options options
+      = new SortedKeyValueFile.Writer.Options()
+      .withKeySchema(model.getSchema(Stringy.class))
+      .withValueSchema(model.getSchema(Stringy.class))
+      .withConfiguration(conf)
+      .withPath(new Path(mTempDir.getRoot().getPath(), "reflect"))
+      .withDataModel(model)
+      .withIndexInterval(2);
+
+    SortedKeyValueFile.Writer<Stringy,Stringy> writer
+        = new SortedKeyValueFile.Writer<Stringy,Stringy>(options);
+
+    try {
+      writer.append(new Stringy("apple"), new Stringy("Apple"));
+      writer.append(new Stringy("banana"), new Stringy("Banana"));
+      writer.append(new Stringy("carrot"), new Stringy("Carrot"));
+      writer.append(new Stringy("durian"), new Stringy("Durian"));
+    } finally {
+      writer.close();
+    }
+
+    LOG.debug("Reading the file back using a reader...");
+    SortedKeyValueFile.Reader.Options readerOptions =
+      new SortedKeyValueFile.Reader.Options()
+      .withKeySchema(model.getSchema(Stringy.class))
+      .withValueSchema(model.getSchema(Stringy.class))
+      .withConfiguration(conf)
+      .withPath(new Path(mTempDir.getRoot().getPath(), "reflect"))
+      .withDataModel(model);
+
+    SortedKeyValueFile.Reader<Stringy,Stringy> reader
+      = new SortedKeyValueFile.Reader<Stringy,Stringy>(readerOptions);
+
+    try {
+      assertEquals(new Stringy("Carrot"), reader.get(new Stringy("carrot")));
+      assertEquals(new Stringy("Banana"), reader.get(new Stringy("banana")));
+      assertNull(reader.get(new Stringy("a-vegetable")));
+      assertNull(reader.get(new Stringy("beet")));
+      assertNull(reader.get(new Stringy("zzz")));
+    } finally {
+      reader.close();
+    }
+
+  }
+
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
new file mode 100644
index 0000000..92e45b5
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.ByteWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestAvroDatumConverterFactory {
+  private Job mJob;
+  private AvroDatumConverterFactory mFactory;
+
+  @Before
+  public void setup() throws IOException {
+    mJob = new Job();
+    mFactory = new AvroDatumConverterFactory(mJob.getConfiguration());
+  }
+
+  @Test
+  public void testConvertAvroKey() throws IOException {
+    AvroJob.setOutputKeySchema(mJob, Schema.create(Schema.Type.STRING));
+
+    AvroKey<CharSequence> avroKey = new AvroKey<CharSequence>("foo");
+    @SuppressWarnings("unchecked")
+    AvroDatumConverter<AvroKey<CharSequence>, ?> converter = mFactory.create(
+        (Class<AvroKey<CharSequence>>) avroKey.getClass());
+    assertEquals("foo", converter.convert(avroKey).toString());
+  }
+
+  @Test
+  public void testConvertAvroValue() throws IOException {
+    AvroJob.setOutputValueSchema(mJob, Schema.create(Schema.Type.INT));
+
+    AvroValue<Integer> avroValue = new AvroValue<Integer>(42);
+    @SuppressWarnings("unchecked")
+    AvroDatumConverter<AvroValue<Integer>, Integer> converter = mFactory.create(
+        (Class<AvroValue<Integer>>) avroValue.getClass());
+    assertEquals(42, converter.convert(avroValue).intValue());
+  }
+
+  @Test
+  public void testConvertBooleanWritable() {
+    AvroDatumConverter<BooleanWritable, Boolean> converter
+        = mFactory.create(BooleanWritable.class);
+    assertEquals(true, converter.convert(new BooleanWritable(true)).booleanValue());
+  }
+
+  @Test
+  public void testConvertBytesWritable() {
+    AvroDatumConverter<BytesWritable, ByteBuffer> converter = mFactory.create(BytesWritable.class);
+    ByteBuffer bytes = converter.convert(new BytesWritable(new byte[] { 1, 2, 3 }));
+    assertEquals(1, bytes.get(0));
+    assertEquals(2, bytes.get(1));
+    assertEquals(3, bytes.get(2));
+  }
+
+  @Test
+  public void testConvertByteWritable() {
+    AvroDatumConverter<ByteWritable, GenericFixed> converter = mFactory.create(ByteWritable.class);
+    assertEquals(42, converter.convert(new ByteWritable((byte) 42)).bytes()[0]);
+  }
+
+  @Test
+  public void testConvertDoubleWritable() {
+    AvroDatumConverter<DoubleWritable, Double> converter = mFactory.create(DoubleWritable.class);
+    assertEquals(2.0, converter.convert(new DoubleWritable(2.0)).doubleValue(), 0.00001);
+  }
+
+  @Test
+  public void testConvertFloatWritable() {
+    AvroDatumConverter<FloatWritable, Float> converter = mFactory.create(FloatWritable.class);
+    assertEquals(2.2f, converter.convert(new FloatWritable(2.2f)).floatValue(), 0.00001);
+  }
+
+  @Test
+  public void testConvertIntWritable() {
+    AvroDatumConverter<IntWritable, Integer> converter = mFactory.create(IntWritable.class);
+    assertEquals(2, converter.convert(new IntWritable(2)).intValue());
+  }
+
+  @Test
+  public void testConvertLongWritable() {
+    AvroDatumConverter<LongWritable, Long> converter = mFactory.create(LongWritable.class);
+    assertEquals(123L, converter.convert(new LongWritable(123L)).longValue());
+  }
+
+  @Test
+  public void testConvertNullWritable() {
+    AvroDatumConverter<NullWritable, Object> converter = mFactory.create(NullWritable.class);
+    assertNull(converter.convert(NullWritable.get()));
+  }
+
+  @Test
+  public void testConvertText() {
+    AvroDatumConverter<Text, CharSequence> converter = mFactory.create(Text.class);
+    assertEquals("foo", converter.convert(new Text("foo")).toString());
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroKeyDeserializer.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroKeyDeserializer.java
new file mode 100644
index 0000000..92dc129
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroKeyDeserializer.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.mapred.AvroWrapper;
+import org.junit.Test;
+
+public class TestAvroKeyDeserializer {
+  @Test
+  public void testDeserialize() throws IOException {
+    // Create a deserializer.
+    Schema writerSchema = Schema.create(Schema.Type.STRING);
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    ClassLoader classLoader = this.getClass().getClassLoader();
+    AvroKeyDeserializer<CharSequence> deserializer
+      = new AvroKeyDeserializer<CharSequence>(writerSchema, readerSchema,
+                                              classLoader);
+
+    // Check the schemas.
+    assertEquals(writerSchema, deserializer.getWriterSchema());
+    assertEquals(readerSchema, deserializer.getReaderSchema());
+
+    // Write some records to deserialize.
+    DatumWriter<CharSequence> datumWriter = new GenericDatumWriter<CharSequence>(writerSchema);
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
+    datumWriter.write("record1", encoder);
+    datumWriter.write("record2", encoder);
+    encoder.flush();
+
+    // Deserialize the records.
+    ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    deserializer.open(inputStream);
+    AvroWrapper<CharSequence> record = null;
+
+    record = deserializer.deserialize(record);
+    assertEquals("record1", record.datum().toString());
+
+    record = deserializer.deserialize(record);
+    assertEquals("record2", record.datum().toString());
+
+    deserializer.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
new file mode 100644
index 0000000..44298b2
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
@@ -0,0 +1,210 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.avro.hadoop.io.AvroSequenceFile;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestAvroSequenceFile {
+  // Disable checkstyle for this variable.  It must be public to work with JUnit @Rule.
+  // CHECKSTYLE:OFF
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+  // CHECKSTYLE:ON
+
+  /** Tests that reading and writing avro data works. */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testReadAvro() throws IOException {
+    Path sequenceFilePath = new Path(new File(mTempDir.getRoot(), "output.seq").getPath());
+
+    writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class,
+        Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT),
+        new AvroKey<CharSequence>("one"), new AvroValue<Integer>(1),
+        new AvroKey<CharSequence>("two"), new AvroValue<Integer>(2));
+
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.get(conf);
+    AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options()
+        .withFileSystem(fs)
+        .withInputPath(sequenceFilePath)
+        .withKeySchema(Schema.create(Schema.Type.STRING))
+        .withValueSchema(Schema.create(Schema.Type.INT))
+        .withConfiguration(conf);
+    SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
+
+    AvroKey<CharSequence> key = new AvroKey<CharSequence>();
+    AvroValue<Integer> value = new AvroValue<Integer>();
+
+    // Read the first record.
+    key = (AvroKey<CharSequence>) reader.next(key);
+    assertNotNull(key);
+    assertEquals("one", key.datum().toString());
+    value = (AvroValue<Integer>) reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(1, value.datum().intValue());
+
+    // Read the second record.
+    key = (AvroKey<CharSequence>) reader.next(key);
+    assertNotNull(key);
+    assertEquals("two", key.datum().toString());
+    value = (AvroValue<Integer>) reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(2, value.datum().intValue());
+
+    assertNull("Should be no more records.", reader.next(key));
+  }
+
+  /** Tests that reading and writing avro records without a reader schema works. */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testReadAvroWithoutReaderSchemas() throws IOException {
+    Path sequenceFilePath = new Path(new File(mTempDir.getRoot(), "output.seq").getPath());
+
+    writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class,
+        Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT),
+        new AvroKey<CharSequence>("one"), new AvroValue<Integer>(1),
+        new AvroKey<CharSequence>("two"), new AvroValue<Integer>(2));
+
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.get(conf);
+    AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options()
+        .withFileSystem(fs)
+        .withInputPath(sequenceFilePath)
+        .withConfiguration(conf);
+    SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
+
+    AvroKey<CharSequence> key = new AvroKey<CharSequence>();
+    AvroValue<Integer> value = new AvroValue<Integer>();
+
+    // Read the first record.
+    key = (AvroKey<CharSequence>) reader.next(key);
+    assertNotNull(key);
+    assertEquals("one", key.datum().toString());
+    value = (AvroValue<Integer>) reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(1, value.datum().intValue());
+
+    // Read the second record.
+    key = (AvroKey<CharSequence>) reader.next(key);
+    assertNotNull(key);
+    assertEquals("two", key.datum().toString());
+    value = (AvroValue<Integer>) reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(2, value.datum().intValue());
+
+    assertNull("Should be no more records.", reader.next(key));
+  }
+
+  /** Tests that reading and writing ordinary Writables still works. */
+  @Test
+  public void testReadWritables() throws IOException {
+    Path sequenceFilePath = new Path(new File(mTempDir.getRoot(), "output.seq").getPath());
+
+    writeSequenceFile(sequenceFilePath, Text.class, IntWritable.class, null, null,
+        new Text("one"), new IntWritable(1),
+        new Text("two"), new IntWritable(2));
+
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.get(conf);
+    AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options()
+        .withFileSystem(fs)
+        .withInputPath(sequenceFilePath)
+        .withConfiguration(conf);
+    SequenceFile.Reader reader = new AvroSequenceFile.Reader(options);
+
+    Text key = new Text();
+    IntWritable value = new IntWritable();
+
+    // Read the first record.
+    assertTrue(reader.next(key));
+    assertEquals("one", key.toString());
+    reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(1, value.get());
+
+    // Read the second record.
+    assertTrue(reader.next(key));
+    assertEquals("two", key.toString());
+    reader.getCurrentValue(value);
+    assertNotNull(value);
+    assertEquals(2, value.get());
+
+    assertFalse("Should be no more records.", reader.next(key));
+  }
+
+  /**
+   * Writes a sequence file of records.
+   *
+   * @param file The target file path.
+   * @param keySchema The schema of the key if using Avro, else null.
+   * @param valueSchema The schema of the value if using Avro, else null.
+   * @param records <i>key1</i>, <i>value1</i>, <i>key2</i>, <i>value2</i>, ...
+   */
+  private void writeSequenceFile(Path file, Class<?> keyClass, Class<?> valueClass,
+      Schema keySchema, Schema valueSchema, Object... records) throws IOException {
+    // Make sure the key/value records have an even size.
+    if (0 != records.length % 2) {
+      throw new IllegalArgumentException("Expected a value for each key record.");
+    }
+
+    // Open a AvroSequenceFile writer.
+    Configuration conf = new Configuration();
+    FileSystem fs = FileSystem.get(conf);
+    AvroSequenceFile.Writer.Options options = new AvroSequenceFile.Writer.Options()
+        .withFileSystem(fs)
+        .withConfiguration(conf)
+        .withOutputPath(file);
+    if (null != keySchema) {
+      options.withKeySchema(keySchema);
+    } else {
+      options.withKeyClass(keyClass);
+    }
+    if (null != valueSchema) {
+      options.withValueSchema(valueSchema);
+    } else {
+      options.withValueClass(valueClass);
+    }
+    SequenceFile.Writer writer = new AvroSequenceFile.Writer(options);
+
+    // Write some records.
+    for (int i = 0; i < records.length; i += 2) {
+      writer.append(records[i], records[i + 1]);
+    }
+
+    // Close the file.
+    writer.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
new file mode 100644
index 0000000..8bfd673
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to Odiago, Inc. under one or more contributor license
+ * agreements.  See the NOTICE file distributed with this work for
+ * additional information regarding copyright ownership.  Odiago, Inc.
+ * licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serializer;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.junit.Test;
+import org.junit.Assert;
+
+public class TestAvroSerialization {
+  @Test
+  public void testAccept() {
+    AvroSerialization<CharSequence> serialization = new AvroSerialization<CharSequence>();
+
+    assertTrue(serialization.accept(AvroKey.class));
+    assertTrue(serialization.accept(AvroValue.class));
+    assertFalse(serialization.accept(AvroWrapper.class));
+    assertFalse(serialization.accept(String.class));
+  }
+
+  @Test
+  public void testGetSerializerForKey() throws IOException {
+    // Set the writer schema in the job configuration.
+    Schema writerSchema = Schema.create(Schema.Type.STRING);
+    Job job = new Job();
+    AvroJob.setMapOutputKeySchema(job, writerSchema);
+
+    // Get a serializer from the configuration.
+    AvroSerialization serialization
+        = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());
+    @SuppressWarnings("unchecked")
+    Serializer<AvroWrapper> serializer = serialization.getSerializer(AvroKey.class);
+    assertTrue(serializer instanceof AvroSerializer);
+    AvroSerializer avroSerializer = (AvroSerializer) serializer;
+
+    // Check that the writer schema is set correctly on the serializer.
+    assertEquals(writerSchema, avroSerializer.getWriterSchema());
+  }
+
+  @Test
+  public void testGetSerializerForValue() throws IOException {
+    // Set the writer schema in the job configuration.
+    Schema writerSchema = Schema.create(Schema.Type.STRING);
+    Job job = new Job();
+    AvroJob.setMapOutputValueSchema(job, writerSchema);
+
+    // Get a serializer from the configuration.
+    AvroSerialization serialization
+        = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());
+    @SuppressWarnings("unchecked")
+    Serializer<AvroWrapper> serializer = serialization.getSerializer(AvroValue.class);
+    assertTrue(serializer instanceof AvroSerializer);
+    AvroSerializer avroSerializer = (AvroSerializer) serializer;
+
+    // Check that the writer schema is set correctly on the serializer.
+    assertEquals(writerSchema, avroSerializer.getWriterSchema());
+  }
+
+  @Test
+  public void testGetDeserializerForKey() throws IOException {
+    // Set the reader schema in the job configuration.
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    Job job = new Job();
+    AvroJob.setMapOutputKeySchema(job, readerSchema);
+
+    // Get a deserializer from the configuration.
+    AvroSerialization serialization
+        = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());
+    @SuppressWarnings("unchecked")
+    Deserializer<AvroWrapper> deserializer = serialization.getDeserializer(AvroKey.class);
+    assertTrue(deserializer instanceof AvroKeyDeserializer);
+    AvroKeyDeserializer avroDeserializer = (AvroKeyDeserializer) deserializer;
+
+    // Check that the reader schema is set correctly on the deserializer.
+    assertEquals(readerSchema, avroDeserializer.getReaderSchema());
+  }
+
+  @Test
+  public void testGetDeserializerForValue() throws IOException {
+    // Set the reader schema in the job configuration.
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    Job job = new Job();
+    AvroJob.setMapOutputValueSchema(job, readerSchema);
+
+    // Get a deserializer from the configuration.
+    AvroSerialization serialization
+        = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());
+    @SuppressWarnings("unchecked")
+    Deserializer<AvroWrapper> deserializer = serialization.getDeserializer(AvroValue.class);
+    assertTrue(deserializer instanceof AvroValueDeserializer);
+    AvroValueDeserializer avroDeserializer = (AvroValueDeserializer) deserializer;
+
+    // Check that the reader schema is set correctly on the deserializer.
+    assertEquals(readerSchema, avroDeserializer.getReaderSchema());
+  }
+
+  @Test public void testClassPath() throws Exception {
+    Configuration conf = new Configuration();
+    ClassLoader loader = conf.getClass().getClassLoader();
+    AvroSerialization serialization = new AvroSerialization();
+    serialization.setConf(conf);
+    AvroDeserializer des =
+      (AvroDeserializer)serialization.getDeserializer(AvroKey.class);
+    ReflectData data =
+      (ReflectData)((ReflectDatumReader)des.mAvroDatumReader).getData();
+    Assert.assertEquals(loader, data.getClassLoader());
+  }
+
+  private <T, O> O roundTrip(Schema schema, T data, Class<? extends GenericData> modelClass) throws IOException {
+    Job job = new Job();
+    AvroJob.setMapOutputKeySchema(job, schema);
+    if (modelClass != null)
+      AvroJob.setDataModelClass(job, modelClass);
+    AvroSerialization serialization =
+      ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());
+    Serializer<AvroKey<T>> serializer = serialization.getSerializer(AvroKey.class);
+    Deserializer<AvroKey<O>> deserializer = serialization.getDeserializer(AvroKey.class);
+
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    serializer.open(baos);
+    serializer.serialize(new AvroKey<T>(data));
+    serializer.close();
+
+    ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
+    deserializer.open(bais);
+    AvroKey<O> result = null;
+    result = deserializer.deserialize(result);
+    deserializer.close();
+
+    return result.datum();
+  }
+
+  @Test
+  public void testRoundTrip() throws Exception {
+    Schema schema = Schema.create(Schema.Type.STRING);
+    assertTrue(roundTrip(schema, "record", null) instanceof String);
+    assertTrue(roundTrip(schema, "record", GenericData.class) instanceof Utf8);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerializer.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerializer.java
new file mode 100644
index 0000000..0f3806d
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerializer.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.junit.Test;
+
+public class TestAvroSerializer {
+  @Test
+  public void testSerialize() throws IOException {
+    // Create a serializer.
+    Schema writerSchema = Schema.create(Schema.Type.STRING);
+    AvroSerializer<CharSequence> serializer = new AvroSerializer<CharSequence>(writerSchema);
+
+    // Check the writer schema.
+    assertEquals(writerSchema, serializer.getWriterSchema());
+
+    // Serialize two records, 'record1' and 'record2'.
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    serializer.open(outputStream);
+    serializer.serialize(new AvroKey<CharSequence>("record1"));
+    serializer.serialize(new AvroKey<CharSequence>("record2"));
+    serializer.close();
+
+    // Make sure the records were serialized correctly.
+    ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    DatumReader<CharSequence> datumReader = new GenericDatumReader<CharSequence>(readerSchema);
+    Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null);
+    CharSequence record = null;
+
+    record = datumReader.read(record, decoder);
+    assertEquals("record1", record.toString());
+
+    record = datumReader.read(record, decoder);
+    assertEquals("record2", record.toString());
+
+    inputStream.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroValueDeserializer.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroValueDeserializer.java
new file mode 100644
index 0000000..ddbd831
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroValueDeserializer.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.io;
+
+import static org.junit.Assert.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.mapred.AvroWrapper;
+import org.junit.Test;
+
+public class TestAvroValueDeserializer {
+  @Test
+  public void testDeserialize() throws IOException {
+    // Create a deserializer.
+    Schema writerSchema = Schema.create(Schema.Type.STRING);
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    ClassLoader classLoader = this.getClass().getClassLoader();
+    AvroValueDeserializer<CharSequence> deserializer
+      = new AvroValueDeserializer<CharSequence>(writerSchema, readerSchema,
+                                                classLoader);
+
+    // Check the schemas.
+    assertEquals(writerSchema, deserializer.getWriterSchema());
+    assertEquals(readerSchema, deserializer.getReaderSchema());
+
+    // Write some records to deserialize.
+    DatumWriter<CharSequence> datumWriter = new GenericDatumWriter<CharSequence>(writerSchema);
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
+    datumWriter.write("record1", encoder);
+    datumWriter.write("record2", encoder);
+    encoder.flush();
+
+    // Deserialize the records.
+    ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    deserializer.open(inputStream);
+    AvroWrapper<CharSequence> record = null;
+
+    record = deserializer.deserialize(record);
+    assertEquals("record1", record.datum().toString());
+
+    record = deserializer.deserialize(record);
+    assertEquals("record2", record.datum().toString());
+
+    deserializer.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/hadoop/util/TestAvroCharSequenceComparator.java b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/util/TestAvroCharSequenceComparator.java
new file mode 100644
index 0000000..bf17a0e
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/hadoop/util/TestAvroCharSequenceComparator.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.hadoop.util;
+
+import static org.junit.Assert.*;
+import static org.hamcrest.Matchers.*;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.avro.util.Utf8;
+
+public class TestAvroCharSequenceComparator {
+  private AvroCharSequenceComparator<CharSequence> mComparator;
+
+  @Before
+  public void setup() {
+    mComparator = new AvroCharSequenceComparator<CharSequence>();
+  }
+
+  @Test
+  public void testCompareString() {
+    assertEquals(0, mComparator.compare("", ""));
+    assertThat(mComparator.compare("", "a"), lessThan(0));
+    assertThat(mComparator.compare("a", ""), greaterThan(0));
+
+    assertEquals(0, mComparator.compare("a", "a"));
+    assertThat(mComparator.compare("a", "b"), lessThan(0));
+    assertThat(mComparator.compare("b", "a"), greaterThan(0));
+
+    assertEquals(0, mComparator.compare("ab", "ab"));
+    assertThat(mComparator.compare("a", "aa"), lessThan(0));
+    assertThat(mComparator.compare("aa", "a"), greaterThan(0));
+
+    assertThat(mComparator.compare("abc", "abcdef"), lessThan(0));
+    assertThat(mComparator.compare("abcdef", "abc"), greaterThan(0));
+  }
+
+  @Test
+  public void testCompareUtf8() {
+    assertEquals(0, mComparator.compare(new Utf8(""), new Utf8("")));
+    assertThat(mComparator.compare(new Utf8(""), new Utf8("a")), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("a"), new Utf8("")), greaterThan(0));
+
+    assertEquals(0, mComparator.compare(new Utf8("a"), new Utf8("a")));
+    assertThat(mComparator.compare(new Utf8("a"), new Utf8("b")), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("b"), new Utf8("a")), greaterThan(0));
+
+    assertEquals(0, mComparator.compare(new Utf8("ab"), new Utf8("ab")));
+    assertThat(mComparator.compare(new Utf8("a"), new Utf8("aa")), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("aa"), new Utf8("a")), greaterThan(0));
+
+    assertThat(mComparator.compare(new Utf8("abc"), new Utf8("abcdef")), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("abcdef"), new Utf8("abc")), greaterThan(0));
+  }
+
+  @Test
+  public void testCompareUtf8ToString() {
+    assertEquals(0, mComparator.compare(new Utf8(""), ""));
+    assertThat(mComparator.compare(new Utf8(""), "a"), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("a"), ""), greaterThan(0));
+
+    assertEquals(0, mComparator.compare(new Utf8("a"), "a"));
+    assertThat(mComparator.compare(new Utf8("a"), "b"), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("b"), "a"), greaterThan(0));
+
+    assertEquals(0, mComparator.compare(new Utf8("ab"), "ab"));
+    assertThat(mComparator.compare(new Utf8("a"), "aa"), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("aa"), "a"), greaterThan(0));
+
+    assertThat(mComparator.compare(new Utf8("abc"), "abcdef"), lessThan(0));
+    assertThat(mComparator.compare(new Utf8("abcdef"), "abc"), greaterThan(0));
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
new file mode 100644
index 0000000..81d35ff
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Set;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestAvroInputFormat {
+  
+  private static final String TEST_DIR = System.getProperty("test.dir", ".") +
+      File.separator + TestAvroInputFormat.class.getName();
+  private JobConf conf;
+  private FileSystem fs;
+  private Path inputDir;
+  
+  @Before
+  public void setUp() throws Exception {
+    conf = new JobConf();
+    fs = FileSystem.getLocal(conf);
+    inputDir = new Path(TEST_DIR);
+  }
+  
+  
+  @After
+  public void tearDown() throws Exception {
+    fs.delete(inputDir, true);
+  }
+  
+  @SuppressWarnings("rawtypes")
+  @Test
+  public void testIgnoreFilesWithoutExtension() throws Exception {
+    fs.mkdirs(inputDir);
+    Path avroFile = new Path(inputDir, "somefile.avro");
+    Path textFile = new Path(inputDir, "someotherfile.txt");
+    fs.create(avroFile).close();
+    fs.create(textFile).close();
+    
+    FileInputFormat.setInputPaths(conf, inputDir);
+
+    
+    AvroInputFormat inputFormat = new AvroInputFormat();
+    FileStatus[] statuses = inputFormat.listStatus(conf);
+    Assert.assertEquals(1, statuses.length);
+    Assert.assertEquals("somefile.avro", statuses[0].getPath().getName());
+    
+    conf.setBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY, false);
+    statuses = inputFormat.listStatus(conf);
+    Assert.assertEquals(2, statuses.length);
+    Set<String> names = new HashSet<String>();
+    names.add(statuses[0].getPath().getName());
+    names.add(statuses[1].getPath().getName());
+    Assert.assertTrue(names.contains("somefile.avro"));
+    Assert.assertTrue(names.contains("someotherfile.txt"));
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
new file mode 100644
index 0000000..278d5c7
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
@@ -0,0 +1,296 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.BufferedInputStream;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.avro.Schema;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class TestAvroMultipleInputs {
+
+  /** The input-1 record. */
+  public static class NamesRecord {
+    private int id = -1;
+    private CharSequence name = "";
+
+    public NamesRecord() {}
+
+    public NamesRecord(int id, CharSequence name) {
+      this.id = id;
+      this.name = name;
+    }
+
+    @Override
+    public String toString() {
+      return id + "\t" + name;
+    }
+  }
+
+  /** The input-2 record. */
+  public static class BalancesRecord {
+    private int id = -1;
+    private long balance = 0L;
+
+    public BalancesRecord() {}
+
+    public BalancesRecord(int id, long balance) {
+      this.id = id;
+      this.balance = balance;
+    }
+
+    @Override
+    public String toString() {
+      return id + "\t" + balance;
+    }
+  }
+
+  /** The map output key record. */
+  public static class KeyRecord {
+    private int id = -1;
+
+    public KeyRecord() {}
+
+    public KeyRecord(int id) {
+      this.id = id;
+    }
+
+    @Override
+    public String toString() {
+      return ((Integer) id).toString();
+    }
+  }
+
+  /** The common map output value record.
+   *  Carries a tag specifying what source
+   *  record type was.
+   */
+  public static class JoinableRecord {
+    private int id = -1;
+    private CharSequence name = "";
+    private long balance = 0L;
+    private CharSequence recType = "";
+
+    public JoinableRecord() {}
+
+    public JoinableRecord(
+        CharSequence recType,
+        int id,
+        CharSequence name,
+        long balance) {
+      this.id = id;
+      this.recType = recType;
+      this.name = name;
+      this.balance = balance;
+    }
+
+    @Override
+    public String toString() {
+      return recType.toString();
+    }
+  }
+
+  /** The output, combined record. */
+  public static class CompleteRecord {
+    private int id = -1;
+    private CharSequence name = "";
+    private long balance = 0L;
+
+    public CompleteRecord() {}
+
+    public CompleteRecord(int id, CharSequence name, long balance) {
+      this.name = name;
+      this.id = id;
+      this.balance = balance;
+    }
+
+    void setId(int id) { this.id = id; };
+
+    void setName(CharSequence name) { this.name = name; };
+
+    void setBalance(long balance) { this.balance = balance; };
+
+    @Override
+    public String toString() {
+      return id + "\t" + name + "\t" + balance;
+    }
+  }
+
+  public static class NamesMapImpl
+    extends AvroMapper<NamesRecord, Pair<KeyRecord, JoinableRecord>> {
+
+    @Override
+    public void map(
+        NamesRecord nameRecord,
+        AvroCollector<Pair<KeyRecord, JoinableRecord>> collector,
+        Reporter reporter) throws IOException {
+      collector.collect(
+          new Pair<KeyRecord, JoinableRecord>(
+              new KeyRecord(nameRecord.id),
+              new JoinableRecord(nameRecord.getClass().getName(),
+                  nameRecord.id, nameRecord.name, -1L)));
+    }
+
+  }
+
+  public static class BalancesMapImpl
+    extends AvroMapper<BalancesRecord, Pair<KeyRecord, JoinableRecord>> {
+
+    @Override
+      public void map(
+          BalancesRecord balanceRecord,
+          AvroCollector<Pair<KeyRecord, JoinableRecord>> collector,
+          Reporter reporter) throws IOException {
+      collector.collect(
+          new Pair<KeyRecord, JoinableRecord>(
+              new KeyRecord(balanceRecord.id),
+              new JoinableRecord(balanceRecord.getClass().getName(),
+                  balanceRecord.id, "", balanceRecord.balance)));
+    }
+
+  }
+
+  public static class ReduceImpl
+    extends AvroReducer<KeyRecord, JoinableRecord, CompleteRecord> {
+
+    @Override
+    public void reduce(KeyRecord ID, Iterable<JoinableRecord> joinables,
+                       AvroCollector<CompleteRecord> collector,
+                       Reporter reporter) throws IOException {
+      CompleteRecord rec = new CompleteRecord();
+      for (JoinableRecord joinable : joinables) {
+        rec.setId(joinable.id);
+        if (joinable.recType.toString().contains("NamesRecord")) {
+          rec.setName(joinable.name);
+        } else {
+          rec.setBalance(joinable.balance);
+        }
+      }
+      collector.collect(rec);
+    }
+
+  }
+
+  @Test
+  public void testJob() throws Exception {
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") +
+        "target/testAvroMultipleInputs";
+    Path inputPath1 = new Path(dir + "/in1");
+    Path inputPath2 = new Path(dir + "/in2");
+    Path outputPath = new Path(dir + "/out");
+
+    outputPath.getFileSystem(job).delete(outputPath, true);
+    inputPath1.getFileSystem(job).delete(inputPath1, true);
+    inputPath2.getFileSystem(job).delete(inputPath2, true);
+
+    writeNamesFiles(new File(inputPath1.toUri().getPath()));
+    writeBalancesFiles(new File(inputPath2.toUri().getPath()));
+
+    job.setJobName("multiple-inputs-join");
+    AvroMultipleInputs.addInputPath(job, inputPath1, NamesMapImpl.class,
+        ReflectData.get().getSchema(NamesRecord.class));
+    AvroMultipleInputs.addInputPath(job, inputPath2, BalancesMapImpl.class,
+        ReflectData.get().getSchema(BalancesRecord.class));
+
+    Schema keySchema = ReflectData.get().getSchema(KeyRecord.class);
+    Schema valueSchema = ReflectData.get().getSchema(JoinableRecord.class);
+    AvroJob.setMapOutputSchema
+      (job, Pair.getPairSchema(keySchema, valueSchema));
+    AvroJob.setOutputSchema(job,
+        ReflectData.get().getSchema(CompleteRecord.class));
+
+    AvroJob.setReducerClass(job, ReduceImpl.class);
+    job.setNumReduceTasks(1);
+
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    AvroJob.setReflect(job);
+
+    JobClient.runJob(job);
+
+    validateCompleteFile(new File(new File(dir, "out"), "part-00000.avro"));
+  }
+
+  /**
+   * Writes a "names.avro" file with five sequential <id, name> pairs.
+   */
+  private void writeNamesFiles(File dir) throws IOException {
+    DatumWriter<NamesRecord> writer = new ReflectDatumWriter<NamesRecord>();
+    DataFileWriter<NamesRecord> out = new DataFileWriter<NamesRecord>(writer);
+    File namesFile = new File(dir+"/names.avro");
+    dir.mkdirs();
+    out.create(ReflectData.get().getSchema(NamesRecord.class), namesFile);
+    for (int i=0; i < 5; i++)
+      out.append(new NamesRecord(i, "record"+i));
+    out.close();
+  }
+
+  /**
+   * Writes a "balances.avro" file with five sequential <id, balance> pairs.
+   */
+  private void writeBalancesFiles(File dir) throws IOException {
+    DatumWriter<BalancesRecord> writer =
+        new ReflectDatumWriter<BalancesRecord>();
+    DataFileWriter<BalancesRecord> out =
+        new DataFileWriter<BalancesRecord>(writer);
+    File namesFile = new File(dir+"/balances.avro");
+    dir.mkdirs();
+    out.create(ReflectData.get().getSchema(BalancesRecord.class), namesFile);
+    for (int i=0; i < 5; i++)
+      out.append(new BalancesRecord(i, (long) i+100));
+    out.close();
+  }
+
+  private void validateCompleteFile(File file) throws Exception {
+    DatumReader<CompleteRecord> reader =
+        new ReflectDatumReader<CompleteRecord>();
+    InputStream in = new BufferedInputStream(new FileInputStream(file));
+    DataFileStream<CompleteRecord> records =
+        new DataFileStream<CompleteRecord>(in,reader);
+    int numRecs = 0;
+    for (CompleteRecord rec : records) {
+      assertEquals(rec.id, numRecs);
+      assertEquals(rec.balance-100, rec.id);
+      assertEquals(rec.name, "record"+rec.id);
+      numRecs++;
+    }
+    records.close();
+    assertEquals(5, numRecs);
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
new file mode 100644
index 0000000..e520c87
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
@@ -0,0 +1,345 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.io.UnsupportedEncodingException;
+import java.util.Locale;
+
+import org.apache.hadoop.io.Text;
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+public class TestAvroMultipleOutputs {
+
+  private static final String UTF8 = "UTF-8";
+
+  public static class MapImpl extends AvroMapper<Utf8, Pair<Utf8, Long>> {
+    private AvroMultipleOutputs amos;
+
+    public void configure(JobConf Job) {
+      this.amos = new AvroMultipleOutputs(Job);
+    }
+
+    @Override
+      public void map(Utf8 text, AvroCollector<Pair<Utf8,Long>> collector,
+                      Reporter reporter) throws IOException {
+      StringTokenizer tokens = new StringTokenizer(text.toString());
+      while (tokens.hasMoreTokens()) {
+        String tok = tokens.nextToken();
+        collector.collect(new Pair<Utf8,Long>(new Utf8(tok),1L));
+        amos.getCollector("myavro2",reporter)
+          .collect(new Pair<Utf8,Long>(new Utf8(tok),1L).toString());
+      }
+        
+    }
+    public void close() throws IOException {
+      amos.close();
+    }
+
+  }
+  
+  public static class ReduceImpl
+    extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
+    private AvroMultipleOutputs amos;
+    
+    public void configure(JobConf Job)
+    {
+        amos=new AvroMultipleOutputs(Job);
+    }    
+
+    @Override
+    public void reduce(Utf8 word, Iterable<Long> counts,
+                       AvroCollector<Pair<Utf8,Long>> collector,
+                       Reporter reporter) throws IOException {
+      long sum = 0;
+      for (long count : counts)
+        sum += count;
+      Pair<Utf8,Long> outputvalue= new Pair<Utf8,Long>(word,sum);
+      amos.getCollector("myavro",reporter).collect(outputvalue);
+      amos.collect("myavro1",reporter,outputvalue.toString());
+      amos.collect("myavro",reporter,new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema(),outputvalue,"testavrofile");
+      amos.collect("myavro",reporter,Schema.create(Schema.Type.STRING),outputvalue.toString(),"testavrofile1");
+      collector.collect(new Pair<Utf8,Long>(word, sum));
+    }
+    public void close() throws IOException
+    {
+      amos.close();
+    }
+  }    
+
+  @Test public void runTestsInOrder() throws Exception {
+    testJob();
+    testProjection();
+    testProjection_newmethods();
+    testProjection_newmethods_1();
+    testProjection1();
+    testJob_noreducer();
+    testProjection_noreducer();
+  }
+  
+  @SuppressWarnings("deprecation")
+  public void testJob() throws Exception {
+    JobConf job = new JobConf();
+    
+//    private static final String UTF8 = "UTF-8";
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+    
+    outputPath.getFileSystem(job).delete(outputPath);
+    WordCountUtil.writeLinesFile();
+    
+    job.setJobName("AvroMultipleOutputs");
+    
+    AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
+    AvroJob.setOutputSchema(job,
+                            new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
+    
+    AvroJob.setMapperClass(job, MapImpl.class);        
+    AvroJob.setReducerClass(job, ReduceImpl.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+    FileOutputFormat.setOutputPath(job, outputPath);
+    FileOutputFormat.setCompressOutput(job, false);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroOutputFormat.class, new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
+    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));   
+    WordCountUtil.setMeta(job);
+
+
+    JobClient.runJob(job);
+    
+    WordCountUtil.validateCountsFile();
+  }
+  
+  @SuppressWarnings("deprecation")
+  public void testProjection() throws Exception {
+    JobConf job = new JobConf();
+    
+    Integer defaultRank = new Integer(-1);
+    
+    String jsonSchema = 
+      "{\"type\":\"record\"," +
+      "\"name\":\"org.apache.avro.mapred.Pair\","+
+      "\"fields\": [ " + 
+        "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
+        "{\"name\":\"value\", \"type\":\"long\"}" + 
+      "]}";
+    
+    Schema readerSchema = Schema.parse(jsonSchema);
+    
+    AvroJob.setInputSchema(job, readerSchema);
+    
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/myavro-r-00000.avro");
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+
+    
+    AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
+    
+    AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
+    NullWritable ignore = NullWritable.get();
+    
+    long sumOfCounts = 0;
+    long numOfCounts = 0;
+    while(recordReader.next(inputPair, ignore)) {
+      Assert.assertEquals((Integer)inputPair.datum().get(0), defaultRank);
+      sumOfCounts += (Long) inputPair.datum().get(1);
+      numOfCounts++;
+    }
+    
+    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    
+    long actualSumOfCounts = 0;
+    for(Long count : WordCountUtil.COUNTS.values()) {
+      actualSumOfCounts += count;
+    }
+    
+    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+
+  }
+  
+  @SuppressWarnings("deprecation")
+  public void testProjection_newmethods() throws Exception {
+    JobConf job = new JobConf();
+    
+    Integer defaultRank = new Integer(-1);
+    
+    String jsonSchema = 
+      "{\"type\":\"record\"," +
+      "\"name\":\"org.apache.avro.mapred.Pair\","+
+      "\"fields\": [ " + 
+        "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
+        "{\"name\":\"value\", \"type\":\"long\"}" + 
+      "]}";
+    
+    Schema readerSchema = Schema.parse(jsonSchema);
+    
+    AvroJob.setInputSchema(job, readerSchema);
+    
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/testavrofile-r-00000.avro");
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+
+    
+    AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
+    
+    AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
+    NullWritable ignore = NullWritable.get();
+    
+    long sumOfCounts = 0;
+    long numOfCounts = 0;
+    while(recordReader.next(inputPair, ignore)) {
+      Assert.assertEquals((Integer)inputPair.datum().get(0), defaultRank);
+      sumOfCounts += (Long) inputPair.datum().get(1);
+      numOfCounts++;
+    }
+    
+    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    
+    long actualSumOfCounts = 0;
+    for(Long count : WordCountUtil.COUNTS.values()) {
+      actualSumOfCounts += count;
+    }
+    
+    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+
+  }
+  
+
+  @SuppressWarnings("deprecation")
+  // Test for a differnt schema output
+  public void testProjection1() throws Exception {
+    JobConf job = new JobConf();
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    AvroJob.setInputSchema(job, readerSchema);
+
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/myavro1-r-00000.avro");
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+    AvroWrapper<Utf8> inputPair = new AvroWrapper<Utf8>(null);
+    NullWritable ignore = NullWritable.get();
+    AvroRecordReader<Utf8> recordReader = new AvroRecordReader<Utf8>(job, fileSplit);
+    long sumOfCounts = 0;
+    long numOfCounts = 0;
+    while(recordReader.next(inputPair, ignore)) {
+        sumOfCounts += Long.parseLong(inputPair.datum().toString().split(":")[2].replace("}","").trim());
+        numOfCounts++;
+    }
+    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    long actualSumOfCounts = 0;
+    for(Long count : WordCountUtil.COUNTS.values()) {
+     actualSumOfCounts += count;
+    }
+    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+  }
+  
+  @SuppressWarnings("deprecation")
+  // Test for a differnt schema output
+  public void testProjection_newmethods_1() throws Exception {
+    JobConf job = new JobConf();
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    AvroJob.setInputSchema(job, readerSchema);
+
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/testavrofile1-r-00000.avro");
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+    AvroWrapper<Utf8> inputPair = new AvroWrapper<Utf8>(null);
+    NullWritable ignore = NullWritable.get();
+    AvroRecordReader<Utf8> recordReader = new AvroRecordReader<Utf8>(job, fileSplit);
+    long sumOfCounts = 0;
+    long numOfCounts = 0;
+    while(recordReader.next(inputPair, ignore)) {
+        sumOfCounts += Long.parseLong(inputPair.datum().toString().split(":")[2].replace("}","").trim());
+        numOfCounts++;
+    }
+    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    long actualSumOfCounts = 0;
+    for(Long count : WordCountUtil.COUNTS.values()) {
+     actualSumOfCounts += count;
+    }
+    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+  }
+
+  @SuppressWarnings("deprecation")
+  public void testJob_noreducer() throws Exception {
+    JobConf job = new JobConf();
+    job.setNumReduceTasks(0);
+//    private static final String UTF8 = "UTF-8";
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+
+    outputPath.getFileSystem(job).delete(outputPath);
+    WordCountUtil.writeLinesFile();
+
+    job.setJobName("AvroMultipleOutputs_noreducer");
+
+    AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
+    AvroJob.setOutputSchema(job,
+                            new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
+
+    AvroJob.setMapperClass(job, MapImpl.class);
+
+    FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+    FileOutputFormat.setOutputPath(job, outputPath);
+    FileOutputFormat.setCompressOutput(job, false);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro2",AvroOutputFormat.class, Schema.create(Schema.Type.STRING));
+    JobClient.runJob(job);
+  }
+  
+  public void testProjection_noreducer() throws Exception {
+    JobConf job = new JobConf();
+    long onel = 1;
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    AvroJob.setInputSchema(job, readerSchema);
+    String dir= System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/myavro2-m-00000.avro");
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+    AvroRecordReader<Utf8> recordReader_new = new AvroRecordReader<Utf8>(job, fileSplit);
+    AvroWrapper<Utf8> inputPair_new = new AvroWrapper<Utf8>(null);
+    NullWritable ignore = NullWritable.get();
+    long testl=0;
+     while(recordReader_new.next(inputPair_new, ignore)) {
+       testl=Long.parseLong(inputPair_new.datum().toString().split(":")[2].replace("}","").trim());
+       Assert.assertEquals(onel,testl);
+    }
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
new file mode 100644
index 0000000..6de4710
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import java.io.UnsupportedEncodingException;
+
+import junit.framework.TestCase;
+
+import org.apache.avro.file.CodecFactory;
+import org.apache.hadoop.mapred.JobConf;
+import org.junit.Test;
+
+public class TestAvroOutputFormat extends TestCase {
+  @Test
+  public void testSetSyncInterval() {
+    JobConf jobConf = new JobConf();
+    int newSyncInterval = 100000;
+    AvroOutputFormat.setSyncInterval(jobConf, newSyncInterval);
+
+    assertEquals(newSyncInterval, jobConf.getInt(
+        AvroOutputFormat.SYNC_INTERVAL_KEY, -1));
+  }
+  
+  @Test
+  public void testNoCodec() throws UnsupportedEncodingException {
+    
+    
+    JobConf job = new JobConf();
+    assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
+    
+    job = new JobConf();
+    job.set("mapred.output.compress", "false");
+    job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
+    assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
+    
+    job = new JobConf();
+    job.set("mapred.output.compress", "false");
+    job.set(AvroJob.OUTPUT_CODEC, "bzip2");
+    assertTrue(AvroOutputFormat.getCodecFactory(job) == null);
+  }
+  
+  @Test
+  public void testBZip2CodecUsingHadoopClass() throws UnsupportedEncodingException {
+    CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+  }
+  
+  @Test
+  public void testBZip2CodecUsingAvroCodec() throws UnsupportedEncodingException {
+    CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set(AvroJob.OUTPUT_CODEC, "bzip2");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroBZip2Codec.getClass()));    
+  }
+  
+  @Test
+  public void testDeflateCodecUsingHadoopClass() throws UnsupportedEncodingException {
+    CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.DeflateCodec");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+  }
+  
+  @Test
+  public void testDeflateCodecUsingAvroCodec() throws UnsupportedEncodingException {
+    CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set(AvroJob.OUTPUT_CODEC, "deflate");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+  }
+  
+  @Test
+  public void testSnappyCodecUsingHadoopClass() throws UnsupportedEncodingException {
+    CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+  }
+  
+  @Test
+  public void testSnappyCodecUsingAvroCodec() throws UnsupportedEncodingException {
+    CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set(AvroJob.OUTPUT_CODEC, "snappy");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroSnappyCodec.getClass()));    
+  }
+  
+  @Test
+  public void testGZipCodecUsingHadoopClass() throws UnsupportedEncodingException {
+    CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate");
+    
+    JobConf job = new JobConf();
+    job.set("mapred.output.compress", "true");
+    job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GZipCodec");
+    CodecFactory factory = AvroOutputFormat.getCodecFactory(job);
+    assertTrue(factory != null);
+    assertTrue(factory.getClass().equals(avroDeflateCodec.getClass()));    
+  }
+
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
new file mode 100644
index 0000000..264251a
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+import java.io.File;
+import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.junit.Test;
+
+public class TestAvroTextOutputFormat {
+  
+  private static final String UTF8 = "UTF-8";
+
+  @Test
+  public void testAvroTextRecordWriter() throws Exception {
+    File file = new File(System.getProperty("test.dir", "."), "writer");
+    Schema schema = Schema.create(Schema.Type.BYTES);
+    DatumWriter<ByteBuffer> datumWriter =
+      new GenericDatumWriter<ByteBuffer>(schema);
+    DataFileWriter<ByteBuffer> fileWriter =
+      new DataFileWriter<ByteBuffer>(datumWriter);
+    fileWriter.create(schema, file);
+    RecordWriter<Object, Object> rw = new AvroTextOutputFormat<Object, Object>()
+      .new AvroTextRecordWriter(fileWriter, "\t".getBytes(UTF8));
+    
+    rw.write(null, null);
+    rw.write(null, NullWritable.get());
+    rw.write(NullWritable.get(), null);
+    rw.write(NullWritable.get(), NullWritable.get());
+    
+    rw.write("k1", null);
+    rw.write("k2", NullWritable.get());
+
+    rw.write(null, "v1");
+    rw.write(NullWritable.get(), "v2");
+
+    rw.write("k3", "v3");
+    rw.write(new Text("k4"), new Text("v4"));
+    
+    rw.close(null);
+
+    DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
+    DataFileReader<ByteBuffer> fileReader =
+      new DataFileReader<ByteBuffer>(file, reader);
+    assertEquals("k1", asString(fileReader.next()));
+    assertEquals("k2", asString(fileReader.next()));
+    assertEquals("v1", asString(fileReader.next()));
+    assertEquals("v2", asString(fileReader.next()));
+    assertEquals("k3\tv3", asString(fileReader.next()));
+    assertEquals("k4\tv4", asString(fileReader.next()));
+    assertFalse("End", fileReader.hasNext());
+  }
+  
+  private String asString(ByteBuffer buf) throws UnsupportedEncodingException {
+    byte[] b = new byte[buf.remaining()];
+    buf.get(b);
+    return new String(b, UTF8);
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
new file mode 100644
index 0000000..7273bdd
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.junit.Test;
+
+public class TestAvroTextSort {
+  
+  @Test
+  /**
+   * Run the identity job on a "bytes" Avro file using AvroAsTextInputFormat
+   * and AvroTextOutputFormat to produce a sorted "bytes" Avro file.
+   */
+  public void testSort() throws Exception {
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+    
+    outputPath.getFileSystem(job).delete(outputPath);
+    WordCountUtil.writeLinesBytesFile();
+    
+    job.setInputFormat(AvroAsTextInputFormat.class);
+    job.setOutputFormat(AvroTextOutputFormat.class);
+    job.setOutputKeyClass(Text.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+    FileOutputFormat.setOutputPath(job, outputPath);
+    
+    JobClient.runJob(job);
+    
+    WordCountUtil.validateSortedFile();
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroWrapper.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroWrapper.java
new file mode 100644
index 0000000..4fd71c7
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroWrapper.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import static org.junit.Assert.*;
+
+import org.junit.Test;
+
+public class TestAvroWrapper {
+  @Test
+  public void testToString() {
+    String datum = "my string";
+    AvroWrapper<CharSequence> wrapper = new AvroWrapper<CharSequence>(datum);
+    assertEquals(datum, wrapper.toString());
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
new file mode 100644
index 0000000..5dcbb6c
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.AvroOutputFormat;
+import org.apache.avro.mapred.AvroWrapper;
+import org.apache.avro.mapred.Pair;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.TextInputFormat;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+ at SuppressWarnings("deprecation")
+public class TestGenericJob {
+  private static final String dir =
+    System.getProperty("test.dir", ".") + "target/testGenericJob";
+
+  private static Schema createSchema() {
+    List<Field> fields = new ArrayList<Schema.Field>();
+
+      
+    fields.add(new Field("Optional", createArraySchema(), "", new ArrayList<Object>()));
+
+    Schema recordSchema =
+      Schema.createRecord("Container", "", "org.apache.avro.mapred", false);
+    recordSchema.setFields(fields);
+    return recordSchema;
+  }
+
+  private static Schema createArraySchema() {
+    List<Schema> schemas = new ArrayList<Schema>();
+    for (int i = 0; i < 5; i++) {
+      schemas.add(createInnerSchema("optional_field_" + i));
+    }
+        
+    Schema unionSchema = Schema.createUnion(schemas);
+    return Schema.createArray(unionSchema);
+  }
+
+  private static Schema createInnerSchema(String name) {
+    Schema innerrecord = Schema.createRecord(name, "", "", false);
+    innerrecord.setFields
+      (Arrays.asList(new Field(name, Schema.create(Type.LONG), "", 0L)));
+    return innerrecord;
+  }
+
+  @Before
+    public void setup() throws IOException {
+    // needed to satisfy the framework only - input ignored in mapper
+    File indir = new File(dir);
+    indir.mkdirs();
+    File infile = new File(dir + "/in");
+    RandomAccessFile file = new RandomAccessFile(infile, "rw");
+    // add some data so framework actually calls our mapper
+    file.writeChars("aa bb cc\ndd ee ff\n");
+    file.close();
+  }
+    
+  @After
+    public void tearDown() throws IOException {
+    FileUtil.fullyDelete(new File(dir));
+  }
+
+  static class AvroTestConverter
+    extends MapReduceBase
+    implements Mapper<LongWritable, Text,
+               AvroWrapper<Pair<Long, GenericData.Record>>, NullWritable> {
+      
+    public void map(LongWritable key, Text value, 
+                    OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out, 
+                    Reporter reporter) throws IOException {
+      GenericData.Record optional_entry =
+        new GenericData.Record(createInnerSchema("optional_field_1"));
+      optional_entry.put("optional_field_1", 0l);
+      GenericData.Array<GenericData.Record> array =
+        new GenericData.Array<GenericData.Record>(1, createArraySchema());
+      array.add(optional_entry);
+
+      GenericData.Record container = new GenericData.Record(createSchema());
+      container.put("Optional", array);
+
+      out.collect(new AvroWrapper<Pair<Long,GenericData.Record>>
+                  (new Pair<Long,GenericData.Record>(key.get(), container)),
+                  NullWritable.get());
+    }
+  }  
+
+
+  @Test
+    public void testJob() throws Exception {
+    JobConf job = new JobConf();
+    Path outputPath = new Path(dir + "/out");
+    outputPath.getFileSystem(job).delete(outputPath);
+        
+    job.setInputFormat(TextInputFormat.class);
+    FileInputFormat.setInputPaths(job, dir + "/in");
+        
+    job.setMapperClass(AvroTestConverter.class);
+    job.setNumReduceTasks(0);
+
+    FileOutputFormat.setOutputPath(job, outputPath);
+    System.out.println(createSchema());
+    AvroJob.setOutputSchema(job,
+                            Pair.getPairSchema(Schema.create(Schema.Type.LONG),
+                                               createSchema()));
+    job.setOutputFormat(AvroOutputFormat.class);
+
+    JobClient.runJob(job);
+  }
+}
+
+
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestPair.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestPair.java
new file mode 100644
index 0000000..e7110f5
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestPair.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.util.ArrayList;
+
+import org.apache.avro.AvroRuntimeException;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestPair {
+
+  @Test public void testCollectionFailure() throws Exception {
+    try {
+      new Pair("foo", new ArrayList());
+    } catch (AvroRuntimeException e) {
+      assertTrue(e.getMessage().startsWith("Cannot infer schema"));
+      return;
+    }
+    fail("Expected an AvroRuntimeException");
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
new file mode 100644
index 0000000..e8a63f1
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.BufferedInputStream;
+import java.util.StringTokenizer;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumWriter;
+import org.apache.avro.reflect.ReflectDatumReader;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestReflectJob {
+
+  /** The input class. */
+  public static class Text {
+    private String text = "";
+    public Text() {}
+    public Text(String text) { this.text = text; }
+    public String toString() { return text; }
+  }
+
+  /** The intermediate data class. */
+  public static class Count {
+    private long count;
+    public Count() {}
+    public Count(long count) { this.count = count; }
+  }
+
+  /** The output class. */
+  public static class WordCount {
+    private String word;
+    private long count;
+    public WordCount() {}
+    public WordCount(String word, long count) {
+      this.word = word;
+      this.count = count;
+    }
+  }
+
+  public static class MapImpl extends AvroMapper<Text, Pair<Text,Count>> {
+    @Override
+      public void map(Text text, AvroCollector<Pair<Text,Count>> collector,
+                      Reporter reporter) throws IOException {
+      StringTokenizer tokens = new StringTokenizer(text.toString());
+      while (tokens.hasMoreTokens())
+        collector.collect(new Pair<Text,Count>(new Text(tokens.nextToken()),
+                                               new Count(1L)));
+    }
+  }
+  
+  public static class ReduceImpl
+    extends AvroReducer<Text, Count, WordCount> {
+    @Override
+    public void reduce(Text word, Iterable<Count> counts,
+                       AvroCollector<WordCount> collector,
+                       Reporter reporter) throws IOException {
+      long sum = 0;
+      for (Count count : counts)
+        sum += count.count;
+      collector.collect(new WordCount(word.text, sum));
+    }
+  }    
+
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testJob() throws Exception {
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") + "target/testReflectJob";
+    Path inputPath = new Path(dir + "/in");
+    Path outputPath = new Path(dir + "/out");
+
+    outputPath.getFileSystem(job).delete(outputPath);
+    inputPath.getFileSystem(job).delete(inputPath);
+
+    writeLinesFile(new File(dir+"/in"));
+    
+    job.setJobName("reflect");
+    
+    AvroJob.setInputSchema(job, ReflectData.get().getSchema(Text.class));
+    AvroJob.setMapOutputSchema
+      (job, new Pair(new Text(""), new Count(0L)).getSchema());
+    AvroJob.setOutputSchema(job, ReflectData.get().getSchema(WordCount.class));
+    
+    AvroJob.setMapperClass(job, MapImpl.class);        
+    //AvroJob.setCombinerClass(job, ReduceImpl.class);
+    AvroJob.setReducerClass(job, ReduceImpl.class);
+    
+    FileInputFormat.setInputPaths(job, inputPath);
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    AvroJob.setReflect(job);                      // use reflection
+
+    JobClient.runJob(job);
+    
+    validateCountsFile(new File(new File(dir, "out"), "part-00000.avro"));
+  }
+
+  private void writeLinesFile(File dir) throws IOException {
+    DatumWriter<Text> writer = new ReflectDatumWriter<Text>();
+    DataFileWriter<Text> out = new DataFileWriter<Text>(writer);
+    File linesFile = new File(dir+"/lines.avro");
+    dir.mkdirs();
+    out.create(ReflectData.get().getSchema(Text.class), linesFile);
+    for (String line : WordCountUtil.LINES)
+      out.append(new Text(line));
+    out.close();
+  }
+  
+  private void validateCountsFile(File file) throws Exception {
+    DatumReader<WordCount> reader = new ReflectDatumReader<WordCount>();
+    InputStream in = new BufferedInputStream(new FileInputStream(file));
+    DataFileStream<WordCount> counts = new DataFileStream<WordCount>(in,reader);
+    int numWords = 0;
+    for (WordCount wc : counts) {
+      assertEquals(wc.word,
+                   WordCountUtil.COUNTS.get(wc.word),
+                   (Long)wc.count);
+      numWords++;
+    }
+    in.close();
+    assertEquals(WordCountUtil.COUNTS.size(), numWords);
+  }
+
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
new file mode 100644
index 0000000..1a3c966
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
@@ -0,0 +1,250 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mapred;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.io.File;
+import java.net.URI;
+import java.util.Iterator;
+
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.util.Utf8;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestSequenceFileReader {
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "10"));
+  private static final File DIR
+    = new File(System.getProperty("test.dir", "."));
+  private static final File FILE = new File(DIR, "test.seq");
+
+  private static final Schema SCHEMA
+    = Pair.getPairSchema(Schema.create(Schema.Type.LONG),
+                         Schema.create(Schema.Type.STRING));
+
+  @BeforeClass
+  public static void testWriteSequenceFile() throws IOException {
+    FILE.delete();
+    Configuration c = new Configuration();
+    URI uri = FILE.toURI();
+    SequenceFile.Writer writer
+      = new SequenceFile.Writer(FileSystem.get(uri, c), c,
+                                new Path(uri.toString()),
+                                LongWritable.class, Text.class);
+    final LongWritable key = new LongWritable();
+    final Text val = new Text();
+    for (int i = 0; i < COUNT; ++i) {
+      key.set(i);
+      val.set(Integer.toString(i));
+      writer.append(key, val);
+    }
+    writer.close();
+  }
+
+  @Test
+  public void testReadSequenceFile() throws Exception {
+    checkFile(new SequenceFileReader<Long,CharSequence>(FILE));
+  }
+
+  public void checkFile(FileReader<Pair<Long,CharSequence>> reader) throws Exception {
+    long i = 0;
+    for (Pair<Long,CharSequence> p : reader) {
+      assertEquals((Long)i, p.key());
+      assertEquals(Long.toString(i), p.value().toString());
+      i++;
+    }
+    assertEquals(COUNT, i);
+    reader.close();
+  }
+
+  @Test
+  public void testSequenceFileInputFormat() throws Exception {
+    JobConf job = new JobConf();
+    Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
+
+    output.getFileSystem(job).delete(output);
+    
+    // configure input for Avro from sequence file
+    AvroJob.setInputSequenceFile(job);
+    FileInputFormat.setInputPaths(job, FILE.toURI().toString());
+    AvroJob.setInputSchema(job, SCHEMA);
+
+    // mapper is default, identity
+    // reducer is default, identity
+
+    // configure output for avro
+    AvroJob.setOutputSchema(job, SCHEMA);
+    FileOutputFormat.setOutputPath(job, output);
+    
+    JobClient.runJob(job);
+
+    checkFile(new DataFileReader<Pair<Long,CharSequence>>
+              (new File(output.toString()+"/part-00000.avro"),
+               new SpecificDatumReader<Pair<Long,CharSequence>>()));
+  }
+
+  private static class NonAvroMapper
+    extends MapReduceBase
+    implements Mapper<LongWritable,Text,AvroKey<Long>,AvroValue<Utf8>> {
+    
+    public void map(LongWritable key, Text value, 
+                  OutputCollector<AvroKey<Long>,AvroValue<Utf8>> out, 
+                  Reporter reporter) throws IOException {
+      out.collect(new AvroKey<Long>(key.get()),
+                  new AvroValue<Utf8>(new Utf8(value.toString())));
+    }
+  }
+
+  @Test
+  public void testNonAvroMapper() throws Exception {
+    JobConf job = new JobConf();
+    Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
+
+    output.getFileSystem(job).delete(output);
+    
+    // configure input for non-Avro sequence file
+    job.setInputFormat(SequenceFileInputFormat.class);
+    FileInputFormat.setInputPaths(job, FILE.toURI().toString());
+
+    // use a hadoop mapper that emits Avro output
+    job.setMapperClass(NonAvroMapper.class);
+
+    // reducer is default, identity
+
+    // configure output for avro
+    FileOutputFormat.setOutputPath(job, output);
+    AvroJob.setOutputSchema(job, SCHEMA);
+
+    JobClient.runJob(job);
+
+    checkFile(new DataFileReader<Pair<Long,CharSequence>>
+              (new File(output.toString()+"/part-00000.avro"),
+               new SpecificDatumReader<Pair<Long,CharSequence>>()));
+  }
+
+  private static class NonAvroOnlyMapper
+    extends MapReduceBase
+    implements Mapper<LongWritable,Text,AvroWrapper<Pair<Long,Utf8>>,NullWritable> {
+    
+    public void map(LongWritable key, Text value, 
+                    OutputCollector<AvroWrapper<Pair<Long,Utf8>>,NullWritable> out, 
+                    Reporter reporter) throws IOException {
+      out.collect(new AvroWrapper<Pair<Long,Utf8>>(new Pair<Long,Utf8>(key.get(), new Utf8(value.toString()))),
+                  NullWritable.get());
+    }
+  }
+
+  @Test
+  public void testNonAvroMapOnly() throws Exception {
+    JobConf job = new JobConf();
+    Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
+
+    output.getFileSystem(job).delete(output);
+    
+
+    // configure input for non-Avro sequence file
+    job.setInputFormat(SequenceFileInputFormat.class);
+    FileInputFormat.setInputPaths(job, FILE.toURI().toString());
+
+    // use a hadoop mapper that emits Avro output
+    job.setMapperClass(NonAvroOnlyMapper.class);
+
+    // configure output for avro
+    job.setNumReduceTasks(0);                     // map-only
+    FileOutputFormat.setOutputPath(job, output);
+    AvroJob.setOutputSchema(job, SCHEMA);
+
+    JobClient.runJob(job);
+
+    checkFile(new DataFileReader<Pair<Long,CharSequence>>
+              (new File(output.toString()+"/part-00000.avro"),
+               new SpecificDatumReader<Pair<Long,CharSequence>>()));
+  }
+
+  private static class NonAvroReducer
+    extends MapReduceBase
+    implements Reducer<AvroKey<Long>,AvroValue<Utf8>,LongWritable,Text> {
+    
+    public void reduce(AvroKey<Long> key, Iterator<AvroValue<Utf8>> values,
+                       OutputCollector<LongWritable, Text> out, 
+                       Reporter reporter) throws IOException {
+      while (values.hasNext()) {
+        AvroValue<Utf8> value = values.next();
+        out.collect(new LongWritable(key.datum()),
+                    new Text(value.datum().toString()));
+      }
+    }
+  }
+
+  @Test
+  public void testNonAvroReducer() throws Exception {
+    JobConf job = new JobConf();
+    Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");
+
+    output.getFileSystem(job).delete(output);
+    
+    // configure input for Avro from sequence file
+    AvroJob.setInputSequenceFile(job);
+    AvroJob.setInputSchema(job, SCHEMA);
+    FileInputFormat.setInputPaths(job, FILE.toURI().toString());
+
+    // mapper is default, identity
+
+    // use a hadoop reducer that consumes Avro input
+    AvroJob.setMapOutputSchema(job, SCHEMA);
+    job.setReducerClass(NonAvroReducer.class);
+
+    // configure output for non-Avro SequenceFile
+    job.setOutputFormat(SequenceFileOutputFormat.class);
+    FileOutputFormat.setOutputPath(job, output);
+
+    // output key/value classes are default, LongWritable/Text
+
+    JobClient.runJob(job);
+
+    checkFile(new SequenceFileReader<Long,CharSequence>
+              (new File(output.toString()+"/part-00000")));
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
new file mode 100644
index 0000000..c32c403
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.io.File;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.file.DataFileReader;
+import static org.apache.avro.file.DataFileConstants.SNAPPY_CODEC;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.After;
+import org.junit.Test;
+
+import test.Weather;
+
+/** Tests mapred API with a specific record. */
+public class TestWeather {
+
+  private static final AtomicInteger mapCloseCalls = new AtomicInteger();
+  private static final AtomicInteger mapConfigureCalls = new AtomicInteger();
+  private static final AtomicInteger reducerCloseCalls = new AtomicInteger();
+  private static final AtomicInteger reducerConfigureCalls = new AtomicInteger();
+
+
+  @After
+  public void tearDown() {
+    mapCloseCalls.set(0);
+    mapConfigureCalls.set(0);
+    reducerCloseCalls.set(0);
+    reducerConfigureCalls.set(0);
+  }
+
+  /** Uses default mapper with no reduces for a map-only identity job. */
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testMapOnly() throws Exception {
+    JobConf job = new JobConf();
+    String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
+    Path input = new Path(inDir+"/weather.avro");
+    Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-ident");
+    
+    output.getFileSystem(job).delete(output);
+    
+    job.setJobName("identity map weather");
+    
+    AvroJob.setInputSchema(job, Weather.SCHEMA$);
+    AvroJob.setOutputSchema(job, Weather.SCHEMA$);
+
+    FileInputFormat.setInputPaths(job, input);
+    FileOutputFormat.setOutputPath(job, output);
+    FileOutputFormat.setCompressOutput(job, true);
+    
+    job.setNumReduceTasks(0);                     // map-only
+
+    JobClient.runJob(job);
+
+    // check output is correct
+    DatumReader<Weather> reader = new SpecificDatumReader<Weather>();
+    DataFileReader<Weather> check = new DataFileReader<Weather>
+      (new File(inDir+"/weather.avro"), reader);
+    DataFileReader<Weather> sorted = new DataFileReader<Weather>
+      (new File(output.toString()+"/part-00000.avro"), reader);
+
+    for (Weather w : sorted)
+      assertEquals(check.next(), w);
+
+    check.close();
+    sorted.close();
+  }
+
+  // maps input Weather to Pair<Weather,Void>, to sort by Weather
+  public static class SortMapper extends AvroMapper<Weather,Pair<Weather,Void>>{
+    @Override
+    public void map(Weather w, AvroCollector<Pair<Weather,Void>> collector,
+                      Reporter reporter) throws IOException {
+      collector.collect(new Pair<Weather,Void>(w, (Void)null));
+    }
+
+    @Override
+    public void close() throws IOException {
+      mapCloseCalls.incrementAndGet();
+    }
+
+    @Override
+    public void configure(JobConf jobConf) {
+      mapConfigureCalls.incrementAndGet();
+    }
+  }
+
+  // output keys only, since values are empty
+  public static class SortReducer
+    extends AvroReducer<Weather, Void, Weather> {
+    @Override
+    public void reduce(Weather w, Iterable<Void> ignore,
+                       AvroCollector<Weather> collector,
+                       Reporter reporter) throws IOException {
+      collector.collect(w);
+    }
+
+    @Override
+    public void close() throws IOException {
+      reducerCloseCalls.incrementAndGet();
+    }
+
+    @Override
+    public void configure(JobConf jobConf) {
+      reducerConfigureCalls.incrementAndGet();
+    }
+  }
+
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testSort() throws Exception {
+    JobConf job = new JobConf();
+    String inDir = System.getProperty("share.dir","../../../share")+"/test/data";
+    Path input = new Path(inDir+"/weather.avro");
+    Path output = new Path(System.getProperty("test.dir","target/test")+"/weather-sort");
+    
+    output.getFileSystem(job).delete(output);
+    
+    job.setJobName("sort weather");
+    
+    AvroJob.setInputSchema(job, Weather.SCHEMA$);
+    AvroJob.setMapOutputSchema
+      (job, Pair.getPairSchema(Weather.SCHEMA$, Schema.create(Type.NULL)));
+    AvroJob.setOutputSchema(job, Weather.SCHEMA$);
+    
+    AvroJob.setMapperClass(job, SortMapper.class);        
+    AvroJob.setReducerClass(job, SortReducer.class);
+
+    FileInputFormat.setInputPaths(job, input);
+    FileOutputFormat.setOutputPath(job, output);
+    FileOutputFormat.setCompressOutput(job, true);
+    AvroJob.setOutputCodec(job, SNAPPY_CODEC);
+    
+    JobClient.runJob(job);
+
+    // check output is correct
+    DatumReader<Weather> reader = new SpecificDatumReader<Weather>();
+    DataFileReader<Weather> check = new DataFileReader<Weather>
+      (new File(inDir+"/weather-sorted.avro"), reader);
+    DataFileReader<Weather> sorted = new DataFileReader<Weather>
+      (new File(output.toString()+"/part-00000.avro"), reader);
+
+    for (Weather w : sorted)
+      assertEquals(check.next(), w);
+
+    check.close();
+    sorted.close();
+
+    // check that AvroMapper and AvroReducer get close() and configure() called
+    assertEquals(1, mapCloseCalls.get());
+    assertEquals(1, reducerCloseCalls.get());
+    assertEquals(1, mapConfigureCalls.get());
+    assertEquals(1, reducerConfigureCalls.get());
+
+
+  }
+
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
new file mode 100644
index 0000000..4e729dc
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+
+public class TestWordCount {
+
+  public static class MapImpl extends AvroMapper<Utf8, Pair<Utf8, Long> > {
+    @Override
+      public void map(Utf8 text, AvroCollector<Pair<Utf8,Long>> collector,
+                      Reporter reporter) throws IOException {
+      StringTokenizer tokens = new StringTokenizer(text.toString());
+      while (tokens.hasMoreTokens())
+        collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
+    }
+  }
+  
+  public static class ReduceImpl
+    extends AvroReducer<Utf8, Long, Pair<Utf8, Long> > {
+    @Override
+    public void reduce(Utf8 word, Iterable<Long> counts,
+                       AvroCollector<Pair<Utf8,Long>> collector,
+                       Reporter reporter) throws IOException {
+      long sum = 0;
+      for (long count : counts)
+        sum += count;
+      collector.collect(new Pair<Utf8,Long>(word, sum));
+    }
+  }    
+
+  @Test public void runTestsInOrder() throws Exception {
+    testJob();
+    testProjection();
+  }
+
+  @SuppressWarnings("deprecation")
+  public void testJob() throws Exception {
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+    
+    outputPath.getFileSystem(job).delete(outputPath);
+    WordCountUtil.writeLinesFile();
+    
+    job.setJobName("wordcount");
+    
+    AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
+    AvroJob.setOutputSchema(job,
+                            new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema());
+    
+    AvroJob.setMapperClass(job, MapImpl.class);        
+    AvroJob.setCombinerClass(job, ReduceImpl.class);
+    AvroJob.setReducerClass(job, ReduceImpl.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(dir + "/in"));
+    FileOutputFormat.setOutputPath(job, outputPath);
+    FileOutputFormat.setCompressOutput(job, true);
+    
+    WordCountUtil.setMeta(job);
+
+    JobClient.runJob(job);
+    
+    WordCountUtil.validateCountsFile();
+  }
+  
+  @SuppressWarnings("deprecation")
+  public void testProjection() throws Exception {
+    JobConf job = new JobConf();
+    
+    Integer defaultRank = new Integer(-1);
+    
+    String jsonSchema = 
+      "{\"type\":\"record\"," +
+      "\"name\":\"org.apache.avro.mapred.Pair\","+
+      "\"fields\": [ " + 
+        "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," +
+        "{\"name\":\"value\", \"type\":\"long\"}" + 
+      "]}";
+    
+    Schema readerSchema = Schema.parse(jsonSchema);
+    
+    AvroJob.setInputSchema(job, readerSchema);
+    
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path inputPath = new Path(dir + "/out" + "/part-00000" + AvroOutputFormat.EXT);
+    FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath);
+    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
+    
+    AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
+    
+    AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
+    NullWritable ignore = NullWritable.get();
+    
+    long sumOfCounts = 0;
+    long numOfCounts = 0;
+    while(recordReader.next(inputPair, ignore)) {
+      Assert.assertEquals((Integer)inputPair.datum().get(0), defaultRank);
+      sumOfCounts += (Long) inputPair.datum().get(1);
+      numOfCounts++;
+    }
+    
+    Assert.assertEquals(numOfCounts, WordCountUtil.COUNTS.size());
+    
+    long actualSumOfCounts = 0;
+    for(Long count : WordCountUtil.COUNTS.values()) {
+      actualSumOfCounts += count;
+    }
+    
+    Assert.assertEquals(sumOfCounts, actualSumOfCounts);
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
new file mode 100644
index 0000000..af60f90
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred;
+
+import static org.junit.Assert.*;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.File;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.BufferedInputStream;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.mapred.JobConf;
+
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.DataFileStream;
+
+public class WordCountUtil {
+
+  private static final File DIR
+    = new File(System.getProperty("test.dir", ".") + "/mapred");
+  public static final File LINES_FILE
+    = new File(new File(DIR, "in"), "lines.avro");
+  private static final File LINES_TEXT_FILE
+    = new File(new File(DIR, "in"), "lines.txt");
+  public static final File COUNTS_FILE
+    = new File(new File(DIR, "out"), "part-00000.avro");
+  private static final File SORTED_FILE
+    = new File(new File(DIR, "out"), "part-00000.avro");
+
+  public static final String[] LINES = new String[] {
+    "the quick brown fox jumps over the lazy dog",
+    "the cow jumps over the moon",
+    "the rain in spain falls mainly on the plains"
+  };
+
+  public static final Map<String,Long> COUNTS =
+    new TreeMap<String,Long>();
+  static {
+    for (String line : LINES) {
+      StringTokenizer tokens = new StringTokenizer(line);
+      while (tokens.hasMoreTokens()) {
+        String word = tokens.nextToken();
+        long count = COUNTS.containsKey(word) ? COUNTS.get(word) : 0L;
+        count++;
+        COUNTS.put(word, count);
+      }
+    }
+  }
+
+  public static void writeLinesFile() throws IOException {
+    FileUtil.fullyDelete(DIR);
+    DatumWriter<Utf8> writer = new GenericDatumWriter<Utf8>();
+    DataFileWriter<Utf8> out = new DataFileWriter<Utf8>(writer);
+    LINES_FILE.getParentFile().mkdirs();
+    out.create(Schema.create(Schema.Type.STRING), LINES_FILE);
+    for (String line : LINES)
+      out.append(new Utf8(line));
+    out.close();
+  }
+
+  public static void writeLinesBytesFile() throws IOException {
+    FileUtil.fullyDelete(DIR);
+    DatumWriter<ByteBuffer> writer = new GenericDatumWriter<ByteBuffer>();
+    DataFileWriter<ByteBuffer> out = new DataFileWriter<ByteBuffer>(writer);
+    LINES_FILE.getParentFile().mkdirs();
+    out.create(Schema.create(Schema.Type.BYTES), LINES_FILE);
+    for (String line : LINES)
+      out.append(ByteBuffer.wrap(line.getBytes("UTF-8")));
+    out.close();
+  }
+  
+  public static void writeLinesTextFile() throws IOException {
+    FileUtil.fullyDelete(DIR);
+    LINES_FILE.getParentFile().mkdirs();
+    PrintStream out = new PrintStream(LINES_TEXT_FILE);
+    for (String line : LINES)
+      out.println(line);
+    out.close();
+  }
+
+  public static void validateCountsFile() throws Exception {
+    DatumReader<Pair<Utf8,Long>> reader
+      = new SpecificDatumReader<Pair<Utf8,Long>>();
+    InputStream in = new BufferedInputStream(new FileInputStream(COUNTS_FILE));
+    DataFileStream<Pair<Utf8,Long>> counts
+      = new DataFileStream<Pair<Utf8,Long>>(in,reader);
+    int numWords = 0;
+    for (Pair<Utf8,Long> wc : counts) {
+      assertEquals(wc.key().toString(),
+                   COUNTS.get(wc.key().toString()), wc.value());
+      numWords++;
+    }
+    checkMeta(counts);
+    in.close();
+    assertEquals(COUNTS.size(), numWords);
+  }
+  
+  public static void validateSortedFile() throws Exception {
+    DatumReader<ByteBuffer> reader = new GenericDatumReader<ByteBuffer>();
+    InputStream in = new BufferedInputStream(
+        new FileInputStream(SORTED_FILE));
+    DataFileStream<ByteBuffer> lines =
+        new DataFileStream<ByteBuffer>(in,reader);
+    List<String> sortedLines = new ArrayList<String>();
+    for (String line : LINES) {
+      sortedLines.add(line);
+    }
+    Collections.sort(sortedLines);
+    for (String expectedLine : sortedLines) {
+      ByteBuffer buf = lines.next();
+      byte[] b = new byte[buf.remaining()];
+      buf.get(b);
+      assertEquals(expectedLine, new String(b, "UTF-8").trim());
+    }
+    assertFalse(lines.hasNext());
+  }
+  
+  // metadata tests
+  private static final String STRING_KEY = "string-key";
+  private static final String LONG_KEY = "long-key";
+  private static final String BYTES_KEY = "bytes-key";
+  
+  private static final String STRING_META_VALUE = "value";
+  private static final long LONG_META_VALUE = 666;
+  private static final byte[] BYTES_META_VALUE
+    = new byte[] {(byte)0x00, (byte)0x80, (byte)0xff};
+
+  public static void setMeta(JobConf job) {
+    AvroJob.setOutputMeta(job, STRING_KEY, STRING_META_VALUE);
+    AvroJob.setOutputMeta(job, LONG_KEY, LONG_META_VALUE);
+    AvroJob.setOutputMeta(job, BYTES_KEY, BYTES_META_VALUE);
+  }
+
+  public static void checkMeta(DataFileStream<?> in) throws Exception {
+    assertEquals(STRING_META_VALUE, in.getMetaString(STRING_KEY));
+    assertEquals(LONG_META_VALUE, in.getMetaLong(LONG_KEY));
+    assertTrue(Arrays.equals(BYTES_META_VALUE, in.getMeta(BYTES_KEY)));
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
new file mode 100644
index 0000000..a7f8b2c
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+
+import org.junit.Test;
+
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.WordCountUtil;
+import org.apache.avro.mapred.Pair;
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.specific.SpecificDatumReader;
+
+/**
+ * See also TestTetherTool for an example of how to submit jobs using the thether tool.
+ *
+ */
+public class TestWordCountTether {
+
+
+  /**
+   * Run a job using the given transport protocol
+   * @param proto
+   */
+  private void _runjob(String proto)throws Exception {
+    // System.out.println(System.getProperty("java.class.path").replace(":", "\n"));
+    System.out.println(System.getProperty("java.class.path"));
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+
+    outputPath.getFileSystem(job).delete(outputPath);
+
+    // create the input file
+    WordCountUtil.writeLinesFile();
+
+    File exec =
+      new File(System.getProperty("java.home")+"/bin/java");
+
+    //input path
+    String in=dir+"/in";
+
+    //create a string of the arguments
+    List<String> execargs = new ArrayList<String>();
+    execargs.add("-classpath");
+    execargs.add(System.getProperty("java.class.path"));
+    execargs.add("org.apache.avro.mapred.tether.WordCountTask");
+
+    FileInputFormat.addInputPaths(job, in);
+    FileOutputFormat.setOutputPath(job, outputPath);
+    TetherJob.setExecutable(job, exec, execargs, false);
+
+    Schema outscheme= new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema();
+    AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING));
+    job.set(AvroJob.OUTPUT_SCHEMA, outscheme.toString());
+
+    TetherJob.setProtocol(job, proto);
+    TetherJob.runJob(job);
+
+    // validate the output
+    DatumReader<Pair<Utf8,Long>> reader
+      = new SpecificDatumReader<Pair<Utf8,Long>>();
+    InputStream cin = new BufferedInputStream(new FileInputStream(WordCountUtil.COUNTS_FILE));
+    DataFileStream<Pair<Utf8,Long>> counts
+      = new DataFileStream<Pair<Utf8,Long>>(cin,reader);
+    int numWords = 0;
+    for (Pair<Utf8,Long> wc : counts) {
+      assertEquals(wc.key().toString(),
+                   WordCountUtil.COUNTS.get(wc.key().toString()), wc.value());
+      numWords++;
+    }
+
+    cin.close();
+    assertEquals(WordCountUtil.COUNTS.size(), numWords);
+
+  }
+
+  /**
+   * Test the job using the sasl protocol
+   * @throws Exception
+   */
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testJob() throws Exception {
+      _runjob("sasl");
+  }
+
+  /**
+   * Test the job using the http protocol
+   * @throws Exception
+   */
+  @Test
+  @SuppressWarnings("deprecation")
+  public void testhtp() throws Exception {
+    _runjob("http");
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
new file mode 100644
index 0000000..b551ef9
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
@@ -0,0 +1,238 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.net.InetSocketAddress;
+import java.net.URL;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.Schema;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.ipc.SaslSocketTransceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+
+/** Base class for Java tether mapreduce programs.  Useless except for testing,
+ * since it's already possible to write Java MapReduce programs without
+ * tethering.  Also serves as an example of how a framework may be
+ * implemented. */
+public abstract class TetherTask<IN,MID,OUT> {
+  static final Logger LOG = LoggerFactory.getLogger(TetherTask.class);
+
+  private Transceiver clientTransceiver;
+  private OutputProtocol outputClient;
+
+  private TaskType taskType;
+  private int partitions;
+
+  private DecoderFactory decoderFactory = DecoderFactory.get();
+  private BinaryDecoder decoder;
+
+  private SpecificDatumReader<IN> inReader;
+  private SpecificDatumReader<MID> midReader;
+  private IN inRecord;
+  private MID midRecord;
+  private MID midRecordSpare;
+  private Collector<MID> midCollector;
+  private Collector<OUT> outCollector;
+
+  private TetheredProcess.Protocol proto;
+
+  private static class Buffer extends ByteArrayOutputStream {
+    public ByteBuffer data() {
+      return ByteBuffer.wrap(buf, 0, count);
+    }
+  }
+
+  /** Collector for map and reduce output values. */
+  public class Collector<T> {
+    private SpecificDatumWriter<T> writer;
+    private Buffer buffer = new Buffer();
+    private BinaryEncoder encoder = new EncoderFactory()
+        .configureBlockSize(512).binaryEncoder(buffer, null);
+    
+    private Collector(Schema schema) {
+      this.writer = new SpecificDatumWriter<T>(schema);
+    }
+
+    /** Collect a map or reduce output value. */
+    public void collect(T record) throws IOException {
+      buffer.reset();
+      writer.write(record, encoder);
+      encoder.flush();
+      outputClient.output(buffer.data());
+    }
+    
+    /** Collect a pre-partitioned map output value. */
+    public void collect(T record, int partition) throws IOException {
+      buffer.reset();
+      writer.write(record, encoder);
+      encoder.flush();
+      outputClient.outputPartitioned(partition, buffer.data());
+    }
+  }
+
+  void open(int inputPort) throws IOException {
+    // open output client, connecting to parent
+    String clientPortString = System.getenv("AVRO_TETHER_OUTPUT_PORT");
+    String protocol = System.getenv("AVRO_TETHER_PROTOCOL");
+    if (clientPortString == null)
+      throw new RuntimeException("AVRO_TETHER_OUTPUT_PORT env var is null");
+    int clientPort = Integer.parseInt(clientPortString);
+
+    if (protocol == null) {
+      throw new RuntimeException("AVRO_TETHER_PROTOCOL env var is null");
+    }
+
+    protocol=protocol.trim().toLowerCase();
+
+    if (protocol.equals("http")) {
+      proto=TetheredProcess.Protocol.HTTP;
+    } else if (protocol.equals("sasl")) {
+      proto=TetheredProcess.Protocol.SASL;
+    } else {
+      throw new RuntimeException("AVROT_TETHER_PROTOCOL="+protocol+" but this protocol is unsupported");
+    }
+
+    switch (proto) {
+    case SASL:
+      this.clientTransceiver =
+      new SaslSocketTransceiver(new InetSocketAddress(clientPort));
+      this.outputClient = SpecificRequestor.getClient(OutputProtocol.class, clientTransceiver);
+      break;
+
+    case HTTP:
+      this.clientTransceiver =new HttpTransceiver(new URL("http://127.0.0.1:"+clientPort));
+      this.outputClient = SpecificRequestor.getClient(OutputProtocol.class, clientTransceiver);
+      break;
+    }
+
+    // send inputPort to parent
+    outputClient.configure(inputPort);
+  }
+
+  void configure(TaskType taskType, CharSequence inSchemaText, CharSequence outSchemaText) {
+    this.taskType = taskType;
+    try {
+      Schema inSchema = Schema.parse(inSchemaText.toString());
+      Schema outSchema = Schema.parse(outSchemaText.toString());
+      switch (taskType) {
+      case MAP:
+        this.inReader = new SpecificDatumReader<IN>(inSchema);
+        this.midCollector = new Collector<MID>(outSchema);
+        break;
+      case REDUCE:
+        this.midReader = new SpecificDatumReader<MID>(inSchema);
+        this.outCollector = new Collector<OUT>(outSchema);
+        break;
+      }
+    } catch (Throwable e) {
+      fail(e.toString());
+    }
+  }
+
+  void partitions(int partitions) { this.partitions = partitions; }
+
+  /** Return the number of map output partitions of this job. */
+  public int partitions() { return partitions; }
+
+  void input(ByteBuffer data, long count) {
+    try {
+      decoder = decoderFactory.binaryDecoder(data.array(), decoder);
+      for (long i = 0; i < count; i++) {
+        switch (taskType) {
+        case MAP:
+          inRecord = inReader.read(inRecord, decoder);
+          map(inRecord, midCollector);
+          break;
+        case REDUCE:
+          MID prev = midRecord;
+          midRecord = midReader.read(midRecordSpare, decoder);
+          if (prev != null && !midRecord.equals(prev))
+            reduceFlush(prev, outCollector);
+          reduce(midRecord, outCollector);
+          midRecordSpare = prev;
+          break;
+        }
+      }
+    } catch (Throwable e) {
+      LOG.warn("failing: "+e, e);
+      fail(e.toString());
+    }
+  }
+
+  void complete() {
+    if (taskType == TaskType.REDUCE && midRecord != null)
+      try {
+        reduceFlush(midRecord, outCollector);
+      } catch (Throwable e) {
+        LOG.warn("failing: "+e, e);
+        fail(e.toString());
+      }
+    LOG.info("TetherTask: Sending complete to parent process.");
+    outputClient.complete();
+    LOG.info("TetherTask: Done sending complete to parent process.");
+  }
+
+  /** Called with input values to generate intermediate values. */
+  public abstract void map(IN record, Collector<MID> collector)
+    throws IOException;
+  /** Called with sorted intermediate values. */
+  public abstract void reduce(MID record, Collector<OUT> collector)
+    throws IOException;
+  /** Called with the last intermediate value in each equivalence run. */
+  public abstract void reduceFlush(MID record, Collector<OUT> collector)
+    throws IOException;
+
+  /** Call to update task status. */
+  public void status(String message) {
+    outputClient.status(message);
+  }
+
+  /** Call to increment a counter. */
+  public void count(String group, String name, long amount) {
+    outputClient.count(group, name, amount);
+  }
+
+  /** Call to fail the task. */
+  public void fail(String message) {
+    outputClient.fail(message);
+    close();
+  }
+
+  void close() {
+    LOG.info("Closing the transciever");
+    if (clientTransceiver != null)
+      try {
+        clientTransceiver.close();
+      } catch (IOException e) {}                  // ignore
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTaskRunner.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTaskRunner.java
new file mode 100644
index 0000000..81bf9dd
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTaskRunner.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.nio.ByteBuffer;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.avro.ipc.HttpServer;
+import org.apache.avro.ipc.HttpTransceiver;
+import org.apache.avro.ipc.SaslSocketServer;
+import org.apache.avro.ipc.SaslSocketTransceiver;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
+import org.apache.avro.ipc.Server;
+
+/** Java implementation of a tether executable.  Useless except for testing,
+ * since it's already possible to write Java MapReduce programs without
+ * tethering.  Also serves as an example of how a framework may be
+ * implemented. */
+public class TetherTaskRunner implements InputProtocol {
+  static final Logger LOG = LoggerFactory.getLogger(TetherTaskRunner.class);
+
+  private Server inputServer;
+  private TetherTask task;
+
+  private TetheredProcess.Protocol proto;
+
+  public TetherTaskRunner(TetherTask task) throws IOException {
+    this.task = task;
+
+    //determine what protocol we are using
+    String protocol = System.getenv("AVRO_TETHER_PROTOCOL");
+    if (protocol == null) {
+      throw new RuntimeException("AVRO_TETHER_PROTOCOL env var is null");
+    }
+
+    protocol=protocol.trim().toLowerCase();
+
+    if (protocol.equals("http")) {
+      LOG.info("Use HTTP protocol");
+      proto=TetheredProcess.Protocol.HTTP;
+    } else if (protocol.equals("sasl")) {
+      LOG.info("Use SASL protocol");
+      proto=TetheredProcess.Protocol.SASL;
+    } else {
+      throw new RuntimeException("AVRO_TETHER_PROTOCOL="+protocol+" but this protocol is unsupported");
+    }
+
+    InetSocketAddress iaddress=new InetSocketAddress(0);
+
+    switch(proto) {
+    case SASL:
+      // start input server
+      this.inputServer = new SaslSocketServer
+      (new SpecificResponder(InputProtocol.class, this),
+          iaddress);
+      LOG.info("Started SaslSocketServer on port:"+iaddress.getPort());
+      break;
+
+    case HTTP:
+      this.inputServer=new  HttpServer
+      (new SpecificResponder(InputProtocol.class, this),
+          iaddress.getPort());
+
+      LOG.info("Started HttpServer on port:"+iaddress.getPort());
+      break;
+    }
+
+    inputServer.start();
+
+    // open output to parent
+    task.open(inputServer.getPort());
+  }
+
+  @Override public void configure(TaskType taskType,
+                                  String inSchema,
+                                  String outSchema) {
+    LOG.info("got configure");
+    task.configure(taskType, inSchema, outSchema);
+  }
+
+  @Override public synchronized void input(ByteBuffer data, long count) {
+    task.input(data, count);
+  }
+
+  @Override public void partitions(int partitions) {
+    task.partitions(partitions);
+  }
+
+  @Override public void abort() {
+    LOG.info("got abort");
+    close();
+  }
+
+  @Override public synchronized void complete() {
+    LOG.info("got input complete");
+    task.complete();
+  }
+
+  /** Wait for task to complete. */
+  public void join() throws InterruptedException {
+    LOG.info("TetherTaskRunner: Start join.");
+    inputServer.join();
+    LOG.info("TetherTaskRunner: Finish join.");
+  }
+
+  private void close() {
+    LOG.info("Closing the task");
+    task.close();
+    LOG.info("Finished closing the task.");
+    if (inputServer != null)
+      inputServer.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCount.avsc b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCount.avsc
new file mode 100644
index 0000000..73dfbd8
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCount.avsc
@@ -0,0 +1,6 @@
+{"type":"record",
+ "name":"Pair","namespace":"org.apache.avro.mapred","fields":[
+     {"name":"key","type":"string"},
+     {"name":"value","type":"long","order":"ignore"}
+ ]
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
new file mode 100644
index 0000000..48cae25
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mapred.tether;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import org.apache.avro.mapred.Pair;
+import org.apache.avro.util.Utf8;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Example Java tethered mapreduce executable.  Implements map and reduce
+ * functions for word count. */
+public class WordCountTask
+  extends TetherTask<Utf8,Pair<Utf8,Long>,Pair<Utf8,Long>> {
+  
+  static final Logger LOG = LoggerFactory.getLogger(WordCountTask.class);
+  @Override public void map(Utf8 text, Collector<Pair<Utf8,Long>> collector)
+    throws IOException {
+    StringTokenizer tokens = new StringTokenizer(text.toString());
+    while (tokens.hasMoreTokens())
+      collector.collect(new Pair<Utf8,Long>(new Utf8(tokens.nextToken()),1L));
+  }
+  
+  private long sum;
+
+  @Override public void reduce(Pair<Utf8,Long> wc,
+                               Collector<Pair<Utf8,Long>> c) {
+    sum += wc.value();
+  }
+    
+  @Override public void reduceFlush(Pair<Utf8,Long> wc, Collector<Pair<Utf8,Long>> c)
+    throws IOException {
+    wc.value(sum);
+    c.collect(wc);
+    sum = 0;
+  }
+
+  public static void main(String[] args) throws Exception {
+    new TetherTaskRunner(new WordCountTask()).join();
+    LOG.info("WordCountTask finished");
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/AvroFiles.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/AvroFiles.java
new file mode 100644
index 0000000..11aa9d6
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/AvroFiles.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+
+/**
+ * A utility class for working with Avro container files within tests.
+ */
+public final class AvroFiles {
+  private AvroFiles() {}
+
+  /**
+   * Creates an avro container file.
+   *
+   * @param file The file to create.
+   * @param schema The schema for the records the file should contain.
+   * @param records The records to put in the file.
+   * @param <T> The (java) type of the avro records.
+   * @return The created file.
+   */
+  public static <T> File createFile(File file, Schema schema, T... records)
+      throws IOException {
+    DatumWriter<T> datumWriter = new GenericDatumWriter<T>(schema);
+    DataFileWriter<T> fileWriter = new DataFileWriter<T>(datumWriter);
+    fileWriter.create(schema, file);
+    for (T record : records) {
+      fileWriter.append(record);
+    }
+    fileWriter.close();
+
+    return file;
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
new file mode 100644
index 0000000..a7b936a
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.junit.Assert.*;
+import static org.easymock.EasyMock.*;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.junit.Test;
+
+public class TestAvroKeyInputFormat {
+  /**
+   * Verifies that a non-null record reader can be created, and the key/value types are
+   * as expected.
+   */
+  @Test
+  public void testCreateRecordReader() throws IOException, InterruptedException {
+    // Set up the job configuration.
+    Job job = new Job();
+    AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.STRING));
+    Configuration conf = job.getConfiguration();
+
+    FileSplit inputSplit = createMock(FileSplit.class);
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+
+    replay(inputSplit);
+    replay(context);
+
+    AvroKeyInputFormat inputFormat = new AvroKeyInputFormat();
+    @SuppressWarnings("unchecked")
+    RecordReader<AvroKey<Object>, NullWritable> recordReader = inputFormat.createRecordReader(
+        inputSplit, context);
+    assertNotNull(inputFormat);
+    recordReader.close();
+
+    verify(inputSplit);
+    verify(context);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
new file mode 100644
index 0000000..9a46d44
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyOutputFormat.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroOutputFormat;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestAvroKeyOutputFormat {
+  private static final String SYNC_INTERVAL_KEY = org.apache.avro.mapred.AvroOutputFormat.SYNC_INTERVAL_KEY;
+  private static final int TEST_SYNC_INTERVAL = 12345;
+
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  @Test
+  public void testWithNullCodec() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setInt(SYNC_INTERVAL_KEY, TEST_SYNC_INTERVAL);
+    testGetRecordWriter(conf, CodecFactory.nullCodec(), TEST_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithDeflateCodec() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.setInt(org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY, 3);
+    testGetRecordWriter(conf, CodecFactory.deflateCodec(3), DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithSnappyCode() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.set(AvroJob.CONF_OUTPUT_CODEC, DataFileConstants.SNAPPY_CODEC);
+    conf.setInt(SYNC_INTERVAL_KEY, TEST_SYNC_INTERVAL);
+    testGetRecordWriter(conf, CodecFactory.snappyCodec(), TEST_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithBZip2Code() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.set(AvroJob.CONF_OUTPUT_CODEC, DataFileConstants.BZIP2_CODEC);
+    testGetRecordWriter(conf, CodecFactory.bzip2Codec(), DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithDeflateCodeWithHadoopConfig() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.set("mapred.output.compression.codec","org.apache.hadoop.io.compress.DeflateCodec");
+    conf.setInt(org.apache.avro.mapred.AvroOutputFormat.DEFLATE_LEVEL_KEY, -1);
+    conf.setInt(SYNC_INTERVAL_KEY, TEST_SYNC_INTERVAL);
+    testGetRecordWriter(conf, CodecFactory.deflateCodec(-1), TEST_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithSnappyCodeWithHadoopConfig() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.set("mapred.output.compression.codec","org.apache.hadoop.io.compress.SnappyCodec");
+    testGetRecordWriter(conf, CodecFactory.snappyCodec(), DataFileConstants.DEFAULT_SYNC_INTERVAL);
+  }
+
+  @Test
+  public void testWithBZip2CodeWithHadoopConfig() throws IOException {
+    Configuration conf = new Configuration();
+    conf.setBoolean("mapred.output.compress", true);
+    conf.set("mapred.output.compression.codec","org.apache.hadoop.io.compress.BZip2Codec");
+    conf.setInt(SYNC_INTERVAL_KEY, TEST_SYNC_INTERVAL);
+    testGetRecordWriter(conf, CodecFactory.bzip2Codec(), TEST_SYNC_INTERVAL);
+  }
+
+  /**
+   * Tests that the record writer is constructed and returned correctly from the output format.
+   */
+  private void testGetRecordWriter(Configuration conf, CodecFactory expectedCodec, int expectedSyncInterval)
+      throws IOException {
+    // Configure a mock task attempt context.
+    Job job = new Job(conf);
+    job.getConfiguration().set("mapred.output.dir", mTempDir.getRoot().getPath());
+    Schema writerSchema = Schema.create(Schema.Type.INT);
+    AvroJob.setOutputKeySchema(job, writerSchema);
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+    expect(context.getConfiguration())
+        .andReturn(job.getConfiguration()).anyTimes();
+    expect(context.getTaskAttemptID())
+        .andReturn(TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0"))
+        .anyTimes();
+    expect(context.getNumReduceTasks()).andReturn(1);
+
+    // Create a mock record writer.
+    @SuppressWarnings("unchecked")
+    RecordWriter<AvroKey<Integer>, NullWritable> expectedRecordWriter
+        = createMock(RecordWriter.class);
+    AvroKeyOutputFormat.RecordWriterFactory recordWriterFactory
+        = createMock(AvroKeyOutputFormat.RecordWriterFactory.class);
+
+    // Expect the record writer factory to be called with appropriate parameters.
+    Capture<CodecFactory> capturedCodecFactory = new Capture<CodecFactory>();
+    expect(recordWriterFactory.create(eq(writerSchema),
+        anyObject(GenericData.class),
+        capture(capturedCodecFactory),  // Capture for comparison later.
+        anyObject(OutputStream.class),
+        eq(expectedSyncInterval))).andReturn(expectedRecordWriter);
+
+    replay(context);
+    replay(expectedRecordWriter);
+    replay(recordWriterFactory);
+
+    AvroKeyOutputFormat<Integer> outputFormat
+        = new AvroKeyOutputFormat<Integer>(recordWriterFactory);
+    RecordWriter<AvroKey<Integer>, NullWritable> recordWriter
+        = outputFormat.getRecordWriter(context);
+    // Make sure the expected codec was used.
+    assertTrue(capturedCodecFactory.hasCaptured());
+    assertEquals(expectedCodec.toString(), capturedCodecFactory.getValue().toString());
+
+    verify(context);
+    verify(expectedRecordWriter);
+    verify(recordWriterFactory);
+
+    assertNotNull(recordWriter);
+    assertTrue(expectedRecordWriter == recordWriter);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
new file mode 100644
index 0000000..cd33169
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.SeekableFileInput;
+import org.apache.avro.file.SeekableInput;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestAvroKeyRecordReader {
+  /** A temporary directory for test data. */
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  /**
+   * Verifies that avro records can be read and progress is reported correctly.
+   */
+  @Test
+  public void testReadRecords() throws IOException, InterruptedException {
+    // Create the test avro file input with two records:
+    //   1. "first"
+    //   2. "second"
+    final SeekableInput avroFileInput = new SeekableFileInput(
+        AvroFiles.createFile(new File(mTempDir.getRoot(), "myStringfile.avro"),
+            Schema.create(Schema.Type.STRING), "first", "second"));
+
+    // Create the record reader.
+    Schema readerSchema = Schema.create(Schema.Type.STRING);
+    RecordReader<AvroKey<CharSequence>, NullWritable> recordReader
+        = new AvroKeyRecordReader<CharSequence>(readerSchema) {
+      @Override
+      protected SeekableInput createSeekableInput(Configuration conf, Path path)
+          throws IOException {
+        return avroFileInput;
+      }
+    };
+
+    // Set up the job configuration.
+    Configuration conf = new Configuration();
+
+    // Create a mock input split for this record reader.
+    FileSplit inputSplit = createMock(FileSplit.class);
+    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
+    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
+    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+
+    // Create a mock task attempt context for this record reader.
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+
+    // Initialize the record reader.
+    replay(inputSplit);
+    replay(context);
+    recordReader.initialize(inputSplit, context);
+
+    assertEquals("Progress should be zero before any records are read",
+        0.0f, recordReader.getProgress(), 0.0f);
+
+    // Some variables to hold the records.
+    AvroKey<CharSequence> key;
+    NullWritable value;
+
+    // Read the first record.
+    assertTrue("Expected at least one record", recordReader.nextKeyValue());
+    key = recordReader.getCurrentKey();
+    value = recordReader.getCurrentValue();
+
+    assertNotNull("First record had null key", key);
+    assertNotNull("First record had null value", value);
+
+    CharSequence firstString = key.datum();
+    assertEquals("first", firstString.toString());
+
+    assertTrue("getCurrentKey() returned different keys for the same record",
+        key == recordReader.getCurrentKey());
+    assertTrue("getCurrentValue() returned different values for the same record",
+        value == recordReader.getCurrentValue());
+
+    // Read the second record.
+    assertTrue("Expected to read a second record", recordReader.nextKeyValue());
+    key = recordReader.getCurrentKey();
+    value = recordReader.getCurrentValue();
+
+    assertNotNull("Second record had null key", key);
+    assertNotNull("Second record had null value", value);
+
+    CharSequence secondString = key.datum();
+    assertEquals("second", secondString.toString());
+
+    assertEquals("Progress should be complete (2 out of 2 records processed)",
+        1.0f, recordReader.getProgress(), 0.0f);
+
+    // There should be no more records.
+    assertFalse("Expected only 2 records", recordReader.nextKeyValue());
+
+    // Close the record reader.
+    recordReader.close();
+
+    // Verify the expected calls on the mocks.
+    verify(inputSplit);
+    verify(context);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
new file mode 100644
index 0000000..a867f02
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordWriter.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.junit.Test;
+
+public class TestAvroKeyRecordWriter {
+  @Test
+  public void testWrite() throws IOException {
+    Schema writerSchema = Schema.create(Schema.Type.INT);
+    GenericData dataModel = new ReflectData();
+    CodecFactory compressionCodec = CodecFactory.nullCodec();
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+
+    replay(context);
+
+    // Write an avro container file with two records: 1 and 2.
+    AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<Integer>(
+        writerSchema, dataModel, compressionCodec, outputStream);
+    recordWriter.write(new AvroKey<Integer>(1), NullWritable.get());
+    recordWriter.write(new AvroKey<Integer>(2), NullWritable.get());
+    recordWriter.close(context);
+
+    verify(context);
+
+    // Verify that the file was written as expected.
+    InputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    Schema readerSchema = Schema.create(Schema.Type.INT);
+    DatumReader<Integer> datumReader = new SpecificDatumReader<Integer>(readerSchema);
+    DataFileStream<Integer> dataFileReader = new DataFileStream<Integer>(inputStream, datumReader);
+
+    assertTrue(dataFileReader.hasNext());  // Record 1.
+    assertEquals(1, dataFileReader.next().intValue());
+    assertTrue(dataFileReader.hasNext());  // Record 2.
+    assertEquals(2, dataFileReader.next().intValue());
+    assertFalse(dataFileReader.hasNext());  // No more records.
+
+    dataFileReader.close();
+  }
+  
+  @Test
+  public void testSycnableWrite() throws IOException {
+    Schema writerSchema = Schema.create(Schema.Type.INT);
+    GenericData dataModel = new ReflectData();
+    CodecFactory compressionCodec = CodecFactory.nullCodec();
+    FileOutputStream outputStream = new FileOutputStream(new File("target/temp.avro"));
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+
+    replay(context);
+
+    // Write an avro container file with two records: 1 and 2.
+    AvroKeyRecordWriter<Integer> recordWriter = new AvroKeyRecordWriter<Integer>(
+        writerSchema, dataModel, compressionCodec, outputStream);
+    long positionOne = recordWriter.sync();
+    recordWriter.write(new AvroKey<Integer>(1), NullWritable.get());
+    long positionTwo = recordWriter.sync();
+    recordWriter.write(new AvroKey<Integer>(2), NullWritable.get());
+    recordWriter.close(context);
+
+    verify(context);
+
+    // Verify that the file was written as expected.
+	Configuration conf = new Configuration();
+	conf.set("fs.default.name", "file:///");
+	Path avroFile = new Path("target/temp.avro");
+	DataFileReader<GenericData.Record> dataFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+			conf), new SpecificDatumReader<GenericData.Record>());
+
+    dataFileReader.seek(positionTwo);
+    assertTrue(dataFileReader.hasNext());  // Record 2.
+    assertEquals(2, dataFileReader.next());
+
+	dataFileReader.seek(positionOne);
+    assertTrue(dataFileReader.hasNext());  // Record 1.
+    assertEquals(1, dataFileReader.next());
+    
+    dataFileReader.close();
+  }  
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
new file mode 100644
index 0000000..051d0e4
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordReader.java
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.SeekableFileInput;
+import org.apache.avro.file.SeekableInput;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestAvroKeyValueRecordReader {
+  /** A temporary directory for test data. */
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  /**
+   * Verifies that avro records can be read and progress is reported correctly.
+   */
+  @Test
+  public void testReadRecords() throws IOException, InterruptedException {
+    // Create the test avro file input with two records:
+    //   1. <"firstkey", 1>
+    //   2. <"second", 2>
+    Schema keyValueSchema = AvroKeyValue.getSchema(
+        Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT));
+
+    AvroKeyValue<CharSequence, Integer> firstInputRecord
+        = new AvroKeyValue<CharSequence, Integer>(new GenericData.Record(keyValueSchema));
+    firstInputRecord.setKey("first");
+    firstInputRecord.setValue(1);
+
+    AvroKeyValue<CharSequence, Integer> secondInputRecord
+        = new AvroKeyValue<CharSequence, Integer>(new GenericData.Record(keyValueSchema));
+    secondInputRecord.setKey("second");
+    secondInputRecord.setValue(2);
+
+    final SeekableInput avroFileInput = new SeekableFileInput(
+        AvroFiles.createFile(new File(mTempDir.getRoot(), "myInputFile.avro"), keyValueSchema,
+            firstInputRecord.get(), secondInputRecord.get()));
+
+    // Create the record reader over the avro input file.
+    RecordReader<AvroKey<CharSequence>, AvroValue<Integer>> recordReader
+        = new AvroKeyValueRecordReader<CharSequence, Integer>(
+            Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT)) {
+      @Override
+      protected SeekableInput createSeekableInput(Configuration conf, Path path)
+          throws IOException {
+        return avroFileInput;
+      }
+    };
+
+    // Set up the job configuration.
+    Configuration conf = new Configuration();
+
+    // Create a mock input split for this record reader.
+    FileSplit inputSplit = createMock(FileSplit.class);
+    expect(inputSplit.getPath()).andReturn(new Path("/path/to/an/avro/file")).anyTimes();
+    expect(inputSplit.getStart()).andReturn(0L).anyTimes();
+    expect(inputSplit.getLength()).andReturn(avroFileInput.length()).anyTimes();
+
+    // Create a mock task attempt context for this record reader.
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+    expect(context.getConfiguration()).andReturn(conf).anyTimes();
+
+    // Initialize the record reader.
+    replay(inputSplit);
+    replay(context);
+    recordReader.initialize(inputSplit, context);
+
+    assertEquals("Progress should be zero before any records are read",
+        0.0f, recordReader.getProgress(), 0.0f);
+
+    // Some variables to hold the records.
+    AvroKey<CharSequence> key;
+    AvroValue<Integer> value;
+
+    // Read the first record.
+    assertTrue("Expected at least one record", recordReader.nextKeyValue());
+    key = recordReader.getCurrentKey();
+    value = recordReader.getCurrentValue();
+
+    assertNotNull("First record had null key", key);
+    assertNotNull("First record had null value", value);
+
+    assertEquals("first", key.datum().toString());
+    assertEquals(1, value.datum().intValue());
+
+    assertTrue("getCurrentKey() returned different keys for the same record",
+        key == recordReader.getCurrentKey());
+    assertTrue("getCurrentValue() returned different values for the same record",
+        value == recordReader.getCurrentValue());
+
+    // Read the second record.
+    assertTrue("Expected to read a second record", recordReader.nextKeyValue());
+    key = recordReader.getCurrentKey();
+    value = recordReader.getCurrentValue();
+
+    assertNotNull("Second record had null key", key);
+    assertNotNull("Second record had null value", value);
+
+    assertEquals("second", key.datum().toString());
+    assertEquals(2, value.datum().intValue());
+
+    assertEquals("Progress should be complete (2 out of 2 records processed)",
+        1.0f, recordReader.getProgress(), 0.0f);
+
+    // There should be no more records.
+    assertFalse("Expected only 2 records", recordReader.nextKeyValue());
+
+    // Close the record reader.
+    recordReader.close();
+
+    // Verify the expected calls on the mocks.
+    verify(inputSplit);
+    verify(context);
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
new file mode 100644
index 0000000..1cd1ded
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyValueRecordWriter.java
@@ -0,0 +1,235 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroDatumConverter;
+import org.apache.avro.hadoop.io.AvroDatumConverterFactory;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.junit.Test;
+
+public class TestAvroKeyValueRecordWriter {
+  @Test
+  public void testWriteRecords() throws IOException {
+    Job job = new Job();
+    AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+
+    replay(context);
+
+    AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
+    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
+    AvroValue<TextStats> avroValue = new AvroValue<TextStats>(null);
+    @SuppressWarnings("unchecked")
+    AvroDatumConverter<AvroValue<TextStats>, ?> valueConverter
+        = factory.create((Class<AvroValue<TextStats>>) avroValue.getClass());
+    CodecFactory compressionCodec = CodecFactory.nullCodec();
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+
+    // Use a writer to generate a Avro container file in memory.
+    // Write two records: <'apple', TextStats('apple')> and <'banana', TextStats('banana')>.
+    AvroKeyValueRecordWriter<Text, AvroValue<TextStats>> writer
+        = new AvroKeyValueRecordWriter<Text, AvroValue<TextStats>>(keyConverter, valueConverter,
+            new ReflectData(), compressionCodec, outputStream);
+    TextStats appleStats = new TextStats();
+    appleStats.name = "apple";
+    writer.write(new Text("apple"), new AvroValue<TextStats>(appleStats));
+    TextStats bananaStats = new TextStats();
+    bananaStats.name = "banana";
+    writer.write(new Text("banana"), new AvroValue<TextStats>(bananaStats));
+    writer.close(context);
+
+    verify(context);
+
+    ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    Schema readerSchema = AvroKeyValue.getSchema(
+        Schema.create(Schema.Type.STRING), TextStats.SCHEMA$);
+    DatumReader<GenericRecord> datumReader
+        = new SpecificDatumReader<GenericRecord>(readerSchema);
+    DataFileStream<GenericRecord> avroFileReader
+        = new DataFileStream<GenericRecord>(inputStream, datumReader);
+
+    // Verify that the first record was written.
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, TextStats> firstRecord
+        = new AvroKeyValue<CharSequence, TextStats>(avroFileReader.next());
+    assertNotNull(firstRecord.get());
+    assertEquals("apple", firstRecord.getKey().toString());
+    assertEquals("apple", firstRecord.getValue().name.toString());
+
+    // Verify that the second record was written;
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, TextStats> secondRecord
+        = new AvroKeyValue<CharSequence, TextStats>(avroFileReader.next());
+    assertNotNull(secondRecord.get());
+    assertEquals("banana", secondRecord.getKey().toString());
+    assertEquals("banana", secondRecord.getValue().name.toString());
+
+    // That's all, folks.
+    assertFalse(avroFileReader.hasNext());
+    avroFileReader.close();
+  }
+
+  public static class R1 {
+    String attribute;
+  }
+  @Test public void testUsingReflection() throws Exception {
+    Job job = new Job();
+    Schema schema = ReflectData.get().getSchema(R1.class);
+    AvroJob.setOutputValueSchema(job, schema);
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+    replay(context);
+
+    R1 record = new R1();
+    record.attribute = "test";
+    AvroValue<R1> avroValue = new AvroValue<R1>(record);
+
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    AvroDatumConverterFactory factory =
+      new AvroDatumConverterFactory(job.getConfiguration());
+
+    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
+
+    @SuppressWarnings("unchecked")
+    AvroDatumConverter<AvroValue<R1>, R1> valueConverter =
+      factory.create((Class<AvroValue<R1>>) avroValue.getClass());
+
+    AvroKeyValueRecordWriter<Text, AvroValue<R1>> writer =
+      new AvroKeyValueRecordWriter<Text, AvroValue<R1>>(
+        keyConverter, valueConverter, new ReflectData(),
+        CodecFactory.nullCodec(), outputStream);
+
+    writer.write(new Text("reflectionData"), avroValue);
+    writer.close(context);
+
+    verify(context);
+
+    ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
+    Schema readerSchema = AvroKeyValue.getSchema(
+      Schema.create(Schema.Type.STRING), schema);
+    DatumReader<GenericRecord> datumReader =
+      new ReflectDatumReader<GenericRecord>(readerSchema);
+    DataFileStream<GenericRecord> avroFileReader =
+      new DataFileStream<GenericRecord>(inputStream, datumReader);
+
+    // Verify that the first record was written.
+    assertTrue(avroFileReader.hasNext());
+
+    // Verify that the record holds the same data that we've written
+    AvroKeyValue<CharSequence, R1> firstRecord =
+      new AvroKeyValue<CharSequence, R1>(avroFileReader.next());
+    assertNotNull(firstRecord.get());
+    assertEquals("reflectionData", firstRecord.getKey().toString());
+    assertEquals(record.attribute, firstRecord.getValue().attribute);
+  }
+ 
+  @Test
+  public void testSyncableWriteRecords() throws IOException {
+    Job job = new Job();
+    AvroJob.setOutputValueSchema(job, TextStats.SCHEMA$);
+    TaskAttemptContext context = createMock(TaskAttemptContext.class);
+
+    replay(context);
+
+    AvroDatumConverterFactory factory = new AvroDatumConverterFactory(job.getConfiguration());
+    AvroDatumConverter<Text, ?> keyConverter = factory.create(Text.class);
+    AvroValue<TextStats> avroValue = new AvroValue<TextStats>(null);
+    @SuppressWarnings("unchecked")
+    AvroDatumConverter<AvroValue<TextStats>, ?> valueConverter
+        = factory.create((Class<AvroValue<TextStats>>) avroValue.getClass());
+    CodecFactory compressionCodec = CodecFactory.nullCodec();
+    FileOutputStream outputStream = new FileOutputStream(new File("target/temp.avro"));
+
+    // Write a marker followed by each record: <'apple', TextStats('apple')> and <'banana', TextStats('banana')>.
+    AvroKeyValueRecordWriter<Text, AvroValue<TextStats>> writer
+        = new AvroKeyValueRecordWriter<Text, AvroValue<TextStats>>(keyConverter, valueConverter,
+            new ReflectData(), compressionCodec, outputStream);
+    TextStats appleStats = new TextStats();
+    appleStats.name = "apple";
+    long pointOne = writer.sync();
+    writer.write(new Text("apple"), new AvroValue<TextStats>(appleStats));
+    TextStats bananaStats = new TextStats();
+    bananaStats.name = "banana";
+    long pointTwo = writer.sync();
+    writer.write(new Text("banana"), new AvroValue<TextStats>(bananaStats));
+    writer.close(context);
+
+    verify(context);
+
+	Configuration conf = new Configuration();
+	conf.set("fs.default.name", "file:///");
+	Path avroFile = new Path("target/temp.avro");
+	DataFileReader<GenericData.Record> avroFileReader = new DataFileReader<GenericData.Record>(new FsInput(avroFile,
+			conf), new SpecificDatumReader<GenericData.Record>());
+    
+	
+	avroFileReader.seek(pointTwo);
+    // Verify that the second record was written;
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, TextStats> secondRecord
+        = new AvroKeyValue<CharSequence, TextStats>(avroFileReader.next());
+    assertNotNull(secondRecord.get());
+    assertEquals("banana", secondRecord.getKey().toString());
+    assertEquals("banana", secondRecord.getValue().name.toString());
+
+    
+	avroFileReader.seek(pointOne);
+    // Verify that the first record was written.
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, TextStats> firstRecord
+        = new AvroKeyValue<CharSequence, TextStats>(avroFileReader.next());
+    assertNotNull(firstRecord.get());
+    assertEquals("apple", firstRecord.getKey().toString());
+    assertEquals("apple", firstRecord.getValue().name.toString());
+
+
+    // That's all, folks.
+    avroFileReader.close();
+  }  
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
new file mode 100644
index 0000000..83cc8ee
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputs.java
@@ -0,0 +1,418 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.reflect.ReflectDatumReader;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.apache.avro.mapred.Pair;
+
+public class TestAvroMultipleOutputs {
+  @Rule
+  public TemporaryFolder tmpFolder = new TemporaryFolder();
+  public static final Schema STATS_SCHEMA =
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
+          + "{\"name\":\"name\",\"type\":\"string\"}]}");
+  public static final Schema STATS_SCHEMA_2 = 
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+
+  private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  private static class StatCountMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, Text, IntWritable> {
+    private IntWritable mCount;
+    private Text mText;
+
+    @Override
+    protected void setup(Context context) {
+      mCount = new IntWritable(0);
+      mText = new Text("");
+    }
+
+    @Override
+    protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
+        throws IOException, InterruptedException {
+      mCount.set(record.datum().count);
+      mText.set(record.datum().name.toString());
+      context.write(mText, mCount);
+    }
+  }
+
+  private static class GenericStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<GenericData.Record>, NullWritable> {
+    private AvroKey<GenericData.Record> mStats;
+    private AvroMultipleOutputs amos;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<GenericData.Record>(null);
+      amos = new AvroMultipleOutputs(context);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      GenericData.Record record = new GenericData.Record(STATS_SCHEMA);
+      GenericData.Record record2 = new GenericData.Record(STATS_SCHEMA_2);
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      record.put("name", new Utf8(line.toString()));
+      record.put("count", new Integer(sum));
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get()); 
+      amos.write("myavro",mStats,NullWritable.get());
+      record2.put("name1", new Utf8(line.toString()));
+      record2.put("count1", new Integer(sum));
+      mStats.datum(record2); 
+      amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
+      amos.write("myavro1",mStats);
+      amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
+      amos.write(mStats, NullWritable.get(), "testwritenonschema");
+    }
+   
+    @Override
+    protected void cleanup(Context context) throws IOException,InterruptedException
+    {
+      amos.close();
+    }
+  }
+
+  private static class SpecificStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<TextStats>, NullWritable> {
+    private AvroKey<TextStats> mStats;
+    private AvroMultipleOutputs amos;
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<TextStats>(null);
+      amos = new AvroMultipleOutputs(context);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      TextStats record = new TextStats();
+      record.count = 0;
+      for (IntWritable count : counts) {
+        record.count += count.get();
+      }
+      record.name = line.toString();
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+      amos.write("myavro3",mStats,NullWritable.get());
+    }
+    @Override
+    protected void cleanup(Context context) throws IOException,InterruptedException
+    {
+      amos.close();
+    }
+  }
+
+  private static class SortMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void map(AvroKey<TextStats> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, value);
+    }
+  }
+
+  private static class SortReducer
+      extends Reducer<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void reduce(AvroKey<TextStats> key, Iterable<NullWritable> ignore, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, NullWritable.get());
+    }
+  }
+
+  @Test
+  public void testAvroGenericOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(GenericStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<GenericData.Record> reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro1-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA_2));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name1")).toString(), (Integer) record.get("count1"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+            new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+       counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+    
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+        
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA_2));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+     counts.put(((Utf8) record.get("name1")).toString(), (Integer) record.get("count1"));
+    }
+    reader.close();
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+    
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+    
+    
+  }
+
+  @Test
+  public void testAvroSpecificOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro3",AvroKeyOutputFormat.class,TextStats.SCHEMA$,null);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out-specific");
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro3-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroInput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro3",AvroKeyOutputFormat.class,TextStats.SCHEMA$,null);
+
+    job.setMapperClass(StatCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro3-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroMapOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(SortMapper.class);
+    AvroJob.setMapOutputKeySchema(job, TextStats.SCHEMA$);
+    job.setMapOutputValueClass(NullWritable.class);
+
+    job.setReducerClass(SortReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
new file mode 100644
index 0000000..8298984
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroMultipleOutputsSyncable.java
@@ -0,0 +1,418 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.avro.mapreduce;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestAvroMultipleOutputsSyncable {
+  @Rule
+  public TemporaryFolder tmpFolder = new TemporaryFolder();
+  public static final Schema STATS_SCHEMA =
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
+          + "{\"name\":\"name\",\"type\":\"string\"}]}");
+  public static final Schema STATS_SCHEMA_2 = 
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count1\",\"type\":\"int\"},"
+          + "{\"name\":\"name1\",\"type\":\"string\"}]}");  
+
+  private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  private static class StatCountMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, Text, IntWritable> {
+    private IntWritable mCount;
+    private Text mText;
+
+    @Override
+    protected void setup(Context context) {
+      mCount = new IntWritable(0);
+      mText = new Text("");
+    }
+
+    @Override
+    protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
+        throws IOException, InterruptedException {
+      mCount.set(record.datum().count);
+      mText.set(record.datum().name.toString());
+      context.write(mText, mCount);
+    }
+  }
+
+  private static class GenericStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<GenericData.Record>, NullWritable> {
+    private AvroKey<GenericData.Record> mStats;
+    private AvroMultipleOutputs amos;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<GenericData.Record>(null);
+      amos = new AvroMultipleOutputs(context);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      GenericData.Record record = new GenericData.Record(STATS_SCHEMA);
+      GenericData.Record record2 = new GenericData.Record(STATS_SCHEMA_2);
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      record.put("name", new Utf8(line.toString()));
+      record.put("count", new Integer(sum));
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+      amos.sync("myavro","myavro");
+      amos.write("myavro",mStats,NullWritable.get());
+      record2.put("name1", new Utf8(line.toString()));
+      record2.put("count1", new Integer(sum));
+      mStats.datum(record2); 
+      amos.write(mStats, NullWritable.get(), STATS_SCHEMA_2, null, "testnewwrite2");
+      amos.sync("myavro1","myavro1");
+      amos.write("myavro1",mStats);
+      amos.write(mStats, NullWritable.get(), STATS_SCHEMA, null, "testnewwrite");
+      amos.write(mStats, NullWritable.get(), "testwritenonschema");
+    }
+   
+    @Override
+    protected void cleanup(Context context) throws IOException,InterruptedException
+    {
+      amos.close();
+    }
+  }
+
+  private static class SpecificStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<TextStats>, NullWritable> {
+    private AvroKey<TextStats> mStats;
+    private AvroMultipleOutputs amos;
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<TextStats>(null);
+      amos = new AvroMultipleOutputs(context);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      TextStats record = new TextStats();
+      record.count = 0;
+      for (IntWritable count : counts) {
+        record.count += count.get();
+      }
+      record.name = line.toString();
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+      amos.sync("myavro3","myavro3");
+      amos.write("myavro3",mStats,NullWritable.get());
+    }
+    @Override
+    protected void cleanup(Context context) throws IOException,InterruptedException
+    {
+      amos.close();
+    }
+  }
+
+  private static class SortMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void map(AvroKey<TextStats> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, value);
+    }
+  }
+
+  private static class SortReducer
+      extends Reducer<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void reduce(AvroKey<TextStats> key, Iterable<NullWritable> ignore, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, NullWritable.get());
+    }
+  }
+
+  @Test
+  public void testAvroGenericOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(GenericStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);    
+    AvroMultipleOutputs.addNamedOutput(job,"myavro",AvroKeyOutputFormat.class,STATS_SCHEMA,null);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro1", AvroKeyOutputFormat.class, STATS_SCHEMA_2); 
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath);
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<GenericData.Record> reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro1-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA_2));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name1")).toString(), (Integer) record.get("count1"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+            new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+       counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+    
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+        
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testnewwrite2-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA_2));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+     counts.put(((Utf8) record.get("name1")).toString(), (Integer) record.get("count1"));
+    }
+    reader.close();
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+    
+    outputFiles = fileSystem.globStatus(outputPath.suffix("/testwritenonschema-r-00000.avro"));
+    Assert.assertEquals(1, outputFiles.length);
+    reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+    
+    
+  }
+
+  @Test
+  public void testAvroSpecificOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro3",AvroKeyOutputFormat.class,TextStats.SCHEMA$,null);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out-specific");
+    outputPath.getFileSystem(job.getConfiguration()).delete(outputPath); 
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro3-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroInput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+    AvroMultipleOutputs.addNamedOutput(job,"myavro3",AvroKeyOutputFormat.class,TextStats.SCHEMA$,null);
+
+    job.setMapperClass(StatCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/myavro3-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroMapOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(SortMapper.class);
+    AvroJob.setMapOutputKeySchema(job, TextStats.SCHEMA$);
+    job.setMapOutputValueClass(NullWritable.class);
+
+    job.setReducerClass(SortReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java
new file mode 100644
index 0000000..9b55c8a
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestFsInput.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+package org.apache.avro.mapreduce;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.PrintWriter;
+import java.nio.charset.Charset;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.mapred.FsInput;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFsInput {
+  private static File file;
+  private static final String FILE_CONTENTS = "abcdefghijklmnopqrstuvwxyz";
+  private Configuration conf;
+  private FsInput fsInput;
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+    File directory = AvroTestUtil.tempDirectory(TestFsInput.class, "file");
+    file = new File(directory, "file.txt");
+    PrintWriter out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(file), Charset.forName("UTF-8")));
+    try {
+      out.print(FILE_CONTENTS);
+    } finally {
+      out.close();
+    }
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    conf = new Configuration();
+    conf.set("fs.default.name", "file:///");
+    fsInput = new FsInput(new Path(file.getPath()), conf);
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (fsInput != null) {
+      fsInput.close();
+    }
+  }
+
+  @Test
+  public void testConfigurationConstructor() throws Exception {
+    FsInput in = new FsInput(new Path(file.getPath()), conf);
+    try {
+      int expectedByteCount = 1;
+      byte[] readBytes = new byte[expectedByteCount];
+      int actualByteCount = fsInput.read(readBytes, 0, expectedByteCount);
+      assertThat(actualByteCount, is(equalTo(expectedByteCount)));
+    } finally {
+      in.close();
+    }
+  }
+
+  @Test
+  public void testFileSystemConstructor() throws Exception {
+    Path path = new Path(file.getPath());
+    FileSystem fs = path.getFileSystem(conf);
+    FsInput in = new FsInput(path, fs);
+    try {
+      int expectedByteCount = 1;
+      byte[] readBytes = new byte[expectedByteCount];
+      int actualByteCount = fsInput.read(readBytes, 0, expectedByteCount);
+      assertThat(actualByteCount, is(equalTo(expectedByteCount)));
+    } finally {
+      in.close();
+    }
+  }
+
+  @Test
+  public void testLength() throws IOException {
+    assertEquals(fsInput.length(), FILE_CONTENTS.length());
+  }
+
+  @Test
+  public void testRead() throws Exception {
+    byte[] expectedBytes = FILE_CONTENTS.getBytes(Charset.forName("UTF-8"));
+    byte[] actualBytes = new byte[expectedBytes.length];
+    int actualByteCount = fsInput.read(actualBytes, 0, actualBytes.length);
+
+    assertThat(actualBytes, is(equalTo(expectedBytes)));
+    assertThat(actualByteCount, is(equalTo(expectedBytes.length)));
+  }
+
+  @Test
+  public void testSeek() throws Exception {
+    int seekPos = FILE_CONTENTS.length() / 2;
+    byte[] fileContentBytes = FILE_CONTENTS.getBytes(Charset.forName("UTF-8"));
+    byte expectedByte = fileContentBytes[seekPos];
+    fsInput.seek(seekPos);
+    byte[] readBytes = new byte[1];
+    fsInput.read(readBytes, 0, 1);
+    byte actualByte = readBytes[0];
+    assertThat(actualByte, is(equalTo(expectedByte)));
+  }
+
+  @Test
+  public void testTell() throws Exception {
+    long expectedTellPos = FILE_CONTENTS.length() / 2;
+    fsInput.seek(expectedTellPos);
+    long actualTellPos = fsInput.tell();
+    assertThat(actualTellPos, is(equalTo(expectedTellPos)));
+  }
+
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java
new file mode 100644
index 0000000..5de75ed
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueInput.java
@@ -0,0 +1,254 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+/**
+ * Tests that Avro container files of generic records with two fields 'key' and 'value'
+ * can be read by the AvroKeyValueInputFormat.
+ */
+public class TestKeyValueInput {
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  /**
+   * Creates an Avro file of <docid, text> pairs to use for test input:
+   *
+   * +-----+-----------------------+
+   * | KEY | VALUE                 |
+   * +-----+-----------------------+
+   * | 1   | "apple banana carrot" |
+   * | 2   | "apple banana"        |
+   * | 3   | "apple"               |
+   * +-----+-----------------------+
+   *
+   * @return The avro file.
+   */
+  private File createInputFile() throws IOException {
+    Schema keyValueSchema = AvroKeyValue.getSchema(
+        Schema.create(Schema.Type.INT), Schema.create(Schema.Type.STRING));
+
+    AvroKeyValue<Integer, CharSequence> record1
+        = new AvroKeyValue<Integer, CharSequence>(new GenericData.Record(keyValueSchema));
+    record1.setKey(1);
+    record1.setValue("apple banana carrot");
+
+    AvroKeyValue<Integer, CharSequence> record2
+        = new AvroKeyValue<Integer, CharSequence>(new GenericData.Record(keyValueSchema));
+    record2.setKey(2);
+    record2.setValue("apple banana");
+
+    AvroKeyValue<Integer, CharSequence> record3
+        = new AvroKeyValue<Integer, CharSequence>(new GenericData.Record(keyValueSchema));
+    record3.setKey(3);
+    record3.setValue("apple");
+
+    return AvroFiles.createFile(new File(mTempDir.getRoot(), "inputKeyValues.avro"),
+        keyValueSchema, record1.get(), record2.get(), record3.get());
+  }
+
+  /** A mapper for indexing documents. */
+  public static class IndexMapper
+      extends Mapper<AvroKey<Integer>, AvroValue<CharSequence>, Text, IntWritable> {
+    @Override
+    protected void map(AvroKey<Integer> docid, AvroValue<CharSequence> body, Context context)
+        throws IOException, InterruptedException {
+      for (String token : body.datum().toString().split(" ")) {
+        context.write(new Text(token), new IntWritable(docid.datum()));
+      }
+    }
+  }
+
+  /** A reducer for aggregating token to docid mapping into a hitlist. */
+  public static class IndexReducer
+      extends Reducer<Text, IntWritable, Text, AvroValue<List<Integer>>> {
+    @Override
+    protected void reduce(Text token, Iterable<IntWritable> docids, Context context)
+        throws IOException, InterruptedException {
+      List<Integer> hitlist = new ArrayList<Integer>();
+      for (IntWritable docid : docids) {
+        hitlist.add(docid.get());
+      }
+      context.write(token, new AvroValue<List<Integer>>(hitlist));
+    }
+  }
+
+  @Test
+  public void testKeyValueInput()
+      throws ClassNotFoundException, IOException, InterruptedException {
+    // Create a test input file.
+    File inputFile = createInputFile();
+
+    // Configure the job input.
+    Job job = new Job();
+    FileInputFormat.setInputPaths(job, new Path(inputFile.getAbsolutePath()));
+    job.setInputFormatClass(AvroKeyValueInputFormat.class);
+    AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.INT));
+    AvroJob.setInputValueSchema(job, Schema.create(Schema.Type.STRING));
+
+    // Configure a mapper.
+    job.setMapperClass(IndexMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    // Configure a reducer.
+    job.setReducerClass(IndexReducer.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(AvroValue.class);
+    AvroJob.setOutputValueSchema(job, Schema.createArray(Schema.create(Schema.Type.INT)));
+
+    // Configure the output format.
+    job.setOutputFormatClass(AvroKeyValueOutputFormat.class);
+    Path outputPath = new Path(mTempDir.getRoot().getPath(), "out-index");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    // Run the job.
+    assertTrue(job.waitForCompletion(true));
+
+    // Verify that the output Avro container file has the expected data.
+    File avroFile = new File(outputPath.toString(), "part-r-00000.avro");
+    DatumReader<GenericRecord> datumReader = new SpecificDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING),
+            Schema.createArray(Schema.create(Schema.Type.INT))));
+    DataFileReader<GenericRecord> avroFileReader
+        = new DataFileReader<GenericRecord>(avroFile, datumReader);
+    assertTrue(avroFileReader.hasNext());
+
+    AvroKeyValue<CharSequence, List<Integer>> appleRecord
+        = new AvroKeyValue<CharSequence, List<Integer>>(avroFileReader.next());
+    assertNotNull(appleRecord.get());
+    assertEquals("apple", appleRecord.getKey().toString());
+    List<Integer> appleDocs = appleRecord.getValue();
+    assertEquals(3, appleDocs.size());
+    assertTrue(appleDocs.contains(1));
+    assertTrue(appleDocs.contains(2));
+    assertTrue(appleDocs.contains(3));
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, List<Integer>> bananaRecord
+        = new AvroKeyValue<CharSequence, List<Integer>>(avroFileReader.next());
+    assertNotNull(bananaRecord.get());
+    assertEquals("banana", bananaRecord.getKey().toString());
+    List<Integer> bananaDocs = bananaRecord.getValue();
+    assertEquals(2, bananaDocs.size());
+    assertTrue(bananaDocs.contains(1));
+    assertTrue(bananaDocs.contains(2));
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, List<Integer>> carrotRecord
+        = new AvroKeyValue<CharSequence, List<Integer>>(avroFileReader.next());
+    assertEquals("carrot", carrotRecord.getKey().toString());
+    List<Integer> carrotDocs = carrotRecord.getValue();
+    assertEquals(1, carrotDocs.size());
+    assertTrue(carrotDocs.contains(1));
+
+    assertFalse(avroFileReader.hasNext());
+    avroFileReader.close();
+  }
+
+  @Test
+  public void testKeyValueInputMapOnly()
+      throws ClassNotFoundException, IOException, InterruptedException {
+    // Create a test input file.
+    File inputFile = createInputFile();
+
+    // Configure the job input.
+    Job job = new Job();
+    FileInputFormat.setInputPaths(job, new Path(inputFile.getAbsolutePath()));
+    job.setInputFormatClass(AvroKeyValueInputFormat.class);
+    AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.INT));
+    AvroJob.setInputValueSchema(job, Schema.create(Schema.Type.STRING));
+
+    // Configure the identity mapper.
+    AvroJob.setMapOutputKeySchema(job, Schema.create(Schema.Type.INT));
+    AvroJob.setMapOutputValueSchema(job, Schema.create(Schema.Type.STRING));
+
+    // Configure zero reducers.
+    job.setNumReduceTasks(0);
+    job.setOutputKeyClass(AvroKey.class);
+    job.setOutputValueClass(AvroValue.class);
+
+    // Configure the output format.
+    job.setOutputFormatClass(AvroKeyValueOutputFormat.class);
+    Path outputPath = new Path(mTempDir.getRoot().getPath(), "out-index");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    // Run the job.
+    assertTrue(job.waitForCompletion(true));
+
+    // Verify that the output Avro container file has the expected data.
+    File avroFile = new File(outputPath.toString(), "part-m-00000.avro");
+    DatumReader<GenericRecord> datumReader = new SpecificDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(Schema.create(Schema.Type.INT),
+            Schema.create(Schema.Type.STRING)));
+    DataFileReader<GenericRecord> avroFileReader
+        = new DataFileReader<GenericRecord>(avroFile, datumReader);
+    assertTrue(avroFileReader.hasNext());
+
+    AvroKeyValue<Integer, CharSequence> record1
+        = new AvroKeyValue<Integer, CharSequence>(avroFileReader.next());
+    assertNotNull(record1.get());
+    assertEquals(1, record1.getKey().intValue());
+    assertEquals("apple banana carrot", record1.getValue().toString());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<Integer, CharSequence> record2
+        = new AvroKeyValue<Integer, CharSequence>(avroFileReader.next());
+    assertNotNull(record2.get());
+    assertEquals(2, record2.getKey().intValue());
+    assertEquals("apple banana", record2.getValue().toString());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<Integer, CharSequence> record3
+        = new AvroKeyValue<Integer, CharSequence>(avroFileReader.next());
+    assertNotNull(record3.get());
+    assertEquals(3, record3.getKey().intValue());
+    assertEquals("apple", record3.getValue().toString());
+
+    assertFalse(avroFileReader.hasNext());
+    avroFileReader.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
new file mode 100644
index 0000000..c684196
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestKeyValueWordCount {
+  @Rule
+  public TemporaryFolder mTempDir = new TemporaryFolder();
+
+  public static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
+    @Override
+    protected void reduce(Text word, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      context.write(word, new IntWritable(sum));
+    }
+  }
+
+  @Test
+  public void testKeyValueMapReduce()
+      throws ClassNotFoundException, IOException, InterruptedException, URISyntaxException {
+    // Configure a word count job over our test input file.
+    Job job = new Job();
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(IntSumReducer.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(IntWritable.class);
+
+    job.setOutputFormatClass(AvroKeyValueOutputFormat.class);
+    Path outputPath = new Path(mTempDir.getRoot().getPath() + "/out-wordcount");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    // Run the job.
+    assertTrue(job.waitForCompletion(true));
+
+    // Verify that the Avro container file generated had the right KeyValuePair generic records.
+    File avroFile = new File(outputPath.toString(), "part-r-00000.avro");
+    DatumReader<GenericRecord> datumReader = new SpecificDatumReader<GenericRecord>(
+        AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT)));
+    DataFileReader<GenericRecord> avroFileReader
+        = new DataFileReader<GenericRecord>(avroFile, datumReader);
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> appleRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertNotNull(appleRecord.get());
+    assertEquals("apple", appleRecord.getKey().toString());
+    assertEquals(3, appleRecord.getValue().intValue());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> bananaRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertNotNull(bananaRecord.get());
+    assertEquals("banana", bananaRecord.getKey().toString());
+    assertEquals(2, bananaRecord.getValue().intValue());
+
+    assertTrue(avroFileReader.hasNext());
+    AvroKeyValue<CharSequence, Integer> carrotRecord
+        = new AvroKeyValue<CharSequence, Integer>(avroFileReader.next());
+    assertEquals("carrot", carrotRecord.getKey().toString());
+    assertEquals(1, carrotRecord.getValue().intValue());
+
+    assertFalse(avroFileReader.hasNext());
+    avroFileReader.close();
+  }
+}
diff --git a/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
new file mode 100644
index 0000000..e0f190f
--- /dev/null
+++ b/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestWordCount.java
@@ -0,0 +1,521 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ * implied.  See the License for the specific language governing
+ * permissions and limitations under the License.
+ */
+
+package org.apache.avro.mapreduce;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapred.FsInput;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.reflect.ReflectDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class TestWordCount {
+  @Rule
+  public TemporaryFolder tmpFolder = new TemporaryFolder();
+  public static final Schema STATS_SCHEMA =
+      Schema.parse("{\"name\":\"stats\",\"type\":\"record\","
+          + "\"fields\":[{\"name\":\"count\",\"type\":\"int\"},"
+          + "{\"name\":\"name\",\"type\":\"string\"}]}");
+
+  public static class ReflectStats {
+    String name;
+    int count;
+  }
+
+  // permit data written as SpecficStats to be read as ReflectStats
+  private static Schema REFLECT_STATS_SCHEMA
+    = ReflectData.get().getSchema(ReflectStats.class);
+  static {
+    REFLECT_STATS_SCHEMA.addAlias(TextStats.SCHEMA$.getFullName());
+  }
+
+  private static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
+    private IntWritable mOne;
+
+    @Override
+    protected void setup(Context context) {
+      mOne = new IntWritable(1);
+    }
+
+    @Override
+    protected void map(LongWritable fileByteOffset, Text line, Context context)
+        throws IOException, InterruptedException {
+      context.write(line, mOne);
+    }
+  }
+
+  private static class StatCountMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, Text, IntWritable> {
+    private IntWritable mCount;
+    private Text mText;
+
+    @Override
+    protected void setup(Context context) {
+      mCount = new IntWritable(0);
+      mText = new Text("");
+    }
+
+    @Override
+    protected void map(AvroKey<TextStats> record, NullWritable ignore, Context context)
+        throws IOException, InterruptedException {
+      mCount.set(record.datum().count);
+      mText.set(record.datum().name.toString());
+      context.write(mText, mCount);
+    }
+  }
+
+  private static class ReflectCountMapper
+      extends Mapper<AvroKey<ReflectStats>, NullWritable, Text, IntWritable> {
+    private IntWritable mCount;
+    private Text mText;
+
+    @Override
+    protected void setup(Context context) {
+      mCount = new IntWritable(0);
+      mText = new Text("");
+    }
+
+    @Override
+    protected void map(AvroKey<ReflectStats> record, NullWritable ignore, Context context)
+        throws IOException, InterruptedException {
+      mCount.set(record.datum().count);
+      mText.set(record.datum().name);
+      context.write(mText, mCount);
+    }
+  }
+
+  private static class AvroSumReducer
+      extends Reducer<Text, IntWritable, AvroKey<CharSequence>, AvroValue<Integer>> {
+    @Override
+    protected void reduce(Text key, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      context.write(new AvroKey<CharSequence>(key.toString()), new AvroValue<Integer>(sum));
+    }
+  }
+
+  private static class GenericStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<GenericData.Record>, NullWritable> {
+    private AvroKey<GenericData.Record> mStats;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<GenericData.Record>(null);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      GenericData.Record record = new GenericData.Record(STATS_SCHEMA);
+      int sum = 0;
+      for (IntWritable count : counts) {
+        sum += count.get();
+      }
+      record.put("name", new Utf8(line.toString()));
+      record.put("count", new Integer(sum));
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+    }
+  }
+
+  private static class SpecificStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<TextStats>, NullWritable> {
+    private AvroKey<TextStats> mStats;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<TextStats>(null);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      TextStats record = new TextStats();
+      record.count = 0;
+      for (IntWritable count : counts) {
+        record.count += count.get();
+      }
+      record.name = line.toString();
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+    }
+  }
+
+  private static class ReflectStatsReducer
+      extends Reducer<Text, IntWritable, AvroKey<ReflectStats>, NullWritable> {
+    private AvroKey<ReflectStats> mStats;
+
+    @Override
+    protected void setup(Context context) {
+      mStats = new AvroKey<ReflectStats>(null);
+    }
+
+    @Override
+    protected void reduce(Text line, Iterable<IntWritable> counts, Context context)
+        throws IOException, InterruptedException {
+      ReflectStats record = new ReflectStats();
+      record.count = 0;
+      for (IntWritable count : counts) {
+        record.count += count.get();
+      }
+      record.name = line.toString();
+      mStats.datum(record);
+      context.write(mStats, NullWritable.get());
+    }
+  }
+
+  private static class SortMapper
+      extends Mapper<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void map(AvroKey<TextStats> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, value);
+    }
+  }
+
+  private static class SortReducer
+      extends Reducer<AvroKey<TextStats>, NullWritable, AvroKey<TextStats>, NullWritable> {
+    @Override
+    protected void reduce(AvroKey<TextStats> key, Iterable<NullWritable> ignore, Context context)
+        throws IOException, InterruptedException {
+      context.write(key, NullWritable.get());
+    }
+  }
+
+  @Test
+  public void testAvroGenericOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(GenericStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, STATS_SCHEMA);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-generic");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<GenericData.Record> reader = new DataFileReader<GenericData.Record>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new GenericDatumReader<GenericData.Record>(STATS_SCHEMA));
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (GenericData.Record record : reader) {
+      counts.put(((Utf8) record.get("name")).toString(), (Integer) record.get("count"));
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroSpecificOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroReflectOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(ReflectStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, REFLECT_STATS_SCHEMA);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-reflect");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<ReflectStats> reader = new DataFileReader<ReflectStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new ReflectDatumReader<ReflectStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (ReflectStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroInput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(StatCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(SpecificStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testReflectInput() throws Exception {
+    Job job = new Job();
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, REFLECT_STATS_SCHEMA);
+
+    job.setMapperClass(ReflectCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(ReflectStatsReducer.class);
+    AvroJob.setOutputKeySchema(job, REFLECT_STATS_SCHEMA);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-reflect-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<ReflectStats> reader = new DataFileReader<ReflectStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new ReflectDatumReader<ReflectStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (ReflectStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  @Test
+  public void testAvroMapOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.avro")
+            .toURI().toString()));
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    AvroJob.setInputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setMapperClass(SortMapper.class);
+    AvroJob.setMapOutputKeySchema(job, TextStats.SCHEMA$);
+    job.setMapOutputValueClass(NullWritable.class);
+
+    job.setReducerClass(SortReducer.class);
+    AvroJob.setOutputKeySchema(job, TextStats.SCHEMA$);
+
+    job.setOutputFormatClass(AvroKeyOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-specific-input");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    DataFileReader<TextStats> reader = new DataFileReader<TextStats>(
+        new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
+        new SpecificDatumReader<TextStats>());
+    Map<String, Integer> counts = new HashMap<String, Integer>();
+    for (TextStats record : reader) {
+      counts.put(record.name.toString(), record.count);
+    }
+    reader.close();
+
+    Assert.assertEquals(3, counts.get("apple").intValue());
+    Assert.assertEquals(2, counts.get("banana").intValue());
+    Assert.assertEquals(1, counts.get("carrot").intValue());
+  }
+
+  /**
+   * Tests the MR output to text files when using AvroKey and AvroValue records.
+   */
+  @Test
+  public void testAvroUsingTextFileOutput() throws Exception {
+    Job job = new Job();
+
+    FileInputFormat.setInputPaths(job, new Path(getClass()
+            .getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt")
+            .toURI().toString()));
+    job.setInputFormatClass(TextInputFormat.class);
+
+    job.setMapperClass(LineCountMapper.class);
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(IntWritable.class);
+
+    job.setReducerClass(AvroSumReducer.class);
+    AvroJob.setOutputKeySchema(job, Schema.create(Schema.Type.STRING));
+    AvroJob.setOutputValueSchema(job, Schema.create(Schema.Type.INT));
+
+    job.setOutputFormatClass(TextOutputFormat.class);
+    Path outputPath = new Path(tmpFolder.getRoot().getPath() + "/out-text");
+    FileOutputFormat.setOutputPath(job, outputPath);
+
+    Assert.assertTrue(job.waitForCompletion(true));
+
+    // Check that the results from the MapReduce were as expected.
+    FileSystem fileSystem = FileSystem.get(job.getConfiguration());
+    FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
+    Assert.assertEquals(1, outputFiles.length);
+    Path filePath = outputFiles[0].getPath();
+    InputStream inputStream = filePath.getFileSystem(job.getConfiguration()).open(filePath);
+    Assert.assertNotNull(inputStream);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
+    try {
+      Assert.assertTrue(reader.ready());
+      Assert.assertEquals("apple\t3", reader.readLine());
+      Assert.assertEquals("banana\t2", reader.readLine());
+      Assert.assertEquals("carrot\t1", reader.readLine());
+      Assert.assertFalse(reader.ready());
+    } finally {
+      reader.close();
+    }
+  }
+}
diff --git a/lang/java/mapred/src/test/resources/log4j.properties b/lang/java/mapred/src/test/resources/log4j.properties
new file mode 100644
index 0000000..8abeb8d
--- /dev/null
+++ b/lang/java/mapred/src/test/resources/log4j.properties
@@ -0,0 +1,8 @@
+log4j.rootLogger=DEBUG,console
+
+# Define the console appender.
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n
+
diff --git a/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc
new file mode 100644
index 0000000..3b7b044
Binary files /dev/null and b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/._SUCCESS.crc differ
diff --git a/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc
new file mode 100644
index 0000000..c9e5f10
Binary files /dev/null and b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/.part-r-00000.avro.crc differ
diff --git a/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/_SUCCESS
new file mode 100755
index 0000000..e69de29
diff --git a/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro
new file mode 100755
index 0000000..e63004b
Binary files /dev/null and b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.avro/part-r-00000.avro differ
diff --git a/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
new file mode 100644
index 0000000..dfd6696
--- /dev/null
+++ b/lang/java/mapred/src/test/resources/org/apache/avro/mapreduce/mapreduce-test-input.txt
@@ -0,0 +1,6 @@
+apple
+banana
+banana
+carrot
+apple
+apple
diff --git a/lang/java/maven-plugin/pom.xml b/lang/java/maven-plugin/pom.xml
new file mode 100644
index 0000000..971e5ef
--- /dev/null
+++ b/lang/java/maven-plugin/pom.xml
@@ -0,0 +1,105 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-maven-plugin</artifactId>
+  <packaging>maven-plugin</packaging>
+
+  <name>Apache Avro Maven Plugin</name>
+  <description>Maven plugin for Avro IDL and Specific API Compilers</description>
+
+  <prerequisites>
+    <maven>${maven.version}</maven>
+  </prerequisites>
+
+  <properties>
+    <pluginTestingVersion>1.3</pluginTestingVersion>
+  </properties>
+  
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-plugin-plugin</artifactId>
+        <version>${plugin-plugin.version}</version>
+        <executions>
+          <execution>
+            <id>generated-helpmojo</id>
+            <goals>
+              <goal>helpmojo</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <!-- maven -->
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-plugin-api</artifactId>
+      <version>${maven.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-project</artifactId>
+      <version>${maven.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven.shared</groupId>
+      <artifactId>file-management</artifactId>
+      <version>${file-management.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-compiler</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <!-- test-->
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-core</artifactId>
+      <version>${maven.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven.plugin-testing</groupId>
+      <artifactId>maven-plugin-testing-tools</artifactId>
+      <version>${pluginTestingVersion}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven.plugin-testing</groupId>
+      <artifactId>maven-plugin-testing-harness</artifactId>
+      <version>${pluginTestingVersion}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
new file mode 100644
index 0000000..a5d8b31
--- /dev/null
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mojo;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.avro.compiler.specific.SpecificCompiler;
+
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.project.MavenProject;
+import org.apache.maven.shared.model.fileset.FileSet;
+import org.apache.maven.shared.model.fileset.util.FileSetManager;
+
+/**
+ * Base for Avro Compiler Mojos.
+ */
+public abstract class AbstractAvroMojo extends AbstractMojo {
+  /**
+   * The source directory of avro files. This directory is added to the
+   * classpath at schema compiling time. All files can therefore be referenced
+   * as classpath resources following the directory structure under the
+   * source directory.
+   *
+   * @parameter property="sourceDirectory"
+   *            default-value="${basedir}/src/main/avro"
+   */
+  private File sourceDirectory;
+
+  /**
+   * @parameter property="outputDirectory"
+   *            default-value="${project.build.directory}/generated-sources/avro"
+   */
+  private File outputDirectory;
+
+  /**
+   * @parameter property="sourceDirectory"
+   *            default-value="${basedir}/src/test/avro"
+   */
+  private File testSourceDirectory;
+
+  /**
+   * @parameter property="outputDirectory"
+   *            default-value="${project.build.directory}/generated-test-sources/avro"
+   */
+  private File testOutputDirectory;
+
+  /**
+   * The field visibility indicator for the fields of the generated class, as
+   * string values of SpecificCompiler.FieldVisibility.  The text is case
+   * insensitive.
+   *
+   * @parameter default-value="PUBLIC_DEPRECATED"
+   */
+  private String fieldVisibility;
+
+  /**
+   * A list of files or directories that should be compiled first thus making
+   * them importable by subsequently compiled schemas. Note that imported files
+   * should not reference each other.
+   * @parameter 
+   */
+  protected String[] imports;
+  
+  /**
+   * A set of Ant-like exclusion patterns used to prevent certain files from
+   * being processed. By default, this set is empty such that no files are
+   * excluded.
+   * 
+   * @parameter
+   */
+  protected String[] excludes = new String[0];
+
+  /**
+   * A set of Ant-like exclusion patterns used to prevent certain files from
+   * being processed. By default, this set is empty such that no files are
+   * excluded.
+   * 
+   * @parameter
+   */
+  protected String[] testExcludes = new String[0];
+
+  /**  The Java type to use for Avro strings.  May be one of CharSequence,
+   * String or Utf8.  CharSequence by default.
+   *
+   * @parameter property="stringType"
+   */
+  protected String stringType = "CharSequence";
+
+  /**
+   * The directory (within the java classpath) that contains the velocity templates
+   * to use for code generation. The default value points to the templates included
+   * with the avro-maven-plugin.
+   *
+   * @parameter property="templateDirectory"
+   */
+  protected String templateDirectory = "/org/apache/avro/compiler/specific/templates/java/classic/";
+
+  /**
+   * Determines whether or not to create setters for the fields of the record.
+   * The default is to create setters.
+   *
+   * @parameter default-value="true"
+   */
+  protected boolean createSetters;
+
+  /**
+   * The current Maven project.
+   * 
+   * @parameter default-value="${project}"
+   * @readonly
+   * @required
+   */
+  protected MavenProject project;
+
+  @Override
+  public void execute() throws MojoExecutionException {
+    boolean hasSourceDir = null != sourceDirectory
+        && sourceDirectory.isDirectory();
+    boolean hasImports = null != imports;
+    boolean hasTestDir = null != testSourceDirectory
+        && testSourceDirectory.isDirectory();
+    if (!hasSourceDir && !hasTestDir) {
+      throw new MojoExecutionException("neither sourceDirectory: "
+          + sourceDirectory + " or testSourceDirectory: " + testSourceDirectory
+          + " are directories");
+    }
+
+    if (hasImports) {
+      for (String importedFile : imports) {
+        File file = new File(importedFile);
+        if (file.isDirectory()) {
+          String[] includedFiles = getIncludedFiles(file.getAbsolutePath(), excludes, getIncludes());
+          getLog().info("Importing Directory: " + file.getAbsolutePath());
+          getLog().debug("Importing Directory Files: " + Arrays.toString(includedFiles));
+          compileFiles(includedFiles, file, outputDirectory);
+        } else if (file.isFile()) {
+          getLog().info("Importing File: " + file.getAbsolutePath());
+          compileFiles(new String[]{file.getName()}, file.getParentFile(), outputDirectory);
+        }
+      }
+    }
+
+    if (hasSourceDir) {
+      String[] includedFiles = getIncludedFiles(
+          sourceDirectory.getAbsolutePath(), excludes, getIncludes());
+      compileFiles(includedFiles, sourceDirectory, outputDirectory);
+    }
+    
+    if (hasImports || hasSourceDir) {
+      project.addCompileSourceRoot(outputDirectory.getAbsolutePath());
+    }
+    
+    if (hasTestDir) {
+      String[] includedFiles = getIncludedFiles(
+          testSourceDirectory.getAbsolutePath(), testExcludes,
+          getTestIncludes());
+      compileFiles(includedFiles, testSourceDirectory, testOutputDirectory);
+      project.addTestCompileSourceRoot(testOutputDirectory.getAbsolutePath());
+    }
+  }
+
+  private String[] getIncludedFiles(String absPath, String[] excludes,
+      String[] includes) {
+    FileSetManager fileSetManager = new FileSetManager();
+    FileSet fs = new FileSet();
+    fs.setDirectory(absPath);
+    fs.setFollowSymlinks(false);
+    
+    //exclude imports directory since it has already been compiled.
+    if (imports != null) {
+      String importExclude = null;
+
+      for (String importFile : this.imports) {
+        File file = new File(importFile);
+
+        if (file.isDirectory()) {
+          importExclude = file.getName() + "/**";
+        } else if (file.isFile()) {
+          importExclude = "**/" + file.getName();
+        }
+
+        fs.addExclude(importExclude);
+      }
+    }
+    for (String include : includes) {
+      fs.addInclude(include);
+    }
+    for (String exclude : excludes) {
+      fs.addExclude(exclude);
+    }
+    return fileSetManager.getIncludedFiles(fs);
+  }
+
+  private void compileFiles(String[] files, File sourceDir, File outDir) throws MojoExecutionException {
+    for (String filename : files) {
+      try {
+        doCompile(filename, sourceDir, outDir);
+      } catch (IOException e) {
+        throw new MojoExecutionException("Error compiling protocol file "
+            + filename + " to " + outDir, e);
+      }
+    }
+  }
+
+  protected SpecificCompiler.FieldVisibility getFieldVisibility() {
+    try {
+      String upper = String.valueOf(this.fieldVisibility).trim().toUpperCase();
+      return SpecificCompiler.FieldVisibility.valueOf(upper);
+    } catch (IllegalArgumentException e) {
+      return SpecificCompiler.FieldVisibility.PUBLIC_DEPRECATED;
+    }
+  }
+
+  protected abstract void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException;
+
+  protected abstract String[] getIncludes();
+
+  protected abstract String[] getTestIncludes();
+
+}
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
new file mode 100644
index 0000000..0e9ef9c
--- /dev/null
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mojo;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.compiler.idl.Idl;
+import org.apache.avro.compiler.idl.ParseException;
+import org.apache.avro.compiler.specific.SpecificCompiler;
+import org.apache.avro.generic.GenericData;
+
+import org.apache.maven.artifact.DependencyResolutionRequiredException;
+
+/**
+ * Generate Java classes and interfaces from AvroIDL files (.avdl)
+ * 
+ * @goal idl-protocol
+ * @requiresDependencyResolution runtime
+ * @phase generate-sources
+ * @threadSafe
+ */
+public class IDLProtocolMojo extends AbstractAvroMojo {
+  /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avdl</code> is used to select IDL files.
+   * 
+   * @parameter
+   */
+  private String[] includes = new String[] { "**/*.avdl" };
+  
+  /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avdl</code> is used to select IDL files.
+   * 
+   * @parameter
+   */
+  private String[] testIncludes = new String[] { "**/*.avdl" };
+
+  @Override
+  protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
+    try {
+      @SuppressWarnings("rawtypes")
+      List runtimeClasspathElements = project.getRuntimeClasspathElements();
+      Idl parser;
+
+      List<URL> runtimeUrls = new ArrayList<URL>();
+
+      // Add the source directory of avro files to the classpath so that
+      // imports can refer to other idl files as classpath resources
+      runtimeUrls.add(sourceDirectory.toURI().toURL());
+
+      // If runtimeClasspathElements is not empty values add its values to Idl path.
+      if (runtimeClasspathElements != null && !runtimeClasspathElements.isEmpty()) {
+        for (Object runtimeClasspathElement : runtimeClasspathElements) {
+          String element = (String) runtimeClasspathElement;
+          runtimeUrls.add(new File(element).toURI().toURL());
+      }
+      }
+
+      URLClassLoader projPathLoader = new URLClassLoader
+          (runtimeUrls.toArray(new URL[0]), Thread.currentThread().getContextClassLoader());
+        parser = new Idl(new File(sourceDirectory, filename), projPathLoader);
+
+      Protocol p = parser.CompilationUnit();
+      String json = p.toString(true);
+      Protocol protocol = Protocol.parse(json);
+      SpecificCompiler compiler = new SpecificCompiler(protocol);
+      compiler.setStringType(GenericData.StringType.valueOf(stringType));
+      compiler.setTemplateDir(templateDirectory);
+      compiler.setFieldVisibility(getFieldVisibility());
+      compiler.setCreateSetters(createSetters);
+      compiler.compileToDestination(null, outputDirectory);
+    } catch (ParseException e) {
+      throw new IOException(e);
+    } catch (DependencyResolutionRequiredException drre) {
+      throw new IOException(drre);
+    }
+  }
+
+  @Override
+  protected String[] getIncludes() {
+    return includes;
+  }
+
+
+  @Override
+  protected String[] getTestIncludes() {
+    return testIncludes;
+  }
+}
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
new file mode 100644
index 0000000..461559b
--- /dev/null
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mojo;
+
+import org.apache.avro.generic.GenericData.StringType;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.compiler.specific.SpecificCompiler;
+
+/**
+ * Generate Java classes and interfaces from Avro protocol files (.avpr)
+ * 
+ * @goal protocol
+ * @phase generate-sources
+ * @threadSafe
+ */
+public class ProtocolMojo extends AbstractAvroMojo {
+  /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avpr</code> is used to select grammar files.
+   * 
+   * @parameter
+   */
+  private String[] includes = new String[] { "**/*.avpr" };
+
+  /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avpr</code> is used to select grammar files.
+   * 
+   * @parameter
+   */
+  private String[] testIncludes = new String[] { "**/*.avpr" };
+  
+  @Override
+  protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
+    File src = new File(sourceDirectory, filename);
+    Protocol protocol = Protocol.parse(src);
+    SpecificCompiler compiler = new SpecificCompiler(protocol);
+    compiler.setTemplateDir(templateDirectory);
+    compiler.setStringType(StringType.valueOf(stringType));
+    compiler.setFieldVisibility(getFieldVisibility());
+    compiler.setCreateSetters(createSetters);
+    compiler.compileToDestination(src, outputDirectory);
+  }
+
+  @Override
+  protected String[] getIncludes() {
+    return includes;
+  }
+
+  @Override
+  protected String[] getTestIncludes() {
+    return testIncludes;
+  }
+}
diff --git a/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
new file mode 100644
index 0000000..7a7eaf9
--- /dev/null
+++ b/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.mojo;
+
+import org.apache.avro.generic.GenericData.StringType;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.compiler.specific.SpecificCompiler;
+
+/**
+ * Generate Java classes from Avro schema files (.avsc)
+ * 
+ * @goal schema
+ * @phase generate-sources
+ * @threadSafe
+ */
+public class SchemaMojo extends AbstractAvroMojo {
+  /**
+   * A parser used to parse all schema files. Using a common parser will 
+   * facilitate the import of external schemas.
+   */
+   private Schema.Parser schemaParser = new Schema.Parser();
+  
+   /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avsc</code> is used to select grammar files.
+   * 
+   * @parameter
+   */
+  private String[] includes = new String[] { "**/*.avsc" };
+  
+  /**
+   * A set of Ant-like inclusion patterns used to select files from the source
+   * directory for processing. By default, the pattern
+   * <code>**/*.avsc</code> is used to select grammar files.
+   * 
+   * @parameter
+   */
+  private String[] testIncludes = new String[] { "**/*.avsc" };
+
+  @Override
+  protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
+    File src = new File(sourceDirectory, filename);
+    Schema schema;
+
+    // This is necessary to maintain backward-compatibility. If there are  
+    // no imported files then isolate the schemas from each other, otherwise
+    // allow them to share a single schema so resuse and sharing of schema
+    // is possible.
+    if (imports == null) {
+      schema = new Schema.Parser().parse(src);
+    } else {
+      schema = schemaParser.parse(src);
+    }
+    
+    SpecificCompiler compiler = new SpecificCompiler(schema);
+    compiler.setTemplateDir(templateDirectory);
+    compiler.setStringType(StringType.valueOf(stringType));
+    compiler.setFieldVisibility(getFieldVisibility());
+    compiler.setCreateSetters(createSetters);
+    compiler.setOutputCharacterEncoding(project.getProperties().getProperty("project.build.sourceEncoding"));
+    compiler.compileToDestination(src, outputDirectory);
+  }
+
+  @Override
+  protected String[] getIncludes() {
+    return includes;
+  }
+
+  @Override
+  protected String[] getTestIncludes() {
+    return testIncludes;
+  }
+}
diff --git a/lang/java/maven-plugin/src/test/avro/AvdlClasspathImport.avdl b/lang/java/maven-plugin/src/test/avro/AvdlClasspathImport.avdl
new file mode 100644
index 0000000..2c6ced7
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/AvdlClasspathImport.avdl
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ at namespace("test")
+protocol IdlClasspathImportTest {
+    import idl "avro/User.avdl";
+
+    record IdlUserWrapper {
+      union { null, test.IdlUser } wrapped;
+    }
+
+}
diff --git a/lang/java/maven-plugin/src/test/avro/User.avdl b/lang/java/maven-plugin/src/test/avro/User.avdl
new file mode 100644
index 0000000..4d4fa5a
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/User.avdl
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ at namespace("test")
+protocol IdlTest {
+
+    enum IdlPrivacy {
+      Public, Private
+    }
+
+    record IdlUser {
+      union { null, string } id;
+      union { null, long } createdOn;
+      union { null, IdlPrivacy } privacy;
+    }
+
+}
diff --git a/lang/java/maven-plugin/src/test/avro/User.avpr b/lang/java/maven-plugin/src/test/avro/User.avpr
new file mode 100644
index 0000000..9d8a109
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/User.avpr
@@ -0,0 +1,34 @@
+{
+  "protocol" : "ProtocolTest",
+  "namespace" : "test",
+  "types" : [ 
+    {
+      "type" : "enum",
+      "name" : "ProtocolPrivacy",
+      "symbols" : [ "Public", "Private"]
+    }, 
+    {
+      "type": "record", 
+      "namespace": "test",
+      "name": "ProtocolUser",
+      "doc": "User Test Bean",
+      "fields": [
+        {
+          "name": "id", 
+          "type": ["null", "string"],
+          "default": null
+        },
+        {
+          "name": "createdOn", 
+          "type": ["null", "long"],
+          "default": null
+        },
+        {
+          "name": "privacy", 
+          "type": ["null", "ProtocolPrivacy"],
+          "default": null
+        }
+      ]
+    } 
+  ]
+}
diff --git a/lang/java/maven-plugin/src/test/avro/User.avsc b/lang/java/maven-plugin/src/test/avro/User.avsc
new file mode 100644
index 0000000..0eb5e09
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/User.avsc
@@ -0,0 +1,38 @@
+{
+    "type": "record", 
+    "namespace": "test",
+    "name": "SchemaUser",
+    "doc": "User Test Bean",
+    "fields": [
+        {
+            "name": "id", 
+            "type": ["null", "string"],
+            "default": null
+        },
+        {
+            "name": "createdOn", 
+            "type": ["null", "long"],
+            "default": null
+        },
+        {
+            "name": "privacy", 
+            "type": ["null", { 
+                    "type": "enum",
+                    "name": "SchemaPrivacy",
+                    "namespace": "test",
+                    "symbols" : ["Public","Private"]
+                }],
+            "default": null
+        },
+        {
+            "name": "privacyImported", 
+            "type": ["null", "test.PrivacyImport"],
+            "default": null
+        },
+        {
+            "name": "privacyDirectImport", 
+            "type": ["null", "test.PrivacyDirectImport"],
+            "default": null
+        }
+    ]
+}
diff --git a/lang/java/maven-plugin/src/test/avro/directImport/PrivacyDirectImport.avsc b/lang/java/maven-plugin/src/test/avro/directImport/PrivacyDirectImport.avsc
new file mode 100644
index 0000000..a5b6295
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/directImport/PrivacyDirectImport.avsc
@@ -0,0 +1,7 @@
+{
+    "type": "enum", 
+    "namespace": "test",
+    "name": "PrivacyDirectImport",
+    "doc": "Privacy Test Enum",
+    "symbols" : ["Public","Private"]
+}
diff --git a/lang/java/maven-plugin/src/test/avro/imports/PrivacyImport.avsc b/lang/java/maven-plugin/src/test/avro/imports/PrivacyImport.avsc
new file mode 100644
index 0000000..f454f1d
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/avro/imports/PrivacyImport.avsc
@@ -0,0 +1,7 @@
+{
+    "type": "enum", 
+    "namespace": "test",
+    "name": "PrivacyImport",
+    "doc": "Privacy Test Enum",
+    "symbols" : ["Public","Private"]
+}
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
new file mode 100644
index 0000000..e6f2091
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mojo;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+import org.apache.maven.plugin.testing.AbstractMojoTestCase;
+
+/**
+ * Base class for all Arvo mojo test classes.
+ *
+ * @author saden
+ */
+public class AbstractAvroMojoTest extends AbstractMojoTestCase {
+
+  @Override
+  protected void setUp() throws Exception {
+    super.setUp();
+  }
+
+  @Override
+  protected void tearDown() throws Exception {
+    super.tearDown();
+  }
+
+  /**
+   * Assert the existence files in the given given directory.
+   * 
+   * @param directory the directory being checked
+   * @param files the files whose existence is being checked.
+   */
+  protected void assertFilesExist(File directory, String... files) {
+    assertNotNull(directory);
+    assertTrue(directory.exists());
+    assertNotNull(files);
+    assertTrue(files.length > 0);
+
+    List<String> dirList = Arrays.asList(directory.list());
+
+    for (String file : files) {
+      assertTrue("File " + file + " does not exist.", dirList.contains(file));
+    }
+  }
+}
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
new file mode 100644
index 0000000..6442bc8
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLProtocolMojo.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mojo;
+
+import java.io.File;
+
+/**
+ * Test the IDL Protocol Mojo.
+ * 
+ * @author saden
+ */
+public class TestIDLProtocolMojo extends AbstractAvroMojoTest {
+
+  protected File testPom = new File(getBasedir(),
+          "src/test/resources/unit/idl/pom.xml");
+
+  public void testIdlProtocolMojo() throws Exception {
+    IDLProtocolMojo mojo = (IDLProtocolMojo) lookupMojo("idl-protocol", testPom);
+
+    assertNotNull(mojo);
+    mojo.execute();
+
+    File outputDir = new File(getBasedir(), "target/test-harness/idl/test");
+    String[] generatedFiles = new String[]{"IdlPrivacy.java",
+      "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java"};
+
+    assertFilesExist(outputDir, generatedFiles);
+  }
+}
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
new file mode 100644
index 0000000..95ebbc6
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mojo;
+
+import java.io.File;
+
+/**
+ * Test the Protocol Mojo.
+ * 
+ * @author saden
+ */
+public class TestProtocolMojo extends AbstractAvroMojoTest {
+
+  protected File testPom = new File(getBasedir(),
+          "src/test/resources/unit/protocol/pom.xml");
+
+  public void testProtocolMojo() throws Exception {
+    ProtocolMojo mojo = (ProtocolMojo) lookupMojo("protocol", testPom);
+
+    assertNotNull(mojo);
+    mojo.execute();
+
+    File outputDir = new File(getBasedir(), "target/test-harness/protocol/test");
+    String[] generatedFiles = new String[]{"ProtocolPrivacy.java",
+      "ProtocolTest.java", "ProtocolUser.java"};
+
+    assertFilesExist(outputDir, generatedFiles);
+  }
+}
diff --git a/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
new file mode 100644
index 0000000..8a47be0
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.mojo;
+
+import java.io.File;
+
+/**
+ * Test the Schema Mojo.
+ * 
+ * @author saden
+ */
+public class TestSchemaMojo extends AbstractAvroMojoTest {
+
+  protected File testPom = new File(getBasedir(),
+          "src/test/resources/unit/schema/pom.xml");
+
+  public void testSchemaMojo() throws Exception {
+    SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", testPom);
+
+    assertNotNull(mojo);
+    mojo.execute();
+
+    File outputDir = new File(getBasedir(), "target/test-harness/schema/test");
+    String[] generatedFiles = new String[]{"PrivacyDirectImport.java",
+      "PrivacyImport.java", "SchemaPrivacy.java", "SchemaUser.java"};
+
+    assertFilesExist(outputDir, generatedFiles);
+  }
+}
diff --git a/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
new file mode 100644
index 0000000..ec6b9d8
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/resources/unit/idl/pom.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.7.3-SNAPSHOT</version>
+    <relativePath>../../../../../../../../../</relativePath>
+  </parent>
+  <artifactId>avro-maven-plugin-test</artifactId>
+  <packaging>jar</packaging>
+
+  <name>testproject</name>
+ 
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>avro-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>idl</id>
+            <goals>
+              <goal>idl-protocol</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <sourceDirectory>${basedir}/src/test</sourceDirectory>
+          <outputDirectory>${basedir}/target/test-harness/idl</outputDirectory>
+          <project implementation="org.apache.maven.plugin.testing.stubs.MavenProjectStub"/>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <version>1.7.3-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>1.9.10</version>
+    </dependency> 
+  </dependencies>
+    
+</project>
diff --git a/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
new file mode 100644
index 0000000..341bb1a
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/resources/unit/protocol/pom.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.7.3-SNAPSHOT</version>
+    <relativePath>../../../../../../../../../</relativePath>
+  </parent>
+  <artifactId>avro-maven-plugin-test</artifactId>
+  <packaging>jar</packaging>
+
+  <name>testproject</name>
+ 
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>avro-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>protocol</id>
+            <goals>
+              <goal>protocol</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <sourceDirectory>${basedir}/src/test/avro</sourceDirectory>
+          <outputDirectory>${basedir}/target/test-harness/protocol</outputDirectory>
+          <project implementation="org.apache.maven.plugin.testing.stubs.MavenProjectStub"/>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <version>1.7.3-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>1.9.10</version>
+    </dependency> 
+  </dependencies>
+    
+</project>
diff --git a/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
new file mode 100644
index 0000000..0faef29
--- /dev/null
+++ b/lang/java/maven-plugin/src/test/resources/unit/schema/pom.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  
+  <artifactId>avro-maven-plugin-test</artifactId>
+  <packaging>jar</packaging>
+
+  <name>testproject</name>
+ 
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>avro-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>schema</id>
+            <goals>
+              <goal>schema</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <sourceDirectory>${basedir}/src/test/avro</sourceDirectory>
+          <outputDirectory>${basedir}/target/test-harness/schema</outputDirectory>
+          <imports>
+            <import>${basedir}/src/test/avro/imports</import>
+            <import>${basedir}/src/test/avro/directImport/PrivacyDirectImport.avsc</import>
+          </imports>
+          <project implementation="org.apache.maven.plugin.testing.stubs.MavenProjectStub"/>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro</artifactId>
+      <version>1.7.3-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>1.9.10</version>
+    </dependency> 
+  </dependencies>
+    
+</project>
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
new file mode 100644
index 0000000..003b1c0
--- /dev/null
+++ b/lang/java/pom.xml
@@ -0,0 +1,503 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.avro</groupId>
+    <artifactId>avro-toplevel</artifactId>
+    <version>1.8.0</version>
+    <relativePath>../../</relativePath>
+  </parent>
+
+  <artifactId>avro-parent</artifactId>
+  <packaging>pom</packaging>
+
+  <name>Apache Avro Java</name>
+  <url>http://avro.apache.org</url>
+  <description>Avro parent Java project</description>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+
+    <!-- version properties for dependencies -->
+    
+    <!--
+      To build the avro-mapred module against Hadoop 1 specify
+      -Dhadoop.version=1 or leave unspecified to build against Hadoop 2
+    -->
+    <hadoop1.version>1.2.1</hadoop1.version>
+    <hadoop2.version>2.5.1</hadoop2.version>
+    <jackson.version>1.9.13</jackson.version>
+    <jetty.version>6.1.26</jetty.version>
+    <jetty-servlet-api.version>2.5-20081211</jetty-servlet-api.version>
+    <jopt-simple.version>4.7</jopt-simple.version>
+    <junit.version>4.11</junit.version>
+    <netty.version>3.5.13.Final</netty.version>
+    <paranamer.version>2.7</paranamer.version>
+    <protobuf.version>2.5.0</protobuf.version>
+    <thrift.version>0.9.1</thrift.version>
+    <slf4j.version>1.7.7</slf4j.version>
+    <snappy.version>1.1.1.3</snappy.version>
+    <velocity.version>1.7</velocity.version>
+    <maven.version>2.0.10</maven.version>
+    <ant.version>1.9.0</ant.version>
+    <commons-cli.version>1.2</commons-cli.version>
+    <commons-codec.version>1.9</commons-codec.version>
+    <commons-compress.version>1.8.1</commons-compress.version>
+    <commons-httpclient.version>3.1</commons-httpclient.version>
+    <commons-lang.version>2.6</commons-lang.version>
+    <commons-logging.version>1.1.1</commons-logging.version>
+    <tukaani.version>1.5</tukaani.version>
+    <easymock.version>3.2</easymock.version>
+    <hamcrest.version>1.3</hamcrest.version>
+    <commons-httpclient.version>3.1</commons-httpclient.version>
+    <joda.version>2.7</joda.version>
+
+    <!-- version properties for plugins -->
+    <checkstyle-plugin.version>2.12.1</checkstyle-plugin.version>
+    <bundle-plugin-version>2.5.3</bundle-plugin-version>
+    <compiler-plugin.version>3.1</compiler-plugin.version>
+    <exec-plugin.version>1.3.2</exec-plugin.version>
+    <jar-plugin.version>2.5</jar-plugin.version>
+    <javacc-plugin.version>2.6</javacc-plugin.version>
+    <javadoc-plugin.version>2.9.1</javadoc-plugin.version>
+    <plugin-tools-javadoc.version>3.2</plugin-tools-javadoc.version>
+    <maven-site-plugin.version>3.3</maven-site-plugin.version>
+    <plugin-plugin.version>3.3</plugin-plugin.version>
+    <source-plugin.version>2.3</source-plugin.version>
+    <surefire-plugin.version>2.17</surefire-plugin.version>
+    <file-management.version>1.2.1</file-management.version>
+    <shade-plugin.version>1.7.1</shade-plugin.version>
+    <archetype-plugin.version>2.2</archetype-plugin.version>
+  </properties>
+
+  <modules>
+    <module>avro</module>
+    <module>compiler</module>
+    <module>maven-plugin</module>
+    <module>ipc</module>
+    <module>trevni</module>
+    <module>tools</module>
+    <module>mapred</module>
+    <module>protobuf</module>
+    <module>thrift</module>
+    <module>archetypes</module>
+  </modules>
+
+  <build>
+    <!-- defines the default plugin configuration that all child projects inherit from.
+      Like dependencyManagement, this provides configuration, version, and other
+      parameters if the plugins are used by child projects -->
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.codehaus.mojo</groupId>
+                    <artifactId>javacc-maven-plugin</artifactId>
+                    <versionRange>[${javacc-plugin.version},)</versionRange>
+                    <goals>
+                      <goal>javacc</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>com.thoughtworks.paranamer</groupId>
+                    <artifactId>paranamer-maven-plugin</artifactId>
+                    <versionRange>[${paranamer.version},)</versionRange>
+                    <goals>
+                      <goal>generate</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.avro</groupId>
+                    <artifactId>avro-maven-plugin</artifactId>
+                    <versionRange>[${project.version},)</versionRange>
+                    <goals>
+                      <goal>protocol</goal>
+                      <goal>idl-protocol</goal>
+                      <goal>schema</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-compiler-plugin</artifactId>
+          <version>${compiler-plugin.version}</version>
+          <configuration>
+            <source>1.6</source>
+            <target>1.6</target>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-jar-plugin</artifactId>
+          <version>${jar-plugin.version}</version>
+
+          <!--We want to be able to resuse the test-jars for mapred
+              to test avro-tool
+              see http://maven.apache.org/guides/mini/guide-attached-tests.html
+          -->
+          <executions>
+            <execution>
+              <goals>
+                <goal>test-jar</goal>
+              </goals>
+            </execution>
+          </executions>
+          <configuration>
+            <archive>
+              <manifest>
+                <addDefaultSpecificationEntries>true</addDefaultSpecificationEntries>
+                <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+              </manifest>
+            </archive>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-surefire-plugin</artifactId>
+          <version>${surefire-plugin.version}</version>
+          <configuration>
+            <includes>
+              <!-- Avro naming convention for JUnit tests -->
+              <include>**/Test**</include>
+            </includes>
+            <excludes>
+              <!-- A few innner classes are not to be tested -->
+              <exclude>**/*$*</exclude>
+              <!-- exclude the generated classes under apache.avro.test, some of
+                these match **/Test** and are not JUnit tests -->
+              <exclude>**/apache/avro/test/**</exclude>
+            </excludes>
+            <enableAssertions>false</enableAssertions>
+            <!-- some IPC tests hang if not run in a separate JVM -->
+            <forkCount>1</forkCount>
+            <reuseForks>false</reuseForks>
+            <!-- TestSpecificCompiler instantiates a Java compiler to test output results,
+                 this does not work with a manifest-only-jar to set the classpath for the javac.
+                 This may cause problems on some platforms.
+                 See http://maven.apache.org/plugins/maven-surefire-plugin/examples/class-loading.html
+                 for more information. -->
+            <useManifestOnlyJar>false</useManifestOnlyJar>
+            <!-- configures unit test standard error and standard out to go to a file per test
+                 rather than the console. -->
+            <redirectTestOutputToFile>true</redirectTestOutputToFile>
+            <failIfNoTests>false</failIfNoTests>
+            <argLine>-Xmx1000m -XX:MaxPermSize=200m</argLine>
+            <systemPropertyVariables>
+              <test.dir>${project.basedir}/target/</test.dir>
+            </systemPropertyVariables>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-checkstyle-plugin</artifactId>
+          <version>${checkstyle-plugin.version}</version>
+          <configuration>
+            <consoleOutput>true</consoleOutput>
+            <configLocation>checkstyle.xml</configLocation>
+          </configuration>
+          <!-- Runs by default in the verify phase  (mvn verify or later in the build cycle)
+               the 'check' goal will fail the build if it does not pass.  "mvn checkstyle:check"
+               will do this alone, or "mvn checkstyle:checkstyle" will report but not break  -->
+          <executions>
+            <execution>
+              <id>checkstyle-check</id>
+              <phase>test</phase>
+              <goals>
+                <goal>check</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-javadoc-plugin</artifactId>
+          <version>${javadoc-plugin.version}</version>
+          <configuration>
+            <links>
+              <link>http://jackson.codehaus.org/1.9.4/javadoc/</link>
+              <link>http://java.sun.com/products/servlet/2.3/javadoc/</link>
+              <link>http://hadoop.apache.org/common/docs/current/api/</link>
+            </links>
+            <tagletArtifacts>
+              <tagletArtifact>
+                <groupId>org.apache.maven.plugin-tools</groupId>
+                <artifactId>maven-plugin-tools-javadoc</artifactId>
+                <version>${plugin-tools-javadoc.version}</version>
+              </tagletArtifact>
+            </tagletArtifacts>
+            <excludePackageNames>org.apache.avro.compiler.idl,*.internal</excludePackageNames>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-source-plugin</artifactId>
+          <version>${source-plugin.version}</version>
+        </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>javacc-maven-plugin</artifactId>
+          <version>${javacc-plugin.version}</version>
+        </plugin>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>exec-maven-plugin</artifactId>
+          <version>${exec-plugin.version}</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-plugin-plugin</artifactId>
+          <version>${plugin-plugin.version}</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-shade-plugin</artifactId>
+          <version>${shade-plugin.version}</version>
+        </plugin>
+        <plugin>
+          <artifactId>maven-antrun-plugin</artifactId>
+          <version>${antrun-plugin.version}</version>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.felix</groupId>
+        <artifactId>maven-bundle-plugin</artifactId>
+        <version>${bundle-plugin-version}</version>
+        <extensions>true</extensions>
+        <inherited>true</inherited>
+        <configuration>
+          <instructions>
+            <Bundle-Name>${project.name}</Bundle-Name>
+            <Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
+            <Export-Package>${osgi.export}</Export-Package>
+            <Import-Package>${osgi.import}</Import-Package>
+            <DynamicImport-Package>${osgi.dynamic.import}</DynamicImport-Package>
+            <Private-Package>${osgi.private}</Private-Package>
+            <Require-Bundle>${osgi.bundles}</Require-Bundle>
+            <Bundle-Activator>${osgi.activator}</Bundle-Activator>
+          </instructions>
+          <supportedProjectTypes>
+            <supportedProjectType>bundle</supportedProjectType>
+          </supportedProjectTypes>
+          <unpackBundle>true</unpackBundle>
+        </configuration>
+        <executions>
+          <execution>
+            <id>bundle-manifest</id>
+            <phase>process-classes</phase>
+            <goals>
+              <goal>manifest</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <reporting>
+  </reporting>
+
+  <profiles>
+    <profile>
+      <id>interop-data-test</id>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <version>${surefire-plugin.version}</version>
+            <configuration>
+              <includes>
+                <include>**/DataFileInteropTest*</include>
+              </includes>
+              <excludes>
+              </excludes>
+              <enableAssertions>false</enableAssertions>
+              <forkCount>1</forkCount>
+              <reuseForks>true</reuseForks>
+              <redirectTestOutputToFile>false</redirectTestOutputToFile>
+              <systemPropertyVariables>
+                <test.dir>../../../build/interop/data/</test.dir>
+              </systemPropertyVariables>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+    <profile>
+      <id>mac</id>
+      <activation>
+        <os>
+        <family>mac</family>
+        </os>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-surefire-plugin</artifactId> 
+            <configuration>
+              <systemPropertyVariables>
+                <test.dir>${project.basedir}/target/</test.dir>
+                <!-- avro-mapred will fail in tests on mac without this -->
+                <java.security.krb5.realm>OX.AC.UK</java.security.krb5.realm>
+                <java.security.krb5.kdc>kdc0.ox.ac.uk:kdc1.ox.ac.uk</java.security.krb5.kdc>
+              </systemPropertyVariables>
+            </configuration>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <!-- dependencyManagement can be used to define dependency versions, scopes, and
+    excludes to be shared by child projects. Child projects will not inherit these dependencies,
+    rather they inherit the properties of the below dependencies (such as version) if
+    the dependency is specified in the child. -->
+  <dependencyManagement>
+    <dependencies>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-core-asl</artifactId>
+        <version>${jackson.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.codehaus.jackson</groupId>
+        <artifactId>jackson-mapper-asl</artifactId>
+        <version>${jackson.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>com.thoughtworks.paranamer</groupId>
+        <artifactId>paranamer</artifactId>
+        <version>${paranamer.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.velocity</groupId>
+        <artifactId>velocity</artifactId>
+        <version>${velocity.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.mortbay.jetty</groupId>
+        <artifactId>jetty</artifactId>
+        <version>${jetty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty</artifactId>
+        <version>${netty.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>net.sf.jopt-simple</groupId>
+        <artifactId>jopt-simple</artifactId>
+        <version>${jopt-simple.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-core</artifactId>
+        <version>${hadoop1.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-client</artifactId>
+        <version>${hadoop2.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.easymock</groupId>
+        <artifactId>easymock</artifactId>
+        <version>${easymock.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.hamcrest</groupId>
+        <artifactId>hamcrest-all</artifactId>
+        <version>${hamcrest.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.xerial.snappy</groupId>
+        <artifactId>snappy-java</artifactId>
+        <version>${snappy.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.commons</groupId>
+        <artifactId>commons-compress</artifactId>
+        <version>${commons-compress.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>joda-time</groupId>
+        <artifactId>joda-time</artifactId>
+        <version>${joda.version}</version>
+        <optional>true</optional>
+      </dependency>
+    </dependencies>
+  </dependencyManagement>
+
+  <!-- dependencies defines dependencies that all child projects share. Child projects
+    will inherit these dependencies directly, and can opt out if necessary with <excludes> -->
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${junit.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-simple</artifactId>
+      <version>${slf4j.version}</version>
+      <scope>runtime</scope>
+      <optional>true</optional>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/lang/java/protobuf/pom.xml b/lang/java/protobuf/pom.xml
new file mode 100644
index 0000000..7ef5da8
--- /dev/null
+++ b/lang/java/protobuf/pom.xml
@@ -0,0 +1,99 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-protobuf</artifactId>
+
+  <name>Apache Avro Protobuf Compatibility</name>
+  <description>Permit serialization of Protobuf-generated classes as Avro data.</description>
+  <packaging>bundle</packaging>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro.protobuf*,
+      org.apache.avro*;version="${project.version}",
+      com.google.protobuf*;version="${project.version}",
+      *
+    </osgi.import>
+    <osgi.export>org.apache.avro.protobuf*;version="${project.version}"</osgi.export>
+  </properties>
+
+  <profiles>
+    <profile>
+      <id>protobuf-generate</id>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>generate-test-sources</phase>
+                <configuration>
+                  <tasks>
+                    <mkdir dir="target/protobuf-tmp"/>
+                    <exec executable="protoc">
+                      <arg value="--java_out=src/test/java/"/>
+                      <arg value="src/test/protobuf/test.proto"/>
+                    </exec>
+                  </tasks>
+                </configuration>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>${protobuf.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
new file mode 100644
index 0000000..dabe15a
--- /dev/null
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.protobuf;
+
+import java.util.List;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.IdentityHashMap;
+import java.util.concurrent.ConcurrentHashMap;
+
+import java.io.IOException;
+import java.io.File;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Message;
+import com.google.protobuf.Message.Builder;
+import com.google.protobuf.MessageOrBuilder;
+import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.Descriptors.FieldDescriptor;
+import com.google.protobuf.Descriptors.EnumDescriptor;
+import com.google.protobuf.Descriptors.EnumValueDescriptor;
+import com.google.protobuf.Descriptors.FileDescriptor;
+import com.google.protobuf.DescriptorProtos.FileOptions;
+
+import org.apache.avro.util.ClassUtils;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.node.JsonNodeFactory;
+
+/** Utilities for serializing Protobuf data in Avro format. */
+public class ProtobufData extends GenericData {
+  private static final String PROTOBUF_TYPE = "protobuf";
+
+  private static final ProtobufData INSTANCE = new ProtobufData();
+
+  protected ProtobufData() {}
+  
+  /** Return the singleton instance. */
+  public static ProtobufData get() { return INSTANCE; }
+
+  @Override
+  public DatumReader createDatumReader(Schema schema) {
+    return new ProtobufDatumReader(schema, schema, this);
+  }
+
+  @Override
+  public DatumWriter createDatumWriter(Schema schema) {
+    return new ProtobufDatumWriter(schema, this);
+  }
+
+  @Override
+  public void setField(Object r, String n, int pos, Object o) {
+    setField(r, n, pos, o, getRecordState(r, getSchema(r.getClass())));
+  }
+
+  @Override
+  public Object getField(Object r, String name, int pos) {
+    return getField(r, name, pos, getRecordState(r, getSchema(r.getClass())));
+  }
+
+  @Override
+  protected void setField(Object r, String n, int pos, Object o, Object state) {
+    Builder b = (Builder)r;
+    FieldDescriptor f = ((FieldDescriptor[])state)[pos];
+    switch (f.getType()) {
+    case MESSAGE:
+      if (o == null) {
+        b.clearField(f);
+        break;
+      }
+    default:
+      b.setField(f, o);
+    }
+  }
+
+  @Override
+  protected Object getField(Object record, String name, int pos, Object state) {
+    Message m = (Message)record;
+    FieldDescriptor f = ((FieldDescriptor[])state)[pos];
+    switch (f.getType()) {
+    case MESSAGE:
+      if (!f.isRepeated() && !m.hasField(f))
+        return null;
+    default:
+      return m.getField(f);
+    }
+  }    
+
+  private final Map<Descriptor,FieldDescriptor[]> fieldCache =
+    new ConcurrentHashMap<Descriptor,FieldDescriptor[]>();
+
+  @Override
+  protected Object getRecordState(Object r, Schema s) {
+    Descriptor d = ((MessageOrBuilder)r).getDescriptorForType();
+    FieldDescriptor[] fields = fieldCache.get(d);
+    if (fields == null) {                         // cache miss
+      fields = new FieldDescriptor[s.getFields().size()];
+      for (Field f : s.getFields())
+        fields[f.pos()] = d.findFieldByName(f.name());
+      fieldCache.put(d, fields);                  // update cache
+    }
+    return fields;
+  }
+
+  @Override
+  protected boolean isRecord(Object datum) {
+    return datum instanceof Message;
+  }
+
+  @Override
+  public Object newRecord(Object old, Schema schema) {
+    try {
+      Class c = ClassUtils.forName(SpecificData.getClassName(schema));
+      if (c == null)
+        return newRecord(old, schema);            // punt to generic
+      if (c.isInstance(old))
+        return old;                               // reuse instance
+      return c.getMethod("newBuilder").invoke(null);
+
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected boolean isArray(Object datum) {
+    return datum instanceof List;
+  }
+
+  @Override
+  protected boolean isBytes(Object datum) {
+    return datum instanceof ByteString;
+  }
+
+  @Override
+  protected Schema getRecordSchema(Object record) {
+    return getSchema(((Message)record).getDescriptorForType());
+  }
+
+  private final Map<Class,Schema> schemaCache
+    = new ConcurrentHashMap<Class,Schema>();
+
+  /** Return a record schema given a protobuf message class. */
+  public Schema getSchema(Class c) {
+    Schema schema = schemaCache.get(c);
+
+    if (schema == null) {                         // cache miss
+      try {
+        Object descriptor = c.getMethod("getDescriptor").invoke(null);
+        if (c.isEnum())
+          schema = getSchema((EnumDescriptor)descriptor);
+        else
+          schema = getSchema((Descriptor)descriptor);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      schemaCache.put(c, schema);                 // update cache
+    }
+    return schema;
+  }
+
+  private static final ThreadLocal<Map<Descriptor,Schema>> SEEN
+    = new ThreadLocal<Map<Descriptor,Schema>>() {
+    protected Map<Descriptor,Schema> initialValue() {
+      return new IdentityHashMap<Descriptor,Schema>();
+    }
+  };
+
+  private Schema getSchema(Descriptor descriptor) {
+    Map<Descriptor,Schema> seen = SEEN.get();
+    if (seen.containsKey(descriptor))             // stop recursion
+      return seen.get(descriptor);
+    boolean first = seen.isEmpty();
+    try {
+      Schema result =
+        Schema.createRecord(descriptor.getName(), null,
+                            getNamespace(descriptor.getFile(),
+                                         descriptor.getContainingType()),
+                            false);
+
+      seen.put(descriptor, result);
+        
+      List<Field> fields = new ArrayList<Field>();
+      for (FieldDescriptor f : descriptor.getFields())
+        fields.add(new Field(f.getName(), getSchema(f), null, getDefault(f)));
+      result.setFields(fields);
+      return result;
+
+    } finally {
+      if (first)
+        seen.clear();
+    }
+  }
+
+  private String getNamespace(FileDescriptor fd, Descriptor containing) {
+    FileOptions o = fd.getOptions();
+    String p = o.hasJavaPackage()
+      ? o.getJavaPackage()
+      : fd.getPackage();
+    String outer;
+    if (o.hasJavaOuterClassname()) {
+      outer = o.getJavaOuterClassname();
+    } else {
+      outer = new File(fd.getName()).getName();
+      outer = outer.substring(0, outer.lastIndexOf('.'));
+      outer = toCamelCase(outer);
+    }
+    String inner = "";
+    while (containing != null) {
+      inner = containing.getName() + "$" + inner;
+      containing = containing.getContainingType();
+    }
+    return p + "." + outer + "$" + inner;
+  }
+
+  private static String toCamelCase(String s){
+    String[] parts = s.split("_");
+    String camelCaseString = "";
+    for (String part : parts) {
+      camelCaseString = camelCaseString + cap(part);
+    }
+    return camelCaseString;
+  }
+
+  private static String cap(String s) {
+    return s.substring(0, 1).toUpperCase() + s.substring(1).toLowerCase();
+  }
+
+  private static final Schema NULL = Schema.create(Schema.Type.NULL);
+
+  private Schema getSchema(FieldDescriptor f) {
+    Schema s = getNonRepeatedSchema(f);
+    if (f.isRepeated())
+      s = Schema.createArray(s);
+    return s;
+  }
+
+  private Schema getNonRepeatedSchema(FieldDescriptor f) {
+    Schema result;
+    switch (f.getType()) {
+    case BOOL:
+      return Schema.create(Schema.Type.BOOLEAN);
+    case FLOAT:
+      return Schema.create(Schema.Type.FLOAT);
+    case DOUBLE:
+      return Schema.create(Schema.Type.DOUBLE);
+    case STRING:
+      Schema s = Schema.create(Schema.Type.STRING);
+      GenericData.setStringType(s, GenericData.StringType.String);
+      return s;
+    case BYTES:
+      return Schema.create(Schema.Type.BYTES);
+    case INT32: case UINT32: case SINT32: case FIXED32: case SFIXED32:
+      return Schema.create(Schema.Type.INT);
+    case INT64: case UINT64: case SINT64: case FIXED64: case SFIXED64:
+      return Schema.create(Schema.Type.LONG);
+    case ENUM:
+      return getSchema(f.getEnumType());
+    case MESSAGE:
+      result = getSchema(f.getMessageType());
+      if (f.isOptional())
+        // wrap optional record fields in a union with null
+        result = Schema.createUnion(Arrays.asList(new Schema[] {NULL, result}));
+      return result;
+    case GROUP:                                   // groups are deprecated
+    default:
+      throw new RuntimeException("Unexpected type: "+f.getType());
+    }
+  }
+
+  private Schema getSchema(EnumDescriptor d) {
+    List<String> symbols = new ArrayList<String>();
+    for (EnumValueDescriptor e : d.getValues()) {
+      symbols.add(e.getName());
+    }
+    return Schema.createEnum(d.getName(), null,
+                             getNamespace(d.getFile(), d.getContainingType()),
+                             symbols);
+  }
+
+  private static final JsonFactory FACTORY = new JsonFactory();
+  private static final ObjectMapper MAPPER = new ObjectMapper(FACTORY);
+  private static final JsonNodeFactory NODES = JsonNodeFactory.instance;
+
+  private JsonNode getDefault(FieldDescriptor f) {
+    if (f.isRequired() || f.isRepeated())         // no default
+      return null;
+
+    if (f.hasDefaultValue()) {                    // parse spec'd default value
+      Object value = f.getDefaultValue();
+      switch (f.getType()) {
+      case ENUM:
+        value = ((EnumValueDescriptor)value).getName();
+        break;
+      }
+      String json = toString(value);
+      try {
+        return MAPPER.readTree(FACTORY.createJsonParser(json));
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    switch (f.getType()) {                        // generate default for type
+    case BOOL:
+      return NODES.booleanNode(false);
+    case FLOAT: case DOUBLE:
+    case INT32: case UINT32: case SINT32: case FIXED32: case SFIXED32:
+    case INT64: case UINT64: case SINT64: case FIXED64: case SFIXED64:
+      return NODES.numberNode(0);
+    case STRING: case BYTES:
+      return NODES.textNode("");
+    case ENUM:
+      return NODES.textNode(f.getEnumType().getValues().get(0).getName());
+    case MESSAGE:
+      return NODES.nullNode();
+    case GROUP:                                   // groups are deprecated
+    default:
+      throw new RuntimeException("Unexpected type: "+f.getType());
+    }
+    
+  }
+
+}
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
new file mode 100644
index 0000000..0b9bf8a
--- /dev/null
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.protobuf;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.ResolvingDecoder;
+import org.apache.avro.util.ClassUtils;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Message;
+import com.google.protobuf.ProtocolMessageEnum;
+
+/** {@link org.apache.avro.io.DatumReader DatumReader} for generated Protobuf
+ * classes. */
+public class ProtobufDatumReader<T> extends GenericDatumReader<T> {
+  public ProtobufDatumReader() {
+    this(null, null, ProtobufData.get());
+  }
+
+  public ProtobufDatumReader(Class<T> c) {
+    this(ProtobufData.get().getSchema(c));
+  }
+
+  /** Construct where the writer's and reader's schemas are the same. */
+  public ProtobufDatumReader(Schema schema) {
+    this(schema, schema, ProtobufData.get());
+  }
+
+  /** Construct given writer's and reader's schema. */
+  public ProtobufDatumReader(Schema writer, Schema reader) {
+    this(writer, reader, ProtobufData.get());
+  }
+
+  protected ProtobufDatumReader(Schema writer, Schema reader,
+                                ProtobufData data) {
+    super(writer, reader, data);
+  }
+
+  @Override
+  protected Object readRecord(Object old, Schema expected, 
+                              ResolvingDecoder in) throws IOException {
+    Message.Builder b = (Message.Builder)super.readRecord(old, expected, in);
+    return b.build();                             // build instance
+  }
+
+  @Override
+  protected Object createEnum(String symbol, Schema schema) {
+    try {
+      Class c = ClassUtils.forName(SpecificData.getClassName(schema));
+      if (c == null) return super.createEnum(symbol, schema); // punt to generic
+      return ((ProtocolMessageEnum)Enum.valueOf(c, symbol)).getValueDescriptor();
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected Object readBytes(Object old, Decoder in) throws IOException {
+    return ByteString.copyFrom(((ByteBuffer)super.readBytes(old, in)).array());
+  }    
+
+}
+
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
new file mode 100644
index 0000000..0af30ad
--- /dev/null
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.protobuf;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Encoder;
+
+import com.google.protobuf.ByteString;
+import com.google.protobuf.Descriptors.EnumValueDescriptor;
+
+/** {@link org.apache.avro.io.DatumWriter DatumWriter} for generated protobuf
+ * classes. */
+public class ProtobufDatumWriter<T> extends GenericDatumWriter<T> {
+  public ProtobufDatumWriter() {
+    super(ProtobufData.get());
+  }
+
+  public ProtobufDatumWriter(Class<T> c) {
+    super(ProtobufData.get().getSchema(c), ProtobufData.get());
+  }
+  
+  public ProtobufDatumWriter(Schema schema) {
+    super(schema, ProtobufData.get());
+  }
+  
+  protected ProtobufDatumWriter(Schema root, ProtobufData protobufData) {
+    super(root, protobufData);
+  }
+  
+  protected ProtobufDatumWriter(ProtobufData protobufData) {
+    super(protobufData);
+  }
+  
+  @Override
+  protected void writeEnum(Schema schema, Object datum, Encoder out)
+    throws IOException {
+    if (!(datum instanceof EnumValueDescriptor))
+      super.writeEnum(schema, datum, out);        // punt to generic
+    else
+      out.writeEnum
+        (schema.getEnumOrdinal(((EnumValueDescriptor)datum).getName()));
+  }
+
+  @Override
+  protected void writeBytes(Object datum, Encoder out) throws IOException {
+    ByteString bytes = (ByteString)datum; 
+    out.writeBytes(bytes.toByteArray(), 0, bytes.size());
+  }
+
+}
+
diff --git a/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/package.html b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/package.html
new file mode 100644
index 0000000..0fd157d
--- /dev/null
+++ b/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/package.html
@@ -0,0 +1,42 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body><a href="http://code.google.com/p/protobuf/">Protocol Buffer</a>
+  compatibility.
+
+<p>Protocol Buffer primitive types are mapped to Avro types as follows:
+<table>
+<tr><th>protobuf type</th><th>Avro type</th></tr>
+<tr><td>int32, uint32, sint32, fixed32, sfixed32</td><td>int</td></tr>
+<tr><td>int64, uint64, sint64, fixed64, sfixed64</td><td>long</td></tr>
+<tr><td>float</td><td>float</td></tr>
+<tr><td>double</td><td>double</td></tr>
+<tr><td>bool</td><td>boolean</td></tr>
+<tr><td>string</td><td>string</td></tr>
+<tr><td>bytes</td><td>bytes</td></tr>
+<tr><td>enum</td><td>enum</td></tr>
+<tr><td>message</td><td>record</td></tr>
+</table>
+<p>Notes:
+<ul>
+<li>protobuf repeated fields are represented as Avro arrays
+<li>protobuf default values are translated to Avro default values
+</ul>
+</body>
+</html>
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
new file mode 100644
index 0000000..806b5dc
--- /dev/null
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/Test.java
@@ -0,0 +1,3316 @@
+// Generated by the protocol buffer compiler.  DO NOT EDIT!
+// source: src/test/protobuf/test.proto
+
+package org.apache.avro.protobuf;
+
+public final class Test {
+  private Test() {}
+  public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistry registry) {
+  }
+  /**
+   * Protobuf enum {@code org.apache.avro.protobuf.A}
+   *
+   * <pre>
+   * an enum
+   * </pre>
+   */
+  public enum A
+      implements com.google.protobuf.ProtocolMessageEnum {
+    /**
+     * <code>X = 1;</code>
+     */
+    X(0, 1),
+    /**
+     * <code>Y = 2;</code>
+     */
+    Y(1, 2),
+    /**
+     * <code>Z = 3;</code>
+     */
+    Z(2, 3),
+    ;
+
+    /**
+     * <code>X = 1;</code>
+     */
+    public static final int X_VALUE = 1;
+    /**
+     * <code>Y = 2;</code>
+     */
+    public static final int Y_VALUE = 2;
+    /**
+     * <code>Z = 3;</code>
+     */
+    public static final int Z_VALUE = 3;
+
+
+    public final int getNumber() { return value; }
+
+    public static A valueOf(int value) {
+      switch (value) {
+        case 1: return X;
+        case 2: return Y;
+        case 3: return Z;
+        default: return null;
+      }
+    }
+
+    public static com.google.protobuf.Internal.EnumLiteMap<A>
+        internalGetValueMap() {
+      return internalValueMap;
+    }
+    private static com.google.protobuf.Internal.EnumLiteMap<A>
+        internalValueMap =
+          new com.google.protobuf.Internal.EnumLiteMap<A>() {
+            public A findValueByNumber(int number) {
+              return A.valueOf(number);
+            }
+          };
+
+    public final com.google.protobuf.Descriptors.EnumValueDescriptor
+        getValueDescriptor() {
+      return getDescriptor().getValues().get(index);
+    }
+    public final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptorForType() {
+      return getDescriptor();
+    }
+    public static final com.google.protobuf.Descriptors.EnumDescriptor
+        getDescriptor() {
+      return org.apache.avro.protobuf.Test.getDescriptor().getEnumTypes().get(0);
+    }
+
+    private static final A[] VALUES = values();
+
+    public static A valueOf(
+        com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+      if (desc.getType() != getDescriptor()) {
+        throw new java.lang.IllegalArgumentException(
+          "EnumValueDescriptor is not for this type.");
+      }
+      return VALUES[desc.getIndex()];
+    }
+
+    private final int index;
+    private final int value;
+
+    private A(int index, int value) {
+      this.index = index;
+      this.value = value;
+    }
+
+    // @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.A)
+  }
+
+  public interface FooOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+
+    // required int32 int32 = 1;
+    /**
+     * <code>required int32 int32 = 1;</code>
+     *
+     * <pre>
+     * all the primitive types
+     * </pre>
+     */
+    boolean hasInt32();
+    /**
+     * <code>required int32 int32 = 1;</code>
+     *
+     * <pre>
+     * all the primitive types
+     * </pre>
+     */
+    int getInt32();
+
+    // optional int64 int64 = 2;
+    /**
+     * <code>optional int64 int64 = 2;</code>
+     */
+    boolean hasInt64();
+    /**
+     * <code>optional int64 int64 = 2;</code>
+     */
+    long getInt64();
+
+    // optional uint32 uint32 = 3;
+    /**
+     * <code>optional uint32 uint32 = 3;</code>
+     */
+    boolean hasUint32();
+    /**
+     * <code>optional uint32 uint32 = 3;</code>
+     */
+    int getUint32();
+
+    // optional uint64 uint64 = 4;
+    /**
+     * <code>optional uint64 uint64 = 4;</code>
+     */
+    boolean hasUint64();
+    /**
+     * <code>optional uint64 uint64 = 4;</code>
+     */
+    long getUint64();
+
+    // optional sint32 sint32 = 5;
+    /**
+     * <code>optional sint32 sint32 = 5;</code>
+     */
+    boolean hasSint32();
+    /**
+     * <code>optional sint32 sint32 = 5;</code>
+     */
+    int getSint32();
+
+    // optional sint64 sint64 = 6;
+    /**
+     * <code>optional sint64 sint64 = 6;</code>
+     */
+    boolean hasSint64();
+    /**
+     * <code>optional sint64 sint64 = 6;</code>
+     */
+    long getSint64();
+
+    // optional fixed32 fixed32 = 7;
+    /**
+     * <code>optional fixed32 fixed32 = 7;</code>
+     */
+    boolean hasFixed32();
+    /**
+     * <code>optional fixed32 fixed32 = 7;</code>
+     */
+    int getFixed32();
+
+    // optional fixed64 fixed64 = 8;
+    /**
+     * <code>optional fixed64 fixed64 = 8;</code>
+     */
+    boolean hasFixed64();
+    /**
+     * <code>optional fixed64 fixed64 = 8;</code>
+     */
+    long getFixed64();
+
+    // optional sfixed32 sfixed32 = 9;
+    /**
+     * <code>optional sfixed32 sfixed32 = 9;</code>
+     */
+    boolean hasSfixed32();
+    /**
+     * <code>optional sfixed32 sfixed32 = 9;</code>
+     */
+    int getSfixed32();
+
+    // optional sfixed64 sfixed64 = 10;
+    /**
+     * <code>optional sfixed64 sfixed64 = 10;</code>
+     */
+    boolean hasSfixed64();
+    /**
+     * <code>optional sfixed64 sfixed64 = 10;</code>
+     */
+    long getSfixed64();
+
+    // optional float float = 11;
+    /**
+     * <code>optional float float = 11;</code>
+     */
+    boolean hasFloat();
+    /**
+     * <code>optional float float = 11;</code>
+     */
+    float getFloat();
+
+    // optional double double = 12;
+    /**
+     * <code>optional double double = 12;</code>
+     */
+    boolean hasDouble();
+    /**
+     * <code>optional double double = 12;</code>
+     */
+    double getDouble();
+
+    // optional bool bool = 13;
+    /**
+     * <code>optional bool bool = 13;</code>
+     */
+    boolean hasBool();
+    /**
+     * <code>optional bool bool = 13;</code>
+     */
+    boolean getBool();
+
+    // optional string string = 14;
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    boolean hasString();
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    java.lang.String getString();
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    com.google.protobuf.ByteString
+        getStringBytes();
+
+    // optional bytes bytes = 15;
+    /**
+     * <code>optional bytes bytes = 15;</code>
+     */
+    boolean hasBytes();
+    /**
+     * <code>optional bytes bytes = 15;</code>
+     */
+    com.google.protobuf.ByteString getBytes();
+
+    // optional .org.apache.avro.protobuf.A enum = 16 [default = Z];
+    /**
+     * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+     */
+    boolean hasEnum();
+    /**
+     * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+     */
+    org.apache.avro.protobuf.Test.A getEnum();
+
+    // repeated int32 intArray = 17;
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    java.util.List<java.lang.Integer> getIntArrayList();
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    int getIntArrayCount();
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    int getIntArray(int index);
+
+    // repeated .org.apache.avro.protobuf.Foo fooArray = 20;
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    java.util.List<org.apache.avro.protobuf.Test.Foo> 
+        getFooArrayList();
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    org.apache.avro.protobuf.Test.Foo getFooArray(int index);
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    int getFooArrayCount();
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+        getFooArrayOrBuilderList();
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    org.apache.avro.protobuf.Test.FooOrBuilder getFooArrayOrBuilder(
+        int index);
+
+    // repeated .org.apache.avro.protobuf.A syms = 19;
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    java.util.List<org.apache.avro.protobuf.Test.A> getSymsList();
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    int getSymsCount();
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    org.apache.avro.protobuf.Test.A getSyms(int index);
+
+    // optional .org.apache.avro.protobuf.Foo foo = 18;
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    boolean hasFoo();
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    org.apache.avro.protobuf.Test.Foo getFoo();
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    org.apache.avro.protobuf.Test.FooOrBuilder getFooOrBuilder();
+  }
+  /**
+   * Protobuf type {@code org.apache.avro.protobuf.Foo}
+   */
+  public static final class Foo extends
+      com.google.protobuf.GeneratedMessage
+      implements FooOrBuilder {
+    // Use Foo.newBuilder() to construct.
+    private Foo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private Foo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final Foo defaultInstance;
+    public static Foo getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public Foo getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private Foo(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      int mutable_bitField0_ = 0;
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+            case 8: {
+              bitField0_ |= 0x00000001;
+              int32_ = input.readInt32();
+              break;
+            }
+            case 16: {
+              bitField0_ |= 0x00000002;
+              int64_ = input.readInt64();
+              break;
+            }
+            case 24: {
+              bitField0_ |= 0x00000004;
+              uint32_ = input.readUInt32();
+              break;
+            }
+            case 32: {
+              bitField0_ |= 0x00000008;
+              uint64_ = input.readUInt64();
+              break;
+            }
+            case 40: {
+              bitField0_ |= 0x00000010;
+              sint32_ = input.readSInt32();
+              break;
+            }
+            case 48: {
+              bitField0_ |= 0x00000020;
+              sint64_ = input.readSInt64();
+              break;
+            }
+            case 61: {
+              bitField0_ |= 0x00000040;
+              fixed32_ = input.readFixed32();
+              break;
+            }
+            case 65: {
+              bitField0_ |= 0x00000080;
+              fixed64_ = input.readFixed64();
+              break;
+            }
+            case 77: {
+              bitField0_ |= 0x00000100;
+              sfixed32_ = input.readSFixed32();
+              break;
+            }
+            case 81: {
+              bitField0_ |= 0x00000200;
+              sfixed64_ = input.readSFixed64();
+              break;
+            }
+            case 93: {
+              bitField0_ |= 0x00000400;
+              float_ = input.readFloat();
+              break;
+            }
+            case 97: {
+              bitField0_ |= 0x00000800;
+              double_ = input.readDouble();
+              break;
+            }
+            case 104: {
+              bitField0_ |= 0x00001000;
+              bool_ = input.readBool();
+              break;
+            }
+            case 114: {
+              bitField0_ |= 0x00002000;
+              string_ = input.readBytes();
+              break;
+            }
+            case 122: {
+              bitField0_ |= 0x00004000;
+              bytes_ = input.readBytes();
+              break;
+            }
+            case 128: {
+              int rawValue = input.readEnum();
+              org.apache.avro.protobuf.Test.A value = org.apache.avro.protobuf.Test.A.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(16, rawValue);
+              } else {
+                bitField0_ |= 0x00008000;
+                enum_ = value;
+              }
+              break;
+            }
+            case 136: {
+              if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+                intArray_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00010000;
+              }
+              intArray_.add(input.readInt32());
+              break;
+            }
+            case 138: {
+              int length = input.readRawVarint32();
+              int limit = input.pushLimit(length);
+              if (!((mutable_bitField0_ & 0x00010000) == 0x00010000) && input.getBytesUntilLimit() > 0) {
+                intArray_ = new java.util.ArrayList<java.lang.Integer>();
+                mutable_bitField0_ |= 0x00010000;
+              }
+              while (input.getBytesUntilLimit() > 0) {
+                intArray_.add(input.readInt32());
+              }
+              input.popLimit(limit);
+              break;
+            }
+            case 146: {
+              org.apache.avro.protobuf.Test.Foo.Builder subBuilder = null;
+              if (((bitField0_ & 0x00010000) == 0x00010000)) {
+                subBuilder = foo_.toBuilder();
+              }
+              foo_ = input.readMessage(org.apache.avro.protobuf.Test.Foo.PARSER, extensionRegistry);
+              if (subBuilder != null) {
+                subBuilder.mergeFrom(foo_);
+                foo_ = subBuilder.buildPartial();
+              }
+              bitField0_ |= 0x00010000;
+              break;
+            }
+            case 152: {
+              int rawValue = input.readEnum();
+              org.apache.avro.protobuf.Test.A value = org.apache.avro.protobuf.Test.A.valueOf(rawValue);
+              if (value == null) {
+                unknownFields.mergeVarintField(19, rawValue);
+              } else {
+                if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
+                  syms_ = new java.util.ArrayList<org.apache.avro.protobuf.Test.A>();
+                  mutable_bitField0_ |= 0x00040000;
+                }
+                syms_.add(value);
+              }
+              break;
+            }
+            case 154: {
+              int length = input.readRawVarint32();
+              int oldLimit = input.pushLimit(length);
+              while(input.getBytesUntilLimit() > 0) {
+                int rawValue = input.readEnum();
+                org.apache.avro.protobuf.Test.A value = org.apache.avro.protobuf.Test.A.valueOf(rawValue);
+                if (value == null) {
+                  unknownFields.mergeVarintField(19, rawValue);
+                } else {
+                  if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
+                    syms_ = new java.util.ArrayList<org.apache.avro.protobuf.Test.A>();
+                    mutable_bitField0_ |= 0x00040000;
+                  }
+                  syms_.add(value);
+                }
+              }
+              input.popLimit(oldLimit);
+              break;
+            }
+            case 162: {
+              if (!((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
+                fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.Test.Foo>();
+                mutable_bitField0_ |= 0x00020000;
+              }
+              fooArray_.add(input.readMessage(org.apache.avro.protobuf.Test.Foo.PARSER, extensionRegistry));
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
+          intArray_ = java.util.Collections.unmodifiableList(intArray_);
+        }
+        if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
+          syms_ = java.util.Collections.unmodifiableList(syms_);
+        }
+        if (((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
+          fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
+        }
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_Foo_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_Foo_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.avro.protobuf.Test.Foo.class, org.apache.avro.protobuf.Test.Foo.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<Foo> PARSER =
+        new com.google.protobuf.AbstractParser<Foo>() {
+      public Foo parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new Foo(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<Foo> getParserForType() {
+      return PARSER;
+    }
+
+    private int bitField0_;
+    // required int32 int32 = 1;
+    public static final int INT32_FIELD_NUMBER = 1;
+    private int int32_;
+    /**
+     * <code>required int32 int32 = 1;</code>
+     *
+     * <pre>
+     * all the primitive types
+     * </pre>
+     */
+    public boolean hasInt32() {
+      return ((bitField0_ & 0x00000001) == 0x00000001);
+    }
+    /**
+     * <code>required int32 int32 = 1;</code>
+     *
+     * <pre>
+     * all the primitive types
+     * </pre>
+     */
+    public int getInt32() {
+      return int32_;
+    }
+
+    // optional int64 int64 = 2;
+    public static final int INT64_FIELD_NUMBER = 2;
+    private long int64_;
+    /**
+     * <code>optional int64 int64 = 2;</code>
+     */
+    public boolean hasInt64() {
+      return ((bitField0_ & 0x00000002) == 0x00000002);
+    }
+    /**
+     * <code>optional int64 int64 = 2;</code>
+     */
+    public long getInt64() {
+      return int64_;
+    }
+
+    // optional uint32 uint32 = 3;
+    public static final int UINT32_FIELD_NUMBER = 3;
+    private int uint32_;
+    /**
+     * <code>optional uint32 uint32 = 3;</code>
+     */
+    public boolean hasUint32() {
+      return ((bitField0_ & 0x00000004) == 0x00000004);
+    }
+    /**
+     * <code>optional uint32 uint32 = 3;</code>
+     */
+    public int getUint32() {
+      return uint32_;
+    }
+
+    // optional uint64 uint64 = 4;
+    public static final int UINT64_FIELD_NUMBER = 4;
+    private long uint64_;
+    /**
+     * <code>optional uint64 uint64 = 4;</code>
+     */
+    public boolean hasUint64() {
+      return ((bitField0_ & 0x00000008) == 0x00000008);
+    }
+    /**
+     * <code>optional uint64 uint64 = 4;</code>
+     */
+    public long getUint64() {
+      return uint64_;
+    }
+
+    // optional sint32 sint32 = 5;
+    public static final int SINT32_FIELD_NUMBER = 5;
+    private int sint32_;
+    /**
+     * <code>optional sint32 sint32 = 5;</code>
+     */
+    public boolean hasSint32() {
+      return ((bitField0_ & 0x00000010) == 0x00000010);
+    }
+    /**
+     * <code>optional sint32 sint32 = 5;</code>
+     */
+    public int getSint32() {
+      return sint32_;
+    }
+
+    // optional sint64 sint64 = 6;
+    public static final int SINT64_FIELD_NUMBER = 6;
+    private long sint64_;
+    /**
+     * <code>optional sint64 sint64 = 6;</code>
+     */
+    public boolean hasSint64() {
+      return ((bitField0_ & 0x00000020) == 0x00000020);
+    }
+    /**
+     * <code>optional sint64 sint64 = 6;</code>
+     */
+    public long getSint64() {
+      return sint64_;
+    }
+
+    // optional fixed32 fixed32 = 7;
+    public static final int FIXED32_FIELD_NUMBER = 7;
+    private int fixed32_;
+    /**
+     * <code>optional fixed32 fixed32 = 7;</code>
+     */
+    public boolean hasFixed32() {
+      return ((bitField0_ & 0x00000040) == 0x00000040);
+    }
+    /**
+     * <code>optional fixed32 fixed32 = 7;</code>
+     */
+    public int getFixed32() {
+      return fixed32_;
+    }
+
+    // optional fixed64 fixed64 = 8;
+    public static final int FIXED64_FIELD_NUMBER = 8;
+    private long fixed64_;
+    /**
+     * <code>optional fixed64 fixed64 = 8;</code>
+     */
+    public boolean hasFixed64() {
+      return ((bitField0_ & 0x00000080) == 0x00000080);
+    }
+    /**
+     * <code>optional fixed64 fixed64 = 8;</code>
+     */
+    public long getFixed64() {
+      return fixed64_;
+    }
+
+    // optional sfixed32 sfixed32 = 9;
+    public static final int SFIXED32_FIELD_NUMBER = 9;
+    private int sfixed32_;
+    /**
+     * <code>optional sfixed32 sfixed32 = 9;</code>
+     */
+    public boolean hasSfixed32() {
+      return ((bitField0_ & 0x00000100) == 0x00000100);
+    }
+    /**
+     * <code>optional sfixed32 sfixed32 = 9;</code>
+     */
+    public int getSfixed32() {
+      return sfixed32_;
+    }
+
+    // optional sfixed64 sfixed64 = 10;
+    public static final int SFIXED64_FIELD_NUMBER = 10;
+    private long sfixed64_;
+    /**
+     * <code>optional sfixed64 sfixed64 = 10;</code>
+     */
+    public boolean hasSfixed64() {
+      return ((bitField0_ & 0x00000200) == 0x00000200);
+    }
+    /**
+     * <code>optional sfixed64 sfixed64 = 10;</code>
+     */
+    public long getSfixed64() {
+      return sfixed64_;
+    }
+
+    // optional float float = 11;
+    public static final int FLOAT_FIELD_NUMBER = 11;
+    private float float_;
+    /**
+     * <code>optional float float = 11;</code>
+     */
+    public boolean hasFloat() {
+      return ((bitField0_ & 0x00000400) == 0x00000400);
+    }
+    /**
+     * <code>optional float float = 11;</code>
+     */
+    public float getFloat() {
+      return float_;
+    }
+
+    // optional double double = 12;
+    public static final int DOUBLE_FIELD_NUMBER = 12;
+    private double double_;
+    /**
+     * <code>optional double double = 12;</code>
+     */
+    public boolean hasDouble() {
+      return ((bitField0_ & 0x00000800) == 0x00000800);
+    }
+    /**
+     * <code>optional double double = 12;</code>
+     */
+    public double getDouble() {
+      return double_;
+    }
+
+    // optional bool bool = 13;
+    public static final int BOOL_FIELD_NUMBER = 13;
+    private boolean bool_;
+    /**
+     * <code>optional bool bool = 13;</code>
+     */
+    public boolean hasBool() {
+      return ((bitField0_ & 0x00001000) == 0x00001000);
+    }
+    /**
+     * <code>optional bool bool = 13;</code>
+     */
+    public boolean getBool() {
+      return bool_;
+    }
+
+    // optional string string = 14;
+    public static final int STRING_FIELD_NUMBER = 14;
+    private java.lang.Object string_;
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    public boolean hasString() {
+      return ((bitField0_ & 0x00002000) == 0x00002000);
+    }
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    public java.lang.String getString() {
+      java.lang.Object ref = string_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          string_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string string = 14;</code>
+     */
+    public com.google.protobuf.ByteString
+        getStringBytes() {
+      java.lang.Object ref = string_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        string_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
+    // optional bytes bytes = 15;
+    public static final int BYTES_FIELD_NUMBER = 15;
+    private com.google.protobuf.ByteString bytes_;
+    /**
+     * <code>optional bytes bytes = 15;</code>
+     */
+    public boolean hasBytes() {
+      return ((bitField0_ & 0x00004000) == 0x00004000);
+    }
+    /**
+     * <code>optional bytes bytes = 15;</code>
+     */
+    public com.google.protobuf.ByteString getBytes() {
+      return bytes_;
+    }
+
+    // optional .org.apache.avro.protobuf.A enum = 16 [default = Z];
+    public static final int ENUM_FIELD_NUMBER = 16;
+    private org.apache.avro.protobuf.Test.A enum_;
+    /**
+     * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+     */
+    public boolean hasEnum() {
+      return ((bitField0_ & 0x00008000) == 0x00008000);
+    }
+    /**
+     * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+     */
+    public org.apache.avro.protobuf.Test.A getEnum() {
+      return enum_;
+    }
+
+    // repeated int32 intArray = 17;
+    public static final int INTARRAY_FIELD_NUMBER = 17;
+    private java.util.List<java.lang.Integer> intArray_;
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    public java.util.List<java.lang.Integer>
+        getIntArrayList() {
+      return intArray_;
+    }
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    public int getIntArrayCount() {
+      return intArray_.size();
+    }
+    /**
+     * <code>repeated int32 intArray = 17;</code>
+     *
+     * <pre>
+     * some repeated types
+     * </pre>
+     */
+    public int getIntArray(int index) {
+      return intArray_.get(index);
+    }
+
+    // repeated .org.apache.avro.protobuf.Foo fooArray = 20;
+    public static final int FOOARRAY_FIELD_NUMBER = 20;
+    private java.util.List<org.apache.avro.protobuf.Test.Foo> fooArray_;
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    public java.util.List<org.apache.avro.protobuf.Test.Foo> getFooArrayList() {
+      return fooArray_;
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+        getFooArrayOrBuilderList() {
+      return fooArray_;
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    public int getFooArrayCount() {
+      return fooArray_.size();
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    public org.apache.avro.protobuf.Test.Foo getFooArray(int index) {
+      return fooArray_.get(index);
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+     */
+    public org.apache.avro.protobuf.Test.FooOrBuilder getFooArrayOrBuilder(
+        int index) {
+      return fooArray_.get(index);
+    }
+
+    // repeated .org.apache.avro.protobuf.A syms = 19;
+    public static final int SYMS_FIELD_NUMBER = 19;
+    private java.util.List<org.apache.avro.protobuf.Test.A> syms_;
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    public java.util.List<org.apache.avro.protobuf.Test.A> getSymsList() {
+      return syms_;
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    public int getSymsCount() {
+      return syms_.size();
+    }
+    /**
+     * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+     */
+    public org.apache.avro.protobuf.Test.A getSyms(int index) {
+      return syms_.get(index);
+    }
+
+    // optional .org.apache.avro.protobuf.Foo foo = 18;
+    public static final int FOO_FIELD_NUMBER = 18;
+    private org.apache.avro.protobuf.Test.Foo foo_;
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    public boolean hasFoo() {
+      return ((bitField0_ & 0x00010000) == 0x00010000);
+    }
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    public org.apache.avro.protobuf.Test.Foo getFoo() {
+      return foo_;
+    }
+    /**
+     * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+     *
+     * <pre>
+     * a recursive type
+     * </pre>
+     */
+    public org.apache.avro.protobuf.Test.FooOrBuilder getFooOrBuilder() {
+      return foo_;
+    }
+
+    private void initFields() {
+      int32_ = 0;
+      int64_ = 0L;
+      uint32_ = 0;
+      uint64_ = 0L;
+      sint32_ = 0;
+      sint64_ = 0L;
+      fixed32_ = 0;
+      fixed64_ = 0L;
+      sfixed32_ = 0;
+      sfixed64_ = 0L;
+      float_ = 0F;
+      double_ = 0D;
+      bool_ = false;
+      string_ = "";
+      bytes_ = com.google.protobuf.ByteString.EMPTY;
+      enum_ = org.apache.avro.protobuf.Test.A.Z;
+      intArray_ = java.util.Collections.emptyList();
+      fooArray_ = java.util.Collections.emptyList();
+      syms_ = java.util.Collections.emptyList();
+      foo_ = org.apache.avro.protobuf.Test.Foo.getDefaultInstance();
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      if (!hasInt32()) {
+        memoizedIsInitialized = 0;
+        return false;
+      }
+      for (int i = 0; i < getFooArrayCount(); i++) {
+        if (!getFooArray(i).isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      if (hasFoo()) {
+        if (!getFoo().isInitialized()) {
+          memoizedIsInitialized = 0;
+          return false;
+        }
+      }
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        output.writeInt32(1, int32_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        output.writeInt64(2, int64_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        output.writeUInt32(3, uint32_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        output.writeUInt64(4, uint64_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        output.writeSInt32(5, sint32_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        output.writeSInt64(6, sint64_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        output.writeFixed32(7, fixed32_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        output.writeFixed64(8, fixed64_);
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        output.writeSFixed32(9, sfixed32_);
+      }
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        output.writeSFixed64(10, sfixed64_);
+      }
+      if (((bitField0_ & 0x00000400) == 0x00000400)) {
+        output.writeFloat(11, float_);
+      }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        output.writeDouble(12, double_);
+      }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        output.writeBool(13, bool_);
+      }
+      if (((bitField0_ & 0x00002000) == 0x00002000)) {
+        output.writeBytes(14, getStringBytes());
+      }
+      if (((bitField0_ & 0x00004000) == 0x00004000)) {
+        output.writeBytes(15, bytes_);
+      }
+      if (((bitField0_ & 0x00008000) == 0x00008000)) {
+        output.writeEnum(16, enum_.getNumber());
+      }
+      for (int i = 0; i < intArray_.size(); i++) {
+        output.writeInt32(17, intArray_.get(i));
+      }
+      if (((bitField0_ & 0x00010000) == 0x00010000)) {
+        output.writeMessage(18, foo_);
+      }
+      for (int i = 0; i < syms_.size(); i++) {
+        output.writeEnum(19, syms_.get(i).getNumber());
+      }
+      for (int i = 0; i < fooArray_.size(); i++) {
+        output.writeMessage(20, fooArray_.get(i));
+      }
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      if (((bitField0_ & 0x00000001) == 0x00000001)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt32Size(1, int32_);
+      }
+      if (((bitField0_ & 0x00000002) == 0x00000002)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeInt64Size(2, int64_);
+      }
+      if (((bitField0_ & 0x00000004) == 0x00000004)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt32Size(3, uint32_);
+      }
+      if (((bitField0_ & 0x00000008) == 0x00000008)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeUInt64Size(4, uint64_);
+      }
+      if (((bitField0_ & 0x00000010) == 0x00000010)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSInt32Size(5, sint32_);
+      }
+      if (((bitField0_ & 0x00000020) == 0x00000020)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSInt64Size(6, sint64_);
+      }
+      if (((bitField0_ & 0x00000040) == 0x00000040)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFixed32Size(7, fixed32_);
+      }
+      if (((bitField0_ & 0x00000080) == 0x00000080)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFixed64Size(8, fixed64_);
+      }
+      if (((bitField0_ & 0x00000100) == 0x00000100)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSFixed32Size(9, sfixed32_);
+      }
+      if (((bitField0_ & 0x00000200) == 0x00000200)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeSFixed64Size(10, sfixed64_);
+      }
+      if (((bitField0_ & 0x00000400) == 0x00000400)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeFloatSize(11, float_);
+      }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeDoubleSize(12, double_);
+      }
+      if (((bitField0_ & 0x00001000) == 0x00001000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBoolSize(13, bool_);
+      }
+      if (((bitField0_ & 0x00002000) == 0x00002000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(14, getStringBytes());
+      }
+      if (((bitField0_ & 0x00004000) == 0x00004000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(15, bytes_);
+      }
+      if (((bitField0_ & 0x00008000) == 0x00008000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeEnumSize(16, enum_.getNumber());
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < intArray_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeInt32SizeNoTag(intArray_.get(i));
+        }
+        size += dataSize;
+        size += 2 * getIntArrayList().size();
+      }
+      if (((bitField0_ & 0x00010000) == 0x00010000)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(18, foo_);
+      }
+      {
+        int dataSize = 0;
+        for (int i = 0; i < syms_.size(); i++) {
+          dataSize += com.google.protobuf.CodedOutputStream
+            .computeEnumSizeNoTag(syms_.get(i).getNumber());
+        }
+        size += dataSize;
+        size += 2 * syms_.size();
+      }
+      for (int i = 0; i < fooArray_.size(); i++) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeMessageSize(20, fooArray_.get(i));
+      }
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.Foo parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.avro.protobuf.Test.Foo prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code org.apache.avro.protobuf.Foo}
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.avro.protobuf.Test.FooOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_Foo_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_Foo_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.avro.protobuf.Test.Foo.class, org.apache.avro.protobuf.Test.Foo.Builder.class);
+      }
+
+      // Construct using org.apache.avro.protobuf.Test.Foo.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+          getFooArrayFieldBuilder();
+          getFooFieldBuilder();
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        int32_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000001);
+        int64_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000002);
+        uint32_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000004);
+        uint64_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000008);
+        sint32_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000010);
+        sint64_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000020);
+        fixed32_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000040);
+        fixed64_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000080);
+        sfixed32_ = 0;
+        bitField0_ = (bitField0_ & ~0x00000100);
+        sfixed64_ = 0L;
+        bitField0_ = (bitField0_ & ~0x00000200);
+        float_ = 0F;
+        bitField0_ = (bitField0_ & ~0x00000400);
+        double_ = 0D;
+        bitField0_ = (bitField0_ & ~0x00000800);
+        bool_ = false;
+        bitField0_ = (bitField0_ & ~0x00001000);
+        string_ = "";
+        bitField0_ = (bitField0_ & ~0x00002000);
+        bytes_ = com.google.protobuf.ByteString.EMPTY;
+        bitField0_ = (bitField0_ & ~0x00004000);
+        enum_ = org.apache.avro.protobuf.Test.A.Z;
+        bitField0_ = (bitField0_ & ~0x00008000);
+        intArray_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00010000);
+        if (fooArrayBuilder_ == null) {
+          fooArray_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00020000);
+        } else {
+          fooArrayBuilder_.clear();
+        }
+        syms_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00040000);
+        if (fooBuilder_ == null) {
+          foo_ = org.apache.avro.protobuf.Test.Foo.getDefaultInstance();
+        } else {
+          fooBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00080000);
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_Foo_descriptor;
+      }
+
+      public org.apache.avro.protobuf.Test.Foo getDefaultInstanceForType() {
+        return org.apache.avro.protobuf.Test.Foo.getDefaultInstance();
+      }
+
+      public org.apache.avro.protobuf.Test.Foo build() {
+        org.apache.avro.protobuf.Test.Foo result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.avro.protobuf.Test.Foo buildPartial() {
+        org.apache.avro.protobuf.Test.Foo result = new org.apache.avro.protobuf.Test.Foo(this);
+        int from_bitField0_ = bitField0_;
+        int to_bitField0_ = 0;
+        if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+          to_bitField0_ |= 0x00000001;
+        }
+        result.int32_ = int32_;
+        if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+          to_bitField0_ |= 0x00000002;
+        }
+        result.int64_ = int64_;
+        if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+          to_bitField0_ |= 0x00000004;
+        }
+        result.uint32_ = uint32_;
+        if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+          to_bitField0_ |= 0x00000008;
+        }
+        result.uint64_ = uint64_;
+        if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
+          to_bitField0_ |= 0x00000010;
+        }
+        result.sint32_ = sint32_;
+        if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
+          to_bitField0_ |= 0x00000020;
+        }
+        result.sint64_ = sint64_;
+        if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
+          to_bitField0_ |= 0x00000040;
+        }
+        result.fixed32_ = fixed32_;
+        if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
+          to_bitField0_ |= 0x00000080;
+        }
+        result.fixed64_ = fixed64_;
+        if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
+          to_bitField0_ |= 0x00000100;
+        }
+        result.sfixed32_ = sfixed32_;
+        if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
+          to_bitField0_ |= 0x00000200;
+        }
+        result.sfixed64_ = sfixed64_;
+        if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
+          to_bitField0_ |= 0x00000400;
+        }
+        result.float_ = float_;
+        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+          to_bitField0_ |= 0x00000800;
+        }
+        result.double_ = double_;
+        if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
+          to_bitField0_ |= 0x00001000;
+        }
+        result.bool_ = bool_;
+        if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
+          to_bitField0_ |= 0x00002000;
+        }
+        result.string_ = string_;
+        if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
+          to_bitField0_ |= 0x00004000;
+        }
+        result.bytes_ = bytes_;
+        if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
+          to_bitField0_ |= 0x00008000;
+        }
+        result.enum_ = enum_;
+        if (((bitField0_ & 0x00010000) == 0x00010000)) {
+          intArray_ = java.util.Collections.unmodifiableList(intArray_);
+          bitField0_ = (bitField0_ & ~0x00010000);
+        }
+        result.intArray_ = intArray_;
+        if (fooArrayBuilder_ == null) {
+          if (((bitField0_ & 0x00020000) == 0x00020000)) {
+            fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
+            bitField0_ = (bitField0_ & ~0x00020000);
+          }
+          result.fooArray_ = fooArray_;
+        } else {
+          result.fooArray_ = fooArrayBuilder_.build();
+        }
+        if (((bitField0_ & 0x00040000) == 0x00040000)) {
+          syms_ = java.util.Collections.unmodifiableList(syms_);
+          bitField0_ = (bitField0_ & ~0x00040000);
+        }
+        result.syms_ = syms_;
+        if (((from_bitField0_ & 0x00080000) == 0x00080000)) {
+          to_bitField0_ |= 0x00010000;
+        }
+        if (fooBuilder_ == null) {
+          result.foo_ = foo_;
+        } else {
+          result.foo_ = fooBuilder_.build();
+        }
+        result.bitField0_ = to_bitField0_;
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.avro.protobuf.Test.Foo) {
+          return mergeFrom((org.apache.avro.protobuf.Test.Foo)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.avro.protobuf.Test.Foo other) {
+        if (other == org.apache.avro.protobuf.Test.Foo.getDefaultInstance()) return this;
+        if (other.hasInt32()) {
+          setInt32(other.getInt32());
+        }
+        if (other.hasInt64()) {
+          setInt64(other.getInt64());
+        }
+        if (other.hasUint32()) {
+          setUint32(other.getUint32());
+        }
+        if (other.hasUint64()) {
+          setUint64(other.getUint64());
+        }
+        if (other.hasSint32()) {
+          setSint32(other.getSint32());
+        }
+        if (other.hasSint64()) {
+          setSint64(other.getSint64());
+        }
+        if (other.hasFixed32()) {
+          setFixed32(other.getFixed32());
+        }
+        if (other.hasFixed64()) {
+          setFixed64(other.getFixed64());
+        }
+        if (other.hasSfixed32()) {
+          setSfixed32(other.getSfixed32());
+        }
+        if (other.hasSfixed64()) {
+          setSfixed64(other.getSfixed64());
+        }
+        if (other.hasFloat()) {
+          setFloat(other.getFloat());
+        }
+        if (other.hasDouble()) {
+          setDouble(other.getDouble());
+        }
+        if (other.hasBool()) {
+          setBool(other.getBool());
+        }
+        if (other.hasString()) {
+          bitField0_ |= 0x00002000;
+          string_ = other.string_;
+          onChanged();
+        }
+        if (other.hasBytes()) {
+          setBytes(other.getBytes());
+        }
+        if (other.hasEnum()) {
+          setEnum(other.getEnum());
+        }
+        if (!other.intArray_.isEmpty()) {
+          if (intArray_.isEmpty()) {
+            intArray_ = other.intArray_;
+            bitField0_ = (bitField0_ & ~0x00010000);
+          } else {
+            ensureIntArrayIsMutable();
+            intArray_.addAll(other.intArray_);
+          }
+          onChanged();
+        }
+        if (fooArrayBuilder_ == null) {
+          if (!other.fooArray_.isEmpty()) {
+            if (fooArray_.isEmpty()) {
+              fooArray_ = other.fooArray_;
+              bitField0_ = (bitField0_ & ~0x00020000);
+            } else {
+              ensureFooArrayIsMutable();
+              fooArray_.addAll(other.fooArray_);
+            }
+            onChanged();
+          }
+        } else {
+          if (!other.fooArray_.isEmpty()) {
+            if (fooArrayBuilder_.isEmpty()) {
+              fooArrayBuilder_.dispose();
+              fooArrayBuilder_ = null;
+              fooArray_ = other.fooArray_;
+              bitField0_ = (bitField0_ & ~0x00020000);
+              fooArrayBuilder_ = 
+                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                   getFooArrayFieldBuilder() : null;
+            } else {
+              fooArrayBuilder_.addAllMessages(other.fooArray_);
+            }
+          }
+        }
+        if (!other.syms_.isEmpty()) {
+          if (syms_.isEmpty()) {
+            syms_ = other.syms_;
+            bitField0_ = (bitField0_ & ~0x00040000);
+          } else {
+            ensureSymsIsMutable();
+            syms_.addAll(other.syms_);
+          }
+          onChanged();
+        }
+        if (other.hasFoo()) {
+          mergeFoo(other.getFoo());
+        }
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        if (!hasInt32()) {
+          
+          return false;
+        }
+        for (int i = 0; i < getFooArrayCount(); i++) {
+          if (!getFooArray(i).isInitialized()) {
+            
+            return false;
+          }
+        }
+        if (hasFoo()) {
+          if (!getFoo().isInitialized()) {
+            
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.avro.protobuf.Test.Foo parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.avro.protobuf.Test.Foo) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+      private int bitField0_;
+
+      // required int32 int32 = 1;
+      private int int32_ ;
+      /**
+       * <code>required int32 int32 = 1;</code>
+       *
+       * <pre>
+       * all the primitive types
+       * </pre>
+       */
+      public boolean hasInt32() {
+        return ((bitField0_ & 0x00000001) == 0x00000001);
+      }
+      /**
+       * <code>required int32 int32 = 1;</code>
+       *
+       * <pre>
+       * all the primitive types
+       * </pre>
+       */
+      public int getInt32() {
+        return int32_;
+      }
+      /**
+       * <code>required int32 int32 = 1;</code>
+       *
+       * <pre>
+       * all the primitive types
+       * </pre>
+       */
+      public Builder setInt32(int value) {
+        bitField0_ |= 0x00000001;
+        int32_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>required int32 int32 = 1;</code>
+       *
+       * <pre>
+       * all the primitive types
+       * </pre>
+       */
+      public Builder clearInt32() {
+        bitField0_ = (bitField0_ & ~0x00000001);
+        int32_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional int64 int64 = 2;
+      private long int64_ ;
+      /**
+       * <code>optional int64 int64 = 2;</code>
+       */
+      public boolean hasInt64() {
+        return ((bitField0_ & 0x00000002) == 0x00000002);
+      }
+      /**
+       * <code>optional int64 int64 = 2;</code>
+       */
+      public long getInt64() {
+        return int64_;
+      }
+      /**
+       * <code>optional int64 int64 = 2;</code>
+       */
+      public Builder setInt64(long value) {
+        bitField0_ |= 0x00000002;
+        int64_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional int64 int64 = 2;</code>
+       */
+      public Builder clearInt64() {
+        bitField0_ = (bitField0_ & ~0x00000002);
+        int64_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional uint32 uint32 = 3;
+      private int uint32_ ;
+      /**
+       * <code>optional uint32 uint32 = 3;</code>
+       */
+      public boolean hasUint32() {
+        return ((bitField0_ & 0x00000004) == 0x00000004);
+      }
+      /**
+       * <code>optional uint32 uint32 = 3;</code>
+       */
+      public int getUint32() {
+        return uint32_;
+      }
+      /**
+       * <code>optional uint32 uint32 = 3;</code>
+       */
+      public Builder setUint32(int value) {
+        bitField0_ |= 0x00000004;
+        uint32_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional uint32 uint32 = 3;</code>
+       */
+      public Builder clearUint32() {
+        bitField0_ = (bitField0_ & ~0x00000004);
+        uint32_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional uint64 uint64 = 4;
+      private long uint64_ ;
+      /**
+       * <code>optional uint64 uint64 = 4;</code>
+       */
+      public boolean hasUint64() {
+        return ((bitField0_ & 0x00000008) == 0x00000008);
+      }
+      /**
+       * <code>optional uint64 uint64 = 4;</code>
+       */
+      public long getUint64() {
+        return uint64_;
+      }
+      /**
+       * <code>optional uint64 uint64 = 4;</code>
+       */
+      public Builder setUint64(long value) {
+        bitField0_ |= 0x00000008;
+        uint64_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional uint64 uint64 = 4;</code>
+       */
+      public Builder clearUint64() {
+        bitField0_ = (bitField0_ & ~0x00000008);
+        uint64_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional sint32 sint32 = 5;
+      private int sint32_ ;
+      /**
+       * <code>optional sint32 sint32 = 5;</code>
+       */
+      public boolean hasSint32() {
+        return ((bitField0_ & 0x00000010) == 0x00000010);
+      }
+      /**
+       * <code>optional sint32 sint32 = 5;</code>
+       */
+      public int getSint32() {
+        return sint32_;
+      }
+      /**
+       * <code>optional sint32 sint32 = 5;</code>
+       */
+      public Builder setSint32(int value) {
+        bitField0_ |= 0x00000010;
+        sint32_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional sint32 sint32 = 5;</code>
+       */
+      public Builder clearSint32() {
+        bitField0_ = (bitField0_ & ~0x00000010);
+        sint32_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional sint64 sint64 = 6;
+      private long sint64_ ;
+      /**
+       * <code>optional sint64 sint64 = 6;</code>
+       */
+      public boolean hasSint64() {
+        return ((bitField0_ & 0x00000020) == 0x00000020);
+      }
+      /**
+       * <code>optional sint64 sint64 = 6;</code>
+       */
+      public long getSint64() {
+        return sint64_;
+      }
+      /**
+       * <code>optional sint64 sint64 = 6;</code>
+       */
+      public Builder setSint64(long value) {
+        bitField0_ |= 0x00000020;
+        sint64_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional sint64 sint64 = 6;</code>
+       */
+      public Builder clearSint64() {
+        bitField0_ = (bitField0_ & ~0x00000020);
+        sint64_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional fixed32 fixed32 = 7;
+      private int fixed32_ ;
+      /**
+       * <code>optional fixed32 fixed32 = 7;</code>
+       */
+      public boolean hasFixed32() {
+        return ((bitField0_ & 0x00000040) == 0x00000040);
+      }
+      /**
+       * <code>optional fixed32 fixed32 = 7;</code>
+       */
+      public int getFixed32() {
+        return fixed32_;
+      }
+      /**
+       * <code>optional fixed32 fixed32 = 7;</code>
+       */
+      public Builder setFixed32(int value) {
+        bitField0_ |= 0x00000040;
+        fixed32_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional fixed32 fixed32 = 7;</code>
+       */
+      public Builder clearFixed32() {
+        bitField0_ = (bitField0_ & ~0x00000040);
+        fixed32_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional fixed64 fixed64 = 8;
+      private long fixed64_ ;
+      /**
+       * <code>optional fixed64 fixed64 = 8;</code>
+       */
+      public boolean hasFixed64() {
+        return ((bitField0_ & 0x00000080) == 0x00000080);
+      }
+      /**
+       * <code>optional fixed64 fixed64 = 8;</code>
+       */
+      public long getFixed64() {
+        return fixed64_;
+      }
+      /**
+       * <code>optional fixed64 fixed64 = 8;</code>
+       */
+      public Builder setFixed64(long value) {
+        bitField0_ |= 0x00000080;
+        fixed64_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional fixed64 fixed64 = 8;</code>
+       */
+      public Builder clearFixed64() {
+        bitField0_ = (bitField0_ & ~0x00000080);
+        fixed64_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional sfixed32 sfixed32 = 9;
+      private int sfixed32_ ;
+      /**
+       * <code>optional sfixed32 sfixed32 = 9;</code>
+       */
+      public boolean hasSfixed32() {
+        return ((bitField0_ & 0x00000100) == 0x00000100);
+      }
+      /**
+       * <code>optional sfixed32 sfixed32 = 9;</code>
+       */
+      public int getSfixed32() {
+        return sfixed32_;
+      }
+      /**
+       * <code>optional sfixed32 sfixed32 = 9;</code>
+       */
+      public Builder setSfixed32(int value) {
+        bitField0_ |= 0x00000100;
+        sfixed32_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional sfixed32 sfixed32 = 9;</code>
+       */
+      public Builder clearSfixed32() {
+        bitField0_ = (bitField0_ & ~0x00000100);
+        sfixed32_ = 0;
+        onChanged();
+        return this;
+      }
+
+      // optional sfixed64 sfixed64 = 10;
+      private long sfixed64_ ;
+      /**
+       * <code>optional sfixed64 sfixed64 = 10;</code>
+       */
+      public boolean hasSfixed64() {
+        return ((bitField0_ & 0x00000200) == 0x00000200);
+      }
+      /**
+       * <code>optional sfixed64 sfixed64 = 10;</code>
+       */
+      public long getSfixed64() {
+        return sfixed64_;
+      }
+      /**
+       * <code>optional sfixed64 sfixed64 = 10;</code>
+       */
+      public Builder setSfixed64(long value) {
+        bitField0_ |= 0x00000200;
+        sfixed64_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional sfixed64 sfixed64 = 10;</code>
+       */
+      public Builder clearSfixed64() {
+        bitField0_ = (bitField0_ & ~0x00000200);
+        sfixed64_ = 0L;
+        onChanged();
+        return this;
+      }
+
+      // optional float float = 11;
+      private float float_ ;
+      /**
+       * <code>optional float float = 11;</code>
+       */
+      public boolean hasFloat() {
+        return ((bitField0_ & 0x00000400) == 0x00000400);
+      }
+      /**
+       * <code>optional float float = 11;</code>
+       */
+      public float getFloat() {
+        return float_;
+      }
+      /**
+       * <code>optional float float = 11;</code>
+       */
+      public Builder setFloat(float value) {
+        bitField0_ |= 0x00000400;
+        float_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional float float = 11;</code>
+       */
+      public Builder clearFloat() {
+        bitField0_ = (bitField0_ & ~0x00000400);
+        float_ = 0F;
+        onChanged();
+        return this;
+      }
+
+      // optional double double = 12;
+      private double double_ ;
+      /**
+       * <code>optional double double = 12;</code>
+       */
+      public boolean hasDouble() {
+        return ((bitField0_ & 0x00000800) == 0x00000800);
+      }
+      /**
+       * <code>optional double double = 12;</code>
+       */
+      public double getDouble() {
+        return double_;
+      }
+      /**
+       * <code>optional double double = 12;</code>
+       */
+      public Builder setDouble(double value) {
+        bitField0_ |= 0x00000800;
+        double_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional double double = 12;</code>
+       */
+      public Builder clearDouble() {
+        bitField0_ = (bitField0_ & ~0x00000800);
+        double_ = 0D;
+        onChanged();
+        return this;
+      }
+
+      // optional bool bool = 13;
+      private boolean bool_ ;
+      /**
+       * <code>optional bool bool = 13;</code>
+       */
+      public boolean hasBool() {
+        return ((bitField0_ & 0x00001000) == 0x00001000);
+      }
+      /**
+       * <code>optional bool bool = 13;</code>
+       */
+      public boolean getBool() {
+        return bool_;
+      }
+      /**
+       * <code>optional bool bool = 13;</code>
+       */
+      public Builder setBool(boolean value) {
+        bitField0_ |= 0x00001000;
+        bool_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bool bool = 13;</code>
+       */
+      public Builder clearBool() {
+        bitField0_ = (bitField0_ & ~0x00001000);
+        bool_ = false;
+        onChanged();
+        return this;
+      }
+
+      // optional string string = 14;
+      private java.lang.Object string_ = "";
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public boolean hasString() {
+        return ((bitField0_ & 0x00002000) == 0x00002000);
+      }
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public java.lang.String getString() {
+        java.lang.Object ref = string_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          string_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public com.google.protobuf.ByteString
+          getStringBytes() {
+        java.lang.Object ref = string_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          string_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public Builder setString(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00002000;
+        string_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public Builder clearString() {
+        bitField0_ = (bitField0_ & ~0x00002000);
+        string_ = getDefaultInstance().getString();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string string = 14;</code>
+       */
+      public Builder setStringBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00002000;
+        string_ = value;
+        onChanged();
+        return this;
+      }
+
+      // optional bytes bytes = 15;
+      private com.google.protobuf.ByteString bytes_ = com.google.protobuf.ByteString.EMPTY;
+      /**
+       * <code>optional bytes bytes = 15;</code>
+       */
+      public boolean hasBytes() {
+        return ((bitField0_ & 0x00004000) == 0x00004000);
+      }
+      /**
+       * <code>optional bytes bytes = 15;</code>
+       */
+      public com.google.protobuf.ByteString getBytes() {
+        return bytes_;
+      }
+      /**
+       * <code>optional bytes bytes = 15;</code>
+       */
+      public Builder setBytes(com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00004000;
+        bytes_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional bytes bytes = 15;</code>
+       */
+      public Builder clearBytes() {
+        bitField0_ = (bitField0_ & ~0x00004000);
+        bytes_ = getDefaultInstance().getBytes();
+        onChanged();
+        return this;
+      }
+
+      // optional .org.apache.avro.protobuf.A enum = 16 [default = Z];
+      private org.apache.avro.protobuf.Test.A enum_ = org.apache.avro.protobuf.Test.A.Z;
+      /**
+       * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+       */
+      public boolean hasEnum() {
+        return ((bitField0_ & 0x00008000) == 0x00008000);
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+       */
+      public org.apache.avro.protobuf.Test.A getEnum() {
+        return enum_;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+       */
+      public Builder setEnum(org.apache.avro.protobuf.Test.A value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        bitField0_ |= 0x00008000;
+        enum_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.A enum = 16 [default = Z];</code>
+       */
+      public Builder clearEnum() {
+        bitField0_ = (bitField0_ & ~0x00008000);
+        enum_ = org.apache.avro.protobuf.Test.A.Z;
+        onChanged();
+        return this;
+      }
+
+      // repeated int32 intArray = 17;
+      private java.util.List<java.lang.Integer> intArray_ = java.util.Collections.emptyList();
+      private void ensureIntArrayIsMutable() {
+        if (!((bitField0_ & 0x00010000) == 0x00010000)) {
+          intArray_ = new java.util.ArrayList<java.lang.Integer>(intArray_);
+          bitField0_ |= 0x00010000;
+         }
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public java.util.List<java.lang.Integer>
+          getIntArrayList() {
+        return java.util.Collections.unmodifiableList(intArray_);
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public int getIntArrayCount() {
+        return intArray_.size();
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public int getIntArray(int index) {
+        return intArray_.get(index);
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public Builder setIntArray(
+          int index, int value) {
+        ensureIntArrayIsMutable();
+        intArray_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public Builder addIntArray(int value) {
+        ensureIntArrayIsMutable();
+        intArray_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public Builder addAllIntArray(
+          java.lang.Iterable<? extends java.lang.Integer> values) {
+        ensureIntArrayIsMutable();
+        super.addAll(values, intArray_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated int32 intArray = 17;</code>
+       *
+       * <pre>
+       * some repeated types
+       * </pre>
+       */
+      public Builder clearIntArray() {
+        intArray_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00010000);
+        onChanged();
+        return this;
+      }
+
+      // repeated .org.apache.avro.protobuf.Foo fooArray = 20;
+      private java.util.List<org.apache.avro.protobuf.Test.Foo> fooArray_ =
+        java.util.Collections.emptyList();
+      private void ensureFooArrayIsMutable() {
+        if (!((bitField0_ & 0x00020000) == 0x00020000)) {
+          fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.Test.Foo>(fooArray_);
+          bitField0_ |= 0x00020000;
+         }
+      }
+
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> fooArrayBuilder_;
+
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public java.util.List<org.apache.avro.protobuf.Test.Foo> getFooArrayList() {
+        if (fooArrayBuilder_ == null) {
+          return java.util.Collections.unmodifiableList(fooArray_);
+        } else {
+          return fooArrayBuilder_.getMessageList();
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public int getFooArrayCount() {
+        if (fooArrayBuilder_ == null) {
+          return fooArray_.size();
+        } else {
+          return fooArrayBuilder_.getCount();
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public org.apache.avro.protobuf.Test.Foo getFooArray(int index) {
+        if (fooArrayBuilder_ == null) {
+          return fooArray_.get(index);
+        } else {
+          return fooArrayBuilder_.getMessage(index);
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder setFooArray(
+          int index, org.apache.avro.protobuf.Test.Foo value) {
+        if (fooArrayBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFooArrayIsMutable();
+          fooArray_.set(index, value);
+          onChanged();
+        } else {
+          fooArrayBuilder_.setMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder setFooArray(
+          int index, org.apache.avro.protobuf.Test.Foo.Builder builderForValue) {
+        if (fooArrayBuilder_ == null) {
+          ensureFooArrayIsMutable();
+          fooArray_.set(index, builderForValue.build());
+          onChanged();
+        } else {
+          fooArrayBuilder_.setMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder addFooArray(org.apache.avro.protobuf.Test.Foo value) {
+        if (fooArrayBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFooArrayIsMutable();
+          fooArray_.add(value);
+          onChanged();
+        } else {
+          fooArrayBuilder_.addMessage(value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder addFooArray(
+          int index, org.apache.avro.protobuf.Test.Foo value) {
+        if (fooArrayBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          ensureFooArrayIsMutable();
+          fooArray_.add(index, value);
+          onChanged();
+        } else {
+          fooArrayBuilder_.addMessage(index, value);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder addFooArray(
+          org.apache.avro.protobuf.Test.Foo.Builder builderForValue) {
+        if (fooArrayBuilder_ == null) {
+          ensureFooArrayIsMutable();
+          fooArray_.add(builderForValue.build());
+          onChanged();
+        } else {
+          fooArrayBuilder_.addMessage(builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder addFooArray(
+          int index, org.apache.avro.protobuf.Test.Foo.Builder builderForValue) {
+        if (fooArrayBuilder_ == null) {
+          ensureFooArrayIsMutable();
+          fooArray_.add(index, builderForValue.build());
+          onChanged();
+        } else {
+          fooArrayBuilder_.addMessage(index, builderForValue.build());
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder addAllFooArray(
+          java.lang.Iterable<? extends org.apache.avro.protobuf.Test.Foo> values) {
+        if (fooArrayBuilder_ == null) {
+          ensureFooArrayIsMutable();
+          super.addAll(values, fooArray_);
+          onChanged();
+        } else {
+          fooArrayBuilder_.addAllMessages(values);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder clearFooArray() {
+        if (fooArrayBuilder_ == null) {
+          fooArray_ = java.util.Collections.emptyList();
+          bitField0_ = (bitField0_ & ~0x00020000);
+          onChanged();
+        } else {
+          fooArrayBuilder_.clear();
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public Builder removeFooArray(int index) {
+        if (fooArrayBuilder_ == null) {
+          ensureFooArrayIsMutable();
+          fooArray_.remove(index);
+          onChanged();
+        } else {
+          fooArrayBuilder_.remove(index);
+        }
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public org.apache.avro.protobuf.Test.Foo.Builder getFooArrayBuilder(
+          int index) {
+        return getFooArrayFieldBuilder().getBuilder(index);
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public org.apache.avro.protobuf.Test.FooOrBuilder getFooArrayOrBuilder(
+          int index) {
+        if (fooArrayBuilder_ == null) {
+          return fooArray_.get(index);  } else {
+          return fooArrayBuilder_.getMessageOrBuilder(index);
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public java.util.List<? extends org.apache.avro.protobuf.Test.FooOrBuilder> 
+           getFooArrayOrBuilderList() {
+        if (fooArrayBuilder_ != null) {
+          return fooArrayBuilder_.getMessageOrBuilderList();
+        } else {
+          return java.util.Collections.unmodifiableList(fooArray_);
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public org.apache.avro.protobuf.Test.Foo.Builder addFooArrayBuilder() {
+        return getFooArrayFieldBuilder().addBuilder(
+            org.apache.avro.protobuf.Test.Foo.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public org.apache.avro.protobuf.Test.Foo.Builder addFooArrayBuilder(
+          int index) {
+        return getFooArrayFieldBuilder().addBuilder(
+            index, org.apache.avro.protobuf.Test.Foo.getDefaultInstance());
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.Foo fooArray = 20;</code>
+       */
+      public java.util.List<org.apache.avro.protobuf.Test.Foo.Builder> 
+           getFooArrayBuilderList() {
+        return getFooArrayFieldBuilder().getBuilderList();
+      }
+      private com.google.protobuf.RepeatedFieldBuilder<
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> 
+          getFooArrayFieldBuilder() {
+        if (fooArrayBuilder_ == null) {
+          fooArrayBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+              org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder>(
+                  fooArray_,
+                  ((bitField0_ & 0x00020000) == 0x00020000),
+                  getParentForChildren(),
+                  isClean());
+          fooArray_ = null;
+        }
+        return fooArrayBuilder_;
+      }
+
+      // repeated .org.apache.avro.protobuf.A syms = 19;
+      private java.util.List<org.apache.avro.protobuf.Test.A> syms_ =
+        java.util.Collections.emptyList();
+      private void ensureSymsIsMutable() {
+        if (!((bitField0_ & 0x00040000) == 0x00040000)) {
+          syms_ = new java.util.ArrayList<org.apache.avro.protobuf.Test.A>(syms_);
+          bitField0_ |= 0x00040000;
+        }
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public java.util.List<org.apache.avro.protobuf.Test.A> getSymsList() {
+        return java.util.Collections.unmodifiableList(syms_);
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public int getSymsCount() {
+        return syms_.size();
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public org.apache.avro.protobuf.Test.A getSyms(int index) {
+        return syms_.get(index);
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public Builder setSyms(
+          int index, org.apache.avro.protobuf.Test.A value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        ensureSymsIsMutable();
+        syms_.set(index, value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public Builder addSyms(org.apache.avro.protobuf.Test.A value) {
+        if (value == null) {
+          throw new NullPointerException();
+        }
+        ensureSymsIsMutable();
+        syms_.add(value);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public Builder addAllSyms(
+          java.lang.Iterable<? extends org.apache.avro.protobuf.Test.A> values) {
+        ensureSymsIsMutable();
+        super.addAll(values, syms_);
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>repeated .org.apache.avro.protobuf.A syms = 19;</code>
+       */
+      public Builder clearSyms() {
+        syms_ = java.util.Collections.emptyList();
+        bitField0_ = (bitField0_ & ~0x00040000);
+        onChanged();
+        return this;
+      }
+
+      // optional .org.apache.avro.protobuf.Foo foo = 18;
+      private org.apache.avro.protobuf.Test.Foo foo_ = org.apache.avro.protobuf.Test.Foo.getDefaultInstance();
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> fooBuilder_;
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public boolean hasFoo() {
+        return ((bitField0_ & 0x00080000) == 0x00080000);
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public org.apache.avro.protobuf.Test.Foo getFoo() {
+        if (fooBuilder_ == null) {
+          return foo_;
+        } else {
+          return fooBuilder_.getMessage();
+        }
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public Builder setFoo(org.apache.avro.protobuf.Test.Foo value) {
+        if (fooBuilder_ == null) {
+          if (value == null) {
+            throw new NullPointerException();
+          }
+          foo_ = value;
+          onChanged();
+        } else {
+          fooBuilder_.setMessage(value);
+        }
+        bitField0_ |= 0x00080000;
+        return this;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public Builder setFoo(
+          org.apache.avro.protobuf.Test.Foo.Builder builderForValue) {
+        if (fooBuilder_ == null) {
+          foo_ = builderForValue.build();
+          onChanged();
+        } else {
+          fooBuilder_.setMessage(builderForValue.build());
+        }
+        bitField0_ |= 0x00080000;
+        return this;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public Builder mergeFoo(org.apache.avro.protobuf.Test.Foo value) {
+        if (fooBuilder_ == null) {
+          if (((bitField0_ & 0x00080000) == 0x00080000) &&
+              foo_ != org.apache.avro.protobuf.Test.Foo.getDefaultInstance()) {
+            foo_ =
+              org.apache.avro.protobuf.Test.Foo.newBuilder(foo_).mergeFrom(value).buildPartial();
+          } else {
+            foo_ = value;
+          }
+          onChanged();
+        } else {
+          fooBuilder_.mergeFrom(value);
+        }
+        bitField0_ |= 0x00080000;
+        return this;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public Builder clearFoo() {
+        if (fooBuilder_ == null) {
+          foo_ = org.apache.avro.protobuf.Test.Foo.getDefaultInstance();
+          onChanged();
+        } else {
+          fooBuilder_.clear();
+        }
+        bitField0_ = (bitField0_ & ~0x00080000);
+        return this;
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public org.apache.avro.protobuf.Test.Foo.Builder getFooBuilder() {
+        bitField0_ |= 0x00080000;
+        onChanged();
+        return getFooFieldBuilder().getBuilder();
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      public org.apache.avro.protobuf.Test.FooOrBuilder getFooOrBuilder() {
+        if (fooBuilder_ != null) {
+          return fooBuilder_.getMessageOrBuilder();
+        } else {
+          return foo_;
+        }
+      }
+      /**
+       * <code>optional .org.apache.avro.protobuf.Foo foo = 18;</code>
+       *
+       * <pre>
+       * a recursive type
+       * </pre>
+       */
+      private com.google.protobuf.SingleFieldBuilder<
+          org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder> 
+          getFooFieldBuilder() {
+        if (fooBuilder_ == null) {
+          fooBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+              org.apache.avro.protobuf.Test.Foo, org.apache.avro.protobuf.Test.Foo.Builder, org.apache.avro.protobuf.Test.FooOrBuilder>(
+                  foo_,
+                  getParentForChildren(),
+                  isClean());
+          foo_ = null;
+        }
+        return fooBuilder_;
+      }
+
+      // @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.Foo)
+    }
+
+    static {
+      defaultInstance = new Foo(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.Foo)
+  }
+
+  public interface MOrBuilder
+      extends com.google.protobuf.MessageOrBuilder {
+  }
+  /**
+   * Protobuf type {@code org.apache.avro.protobuf.M}
+   *
+   * <pre>
+   * a nested enum
+   * </pre>
+   */
+  public static final class M extends
+      com.google.protobuf.GeneratedMessage
+      implements MOrBuilder {
+    // Use M.newBuilder() to construct.
+    private M(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+      super(builder);
+      this.unknownFields = builder.getUnknownFields();
+    }
+    private M(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+    private static final M defaultInstance;
+    public static M getDefaultInstance() {
+      return defaultInstance;
+    }
+
+    public M getDefaultInstanceForType() {
+      return defaultInstance;
+    }
+
+    private final com.google.protobuf.UnknownFieldSet unknownFields;
+    @java.lang.Override
+    public final com.google.protobuf.UnknownFieldSet
+        getUnknownFields() {
+      return this.unknownFields;
+    }
+    private M(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      initFields();
+      com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+          com.google.protobuf.UnknownFieldSet.newBuilder();
+      try {
+        boolean done = false;
+        while (!done) {
+          int tag = input.readTag();
+          switch (tag) {
+            case 0:
+              done = true;
+              break;
+            default: {
+              if (!parseUnknownField(input, unknownFields,
+                                     extensionRegistry, tag)) {
+                done = true;
+              }
+              break;
+            }
+          }
+        }
+      } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+        throw e.setUnfinishedMessage(this);
+      } catch (java.io.IOException e) {
+        throw new com.google.protobuf.InvalidProtocolBufferException(
+            e.getMessage()).setUnfinishedMessage(this);
+      } finally {
+        this.unknownFields = unknownFields.build();
+        makeExtensionsImmutable();
+      }
+    }
+    public static final com.google.protobuf.Descriptors.Descriptor
+        getDescriptor() {
+      return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_M_descriptor;
+    }
+
+    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+        internalGetFieldAccessorTable() {
+      return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_M_fieldAccessorTable
+          .ensureFieldAccessorsInitialized(
+              org.apache.avro.protobuf.Test.M.class, org.apache.avro.protobuf.Test.M.Builder.class);
+    }
+
+    public static com.google.protobuf.Parser<M> PARSER =
+        new com.google.protobuf.AbstractParser<M>() {
+      public M parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+        return new M(input, extensionRegistry);
+      }
+    };
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<M> getParserForType() {
+      return PARSER;
+    }
+
+    /**
+     * Protobuf enum {@code org.apache.avro.protobuf.M.N}
+     */
+    public enum N
+        implements com.google.protobuf.ProtocolMessageEnum {
+      /**
+       * <code>A = 1;</code>
+       */
+      A(0, 1),
+      ;
+
+      /**
+       * <code>A = 1;</code>
+       */
+      public static final int A_VALUE = 1;
+
+
+      public final int getNumber() { return value; }
+
+      public static N valueOf(int value) {
+        switch (value) {
+          case 1: return A;
+          default: return null;
+        }
+      }
+
+      public static com.google.protobuf.Internal.EnumLiteMap<N>
+          internalGetValueMap() {
+        return internalValueMap;
+      }
+      private static com.google.protobuf.Internal.EnumLiteMap<N>
+          internalValueMap =
+            new com.google.protobuf.Internal.EnumLiteMap<N>() {
+              public N findValueByNumber(int number) {
+                return N.valueOf(number);
+              }
+            };
+
+      public final com.google.protobuf.Descriptors.EnumValueDescriptor
+          getValueDescriptor() {
+        return getDescriptor().getValues().get(index);
+      }
+      public final com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptorForType() {
+        return getDescriptor();
+      }
+      public static final com.google.protobuf.Descriptors.EnumDescriptor
+          getDescriptor() {
+        return org.apache.avro.protobuf.Test.M.getDescriptor().getEnumTypes().get(0);
+      }
+
+      private static final N[] VALUES = values();
+
+      public static N valueOf(
+          com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+        if (desc.getType() != getDescriptor()) {
+          throw new java.lang.IllegalArgumentException(
+            "EnumValueDescriptor is not for this type.");
+        }
+        return VALUES[desc.getIndex()];
+      }
+
+      private final int index;
+      private final int value;
+
+      private N(int index, int value) {
+        this.index = index;
+        this.value = value;
+      }
+
+      // @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.M.N)
+    }
+
+    private void initFields() {
+    }
+    private byte memoizedIsInitialized = -1;
+    public final boolean isInitialized() {
+      byte isInitialized = memoizedIsInitialized;
+      if (isInitialized != -1) return isInitialized == 1;
+
+      memoizedIsInitialized = 1;
+      return true;
+    }
+
+    public void writeTo(com.google.protobuf.CodedOutputStream output)
+                        throws java.io.IOException {
+      getSerializedSize();
+      getUnknownFields().writeTo(output);
+    }
+
+    private int memoizedSerializedSize = -1;
+    public int getSerializedSize() {
+      int size = memoizedSerializedSize;
+      if (size != -1) return size;
+
+      size = 0;
+      size += getUnknownFields().getSerializedSize();
+      memoizedSerializedSize = size;
+      return size;
+    }
+
+    private static final long serialVersionUID = 0L;
+    @java.lang.Override
+    protected java.lang.Object writeReplace()
+        throws java.io.ObjectStreamException {
+      return super.writeReplace();
+    }
+
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        com.google.protobuf.ByteString data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        com.google.protobuf.ByteString data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(byte[] data)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        byte[] data,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws com.google.protobuf.InvalidProtocolBufferException {
+      return PARSER.parseFrom(data, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.M parseDelimitedFrom(java.io.InputStream input)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.M parseDelimitedFrom(
+        java.io.InputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        com.google.protobuf.CodedInputStream input)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input);
+    }
+    public static org.apache.avro.protobuf.Test.M parseFrom(
+        com.google.protobuf.CodedInputStream input,
+        com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+        throws java.io.IOException {
+      return PARSER.parseFrom(input, extensionRegistry);
+    }
+
+    public static Builder newBuilder() { return Builder.create(); }
+    public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder(org.apache.avro.protobuf.Test.M prototype) {
+      return newBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() { return newBuilder(this); }
+
+    @java.lang.Override
+    protected Builder newBuilderForType(
+        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+      Builder builder = new Builder(parent);
+      return builder;
+    }
+    /**
+     * Protobuf type {@code org.apache.avro.protobuf.M}
+     *
+     * <pre>
+     * a nested enum
+     * </pre>
+     */
+    public static final class Builder extends
+        com.google.protobuf.GeneratedMessage.Builder<Builder>
+       implements org.apache.avro.protobuf.Test.MOrBuilder {
+      public static final com.google.protobuf.Descriptors.Descriptor
+          getDescriptor() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_M_descriptor;
+      }
+
+      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+          internalGetFieldAccessorTable() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_M_fieldAccessorTable
+            .ensureFieldAccessorsInitialized(
+                org.apache.avro.protobuf.Test.M.class, org.apache.avro.protobuf.Test.M.Builder.class);
+      }
+
+      // Construct using org.apache.avro.protobuf.Test.M.newBuilder()
+      private Builder() {
+        maybeForceBuilderInitialization();
+      }
+
+      private Builder(
+          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        super(parent);
+        maybeForceBuilderInitialization();
+      }
+      private void maybeForceBuilderInitialization() {
+        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        }
+      }
+      private static Builder create() {
+        return new Builder();
+      }
+
+      public Builder clear() {
+        super.clear();
+        return this;
+      }
+
+      public Builder clone() {
+        return create().mergeFrom(buildPartial());
+      }
+
+      public com.google.protobuf.Descriptors.Descriptor
+          getDescriptorForType() {
+        return org.apache.avro.protobuf.Test.internal_static_org_apache_avro_protobuf_M_descriptor;
+      }
+
+      public org.apache.avro.protobuf.Test.M getDefaultInstanceForType() {
+        return org.apache.avro.protobuf.Test.M.getDefaultInstance();
+      }
+
+      public org.apache.avro.protobuf.Test.M build() {
+        org.apache.avro.protobuf.Test.M result = buildPartial();
+        if (!result.isInitialized()) {
+          throw newUninitializedMessageException(result);
+        }
+        return result;
+      }
+
+      public org.apache.avro.protobuf.Test.M buildPartial() {
+        org.apache.avro.protobuf.Test.M result = new org.apache.avro.protobuf.Test.M(this);
+        onBuilt();
+        return result;
+      }
+
+      public Builder mergeFrom(com.google.protobuf.Message other) {
+        if (other instanceof org.apache.avro.protobuf.Test.M) {
+          return mergeFrom((org.apache.avro.protobuf.Test.M)other);
+        } else {
+          super.mergeFrom(other);
+          return this;
+        }
+      }
+
+      public Builder mergeFrom(org.apache.avro.protobuf.Test.M other) {
+        if (other == org.apache.avro.protobuf.Test.M.getDefaultInstance()) return this;
+        this.mergeUnknownFields(other.getUnknownFields());
+        return this;
+      }
+
+      public final boolean isInitialized() {
+        return true;
+      }
+
+      public Builder mergeFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws java.io.IOException {
+        org.apache.avro.protobuf.Test.M parsedMessage = null;
+        try {
+          parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+        } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+          parsedMessage = (org.apache.avro.protobuf.Test.M) e.getUnfinishedMessage();
+          throw e;
+        } finally {
+          if (parsedMessage != null) {
+            mergeFrom(parsedMessage);
+          }
+        }
+        return this;
+      }
+
+      // @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.M)
+    }
+
+    static {
+      defaultInstance = new M(true);
+      defaultInstance.initFields();
+    }
+
+    // @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.M)
+  }
+
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_avro_protobuf_Foo_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_avro_protobuf_Foo_fieldAccessorTable;
+  private static com.google.protobuf.Descriptors.Descriptor
+    internal_static_org_apache_avro_protobuf_M_descriptor;
+  private static
+    com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      internal_static_org_apache_avro_protobuf_M_fieldAccessorTable;
+
+  public static com.google.protobuf.Descriptors.FileDescriptor
+      getDescriptor() {
+    return descriptor;
+  }
+  private static com.google.protobuf.Descriptors.FileDescriptor
+      descriptor;
+  static {
+    java.lang.String[] descriptorData = {
+      "\n\034src/test/protobuf/test.proto\022\030org.apac" +
+      "he.avro.protobuf\"\275\003\n\003Foo\022\r\n\005int32\030\001 \002(\005\022" +
+      "\r\n\005int64\030\002 \001(\003\022\016\n\006uint32\030\003 \001(\r\022\016\n\006uint64" +
+      "\030\004 \001(\004\022\016\n\006sint32\030\005 \001(\021\022\016\n\006sint64\030\006 \001(\022\022\017" +
+      "\n\007fixed32\030\007 \001(\007\022\017\n\007fixed64\030\010 \001(\006\022\020\n\010sfix" +
+      "ed32\030\t \001(\017\022\020\n\010sfixed64\030\n \001(\020\022\r\n\005float\030\013 " +
+      "\001(\002\022\016\n\006double\030\014 \001(\001\022\014\n\004bool\030\r \001(\010\022\016\n\006str" +
+      "ing\030\016 \001(\t\022\r\n\005bytes\030\017 \001(\014\022,\n\004enum\030\020 \001(\0162\033" +
+      ".org.apache.avro.protobuf.A:\001Z\022\020\n\010intArr" +
+      "ay\030\021 \003(\005\022/\n\010fooArray\030\024 \003(\0132\035.org.apache.",
+      "avro.protobuf.Foo\022)\n\004syms\030\023 \003(\0162\033.org.ap" +
+      "ache.avro.protobuf.A\022*\n\003foo\030\022 \001(\0132\035.org." +
+      "apache.avro.protobuf.Foo\"\017\n\001M\"\n\n\001N\022\005\n\001A\020" +
+      "\001*\030\n\001A\022\005\n\001X\020\001\022\005\n\001Y\020\002\022\005\n\001Z\020\003"
+    };
+    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+        public com.google.protobuf.ExtensionRegistry assignDescriptors(
+            com.google.protobuf.Descriptors.FileDescriptor root) {
+          descriptor = root;
+          internal_static_org_apache_avro_protobuf_Foo_descriptor =
+            getDescriptor().getMessageTypes().get(0);
+          internal_static_org_apache_avro_protobuf_Foo_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_avro_protobuf_Foo_descriptor,
+              new java.lang.String[] { "Int32", "Int64", "Uint32", "Uint64", "Sint32", "Sint64", "Fixed32", "Fixed64", "Sfixed32", "Sfixed64", "Float", "Double", "Bool", "String", "Bytes", "Enum", "IntArray", "FooArray", "Syms", "Foo", });
+          internal_static_org_apache_avro_protobuf_M_descriptor =
+            getDescriptor().getMessageTypes().get(1);
+          internal_static_org_apache_avro_protobuf_M_fieldAccessorTable = new
+            com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+              internal_static_org_apache_avro_protobuf_M_descriptor,
+              new java.lang.String[] { });
+          return null;
+        }
+      };
+    com.google.protobuf.Descriptors.FileDescriptor
+      .internalBuildGeneratedFileFrom(descriptorData,
+        new com.google.protobuf.Descriptors.FileDescriptor[] {
+        }, assigner);
+  }
+
+  // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
new file mode 100644
index 0000000..8cdfb81
--- /dev/null
+++ b/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.protobuf;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.specific.SpecificData;
+
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+import com.google.protobuf.ByteString;
+
+import org.apache.avro.protobuf.Test.Foo;
+import org.apache.avro.protobuf.Test.A;
+import org.apache.avro.protobuf.Test.M.N;
+
+public class TestProtobuf {
+  @Test public void testMessage() throws Exception {
+
+    System.out.println(ProtobufData.get().getSchema(Foo.class).toString(true));
+    Foo.Builder builder = Foo.newBuilder();
+    builder.setInt32(0);
+    builder.setInt64(2);
+    builder.setUint32(3);
+    builder.setUint64(4);
+    builder.setSint32(5);
+    builder.setSint64(6);
+    builder.setFixed32(7);
+    builder.setFixed64(8);
+    builder.setSfixed32(9);
+    builder.setSfixed64(10);
+    builder.setFloat(1.0F);
+    builder.setDouble(2.0);
+    builder.setBool(true);
+    builder.setString("foo");
+    builder.setBytes(ByteString.copyFromUtf8("bar"));
+    builder.setEnum(A.X);
+    builder.addIntArray(27);
+    builder.addSyms(A.Y);
+    Foo fooInner = builder.build();
+
+    Foo fooInArray = builder.build();
+    builder = Foo.newBuilder(fooInArray);
+    builder.addFooArray(fooInArray);
+
+    builder = Foo.newBuilder(fooInner);
+    builder.setFoo(fooInner);
+    Foo foo = builder.build();
+
+    System.out.println(foo);
+
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    ProtobufDatumWriter<Foo> w = new ProtobufDatumWriter<Foo>(Foo.class);
+    Encoder e = EncoderFactory.get().binaryEncoder(bao, null);
+    w.write(foo, e);
+    e.flush();
+    
+    Object o = new ProtobufDatumReader<Foo>(Foo.class).read
+      (null,
+       DecoderFactory.get().createBinaryDecoder
+       (new ByteArrayInputStream(bao.toByteArray()), null));
+
+    assertEquals(foo, o);
+  }
+
+  @Test public void testNestedEnum() throws Exception {
+    Schema s = ProtobufData.get().getSchema(N.class);
+    assertEquals(N.class.getName(), SpecificData.getClassName(s));
+  }
+
+}
diff --git a/lang/java/protobuf/src/test/protobuf/test.proto b/lang/java/protobuf/src/test/protobuf/test.proto
new file mode 100644
index 0000000..decd650
--- /dev/null
+++ b/lang/java/protobuf/src/test/protobuf/test.proto
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.protobuf;
+
+message Foo {
+  // all the primitive types
+  required    int32 int32    =  1;
+  optional    int64 int64    =  2;
+  optional   uint32 uint32   =  3;
+  optional   uint64 uint64   =  4;
+  optional   sint32 sint32   =  5;
+  optional   sint64 sint64   =  6;
+  optional  fixed32 fixed32  =  7;
+  optional  fixed64 fixed64  =  8;
+  optional sfixed32 sfixed32 =  9;
+  optional sfixed64 sfixed64 = 10;
+  optional    float float    = 11;
+  optional   double double   = 12;
+  optional     bool bool     = 13;
+  optional   string string   = 14;
+  optional    bytes bytes    = 15;
+  optional        A enum     = 16 [default = Z];
+
+  // some repeated types
+  repeated    int32 intArray = 17;
+  repeated    Foo   fooArray = 20;
+  repeated    A     syms = 19;
+
+  // a recursive type
+  optional     Foo  foo      = 18;
+
+}
+
+// an enum
+enum A {
+  X = 1;
+  Y = 2;
+  Z = 3;
+}
+
+// a nested enum
+message M {
+  enum N {
+    A = 1;
+  }
+}
diff --git a/lang/java/thrift/README b/lang/java/thrift/README
new file mode 100644
index 0000000..f05134a
--- /dev/null
+++ b/lang/java/thrift/README
@@ -0,0 +1,3 @@
+The trift-generated files are checked-in so that every developer who runs tests need not have the Thrift compiler installed.
+
+For regeneration of thrift files make sure you have required version of thrift-compiler installed (0.7) and run `mvn -Pthrift-generate generate-test-sources`
diff --git a/lang/java/thrift/pom.xml b/lang/java/thrift/pom.xml
new file mode 100644
index 0000000..0df386c
--- /dev/null
+++ b/lang/java/thrift/pom.xml
@@ -0,0 +1,100 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-thrift</artifactId>
+
+  <name>Apache Avro Thrift Compatibility</name>
+  <description>Permit serialization of Thrift-generated classes as Avro data.</description>
+  <packaging>bundle</packaging>
+
+  <properties>
+    <osgi.import>
+      !org.apache.avro.thrift*,
+      org.apache.avro*;version="${project.version}",
+      org.apache.thrift*,
+      *
+    </osgi.import>
+    <osgi.export>org.apache.avro.thrift*;version="${project.version}"</osgi.export>
+  </properties>
+
+  <profiles>
+    <profile>
+      <id>thrift-generate</id>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+              <execution>
+                <phase>generate-test-sources</phase>
+                <configuration>
+                  <tasks>
+                    <mkdir dir="target/thrift-tmp"/>
+                    <exec executable="thrift">
+                      <arg value="--gen"/>
+                      <arg value="java:beans"/>
+                      <arg value="-o"/>
+                      <arg value="target/thrift-tmp"/>
+                      <arg value="src/test/thrift/test.thrift"/>
+                    </exec>
+                    <copy todir="src/test/java" overwrite="true">
+                      <fileset dir="target/thrift-tmp/gen-javabean"/>
+                    </copy>
+                  </tasks>
+                </configuration>
+                <goals>
+                  <goal>run</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <version>${thrift.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+  </dependencies>
+
+</project>
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java
new file mode 100644
index 0000000..c78e25f
--- /dev/null
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java
@@ -0,0 +1,270 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.thrift;
+
+import java.util.List;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.nio.ByteBuffer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+
+import org.apache.avro.util.ClassUtils;
+import org.apache.thrift.TBase;
+import org.apache.thrift.TEnum;
+import org.apache.thrift.TFieldIdEnum;
+import org.apache.thrift.TFieldRequirementType;
+import org.apache.thrift.TUnion;
+import org.apache.thrift.protocol.TType;
+import org.apache.thrift.meta_data.FieldMetaData;
+import org.apache.thrift.meta_data.FieldValueMetaData;
+import org.apache.thrift.meta_data.EnumMetaData;
+import org.apache.thrift.meta_data.ListMetaData;
+import org.apache.thrift.meta_data.SetMetaData;
+import org.apache.thrift.meta_data.MapMetaData;
+import org.apache.thrift.meta_data.StructMetaData;
+
+/** Utilities for serializing Thrift data in Avro format. */
+public class ThriftData extends GenericData {
+  static final String THRIFT_TYPE = "thrift";
+  static final String THRIFT_PROP = "thrift";
+
+  private static final ThriftData INSTANCE = new ThriftData();
+
+  protected ThriftData() {}
+  
+  /** Return the singleton instance. */
+  public static ThriftData get() { return INSTANCE; }
+
+  @Override
+  public DatumReader createDatumReader(Schema schema) {
+    return new ThriftDatumReader(schema, schema, this);
+  }
+
+  @Override
+  public DatumWriter createDatumWriter(Schema schema) {
+    return new ThriftDatumWriter(schema, this);
+  }
+
+  @Override
+  public void setField(Object r, String n, int pos, Object o) {
+    setField(r, n, pos, o, getRecordState(r, getSchema(r.getClass())));
+  }
+
+  @Override
+  public Object getField(Object r, String name, int pos) {
+    return getField(r, name, pos, getRecordState(r, getSchema(r.getClass())));
+  }
+
+  @Override
+  protected void setField(Object r, String n, int pos, Object v, Object state) {
+    if (v == null && r instanceof TUnion) return;
+    ((TBase)r).setFieldValue(((TFieldIdEnum[])state)[pos], v);
+  }
+
+  @Override
+  protected Object getField(Object record, String name, int pos, Object state) {
+    TFieldIdEnum f = ((TFieldIdEnum[])state)[pos];
+    TBase struct = (TBase)record;
+    if (struct.isSet(f))
+      return struct.getFieldValue(f);
+    return null;
+  }
+
+  private final Map<Schema,TFieldIdEnum[]> fieldCache =
+    new ConcurrentHashMap<Schema,TFieldIdEnum[]>();
+
+  @Override
+  @SuppressWarnings("unchecked")
+  protected Object getRecordState(Object r, Schema s) {
+    TFieldIdEnum[] fields = fieldCache.get(s);
+    if (fields == null) {                           // cache miss
+      fields = new TFieldIdEnum[s.getFields().size()];
+      Class c = r.getClass();
+      for (TFieldIdEnum f :
+          FieldMetaData.getStructMetaDataMap((Class<? extends TBase>) c).keySet())
+        fields[s.getField(f.getFieldName()).pos()] = f;
+      fieldCache.put(s, fields);                  // update cache
+    }
+    return fields;
+  }
+
+  @Override
+  protected String getSchemaName(Object datum) {
+    // support implicit conversion from thrift's i16
+    // to avro INT for thrift's optional fields
+    if (datum instanceof Short)
+      return Schema.Type.INT.getName();
+    // support implicit conversion from thrift's byte
+    // to avro INT for thrift's optional fields
+    if (datum instanceof Byte)
+      return Schema.Type.INT.getName();
+
+    return super.getSchemaName(datum);
+  }
+
+  @Override
+  protected boolean isRecord(Object datum) {
+    return datum instanceof TBase;
+  }
+
+  @Override
+  protected boolean isEnum(Object datum) {
+    return datum instanceof TEnum;
+  }
+
+  @Override
+  protected Schema getEnumSchema(Object datum) {
+    return getSchema(datum.getClass());
+  }
+
+  @Override
+  // setFieldValue takes ByteBuffer but getFieldValue returns byte[]
+  protected boolean isBytes(Object datum) {
+    if (datum instanceof ByteBuffer) return true;
+    if (datum == null) return false;
+    Class c = datum.getClass();
+    return c.isArray() && c.getComponentType() == Byte.TYPE;
+  }
+
+  @Override
+  public Object newRecord(Object old, Schema schema) {
+    try {
+      Class c = ClassUtils.forName(SpecificData.getClassName(schema));
+      if (c == null)
+        return newRecord(old, schema);            // punt to generic
+      if (c.isInstance(old))
+        return old;                               // reuse instance
+      return c.newInstance();                     // create new instance
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  protected Schema getRecordSchema(Object record) {
+    return getSchema(record.getClass());
+  }
+
+  private final Map<Class,Schema> schemaCache
+    = new ConcurrentHashMap<Class,Schema>();
+
+  /** Return a record schema given a thrift generated class. */
+  @SuppressWarnings("unchecked")
+  public Schema getSchema(Class c) {
+    Schema schema = schemaCache.get(c);
+
+    if (schema == null) {                         // cache miss
+      try {
+        if (TEnum.class.isAssignableFrom(c)) {    // enum
+          List<String> symbols = new ArrayList<String>();
+          for (Enum e : ((Class<? extends Enum>)c).getEnumConstants())
+            symbols.add(e.name());
+          schema = Schema.createEnum(c.getName(), null, null, symbols);
+        } else if (TBase.class.isAssignableFrom(c)) { // struct
+          schema = Schema.createRecord(c.getName(), null, null,
+                                       Throwable.class.isAssignableFrom(c));
+          List<Field> fields = new ArrayList<Field>();
+          for (FieldMetaData f :
+                 FieldMetaData.getStructMetaDataMap((Class<? extends TBase>) c).values()) {
+            Schema s = getSchema(f.valueMetaData);
+            if (f.requirementType == TFieldRequirementType.OPTIONAL
+                && (s.getType() != Schema.Type.UNION))
+              s = nullable(s);
+            fields.add(new Field(f.fieldName, s, null, null));
+          }
+          schema.setFields(fields);
+        } else {
+          throw new RuntimeException("Not a Thrift-generated class: "+c);
+        }
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+      schemaCache.put(c, schema);                 // update cache
+    }
+    return schema;
+  }
+
+  private static final Schema NULL = Schema.create(Schema.Type.NULL);
+
+  private Schema getSchema(FieldValueMetaData f) {
+    switch (f.type) {
+    case TType.BOOL:
+      return Schema.create(Schema.Type.BOOLEAN);
+    case TType.BYTE:
+      Schema b = Schema.create(Schema.Type.INT);
+      b.addProp(THRIFT_PROP, "byte");
+      return b;
+    case TType.I16:
+      Schema s = Schema.create(Schema.Type.INT);
+      s.addProp(THRIFT_PROP, "short");
+      return s;
+    case TType.I32:
+      return Schema.create(Schema.Type.INT);
+    case TType.I64:
+      return Schema.create(Schema.Type.LONG);
+    case TType.DOUBLE:
+      return Schema.create(Schema.Type.DOUBLE);
+    case TType.ENUM:
+      EnumMetaData enumMeta = (EnumMetaData)f;
+      return nullable(getSchema(enumMeta.enumClass));
+    case TType.LIST:
+      ListMetaData listMeta = (ListMetaData)f;
+      return nullable(Schema.createArray(getSchema(listMeta.elemMetaData)));
+    case TType.MAP:
+      MapMetaData mapMeta = (MapMetaData)f;
+      if (mapMeta.keyMetaData.type != TType.STRING)
+        throw new AvroRuntimeException("Map keys must be strings: "+f);
+      Schema map = Schema.createMap(getSchema(mapMeta.valueMetaData));
+      GenericData.setStringType(map, GenericData.StringType.String);
+      return nullable(map);
+    case TType.SET:
+      SetMetaData setMeta = (SetMetaData)f;
+      Schema set = Schema.createArray(getSchema(setMeta.elemMetaData));
+      set.addProp(THRIFT_PROP, "set");
+      return nullable(set);
+    case TType.STRING:
+      if (f.isBinary())
+        return nullable(Schema.create(Schema.Type.BYTES));
+      Schema string = Schema.create(Schema.Type.STRING);
+      GenericData.setStringType(string, GenericData.StringType.String);
+      return nullable(string);
+    case TType.STRUCT:
+      StructMetaData structMeta = (StructMetaData)f;
+      Schema record = getSchema(structMeta.structClass);
+      return nullable(record);
+    case TType.VOID:
+      return NULL;
+    default:
+      throw new RuntimeException("Unexpected type in field: "+f);
+    }
+  }
+
+  private Schema nullable(Schema schema) {
+    return Schema.createUnion(Arrays.asList(new Schema[] {NULL, schema}));
+  }
+
+}
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumReader.java b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumReader.java
new file mode 100644
index 0000000..56aa5f4
--- /dev/null
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumReader.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.thrift;
+
+import java.io.IOException;
+import java.util.Set;
+import java.util.HashSet;
+
+import org.apache.avro.Schema;
+import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.util.ClassUtils;
+
+/** {@link org.apache.avro.io.DatumReader DatumReader} for generated Thrift
+ * classes. */
+public class ThriftDatumReader<T> extends GenericDatumReader<T> {
+  public ThriftDatumReader() {
+    this(null, null, ThriftData.get());
+  }
+
+  public ThriftDatumReader(Class<T> c) {
+    this(ThriftData.get().getSchema(c));
+  }
+
+  /** Construct where the writer's and reader's schemas are the same. */
+  public ThriftDatumReader(Schema schema) {
+    this(schema, schema, ThriftData.get());
+  }
+
+  /** Construct given writer's and reader's schema. */
+  public ThriftDatumReader(Schema writer, Schema reader) {
+    this(writer, reader, ThriftData.get());
+  }
+
+  protected ThriftDatumReader(Schema writer, Schema reader, ThriftData data) {
+    super(writer, reader, data);
+  }
+
+  @Override
+  protected Object createEnum(String symbol, Schema schema) {
+    try {
+      Class c = ClassUtils.forName(SpecificData.getClassName(schema));
+      if (c == null) return super.createEnum(symbol, schema); // punt to generic
+      return Enum.valueOf(c, symbol);
+    } catch (Exception e) {
+      throw new AvroRuntimeException(e);
+    }
+  }
+
+  @Override
+  protected Object readInt(Object old, Schema s, Decoder in)
+    throws IOException {
+    String type = s.getProp(ThriftData.THRIFT_PROP);
+    int value = in.readInt();
+    if (type != null) {
+      if ("byte".equals(type)) return (byte)value;
+      if ("short".equals(type)) return (short)value;
+    }
+    return value;
+  }
+
+  @Override
+  protected Object newArray(Object old, int size, Schema schema) {
+    if ("set".equals(schema.getProp(ThriftData.THRIFT_PROP))) {
+      if (old instanceof Set) {
+        ((Set) old).clear();
+        return old;
+      }
+      return new HashSet();
+    } else {
+      return super.newArray(old, size, schema);
+    }
+  }
+
+}
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java
new file mode 100644
index 0000000..9ef3aed
--- /dev/null
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.thrift;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.generic.GenericDatumWriter;
+
+import java.nio.ByteBuffer;
+import java.io.IOException;
+
+/** {@link org.apache.avro.io.DatumWriter DatumWriter} for generated thrift
+ * classes. */
+public class ThriftDatumWriter<T> extends GenericDatumWriter<T> {
+  public ThriftDatumWriter() {
+    super(ThriftData.get());
+  }
+
+  public ThriftDatumWriter(Class<T> c) {
+    super(ThriftData.get().getSchema(c), ThriftData.get());
+  }
+  
+  public ThriftDatumWriter(Schema schema) {
+    super(schema, ThriftData.get());
+  }
+  
+  protected ThriftDatumWriter(Schema root, ThriftData thriftData) {
+    super(root, thriftData);
+  }
+  
+  protected ThriftDatumWriter(ThriftData thriftData) {
+    super(thriftData);
+  }
+
+  @Override
+  protected void writeBytes(Object datum, Encoder out) throws IOException {
+    // Thrift assymetry: setter takes ByteBuffer but getter returns byte[]
+    out.writeBytes(ByteBuffer.wrap((byte[])datum));
+  }
+
+
+}
+
diff --git a/lang/java/thrift/src/main/java/org/apache/avro/thrift/package.html b/lang/java/thrift/src/main/java/org/apache/avro/thrift/package.html
new file mode 100644
index 0000000..c20aa3c
--- /dev/null
+++ b/lang/java/thrift/src/main/java/org/apache/avro/thrift/package.html
@@ -0,0 +1,46 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body><a href="http://thrift.apache.org/">Thrift</a> compatibility.
+
+<p>Thrift primitive types are mapped to Avro schemas as follows:</p>
+<table>
+<tr><th>Thrift type</th><th>Avro schema</th></tr>
+<tr><td>bool</td><td>"boolean"</td></tr>
+<tr><td>byte</td><td>{"type": "int", "thrift": "byte"}</td></tr>
+<tr><td>i16</td><td>{"type": "int", "thrift": "short"}</td></tr>
+<tr><td>i32</td><td>"int"</td></tr>
+<tr><td>i64</td><td>"long"</td></tr>
+<tr><td>double</td><td>"double"</td></tr>
+<tr><td>string</td><td>"string"</td></tr>
+<tr><td>binary</td><td>"bytes"</td></tr>
+</table>
+
+<p>Thrift complex types are mapped to Avro complex types as follows:</p>
+<table>
+<tr><th>Thrift</th><th>Avro</th></tr>
+<tr><td>struct</td><td>record</td></tr>
+<tr><td>enum</td><td>enum</td></tr>
+<tr><td>list</td><td>array</td></tr>
+<tr><td>set</td><td>array <em>(Note: "thrift":"set" is added to schema.)</em></td></tr>
+<tr><td>map</td><td>map <em>(Note: only string keys are permitted.)</em></td></tr>
+</table>
+
+</body>
+</html>
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java
new file mode 100644
index 0000000..e7e618d
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/TestThrift.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.thrift;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.avro.thrift.test.Test;
+import org.apache.avro.thrift.test.FooOrBar;
+import org.apache.avro.thrift.test.E;
+import org.apache.avro.thrift.test.Nested;
+
+public class TestThrift {
+
+  @org.junit.Test public void testStruct() throws Exception {
+
+    System.out.println(ThriftData.get().getSchema(Test.class).toString(true));
+
+    Test test = new Test();
+    test.setBoolField(true);
+    test.setByteField((byte)2);
+    test.setI16Field((short)3);
+    test.setI16OptionalField((short)14);
+    test.setI32Field(4);
+    test.setI64Field(5L);
+    test.setDoubleField(2.0);
+    test.setStringField("foo");
+    test.setBinaryField(ByteBuffer.wrap(new byte[] {0,-1}));
+    test.setMapField(Collections.singletonMap("x", 1));
+    test.setListField(Collections.singletonList(7));
+    test.setSetField(Collections.singleton(8));
+    test.setEnumField(E.X);
+    test.setStructField(new Nested(9));
+    test.setFooOrBar(FooOrBar.foo("x"));
+
+    System.out.println(test);
+
+    check(test);
+  }
+
+  @org.junit.Test public void testOptionals() throws Exception {
+
+    Test test = new Test();
+    test.setBoolField(true);
+    test.setByteField((byte)2);
+    test.setByteOptionalField((byte)4);
+    test.setI16Field((short)3);
+    test.setI16OptionalField((short)15);
+    test.setI64Field(5L);
+    test.setDoubleField(2.0);
+
+    System.out.println(test);
+
+    check(test);
+  }
+
+  private void check(Test test) throws Exception {
+
+    ByteArrayOutputStream bao = new ByteArrayOutputStream();
+    ThriftDatumWriter<Test> w = new ThriftDatumWriter<Test>(Test.class);
+    Encoder e = EncoderFactory.get().binaryEncoder(bao, null);
+    w.write(test, e);
+    e.flush();
+
+    Object o = new ThriftDatumReader<Test>(Test.class).read
+      (null,
+       DecoderFactory.get().createBinaryDecoder
+       (new ByteArrayInputStream(bao.toByteArray()), null));
+
+    assertEquals(test, o);
+
+  }
+}
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
new file mode 100644
index 0000000..b9dbd78
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/E.java
@@ -0,0 +1,48 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+
+import java.util.Map;
+import java.util.HashMap;
+import org.apache.thrift.TEnum;
+
+public enum E implements org.apache.thrift.TEnum {
+  X(1),
+  Y(2),
+  Z(3);
+
+  private final int value;
+
+  private E(int value) {
+    this.value = value;
+  }
+
+  /**
+   * Get the integer value of this enum value, as defined in the Thrift IDL.
+   */
+  public int getValue() {
+    return value;
+  }
+
+  /**
+   * Find a the enum type by its integer value, as defined in the Thrift IDL.
+   * @return null if the value is not found.
+   */
+  public static E findByValue(int value) { 
+    switch (value) {
+      case 1:
+        return X;
+      case 2:
+        return Y;
+      case 3:
+        return Z;
+      default:
+        return null;
+    }
+  }
+}
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
new file mode 100644
index 0000000..4dfd46d
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java
@@ -0,0 +1,385 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Error extends TException implements org.apache.thrift.TBase<Error, Error._Fields>, java.io.Serializable, Cloneable, Comparable<Error> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Error");
+
+  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new ErrorStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new ErrorTupleSchemeFactory());
+  }
+
+  private String message; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    MESSAGE((short)1, "message");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // MESSAGE
+          return MESSAGE;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Error.class, metaDataMap);
+  }
+
+  public Error() {
+  }
+
+  public Error(
+    String message)
+  {
+    this();
+    this.message = message;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public Error(Error other) {
+    if (other.isSetMessage()) {
+      this.message = other.message;
+    }
+  }
+
+  public Error deepCopy() {
+    return new Error(this);
+  }
+
+  @Override
+  public void clear() {
+    this.message = null;
+  }
+
+  public String getMessage() {
+    return this.message;
+  }
+
+  public void setMessage(String message) {
+    this.message = message;
+  }
+
+  public void unsetMessage() {
+    this.message = null;
+  }
+
+  /** Returns true if field message is set (has been assigned a value) and false otherwise */
+  public boolean isSetMessage() {
+    return this.message != null;
+  }
+
+  public void setMessageIsSet(boolean value) {
+    if (!value) {
+      this.message = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case MESSAGE:
+      if (value == null) {
+        unsetMessage();
+      } else {
+        setMessage((String)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case MESSAGE:
+      return getMessage();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case MESSAGE:
+      return isSetMessage();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof Error)
+      return this.equals((Error)that);
+    return false;
+  }
+
+  public boolean equals(Error that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_message = true && this.isSetMessage();
+    boolean that_present_message = true && that.isSetMessage();
+    if (this_present_message || that_present_message) {
+      if (!(this_present_message && that_present_message))
+        return false;
+      if (!this.message.equals(that.message))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  @Override
+  public int compareTo(Error other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMessage()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Error(");
+    boolean first = true;
+
+    sb.append("message:");
+    if (this.message == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.message);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class ErrorStandardSchemeFactory implements SchemeFactory {
+    public ErrorStandardScheme getScheme() {
+      return new ErrorStandardScheme();
+    }
+  }
+
+  private static class ErrorStandardScheme extends StandardScheme<Error> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, Error struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // MESSAGE
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.message = iprot.readString();
+              struct.setMessageIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, Error struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.message != null) {
+        oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
+        oprot.writeString(struct.message);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class ErrorTupleSchemeFactory implements SchemeFactory {
+    public ErrorTupleScheme getScheme() {
+      return new ErrorTupleScheme();
+    }
+  }
+
+  private static class ErrorTupleScheme extends TupleScheme<Error> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, Error struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetMessage()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.isSetMessage()) {
+        oprot.writeString(struct.message);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, Error struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        struct.message = iprot.readString();
+        struct.setMessageIsSet(true);
+      }
+    }
+  }
+
+}
+
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
new file mode 100644
index 0000000..f731ccf
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java
@@ -0,0 +1,1996 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Foo {
+
+  public interface Iface {
+
+    public void ping() throws org.apache.thrift.TException;
+
+    public int add(int num1, int num2) throws org.apache.thrift.TException;
+
+    public void zip() throws org.apache.thrift.TException;
+
+  }
+
+  public interface AsyncIface {
+
+    public void ping(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+    public void add(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+    public void zip(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+  }
+
+  public static class Client extends org.apache.thrift.TServiceClient implements Iface {
+    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
+      public Factory() {}
+      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
+        return new Client(prot);
+      }
+      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+        return new Client(iprot, oprot);
+      }
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol prot)
+    {
+      super(prot, prot);
+    }
+
+    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+      super(iprot, oprot);
+    }
+
+    public void ping() throws org.apache.thrift.TException
+    {
+      send_ping();
+      recv_ping();
+    }
+
+    public void send_ping() throws org.apache.thrift.TException
+    {
+      ping_args args = new ping_args();
+      sendBase("ping", args);
+    }
+
+    public void recv_ping() throws org.apache.thrift.TException
+    {
+      ping_result result = new ping_result();
+      receiveBase(result, "ping");
+      return;
+    }
+
+    public int add(int num1, int num2) throws org.apache.thrift.TException
+    {
+      send_add(num1, num2);
+      return recv_add();
+    }
+
+    public void send_add(int num1, int num2) throws org.apache.thrift.TException
+    {
+      add_args args = new add_args();
+      args.setNum1(num1);
+      args.setNum2(num2);
+      sendBase("add", args);
+    }
+
+    public int recv_add() throws org.apache.thrift.TException
+    {
+      add_result result = new add_result();
+      receiveBase(result, "add");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "add failed: unknown result");
+    }
+
+    public void zip() throws org.apache.thrift.TException
+    {
+      send_zip();
+    }
+
+    public void send_zip() throws org.apache.thrift.TException
+    {
+      zip_args args = new zip_args();
+      sendBase("zip", args);
+    }
+
+  }
+  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
+    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
+      private org.apache.thrift.async.TAsyncClientManager clientManager;
+      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
+      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
+        this.clientManager = clientManager;
+        this.protocolFactory = protocolFactory;
+      }
+      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
+        return new AsyncClient(protocolFactory, clientManager, transport);
+      }
+    }
+
+    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
+      super(protocolFactory, clientManager, transport);
+    }
+
+    public void ping(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      ping_call method_call = new ping_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class ping_call extends org.apache.thrift.async.TAsyncMethodCall {
+      public ping_call(org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("ping", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        ping_args args = new ping_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_ping();
+      }
+    }
+
+    public void add(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      add_call method_call = new add_call(num1, num2, resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class add_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private int num1;
+      private int num2;
+      public add_call(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.num1 = num1;
+        this.num2 = num2;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("add", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        add_args args = new add_args();
+        args.setNum1(num1);
+        args.setNum2(num2);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public int getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_add();
+      }
+    }
+
+    public void zip(org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+      checkReady();
+      zip_call method_call = new zip_call(resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    public static class zip_call extends org.apache.thrift.async.TAsyncMethodCall {
+      public zip_call(org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, true);
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("zip", org.apache.thrift.protocol.TMessageType.CALL, 0));
+        zip_args args = new zip_args();
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws org.apache.thrift.TException {
+        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+      }
+    }
+
+  }
+
+  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
+    private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+    public Processor(I iface) {
+      super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
+    }
+
+    protected Processor(I iface, Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
+      super(iface, getProcessMap(processMap));
+    }
+
+    private static <I extends Iface> Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> getProcessMap(Map<String,  org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {
+      processMap.put("ping", new ping());
+      processMap.put("add", new add());
+      processMap.put("zip", new zip());
+      return processMap;
+    }
+
+    public static class ping<I extends Iface> extends org.apache.thrift.ProcessFunction<I, ping_args> {
+      public ping() {
+        super("ping");
+      }
+
+      public ping_args getEmptyArgsInstance() {
+        return new ping_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public ping_result getResult(I iface, ping_args args) throws org.apache.thrift.TException {
+        ping_result result = new ping_result();
+        iface.ping();
+        return result;
+      }
+    }
+
+    public static class add<I extends Iface> extends org.apache.thrift.ProcessFunction<I, add_args> {
+      public add() {
+        super("add");
+      }
+
+      public add_args getEmptyArgsInstance() {
+        return new add_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public add_result getResult(I iface, add_args args) throws org.apache.thrift.TException {
+        add_result result = new add_result();
+        result.success = iface.add(args.num1, args.num2);
+        result.setSuccessIsSet(true);
+        return result;
+      }
+    }
+
+    public static class zip<I extends Iface> extends org.apache.thrift.ProcessFunction<I, zip_args> {
+      public zip() {
+        super("zip");
+      }
+
+      public zip_args getEmptyArgsInstance() {
+        return new zip_args();
+      }
+
+      protected boolean isOneway() {
+        return true;
+      }
+
+      public org.apache.thrift.TBase getResult(I iface, zip_args args) throws org.apache.thrift.TException {
+        iface.zip();
+        return null;
+      }
+    }
+
+  }
+
+  public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
+    private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
+    public AsyncProcessor(I iface) {
+      super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
+    }
+
+    protected AsyncProcessor(I iface, Map<String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {
+      super(iface, getProcessMap(processMap));
+    }
+
+    private static <I extends AsyncIface> Map<String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase,?>> getProcessMap(Map<String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {
+      processMap.put("ping", new ping());
+      processMap.put("add", new add());
+      processMap.put("zip", new zip());
+      return processMap;
+    }
+
+    public static class ping<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, ping_args, Void> {
+      public ping() {
+        super("ping");
+      }
+
+      public ping_args getEmptyArgsInstance() {
+        return new ping_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            ping_result result = new ping_result();
+            try {
+              fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            ping_result result = new ping_result();
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, ping_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+        iface.ping(resultHandler);
+      }
+    }
+
+    public static class add<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, add_args, Integer> {
+      public add() {
+        super("add");
+      }
+
+      public add_args getEmptyArgsInstance() {
+        return new add_args();
+      }
+
+      public AsyncMethodCallback<Integer> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Integer>() { 
+          public void onComplete(Integer o) {
+            add_result result = new add_result();
+            result.success = o;
+            result.setSuccessIsSet(true);
+            try {
+              fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            add_result result = new add_result();
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, add_args args, org.apache.thrift.async.AsyncMethodCallback<Integer> resultHandler) throws TException {
+        iface.add(args.num1, args.num2,resultHandler);
+      }
+    }
+
+    public static class zip<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, zip_args, Void> {
+      public zip() {
+        super("zip");
+      }
+
+      public zip_args getEmptyArgsInstance() {
+        return new zip_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+          }
+          public void onError(Exception e) {
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return true;
+      }
+
+      public void start(I iface, zip_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+        iface.zip(resultHandler);
+      }
+    }
+
+  }
+
+  public static class ping_args implements org.apache.thrift.TBase<ping_args, ping_args._Fields>, java.io.Serializable, Cloneable, Comparable<ping_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ping_args");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new ping_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new ping_argsTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ping_args.class, metaDataMap);
+    }
+
+    public ping_args() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public ping_args(ping_args other) {
+    }
+
+    public ping_args deepCopy() {
+      return new ping_args(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof ping_args)
+        return this.equals((ping_args)that);
+      return false;
+    }
+
+    public boolean equals(ping_args that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public int compareTo(ping_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("ping_args(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class ping_argsStandardSchemeFactory implements SchemeFactory {
+      public ping_argsStandardScheme getScheme() {
+        return new ping_argsStandardScheme();
+      }
+    }
+
+    private static class ping_argsStandardScheme extends StandardScheme<ping_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, ping_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, ping_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class ping_argsTupleSchemeFactory implements SchemeFactory {
+      public ping_argsTupleScheme getScheme() {
+        return new ping_argsTupleScheme();
+      }
+    }
+
+    private static class ping_argsTupleScheme extends TupleScheme<ping_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, ping_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, ping_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+  public static class ping_result implements org.apache.thrift.TBase<ping_result, ping_result._Fields>, java.io.Serializable, Cloneable, Comparable<ping_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ping_result");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new ping_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new ping_resultTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ping_result.class, metaDataMap);
+    }
+
+    public ping_result() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public ping_result(ping_result other) {
+    }
+
+    public ping_result deepCopy() {
+      return new ping_result(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof ping_result)
+        return this.equals((ping_result)that);
+      return false;
+    }
+
+    public boolean equals(ping_result that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public int compareTo(ping_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("ping_result(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class ping_resultStandardSchemeFactory implements SchemeFactory {
+      public ping_resultStandardScheme getScheme() {
+        return new ping_resultStandardScheme();
+      }
+    }
+
+    private static class ping_resultStandardScheme extends StandardScheme<ping_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, ping_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, ping_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class ping_resultTupleSchemeFactory implements SchemeFactory {
+      public ping_resultTupleScheme getScheme() {
+        return new ping_resultTupleScheme();
+      }
+    }
+
+    private static class ping_resultTupleScheme extends TupleScheme<ping_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, ping_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, ping_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+  public static class add_args implements org.apache.thrift.TBase<add_args, add_args._Fields>, java.io.Serializable, Cloneable, Comparable<add_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("add_args");
+
+    private static final org.apache.thrift.protocol.TField NUM1_FIELD_DESC = new org.apache.thrift.protocol.TField("num1", org.apache.thrift.protocol.TType.I32, (short)1);
+    private static final org.apache.thrift.protocol.TField NUM2_FIELD_DESC = new org.apache.thrift.protocol.TField("num2", org.apache.thrift.protocol.TType.I32, (short)2);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new add_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new add_argsTupleSchemeFactory());
+    }
+
+    private int num1; // required
+    private int num2; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      NUM1((short)1, "num1"),
+      NUM2((short)2, "num2");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 1: // NUM1
+            return NUM1;
+          case 2: // NUM2
+            return NUM2;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    private static final int __NUM1_ISSET_ID = 0;
+    private static final int __NUM2_ISSET_ID = 1;
+    private byte __isset_bitfield = 0;
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.NUM1, new org.apache.thrift.meta_data.FieldMetaData("num1", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+      tmpMap.put(_Fields.NUM2, new org.apache.thrift.meta_data.FieldMetaData("num2", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_args.class, metaDataMap);
+    }
+
+    public add_args() {
+    }
+
+    public add_args(
+      int num1,
+      int num2)
+    {
+      this();
+      this.num1 = num1;
+      setNum1IsSet(true);
+      this.num2 = num2;
+      setNum2IsSet(true);
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public add_args(add_args other) {
+      __isset_bitfield = other.__isset_bitfield;
+      this.num1 = other.num1;
+      this.num2 = other.num2;
+    }
+
+    public add_args deepCopy() {
+      return new add_args(this);
+    }
+
+    @Override
+    public void clear() {
+      setNum1IsSet(false);
+      this.num1 = 0;
+      setNum2IsSet(false);
+      this.num2 = 0;
+    }
+
+    public int getNum1() {
+      return this.num1;
+    }
+
+    public void setNum1(int num1) {
+      this.num1 = num1;
+      setNum1IsSet(true);
+    }
+
+    public void unsetNum1() {
+      __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __NUM1_ISSET_ID);
+    }
+
+    /** Returns true if field num1 is set (has been assigned a value) and false otherwise */
+    public boolean isSetNum1() {
+      return EncodingUtils.testBit(__isset_bitfield, __NUM1_ISSET_ID);
+    }
+
+    public void setNum1IsSet(boolean value) {
+      __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __NUM1_ISSET_ID, value);
+    }
+
+    public int getNum2() {
+      return this.num2;
+    }
+
+    public void setNum2(int num2) {
+      this.num2 = num2;
+      setNum2IsSet(true);
+    }
+
+    public void unsetNum2() {
+      __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __NUM2_ISSET_ID);
+    }
+
+    /** Returns true if field num2 is set (has been assigned a value) and false otherwise */
+    public boolean isSetNum2() {
+      return EncodingUtils.testBit(__isset_bitfield, __NUM2_ISSET_ID);
+    }
+
+    public void setNum2IsSet(boolean value) {
+      __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __NUM2_ISSET_ID, value);
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case NUM1:
+        if (value == null) {
+          unsetNum1();
+        } else {
+          setNum1((Integer)value);
+        }
+        break;
+
+      case NUM2:
+        if (value == null) {
+          unsetNum2();
+        } else {
+          setNum2((Integer)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case NUM1:
+        return Integer.valueOf(getNum1());
+
+      case NUM2:
+        return Integer.valueOf(getNum2());
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case NUM1:
+        return isSetNum1();
+      case NUM2:
+        return isSetNum2();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof add_args)
+        return this.equals((add_args)that);
+      return false;
+    }
+
+    public boolean equals(add_args that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_num1 = true;
+      boolean that_present_num1 = true;
+      if (this_present_num1 || that_present_num1) {
+        if (!(this_present_num1 && that_present_num1))
+          return false;
+        if (this.num1 != that.num1)
+          return false;
+      }
+
+      boolean this_present_num2 = true;
+      boolean that_present_num2 = true;
+      if (this_present_num2 || that_present_num2) {
+        if (!(this_present_num2 && that_present_num2))
+          return false;
+        if (this.num2 != that.num2)
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public int compareTo(add_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(isSetNum1()).compareTo(other.isSetNum1());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetNum1()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.num1, other.num1);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      lastComparison = Boolean.valueOf(isSetNum2()).compareTo(other.isSetNum2());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetNum2()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.num2, other.num2);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("add_args(");
+      boolean first = true;
+
+      sb.append("num1:");
+      sb.append(this.num1);
+      first = false;
+      if (!first) sb.append(", ");
+      sb.append("num2:");
+      sb.append(this.num2);
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+        __isset_bitfield = 0;
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class add_argsStandardSchemeFactory implements SchemeFactory {
+      public add_argsStandardScheme getScheme() {
+        return new add_argsStandardScheme();
+      }
+    }
+
+    private static class add_argsStandardScheme extends StandardScheme<add_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, add_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 1: // NUM1
+              if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                struct.num1 = iprot.readI32();
+                struct.setNum1IsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            case 2: // NUM2
+              if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                struct.num2 = iprot.readI32();
+                struct.setNum2IsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, add_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldBegin(NUM1_FIELD_DESC);
+        oprot.writeI32(struct.num1);
+        oprot.writeFieldEnd();
+        oprot.writeFieldBegin(NUM2_FIELD_DESC);
+        oprot.writeI32(struct.num2);
+        oprot.writeFieldEnd();
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class add_argsTupleSchemeFactory implements SchemeFactory {
+      public add_argsTupleScheme getScheme() {
+        return new add_argsTupleScheme();
+      }
+    }
+
+    private static class add_argsTupleScheme extends TupleScheme<add_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, add_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetNum1()) {
+          optionals.set(0);
+        }
+        if (struct.isSetNum2()) {
+          optionals.set(1);
+        }
+        oprot.writeBitSet(optionals, 2);
+        if (struct.isSetNum1()) {
+          oprot.writeI32(struct.num1);
+        }
+        if (struct.isSetNum2()) {
+          oprot.writeI32(struct.num2);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, add_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(2);
+        if (incoming.get(0)) {
+          struct.num1 = iprot.readI32();
+          struct.setNum1IsSet(true);
+        }
+        if (incoming.get(1)) {
+          struct.num2 = iprot.readI32();
+          struct.setNum2IsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class add_result implements org.apache.thrift.TBase<add_result, add_result._Fields>, java.io.Serializable, Cloneable, Comparable<add_result>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("add_result");
+
+    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.I32, (short)0);
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new add_resultStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new add_resultTupleSchemeFactory());
+    }
+
+    private int success; // required
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+      SUCCESS((short)0, "success");
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          case 0: // SUCCESS
+            return SUCCESS;
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+
+    // isset id assignments
+    private static final int __SUCCESS_ISSET_ID = 0;
+    private byte __isset_bitfield = 0;
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_result.class, metaDataMap);
+    }
+
+    public add_result() {
+    }
+
+    public add_result(
+      int success)
+    {
+      this();
+      this.success = success;
+      setSuccessIsSet(true);
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public add_result(add_result other) {
+      __isset_bitfield = other.__isset_bitfield;
+      this.success = other.success;
+    }
+
+    public add_result deepCopy() {
+      return new add_result(this);
+    }
+
+    @Override
+    public void clear() {
+      setSuccessIsSet(false);
+      this.success = 0;
+    }
+
+    public int getSuccess() {
+      return this.success;
+    }
+
+    public void setSuccess(int success) {
+      this.success = success;
+      setSuccessIsSet(true);
+    }
+
+    public void unsetSuccess() {
+      __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
+    }
+
+    /** Returns true if field success is set (has been assigned a value) and false otherwise */
+    public boolean isSetSuccess() {
+      return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
+    }
+
+    public void setSuccessIsSet(boolean value) {
+      __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      case SUCCESS:
+        if (value == null) {
+          unsetSuccess();
+        } else {
+          setSuccess((Integer)value);
+        }
+        break;
+
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      case SUCCESS:
+        return Integer.valueOf(getSuccess());
+
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      case SUCCESS:
+        return isSetSuccess();
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof add_result)
+        return this.equals((add_result)that);
+      return false;
+    }
+
+    public boolean equals(add_result that) {
+      if (that == null)
+        return false;
+
+      boolean this_present_success = true;
+      boolean that_present_success = true;
+      if (this_present_success || that_present_success) {
+        if (!(this_present_success && that_present_success))
+          return false;
+        if (this.success != that.success)
+          return false;
+      }
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public int compareTo(add_result other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+      if (isSetSuccess()) {
+        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
+        if (lastComparison != 0) {
+          return lastComparison;
+        }
+      }
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+      }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("add_result(");
+      boolean first = true;
+
+      sb.append("success:");
+      sb.append(this.success);
+      first = false;
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+        __isset_bitfield = 0;
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class add_resultStandardSchemeFactory implements SchemeFactory {
+      public add_resultStandardScheme getScheme() {
+        return new add_resultStandardScheme();
+      }
+    }
+
+    private static class add_resultStandardScheme extends StandardScheme<add_result> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, add_result struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            case 0: // SUCCESS
+              if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+                struct.success = iprot.readI32();
+                struct.setSuccessIsSet(true);
+              } else { 
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+              }
+              break;
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, add_result struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        if (struct.isSetSuccess()) {
+          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+          oprot.writeI32(struct.success);
+          oprot.writeFieldEnd();
+        }
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class add_resultTupleSchemeFactory implements SchemeFactory {
+      public add_resultTupleScheme getScheme() {
+        return new add_resultTupleScheme();
+      }
+    }
+
+    private static class add_resultTupleScheme extends TupleScheme<add_result> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, add_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+        BitSet optionals = new BitSet();
+        if (struct.isSetSuccess()) {
+          optionals.set(0);
+        }
+        oprot.writeBitSet(optionals, 1);
+        if (struct.isSetSuccess()) {
+          oprot.writeI32(struct.success);
+        }
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, add_result struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+        BitSet incoming = iprot.readBitSet(1);
+        if (incoming.get(0)) {
+          struct.success = iprot.readI32();
+          struct.setSuccessIsSet(true);
+        }
+      }
+    }
+
+  }
+
+  public static class zip_args implements org.apache.thrift.TBase<zip_args, zip_args._Fields>, java.io.Serializable, Cloneable, Comparable<zip_args>   {
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("zip_args");
+
+
+    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+    static {
+      schemes.put(StandardScheme.class, new zip_argsStandardSchemeFactory());
+      schemes.put(TupleScheme.class, new zip_argsTupleSchemeFactory());
+    }
+
+
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+      static {
+        for (_Fields field : EnumSet.allOf(_Fields.class)) {
+          byName.put(field.getFieldName(), field);
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, or null if its not found.
+       */
+      public static _Fields findByThriftId(int fieldId) {
+        switch(fieldId) {
+          default:
+            return null;
+        }
+      }
+
+      /**
+       * Find the _Fields constant that matches fieldId, throwing an exception
+       * if it is not found.
+       */
+      public static _Fields findByThriftIdOrThrow(int fieldId) {
+        _Fields fields = findByThriftId(fieldId);
+        if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+        return fields;
+      }
+
+      /**
+       * Find the _Fields constant that matches name, or null if its not found.
+       */
+      public static _Fields findByName(String name) {
+        return byName.get(name);
+      }
+
+      private final short _thriftId;
+      private final String _fieldName;
+
+      _Fields(short thriftId, String fieldName) {
+        _thriftId = thriftId;
+        _fieldName = fieldName;
+      }
+
+      public short getThriftFieldId() {
+        return _thriftId;
+      }
+
+      public String getFieldName() {
+        return _fieldName;
+      }
+    }
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+    static {
+      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+      metaDataMap = Collections.unmodifiableMap(tmpMap);
+      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(zip_args.class, metaDataMap);
+    }
+
+    public zip_args() {
+    }
+
+    /**
+     * Performs a deep copy on <i>other</i>.
+     */
+    public zip_args(zip_args other) {
+    }
+
+    public zip_args deepCopy() {
+      return new zip_args(this);
+    }
+
+    @Override
+    public void clear() {
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+      switch (field) {
+      }
+    }
+
+    public Object getFieldValue(_Fields field) {
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+      if (field == null) {
+        throw new IllegalArgumentException();
+      }
+
+      switch (field) {
+      }
+      throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      if (that instanceof zip_args)
+        return this.equals((zip_args)that);
+      return false;
+    }
+
+    public boolean equals(zip_args that) {
+      if (that == null)
+        return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return 0;
+    }
+
+    @Override
+    public int compareTo(zip_args other) {
+      if (!getClass().equals(other.getClass())) {
+        return getClass().getName().compareTo(other.getClass().getName());
+      }
+
+      int lastComparison = 0;
+
+      return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+      return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+      schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+      schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder("zip_args(");
+      boolean first = true;
+
+      sb.append(")");
+      return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+      // check for required fields
+      // check for sub-struct validity
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+      try {
+        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+      try {
+        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+      } catch (org.apache.thrift.TException te) {
+        throw new java.io.IOException(te);
+      }
+    }
+
+    private static class zip_argsStandardSchemeFactory implements SchemeFactory {
+      public zip_argsStandardScheme getScheme() {
+        return new zip_argsStandardScheme();
+      }
+    }
+
+    private static class zip_argsStandardScheme extends StandardScheme<zip_args> {
+
+      public void read(org.apache.thrift.protocol.TProtocol iprot, zip_args struct) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField schemeField;
+        iprot.readStructBegin();
+        while (true)
+        {
+          schemeField = iprot.readFieldBegin();
+          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+            break;
+          }
+          switch (schemeField.id) {
+            default:
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+          }
+          iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+        struct.validate();
+      }
+
+      public void write(org.apache.thrift.protocol.TProtocol oprot, zip_args struct) throws org.apache.thrift.TException {
+        struct.validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+      }
+
+    }
+
+    private static class zip_argsTupleSchemeFactory implements SchemeFactory {
+      public zip_argsTupleScheme getScheme() {
+        return new zip_argsTupleScheme();
+      }
+    }
+
+    private static class zip_argsTupleScheme extends TupleScheme<zip_args> {
+
+      @Override
+      public void write(org.apache.thrift.protocol.TProtocol prot, zip_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol oprot = (TTupleProtocol) prot;
+      }
+
+      @Override
+      public void read(org.apache.thrift.protocol.TProtocol prot, zip_args struct) throws org.apache.thrift.TException {
+        TTupleProtocol iprot = (TTupleProtocol) prot;
+      }
+    }
+
+  }
+
+}
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
new file mode 100644
index 0000000..308ee45
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java
@@ -0,0 +1,356 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Fields> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FooOrBar");
+  private static final org.apache.thrift.protocol.TField FOO_FIELD_DESC = new org.apache.thrift.protocol.TField("foo", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField BAR_FIELD_DESC = new org.apache.thrift.protocol.TField("bar", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    FOO((short)1, "foo"),
+    BAR((short)2, "bar");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // FOO
+          return FOO;
+        case 2: // BAR
+          return BAR;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.FOO, new org.apache.thrift.meta_data.FieldMetaData("foo", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.BAR, new org.apache.thrift.meta_data.FieldMetaData("bar", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(FooOrBar.class, metaDataMap);
+  }
+
+  public FooOrBar() {
+    super();
+  }
+
+  public FooOrBar(_Fields setField, Object value) {
+    super(setField, value);
+  }
+
+  public FooOrBar(FooOrBar other) {
+    super(other);
+  }
+  public FooOrBar deepCopy() {
+    return new FooOrBar(this);
+  }
+
+  public static FooOrBar foo(String value) {
+    FooOrBar x = new FooOrBar();
+    x.setFoo(value);
+    return x;
+  }
+
+  public static FooOrBar bar(String value) {
+    FooOrBar x = new FooOrBar();
+    x.setBar(value);
+    return x;
+  }
+
+
+  @Override
+  protected void checkType(_Fields setField, Object value) throws ClassCastException {
+    switch (setField) {
+      case FOO:
+        if (value instanceof String) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type String for field 'foo', but got " + value.getClass().getSimpleName());
+      case BAR:
+        if (value instanceof String) {
+          break;
+        }
+        throw new ClassCastException("Was expecting value of type String for field 'bar', but got " + value.getClass().getSimpleName());
+      default:
+        throw new IllegalArgumentException("Unknown field id " + setField);
+    }
+  }
+
+  @Override
+  protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
+    _Fields setField = _Fields.findByThriftId(field.id);
+    if (setField != null) {
+      switch (setField) {
+        case FOO:
+          if (field.type == FOO_FIELD_DESC.type) {
+            String foo;
+            foo = iprot.readString();
+            return foo;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        case BAR:
+          if (field.type == BAR_FIELD_DESC.type) {
+            String bar;
+            bar = iprot.readString();
+            return bar;
+          } else {
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            return null;
+          }
+        default:
+          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+      }
+    } else {
+      org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+      return null;
+    }
+  }
+
+  @Override
+  protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    switch (setField_) {
+      case FOO:
+        String foo = (String)value_;
+        oprot.writeString(foo);
+        return;
+      case BAR:
+        String bar = (String)value_;
+        oprot.writeString(bar);
+        return;
+      default:
+        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+    }
+  }
+
+  @Override
+  protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
+    _Fields setField = _Fields.findByThriftId(fieldID);
+    if (setField != null) {
+      switch (setField) {
+        case FOO:
+          String foo;
+          foo = iprot.readString();
+          return foo;
+        case BAR:
+          String bar;
+          bar = iprot.readString();
+          return bar;
+        default:
+          throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+      }
+    } else {
+      throw new TProtocolException("Couldn't find a field with field id " + fieldID);
+    }
+  }
+
+  @Override
+  protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    switch (setField_) {
+      case FOO:
+        String foo = (String)value_;
+        oprot.writeString(foo);
+        return;
+      case BAR:
+        String bar = (String)value_;
+        oprot.writeString(bar);
+        return;
+      default:
+        throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+    }
+  }
+
+  @Override
+  protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
+    switch (setField) {
+      case FOO:
+        return FOO_FIELD_DESC;
+      case BAR:
+        return BAR_FIELD_DESC;
+      default:
+        throw new IllegalArgumentException("Unknown field id " + setField);
+    }
+  }
+
+  @Override
+  protected org.apache.thrift.protocol.TStruct getStructDesc() {
+    return STRUCT_DESC;
+  }
+
+  @Override
+  protected _Fields enumForId(short id) {
+    return _Fields.findByThriftIdOrThrow(id);
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+
+  public String getFoo() {
+    if (getSetField() == _Fields.FOO) {
+      return (String)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'foo' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void setFoo(String value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.FOO;
+    value_ = value;
+  }
+
+  public String getBar() {
+    if (getSetField() == _Fields.BAR) {
+      return (String)getFieldValue();
+    } else {
+      throw new RuntimeException("Cannot get field 'bar' because union is currently set to " + getFieldDesc(getSetField()).name);
+    }
+  }
+
+  public void setBar(String value) {
+    if (value == null) throw new NullPointerException();
+    setField_ = _Fields.BAR;
+    value_ = value;
+  }
+
+  public boolean isSetFoo() {
+    return setField_ == _Fields.FOO;
+  }
+
+
+  public boolean isSetBar() {
+    return setField_ == _Fields.BAR;
+  }
+
+
+  public boolean equals(Object other) {
+    if (other instanceof FooOrBar) {
+      return equals((FooOrBar)other);
+    } else {
+      return false;
+    }
+  }
+
+  public boolean equals(FooOrBar other) {
+    return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
+  }
+
+  @Override
+  public int compareTo(FooOrBar other) {
+    int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
+    if (lastComparison == 0) {
+      return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
+    }
+    return lastComparison;
+  }
+
+
+  /**
+   * If you'd like this to perform more respectably, use the hashcode generator option.
+   */
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+
+}
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
new file mode 100644
index 0000000..482e4cc
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java
@@ -0,0 +1,383 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>, java.io.Serializable, Cloneable, Comparable<Nested> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Nested");
+
+  private static final org.apache.thrift.protocol.TField X_FIELD_DESC = new org.apache.thrift.protocol.TField("x", org.apache.thrift.protocol.TType.I32, (short)1);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new NestedStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new NestedTupleSchemeFactory());
+  }
+
+  private int x; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    X((short)1, "x");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // X
+          return X;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __X_ISSET_ID = 0;
+  private byte __isset_bitfield = 0;
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.X, new org.apache.thrift.meta_data.FieldMetaData("x", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Nested.class, metaDataMap);
+  }
+
+  public Nested() {
+  }
+
+  public Nested(
+    int x)
+  {
+    this();
+    this.x = x;
+    setXIsSet(true);
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public Nested(Nested other) {
+    __isset_bitfield = other.__isset_bitfield;
+    this.x = other.x;
+  }
+
+  public Nested deepCopy() {
+    return new Nested(this);
+  }
+
+  @Override
+  public void clear() {
+    setXIsSet(false);
+    this.x = 0;
+  }
+
+  public int getX() {
+    return this.x;
+  }
+
+  public void setX(int x) {
+    this.x = x;
+    setXIsSet(true);
+  }
+
+  public void unsetX() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __X_ISSET_ID);
+  }
+
+  /** Returns true if field x is set (has been assigned a value) and false otherwise */
+  public boolean isSetX() {
+    return EncodingUtils.testBit(__isset_bitfield, __X_ISSET_ID);
+  }
+
+  public void setXIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __X_ISSET_ID, value);
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case X:
+      if (value == null) {
+        unsetX();
+      } else {
+        setX((Integer)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case X:
+      return Integer.valueOf(getX());
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case X:
+      return isSetX();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof Nested)
+      return this.equals((Nested)that);
+    return false;
+  }
+
+  public boolean equals(Nested that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_x = true;
+    boolean that_present_x = true;
+    if (this_present_x || that_present_x) {
+      if (!(this_present_x && that_present_x))
+        return false;
+      if (this.x != that.x)
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  @Override
+  public int compareTo(Nested other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(isSetX()).compareTo(other.isSetX());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetX()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.x, other.x);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Nested(");
+    boolean first = true;
+
+    sb.append("x:");
+    sb.append(this.x);
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class NestedStandardSchemeFactory implements SchemeFactory {
+    public NestedStandardScheme getScheme() {
+      return new NestedStandardScheme();
+    }
+  }
+
+  private static class NestedStandardScheme extends StandardScheme<Nested> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, Nested struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // X
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.x = iprot.readI32();
+              struct.setXIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, Nested struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      oprot.writeFieldBegin(X_FIELD_DESC);
+      oprot.writeI32(struct.x);
+      oprot.writeFieldEnd();
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class NestedTupleSchemeFactory implements SchemeFactory {
+    public NestedTupleScheme getScheme() {
+      return new NestedTupleScheme();
+    }
+  }
+
+  private static class NestedTupleScheme extends TupleScheme<Nested> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetX()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.isSetX()) {
+        oprot.writeI32(struct.x);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        struct.x = iprot.readI32();
+        struct.setXIsSet(true);
+      }
+    }
+  }
+
+}
+
diff --git a/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
new file mode 100644
index 0000000..65c1b3f
--- /dev/null
+++ b/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java
@@ -0,0 +1,2011 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.1)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.avro.thrift.test;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Test implements org.apache.thrift.TBase<Test, Test._Fields>, java.io.Serializable, Cloneable, Comparable<Test> {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Test");
+
+  private static final org.apache.thrift.protocol.TField BOOL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("boolField", org.apache.thrift.protocol.TType.BOOL, (short)1);
+  private static final org.apache.thrift.protocol.TField BYTE_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("byteField", org.apache.thrift.protocol.TType.BYTE, (short)2);
+  private static final org.apache.thrift.protocol.TField BYTE_OPTIONAL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("byteOptionalField", org.apache.thrift.protocol.TType.BYTE, (short)16);
+  private static final org.apache.thrift.protocol.TField I16_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("i16Field", org.apache.thrift.protocol.TType.I16, (short)3);
+  private static final org.apache.thrift.protocol.TField I16_OPTIONAL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("i16OptionalField", org.apache.thrift.protocol.TType.I16, (short)15);
+  private static final org.apache.thrift.protocol.TField I32_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("i32Field", org.apache.thrift.protocol.TType.I32, (short)4);
+  private static final org.apache.thrift.protocol.TField I64_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("i64Field", org.apache.thrift.protocol.TType.I64, (short)5);
+  private static final org.apache.thrift.protocol.TField DOUBLE_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("doubleField", org.apache.thrift.protocol.TType.DOUBLE, (short)6);
+  private static final org.apache.thrift.protocol.TField STRING_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("stringField", org.apache.thrift.protocol.TType.STRING, (short)7);
+  private static final org.apache.thrift.protocol.TField BINARY_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("binaryField", org.apache.thrift.protocol.TType.STRING, (short)8);
+  private static final org.apache.thrift.protocol.TField MAP_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("mapField", org.apache.thrift.protocol.TType.MAP, (short)9);
+  private static final org.apache.thrift.protocol.TField LIST_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("listField", org.apache.thrift.protocol.TType.LIST, (short)10);
+  private static final org.apache.thrift.protocol.TField SET_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("setField", org.apache.thrift.protocol.TType.SET, (short)11);
+  private static final org.apache.thrift.protocol.TField ENUM_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("enumField", org.apache.thrift.protocol.TType.I32, (short)12);
+  private static final org.apache.thrift.protocol.TField STRUCT_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField("structField", org.apache.thrift.protocol.TType.STRUCT, (short)13);
+  private static final org.apache.thrift.protocol.TField FOO_OR_BAR_FIELD_DESC = new org.apache.thrift.protocol.TField("fooOrBar", org.apache.thrift.protocol.TType.STRUCT, (short)14);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new TestStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new TestTupleSchemeFactory());
+  }
+
+  private boolean boolField; // required
+  private byte byteField; // required
+  private byte byteOptionalField; // optional
+  private short i16Field; // required
+  private short i16OptionalField; // optional
+  private int i32Field; // optional
+  private long i64Field; // required
+  private double doubleField; // required
+  private String stringField; // required
+  private ByteBuffer binaryField; // optional
+  private Map<String,Integer> mapField; // required
+  private List<Integer> listField; // required
+  private Set<Integer> setField; // required
+  private E enumField; // required
+  private Nested structField; // required
+  private FooOrBar fooOrBar; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    BOOL_FIELD((short)1, "boolField"),
+    BYTE_FIELD((short)2, "byteField"),
+    BYTE_OPTIONAL_FIELD((short)16, "byteOptionalField"),
+    I16_FIELD((short)3, "i16Field"),
+    I16_OPTIONAL_FIELD((short)15, "i16OptionalField"),
+    I32_FIELD((short)4, "i32Field"),
+    I64_FIELD((short)5, "i64Field"),
+    DOUBLE_FIELD((short)6, "doubleField"),
+    STRING_FIELD((short)7, "stringField"),
+    BINARY_FIELD((short)8, "binaryField"),
+    MAP_FIELD((short)9, "mapField"),
+    LIST_FIELD((short)10, "listField"),
+    SET_FIELD((short)11, "setField"),
+    /**
+     * 
+     * @see E
+     */
+    ENUM_FIELD((short)12, "enumField"),
+    STRUCT_FIELD((short)13, "structField"),
+    FOO_OR_BAR((short)14, "fooOrBar");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // BOOL_FIELD
+          return BOOL_FIELD;
+        case 2: // BYTE_FIELD
+          return BYTE_FIELD;
+        case 16: // BYTE_OPTIONAL_FIELD
+          return BYTE_OPTIONAL_FIELD;
+        case 3: // I16_FIELD
+          return I16_FIELD;
+        case 15: // I16_OPTIONAL_FIELD
+          return I16_OPTIONAL_FIELD;
+        case 4: // I32_FIELD
+          return I32_FIELD;
+        case 5: // I64_FIELD
+          return I64_FIELD;
+        case 6: // DOUBLE_FIELD
+          return DOUBLE_FIELD;
+        case 7: // STRING_FIELD
+          return STRING_FIELD;
+        case 8: // BINARY_FIELD
+          return BINARY_FIELD;
+        case 9: // MAP_FIELD
+          return MAP_FIELD;
+        case 10: // LIST_FIELD
+          return LIST_FIELD;
+        case 11: // SET_FIELD
+          return SET_FIELD;
+        case 12: // ENUM_FIELD
+          return ENUM_FIELD;
+        case 13: // STRUCT_FIELD
+          return STRUCT_FIELD;
+        case 14: // FOO_OR_BAR
+          return FOO_OR_BAR;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  private static final int __BOOLFIELD_ISSET_ID = 0;
+  private static final int __BYTEFIELD_ISSET_ID = 1;
+  private static final int __BYTEOPTIONALFIELD_ISSET_ID = 2;
+  private static final int __I16FIELD_ISSET_ID = 3;
+  private static final int __I16OPTIONALFIELD_ISSET_ID = 4;
+  private static final int __I32FIELD_ISSET_ID = 5;
+  private static final int __I64FIELD_ISSET_ID = 6;
+  private static final int __DOUBLEFIELD_ISSET_ID = 7;
+  private byte __isset_bitfield = 0;
+  private _Fields optionals[] = {_Fields.BYTE_OPTIONAL_FIELD,_Fields.I16_OPTIONAL_FIELD,_Fields.I32_FIELD,_Fields.BINARY_FIELD};
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.BOOL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("boolField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+    tmpMap.put(_Fields.BYTE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
+    tmpMap.put(_Fields.BYTE_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("byteOptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
+    tmpMap.put(_Fields.I16_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16Field", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
+    tmpMap.put(_Fields.I16_OPTIONAL_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i16OptionalField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
+    tmpMap.put(_Fields.I32_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i32Field", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+    tmpMap.put(_Fields.I64_FIELD, new org.apache.thrift.meta_data.FieldMetaData("i64Field", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+    tmpMap.put(_Fields.DOUBLE_FIELD, new org.apache.thrift.meta_data.FieldMetaData("doubleField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
+    tmpMap.put(_Fields.STRING_FIELD, new org.apache.thrift.meta_data.FieldMetaData("stringField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.BINARY_FIELD, new org.apache.thrift.meta_data.FieldMetaData("binaryField", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING        , true)));
+    tmpMap.put(_Fields.MAP_FIELD, new org.apache.thrift.meta_data.FieldMetaData("mapField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
+    tmpMap.put(_Fields.LIST_FIELD, new org.apache.thrift.meta_data.FieldMetaData("listField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
+    tmpMap.put(_Fields.SET_FIELD, new org.apache.thrift.meta_data.FieldMetaData("setField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
+    tmpMap.put(_Fields.ENUM_FIELD, new org.apache.thrift.meta_data.FieldMetaData("enumField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, E.class)));
+    tmpMap.put(_Fields.STRUCT_FIELD, new org.apache.thrift.meta_data.FieldMetaData("structField", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Nested.class)));
+    tmpMap.put(_Fields.FOO_OR_BAR, new org.apache.thrift.meta_data.FieldMetaData("fooOrBar", org.apache.thrift.TFieldRequirementType.DEFAULT, 
+        new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FooOrBar.class)));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Test.class, metaDataMap);
+  }
+
+  public Test() {
+  }
+
+  public Test(
+    boolean boolField,
+    byte byteField,
+    short i16Field,
+    long i64Field,
+    double doubleField,
+    String stringField,
+    Map<String,Integer> mapField,
+    List<Integer> listField,
+    Set<Integer> setField,
+    E enumField,
+    Nested structField,
+    FooOrBar fooOrBar)
+  {
+    this();
+    this.boolField = boolField;
+    setBoolFieldIsSet(true);
+    this.byteField = byteField;
+    setByteFieldIsSet(true);
+    this.i16Field = i16Field;
+    setI16FieldIsSet(true);
+    this.i64Field = i64Field;
+    setI64FieldIsSet(true);
+    this.doubleField = doubleField;
+    setDoubleFieldIsSet(true);
+    this.stringField = stringField;
+    this.mapField = mapField;
+    this.listField = listField;
+    this.setField = setField;
+    this.enumField = enumField;
+    this.structField = structField;
+    this.fooOrBar = fooOrBar;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public Test(Test other) {
+    __isset_bitfield = other.__isset_bitfield;
+    this.boolField = other.boolField;
+    this.byteField = other.byteField;
+    this.byteOptionalField = other.byteOptionalField;
+    this.i16Field = other.i16Field;
+    this.i16OptionalField = other.i16OptionalField;
+    this.i32Field = other.i32Field;
+    this.i64Field = other.i64Field;
+    this.doubleField = other.doubleField;
+    if (other.isSetStringField()) {
+      this.stringField = other.stringField;
+    }
+    if (other.isSetBinaryField()) {
+      this.binaryField = org.apache.thrift.TBaseHelper.copyBinary(other.binaryField);
+;
+    }
+    if (other.isSetMapField()) {
+      Map<String,Integer> __this__mapField = new HashMap<String,Integer>(other.mapField);
+      this.mapField = __this__mapField;
+    }
+    if (other.isSetListField()) {
+      List<Integer> __this__listField = new ArrayList<Integer>(other.listField);
+      this.listField = __this__listField;
+    }
+    if (other.isSetSetField()) {
+      Set<Integer> __this__setField = new HashSet<Integer>(other.setField);
+      this.setField = __this__setField;
+    }
+    if (other.isSetEnumField()) {
+      this.enumField = other.enumField;
+    }
+    if (other.isSetStructField()) {
+      this.structField = new Nested(other.structField);
+    }
+    if (other.isSetFooOrBar()) {
+      this.fooOrBar = new FooOrBar(other.fooOrBar);
+    }
+  }
+
+  public Test deepCopy() {
+    return new Test(this);
+  }
+
+  @Override
+  public void clear() {
+    setBoolFieldIsSet(false);
+    this.boolField = false;
+    setByteFieldIsSet(false);
+    this.byteField = 0;
+    setByteOptionalFieldIsSet(false);
+    this.byteOptionalField = 0;
+    setI16FieldIsSet(false);
+    this.i16Field = 0;
+    setI16OptionalFieldIsSet(false);
+    this.i16OptionalField = 0;
+    setI32FieldIsSet(false);
+    this.i32Field = 0;
+    setI64FieldIsSet(false);
+    this.i64Field = 0;
+    setDoubleFieldIsSet(false);
+    this.doubleField = 0.0;
+    this.stringField = null;
+    this.binaryField = null;
+    this.mapField = null;
+    this.listField = null;
+    this.setField = null;
+    this.enumField = null;
+    this.structField = null;
+    this.fooOrBar = null;
+  }
+
+  public boolean isBoolField() {
+    return this.boolField;
+  }
+
+  public void setBoolField(boolean boolField) {
+    this.boolField = boolField;
+    setBoolFieldIsSet(true);
+  }
+
+  public void unsetBoolField() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BOOLFIELD_ISSET_ID);
+  }
+
+  /** Returns true if field boolField is set (has been assigned a value) and false otherwise */
+  public boolean isSetBoolField() {
+    return EncodingUtils.testBit(__isset_bitfield, __BOOLFIELD_ISSET_ID);
+  }
+
+  public void setBoolFieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BOOLFIELD_ISSET_ID, value);
+  }
+
+  public byte getByteField() {
+    return this.byteField;
+  }
+
+  public void setByteField(byte byteField) {
+    this.byteField = byteField;
+    setByteFieldIsSet(true);
+  }
+
+  public void unsetByteField() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BYTEFIELD_ISSET_ID);
+  }
+
+  /** Returns true if field byteField is set (has been assigned a value) and false otherwise */
+  public boolean isSetByteField() {
+    return EncodingUtils.testBit(__isset_bitfield, __BYTEFIELD_ISSET_ID);
+  }
+
+  public void setByteFieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BYTEFIELD_ISSET_ID, value);
+  }
+
+  public byte getByteOptionalField() {
+    return this.byteOptionalField;
+  }
+
+  public void setByteOptionalField(byte byteOptionalField) {
+    this.byteOptionalField = byteOptionalField;
+    setByteOptionalFieldIsSet(true);
+  }
+
+  public void unsetByteOptionalField() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID);
+  }
+
+  /** Returns true if field byteOptionalField is set (has been assigned a value) and false otherwise */
+  public boolean isSetByteOptionalField() {
+    return EncodingUtils.testBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID);
+  }
+
+  public void setByteOptionalFieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID, value);
+  }
+
+  public short getI16Field() {
+    return this.i16Field;
+  }
+
+  public void setI16Field(short i16Field) {
+    this.i16Field = i16Field;
+    setI16FieldIsSet(true);
+  }
+
+  public void unsetI16Field() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __I16FIELD_ISSET_ID);
+  }
+
+  /** Returns true if field i16Field is set (has been assigned a value) and false otherwise */
+  public boolean isSetI16Field() {
+    return EncodingUtils.testBit(__isset_bitfield, __I16FIELD_ISSET_ID);
+  }
+
+  public void setI16FieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __I16FIELD_ISSET_ID, value);
+  }
+
+  public short getI16OptionalField() {
+    return this.i16OptionalField;
+  }
+
+  public void setI16OptionalField(short i16OptionalField) {
+    this.i16OptionalField = i16OptionalField;
+    setI16OptionalFieldIsSet(true);
+  }
+
+  public void unsetI16OptionalField() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID);
+  }
+
+  /** Returns true if field i16OptionalField is set (has been assigned a value) and false otherwise */
+  public boolean isSetI16OptionalField() {
+    return EncodingUtils.testBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID);
+  }
+
+  public void setI16OptionalFieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID, value);
+  }
+
+  public int getI32Field() {
+    return this.i32Field;
+  }
+
+  public void setI32Field(int i32Field) {
+    this.i32Field = i32Field;
+    setI32FieldIsSet(true);
+  }
+
+  public void unsetI32Field() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __I32FIELD_ISSET_ID);
+  }
+
+  /** Returns true if field i32Field is set (has been assigned a value) and false otherwise */
+  public boolean isSetI32Field() {
+    return EncodingUtils.testBit(__isset_bitfield, __I32FIELD_ISSET_ID);
+  }
+
+  public void setI32FieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __I32FIELD_ISSET_ID, value);
+  }
+
+  public long getI64Field() {
+    return this.i64Field;
+  }
+
+  public void setI64Field(long i64Field) {
+    this.i64Field = i64Field;
+    setI64FieldIsSet(true);
+  }
+
+  public void unsetI64Field() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __I64FIELD_ISSET_ID);
+  }
+
+  /** Returns true if field i64Field is set (has been assigned a value) and false otherwise */
+  public boolean isSetI64Field() {
+    return EncodingUtils.testBit(__isset_bitfield, __I64FIELD_ISSET_ID);
+  }
+
+  public void setI64FieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __I64FIELD_ISSET_ID, value);
+  }
+
+  public double getDoubleField() {
+    return this.doubleField;
+  }
+
+  public void setDoubleField(double doubleField) {
+    this.doubleField = doubleField;
+    setDoubleFieldIsSet(true);
+  }
+
+  public void unsetDoubleField() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID);
+  }
+
+  /** Returns true if field doubleField is set (has been assigned a value) and false otherwise */
+  public boolean isSetDoubleField() {
+    return EncodingUtils.testBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID);
+  }
+
+  public void setDoubleFieldIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID, value);
+  }
+
+  public String getStringField() {
+    return this.stringField;
+  }
+
+  public void setStringField(String stringField) {
+    this.stringField = stringField;
+  }
+
+  public void unsetStringField() {
+    this.stringField = null;
+  }
+
+  /** Returns true if field stringField is set (has been assigned a value) and false otherwise */
+  public boolean isSetStringField() {
+    return this.stringField != null;
+  }
+
+  public void setStringFieldIsSet(boolean value) {
+    if (!value) {
+      this.stringField = null;
+    }
+  }
+
+  public byte[] getBinaryField() {
+    setBinaryField(org.apache.thrift.TBaseHelper.rightSize(binaryField));
+    return binaryField == null ? null : binaryField.array();
+  }
+
+  public ByteBuffer bufferForBinaryField() {
+    return binaryField;
+  }
+
+  public void setBinaryField(byte[] binaryField) {
+    setBinaryField(binaryField == null ? (ByteBuffer)null : ByteBuffer.wrap(binaryField));
+  }
+
+  public void setBinaryField(ByteBuffer binaryField) {
+    this.binaryField = binaryField;
+  }
+
+  public void unsetBinaryField() {
+    this.binaryField = null;
+  }
+
+  /** Returns true if field binaryField is set (has been assigned a value) and false otherwise */
+  public boolean isSetBinaryField() {
+    return this.binaryField != null;
+  }
+
+  public void setBinaryFieldIsSet(boolean value) {
+    if (!value) {
+      this.binaryField = null;
+    }
+  }
+
+  public int getMapFieldSize() {
+    return (this.mapField == null) ? 0 : this.mapField.size();
+  }
+
+  public void putToMapField(String key, int val) {
+    if (this.mapField == null) {
+      this.mapField = new HashMap<String,Integer>();
+    }
+    this.mapField.put(key, val);
+  }
+
+  public Map<String,Integer> getMapField() {
+    return this.mapField;
+  }
+
+  public void setMapField(Map<String,Integer> mapField) {
+    this.mapField = mapField;
+  }
+
+  public void unsetMapField() {
+    this.mapField = null;
+  }
+
+  /** Returns true if field mapField is set (has been assigned a value) and false otherwise */
+  public boolean isSetMapField() {
+    return this.mapField != null;
+  }
+
+  public void setMapFieldIsSet(boolean value) {
+    if (!value) {
+      this.mapField = null;
+    }
+  }
+
+  public int getListFieldSize() {
+    return (this.listField == null) ? 0 : this.listField.size();
+  }
+
+  public java.util.Iterator<Integer> getListFieldIterator() {
+    return (this.listField == null) ? null : this.listField.iterator();
+  }
+
+  public void addToListField(int elem) {
+    if (this.listField == null) {
+      this.listField = new ArrayList<Integer>();
+    }
+    this.listField.add(elem);
+  }
+
+  public List<Integer> getListField() {
+    return this.listField;
+  }
+
+  public void setListField(List<Integer> listField) {
+    this.listField = listField;
+  }
+
+  public void unsetListField() {
+    this.listField = null;
+  }
+
+  /** Returns true if field listField is set (has been assigned a value) and false otherwise */
+  public boolean isSetListField() {
+    return this.listField != null;
+  }
+
+  public void setListFieldIsSet(boolean value) {
+    if (!value) {
+      this.listField = null;
+    }
+  }
+
+  public int getSetFieldSize() {
+    return (this.setField == null) ? 0 : this.setField.size();
+  }
+
+  public java.util.Iterator<Integer> getSetFieldIterator() {
+    return (this.setField == null) ? null : this.setField.iterator();
+  }
+
+  public void addToSetField(int elem) {
+    if (this.setField == null) {
+      this.setField = new HashSet<Integer>();
+    }
+    this.setField.add(elem);
+  }
+
+  public Set<Integer> getSetField() {
+    return this.setField;
+  }
+
+  public void setSetField(Set<Integer> setField) {
+    this.setField = setField;
+  }
+
+  public void unsetSetField() {
+    this.setField = null;
+  }
+
+  /** Returns true if field setField is set (has been assigned a value) and false otherwise */
+  public boolean isSetSetField() {
+    return this.setField != null;
+  }
+
+  public void setSetFieldIsSet(boolean value) {
+    if (!value) {
+      this.setField = null;
+    }
+  }
+
+  /**
+   * 
+   * @see E
+   */
+  public E getEnumField() {
+    return this.enumField;
+  }
+
+  /**
+   * 
+   * @see E
+   */
+  public void setEnumField(E enumField) {
+    this.enumField = enumField;
+  }
+
+  public void unsetEnumField() {
+    this.enumField = null;
+  }
+
+  /** Returns true if field enumField is set (has been assigned a value) and false otherwise */
+  public boolean isSetEnumField() {
+    return this.enumField != null;
+  }
+
+  public void setEnumFieldIsSet(boolean value) {
+    if (!value) {
+      this.enumField = null;
+    }
+  }
+
+  public Nested getStructField() {
+    return this.structField;
+  }
+
+  public void setStructField(Nested structField) {
+    this.structField = structField;
+  }
+
+  public void unsetStructField() {
+    this.structField = null;
+  }
+
+  /** Returns true if field structField is set (has been assigned a value) and false otherwise */
+  public boolean isSetStructField() {
+    return this.structField != null;
+  }
+
+  public void setStructFieldIsSet(boolean value) {
+    if (!value) {
+      this.structField = null;
+    }
+  }
+
+  public FooOrBar getFooOrBar() {
+    return this.fooOrBar;
+  }
+
+  public void setFooOrBar(FooOrBar fooOrBar) {
+    this.fooOrBar = fooOrBar;
+  }
+
+  public void unsetFooOrBar() {
+    this.fooOrBar = null;
+  }
+
+  /** Returns true if field fooOrBar is set (has been assigned a value) and false otherwise */
+  public boolean isSetFooOrBar() {
+    return this.fooOrBar != null;
+  }
+
+  public void setFooOrBarIsSet(boolean value) {
+    if (!value) {
+      this.fooOrBar = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case BOOL_FIELD:
+      if (value == null) {
+        unsetBoolField();
+      } else {
+        setBoolField((Boolean)value);
+      }
+      break;
+
+    case BYTE_FIELD:
+      if (value == null) {
+        unsetByteField();
+      } else {
+        setByteField((Byte)value);
+      }
+      break;
+
+    case BYTE_OPTIONAL_FIELD:
+      if (value == null) {
+        unsetByteOptionalField();
+      } else {
+        setByteOptionalField((Byte)value);
+      }
+      break;
+
+    case I16_FIELD:
+      if (value == null) {
+        unsetI16Field();
+      } else {
+        setI16Field((Short)value);
+      }
+      break;
+
+    case I16_OPTIONAL_FIELD:
+      if (value == null) {
+        unsetI16OptionalField();
+      } else {
+        setI16OptionalField((Short)value);
+      }
+      break;
+
+    case I32_FIELD:
+      if (value == null) {
+        unsetI32Field();
+      } else {
+        setI32Field((Integer)value);
+      }
+      break;
+
+    case I64_FIELD:
+      if (value == null) {
+        unsetI64Field();
+      } else {
+        setI64Field((Long)value);
+      }
+      break;
+
+    case DOUBLE_FIELD:
+      if (value == null) {
+        unsetDoubleField();
+      } else {
+        setDoubleField((Double)value);
+      }
+      break;
+
+    case STRING_FIELD:
+      if (value == null) {
+        unsetStringField();
+      } else {
+        setStringField((String)value);
+      }
+      break;
+
+    case BINARY_FIELD:
+      if (value == null) {
+        unsetBinaryField();
+      } else {
+        setBinaryField((ByteBuffer)value);
+      }
+      break;
+
+    case MAP_FIELD:
+      if (value == null) {
+        unsetMapField();
+      } else {
+        setMapField((Map<String,Integer>)value);
+      }
+      break;
+
+    case LIST_FIELD:
+      if (value == null) {
+        unsetListField();
+      } else {
+        setListField((List<Integer>)value);
+      }
+      break;
+
+    case SET_FIELD:
+      if (value == null) {
+        unsetSetField();
+      } else {
+        setSetField((Set<Integer>)value);
+      }
+      break;
+
+    case ENUM_FIELD:
+      if (value == null) {
+        unsetEnumField();
+      } else {
+        setEnumField((E)value);
+      }
+      break;
+
+    case STRUCT_FIELD:
+      if (value == null) {
+        unsetStructField();
+      } else {
+        setStructField((Nested)value);
+      }
+      break;
+
+    case FOO_OR_BAR:
+      if (value == null) {
+        unsetFooOrBar();
+      } else {
+        setFooOrBar((FooOrBar)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case BOOL_FIELD:
+      return Boolean.valueOf(isBoolField());
+
+    case BYTE_FIELD:
+      return Byte.valueOf(getByteField());
+
+    case BYTE_OPTIONAL_FIELD:
+      return Byte.valueOf(getByteOptionalField());
+
+    case I16_FIELD:
+      return Short.valueOf(getI16Field());
+
+    case I16_OPTIONAL_FIELD:
+      return Short.valueOf(getI16OptionalField());
+
+    case I32_FIELD:
+      return Integer.valueOf(getI32Field());
+
+    case I64_FIELD:
+      return Long.valueOf(getI64Field());
+
+    case DOUBLE_FIELD:
+      return Double.valueOf(getDoubleField());
+
+    case STRING_FIELD:
+      return getStringField();
+
+    case BINARY_FIELD:
+      return getBinaryField();
+
+    case MAP_FIELD:
+      return getMapField();
+
+    case LIST_FIELD:
+      return getListField();
+
+    case SET_FIELD:
+      return getSetField();
+
+    case ENUM_FIELD:
+      return getEnumField();
+
+    case STRUCT_FIELD:
+      return getStructField();
+
+    case FOO_OR_BAR:
+      return getFooOrBar();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case BOOL_FIELD:
+      return isSetBoolField();
+    case BYTE_FIELD:
+      return isSetByteField();
+    case BYTE_OPTIONAL_FIELD:
+      return isSetByteOptionalField();
+    case I16_FIELD:
+      return isSetI16Field();
+    case I16_OPTIONAL_FIELD:
+      return isSetI16OptionalField();
+    case I32_FIELD:
+      return isSetI32Field();
+    case I64_FIELD:
+      return isSetI64Field();
+    case DOUBLE_FIELD:
+      return isSetDoubleField();
+    case STRING_FIELD:
+      return isSetStringField();
+    case BINARY_FIELD:
+      return isSetBinaryField();
+    case MAP_FIELD:
+      return isSetMapField();
+    case LIST_FIELD:
+      return isSetListField();
+    case SET_FIELD:
+      return isSetSetField();
+    case ENUM_FIELD:
+      return isSetEnumField();
+    case STRUCT_FIELD:
+      return isSetStructField();
+    case FOO_OR_BAR:
+      return isSetFooOrBar();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof Test)
+      return this.equals((Test)that);
+    return false;
+  }
+
+  public boolean equals(Test that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_boolField = true;
+    boolean that_present_boolField = true;
+    if (this_present_boolField || that_present_boolField) {
+      if (!(this_present_boolField && that_present_boolField))
+        return false;
+      if (this.boolField != that.boolField)
+        return false;
+    }
+
+    boolean this_present_byteField = true;
+    boolean that_present_byteField = true;
+    if (this_present_byteField || that_present_byteField) {
+      if (!(this_present_byteField && that_present_byteField))
+        return false;
+      if (this.byteField != that.byteField)
+        return false;
+    }
+
+    boolean this_present_byteOptionalField = true && this.isSetByteOptionalField();
+    boolean that_present_byteOptionalField = true && that.isSetByteOptionalField();
+    if (this_present_byteOptionalField || that_present_byteOptionalField) {
+      if (!(this_present_byteOptionalField && that_present_byteOptionalField))
+        return false;
+      if (this.byteOptionalField != that.byteOptionalField)
+        return false;
+    }
+
+    boolean this_present_i16Field = true;
+    boolean that_present_i16Field = true;
+    if (this_present_i16Field || that_present_i16Field) {
+      if (!(this_present_i16Field && that_present_i16Field))
+        return false;
+      if (this.i16Field != that.i16Field)
+        return false;
+    }
+
+    boolean this_present_i16OptionalField = true && this.isSetI16OptionalField();
+    boolean that_present_i16OptionalField = true && that.isSetI16OptionalField();
+    if (this_present_i16OptionalField || that_present_i16OptionalField) {
+      if (!(this_present_i16OptionalField && that_present_i16OptionalField))
+        return false;
+      if (this.i16OptionalField != that.i16OptionalField)
+        return false;
+    }
+
+    boolean this_present_i32Field = true && this.isSetI32Field();
+    boolean that_present_i32Field = true && that.isSetI32Field();
+    if (this_present_i32Field || that_present_i32Field) {
+      if (!(this_present_i32Field && that_present_i32Field))
+        return false;
+      if (this.i32Field != that.i32Field)
+        return false;
+    }
+
+    boolean this_present_i64Field = true;
+    boolean that_present_i64Field = true;
+    if (this_present_i64Field || that_present_i64Field) {
+      if (!(this_present_i64Field && that_present_i64Field))
+        return false;
+      if (this.i64Field != that.i64Field)
+        return false;
+    }
+
+    boolean this_present_doubleField = true;
+    boolean that_present_doubleField = true;
+    if (this_present_doubleField || that_present_doubleField) {
+      if (!(this_present_doubleField && that_present_doubleField))
+        return false;
+      if (this.doubleField != that.doubleField)
+        return false;
+    }
+
+    boolean this_present_stringField = true && this.isSetStringField();
+    boolean that_present_stringField = true && that.isSetStringField();
+    if (this_present_stringField || that_present_stringField) {
+      if (!(this_present_stringField && that_present_stringField))
+        return false;
+      if (!this.stringField.equals(that.stringField))
+        return false;
+    }
+
+    boolean this_present_binaryField = true && this.isSetBinaryField();
+    boolean that_present_binaryField = true && that.isSetBinaryField();
+    if (this_present_binaryField || that_present_binaryField) {
+      if (!(this_present_binaryField && that_present_binaryField))
+        return false;
+      if (!this.binaryField.equals(that.binaryField))
+        return false;
+    }
+
+    boolean this_present_mapField = true && this.isSetMapField();
+    boolean that_present_mapField = true && that.isSetMapField();
+    if (this_present_mapField || that_present_mapField) {
+      if (!(this_present_mapField && that_present_mapField))
+        return false;
+      if (!this.mapField.equals(that.mapField))
+        return false;
+    }
+
+    boolean this_present_listField = true && this.isSetListField();
+    boolean that_present_listField = true && that.isSetListField();
+    if (this_present_listField || that_present_listField) {
+      if (!(this_present_listField && that_present_listField))
+        return false;
+      if (!this.listField.equals(that.listField))
+        return false;
+    }
+
+    boolean this_present_setField = true && this.isSetSetField();
+    boolean that_present_setField = true && that.isSetSetField();
+    if (this_present_setField || that_present_setField) {
+      if (!(this_present_setField && that_present_setField))
+        return false;
+      if (!this.setField.equals(that.setField))
+        return false;
+    }
+
+    boolean this_present_enumField = true && this.isSetEnumField();
+    boolean that_present_enumField = true && that.isSetEnumField();
+    if (this_present_enumField || that_present_enumField) {
+      if (!(this_present_enumField && that_present_enumField))
+        return false;
+      if (!this.enumField.equals(that.enumField))
+        return false;
+    }
+
+    boolean this_present_structField = true && this.isSetStructField();
+    boolean that_present_structField = true && that.isSetStructField();
+    if (this_present_structField || that_present_structField) {
+      if (!(this_present_structField && that_present_structField))
+        return false;
+      if (!this.structField.equals(that.structField))
+        return false;
+    }
+
+    boolean this_present_fooOrBar = true && this.isSetFooOrBar();
+    boolean that_present_fooOrBar = true && that.isSetFooOrBar();
+    if (this_present_fooOrBar || that_present_fooOrBar) {
+      if (!(this_present_fooOrBar && that_present_fooOrBar))
+        return false;
+      if (!this.fooOrBar.equals(that.fooOrBar))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  @Override
+  public int compareTo(Test other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+
+    lastComparison = Boolean.valueOf(isSetBoolField()).compareTo(other.isSetBoolField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetBoolField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.boolField, other.boolField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetByteField()).compareTo(other.isSetByteField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetByteField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.byteField, other.byteField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetByteOptionalField()).compareTo(other.isSetByteOptionalField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetByteOptionalField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.byteOptionalField, other.byteOptionalField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetI16Field()).compareTo(other.isSetI16Field());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetI16Field()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i16Field, other.i16Field);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetI16OptionalField()).compareTo(other.isSetI16OptionalField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetI16OptionalField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i16OptionalField, other.i16OptionalField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetI32Field()).compareTo(other.isSetI32Field());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetI32Field()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i32Field, other.i32Field);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetI64Field()).compareTo(other.isSetI64Field());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetI64Field()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i64Field, other.i64Field);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetDoubleField()).compareTo(other.isSetDoubleField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetDoubleField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.doubleField, other.doubleField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetStringField()).compareTo(other.isSetStringField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetStringField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.stringField, other.stringField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetBinaryField()).compareTo(other.isSetBinaryField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetBinaryField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.binaryField, other.binaryField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetMapField()).compareTo(other.isSetMapField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetMapField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mapField, other.mapField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetListField()).compareTo(other.isSetListField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetListField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.listField, other.listField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetSetField()).compareTo(other.isSetSetField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetSetField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.setField, other.setField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetEnumField()).compareTo(other.isSetEnumField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetEnumField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.enumField, other.enumField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetStructField()).compareTo(other.isSetStructField());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetStructField()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.structField, other.structField);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetFooOrBar()).compareTo(other.isSetFooOrBar());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetFooOrBar()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fooOrBar, other.fooOrBar);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("Test(");
+    boolean first = true;
+
+    sb.append("boolField:");
+    sb.append(this.boolField);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("byteField:");
+    sb.append(this.byteField);
+    first = false;
+    if (isSetByteOptionalField()) {
+      if (!first) sb.append(", ");
+      sb.append("byteOptionalField:");
+      sb.append(this.byteOptionalField);
+      first = false;
+    }
+    if (!first) sb.append(", ");
+    sb.append("i16Field:");
+    sb.append(this.i16Field);
+    first = false;
+    if (isSetI16OptionalField()) {
+      if (!first) sb.append(", ");
+      sb.append("i16OptionalField:");
+      sb.append(this.i16OptionalField);
+      first = false;
+    }
+    if (isSetI32Field()) {
+      if (!first) sb.append(", ");
+      sb.append("i32Field:");
+      sb.append(this.i32Field);
+      first = false;
+    }
+    if (!first) sb.append(", ");
+    sb.append("i64Field:");
+    sb.append(this.i64Field);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("doubleField:");
+    sb.append(this.doubleField);
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("stringField:");
+    if (this.stringField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.stringField);
+    }
+    first = false;
+    if (isSetBinaryField()) {
+      if (!first) sb.append(", ");
+      sb.append("binaryField:");
+      if (this.binaryField == null) {
+        sb.append("null");
+      } else {
+        org.apache.thrift.TBaseHelper.toString(this.binaryField, sb);
+      }
+      first = false;
+    }
+    if (!first) sb.append(", ");
+    sb.append("mapField:");
+    if (this.mapField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.mapField);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("listField:");
+    if (this.listField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.listField);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("setField:");
+    if (this.setField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.setField);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("enumField:");
+    if (this.enumField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.enumField);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("structField:");
+    if (this.structField == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.structField);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("fooOrBar:");
+    if (this.fooOrBar == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.fooOrBar);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    // check for sub-struct validity
+    if (structField != null) {
+      structField.validate();
+    }
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+      __isset_bitfield = 0;
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TestStandardSchemeFactory implements SchemeFactory {
+    public TestStandardScheme getScheme() {
+      return new TestStandardScheme();
+    }
+  }
+
+  private static class TestStandardScheme extends StandardScheme<Test> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, Test struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // BOOL_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
+              struct.boolField = iprot.readBool();
+              struct.setBoolFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // BYTE_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
+              struct.byteField = iprot.readByte();
+              struct.setByteFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 16: // BYTE_OPTIONAL_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
+              struct.byteOptionalField = iprot.readByte();
+              struct.setByteOptionalFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // I16_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
+              struct.i16Field = iprot.readI16();
+              struct.setI16FieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 15: // I16_OPTIONAL_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
+              struct.i16OptionalField = iprot.readI16();
+              struct.setI16OptionalFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 4: // I32_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.i32Field = iprot.readI32();
+              struct.setI32FieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 5: // I64_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+              struct.i64Field = iprot.readI64();
+              struct.setI64FieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 6: // DOUBLE_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) {
+              struct.doubleField = iprot.readDouble();
+              struct.setDoubleFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 7: // STRING_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.stringField = iprot.readString();
+              struct.setStringFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 8: // BINARY_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.binaryField = iprot.readBinary();
+              struct.setBinaryFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 9: // MAP_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map0 = iprot.readMapBegin();
+                struct.mapField = new HashMap<String,Integer>(2*_map0.size);
+                for (int _i1 = 0; _i1 < _map0.size; ++_i1)
+                {
+                  String _key2;
+                  int _val3;
+                  _key2 = iprot.readString();
+                  _val3 = iprot.readI32();
+                  struct.mapField.put(_key2, _val3);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setMapFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 10: // LIST_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list4 = iprot.readListBegin();
+                struct.listField = new ArrayList<Integer>(_list4.size);
+                for (int _i5 = 0; _i5 < _list4.size; ++_i5)
+                {
+                  int _elem6;
+                  _elem6 = iprot.readI32();
+                  struct.listField.add(_elem6);
+                }
+                iprot.readListEnd();
+              }
+              struct.setListFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 11: // SET_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.SET) {
+              {
+                org.apache.thrift.protocol.TSet _set7 = iprot.readSetBegin();
+                struct.setField = new HashSet<Integer>(2*_set7.size);
+                for (int _i8 = 0; _i8 < _set7.size; ++_i8)
+                {
+                  int _elem9;
+                  _elem9 = iprot.readI32();
+                  struct.setField.add(_elem9);
+                }
+                iprot.readSetEnd();
+              }
+              struct.setSetFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 12: // ENUM_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+              struct.enumField = E.findByValue(iprot.readI32());
+              struct.setEnumFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 13: // STRUCT_FIELD
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+              struct.structField = new Nested();
+              struct.structField.read(iprot);
+              struct.setStructFieldIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 14: // FOO_OR_BAR
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+              struct.fooOrBar = new FooOrBar();
+              struct.fooOrBar.read(iprot);
+              struct.setFooOrBarIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, Test struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      oprot.writeFieldBegin(BOOL_FIELD_FIELD_DESC);
+      oprot.writeBool(struct.boolField);
+      oprot.writeFieldEnd();
+      oprot.writeFieldBegin(BYTE_FIELD_FIELD_DESC);
+      oprot.writeByte(struct.byteField);
+      oprot.writeFieldEnd();
+      oprot.writeFieldBegin(I16_FIELD_FIELD_DESC);
+      oprot.writeI16(struct.i16Field);
+      oprot.writeFieldEnd();
+      if (struct.isSetI32Field()) {
+        oprot.writeFieldBegin(I32_FIELD_FIELD_DESC);
+        oprot.writeI32(struct.i32Field);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldBegin(I64_FIELD_FIELD_DESC);
+      oprot.writeI64(struct.i64Field);
+      oprot.writeFieldEnd();
+      oprot.writeFieldBegin(DOUBLE_FIELD_FIELD_DESC);
+      oprot.writeDouble(struct.doubleField);
+      oprot.writeFieldEnd();
+      if (struct.stringField != null) {
+        oprot.writeFieldBegin(STRING_FIELD_FIELD_DESC);
+        oprot.writeString(struct.stringField);
+        oprot.writeFieldEnd();
+      }
+      if (struct.binaryField != null) {
+        if (struct.isSetBinaryField()) {
+          oprot.writeFieldBegin(BINARY_FIELD_FIELD_DESC);
+          oprot.writeBinary(struct.binaryField);
+          oprot.writeFieldEnd();
+        }
+      }
+      if (struct.mapField != null) {
+        oprot.writeFieldBegin(MAP_FIELD_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I32, struct.mapField.size()));
+          for (Map.Entry<String, Integer> _iter10 : struct.mapField.entrySet())
+          {
+            oprot.writeString(_iter10.getKey());
+            oprot.writeI32(_iter10.getValue());
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.listField != null) {
+        oprot.writeFieldBegin(LIST_FIELD_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.listField.size()));
+          for (int _iter11 : struct.listField)
+          {
+            oprot.writeI32(_iter11);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.setField != null) {
+        oprot.writeFieldBegin(SET_FIELD_FIELD_DESC);
+        {
+          oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I32, struct.setField.size()));
+          for (int _iter12 : struct.setField)
+          {
+            oprot.writeI32(_iter12);
+          }
+          oprot.writeSetEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.enumField != null) {
+        oprot.writeFieldBegin(ENUM_FIELD_FIELD_DESC);
+        oprot.writeI32(struct.enumField.getValue());
+        oprot.writeFieldEnd();
+      }
+      if (struct.structField != null) {
+        oprot.writeFieldBegin(STRUCT_FIELD_FIELD_DESC);
+        struct.structField.write(oprot);
+        oprot.writeFieldEnd();
+      }
+      if (struct.fooOrBar != null) {
+        oprot.writeFieldBegin(FOO_OR_BAR_FIELD_DESC);
+        struct.fooOrBar.write(oprot);
+        oprot.writeFieldEnd();
+      }
+      if (struct.isSetI16OptionalField()) {
+        oprot.writeFieldBegin(I16_OPTIONAL_FIELD_FIELD_DESC);
+        oprot.writeI16(struct.i16OptionalField);
+        oprot.writeFieldEnd();
+      }
+      if (struct.isSetByteOptionalField()) {
+        oprot.writeFieldBegin(BYTE_OPTIONAL_FIELD_FIELD_DESC);
+        oprot.writeByte(struct.byteOptionalField);
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TestTupleSchemeFactory implements SchemeFactory {
+    public TestTupleScheme getScheme() {
+      return new TestTupleScheme();
+    }
+  }
+
+  private static class TestTupleScheme extends TupleScheme<Test> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, Test struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      BitSet optionals = new BitSet();
+      if (struct.isSetBoolField()) {
+        optionals.set(0);
+      }
+      if (struct.isSetByteField()) {
+        optionals.set(1);
+      }
+      if (struct.isSetByteOptionalField()) {
+        optionals.set(2);
+      }
+      if (struct.isSetI16Field()) {
+        optionals.set(3);
+      }
+      if (struct.isSetI16OptionalField()) {
+        optionals.set(4);
+      }
+      if (struct.isSetI32Field()) {
+        optionals.set(5);
+      }
+      if (struct.isSetI64Field()) {
+        optionals.set(6);
+      }
+      if (struct.isSetDoubleField()) {
+        optionals.set(7);
+      }
+      if (struct.isSetStringField()) {
+        optionals.set(8);
+      }
+      if (struct.isSetBinaryField()) {
+        optionals.set(9);
+      }
+      if (struct.isSetMapField()) {
+        optionals.set(10);
+      }
+      if (struct.isSetListField()) {
+        optionals.set(11);
+      }
+      if (struct.isSetSetField()) {
+        optionals.set(12);
+      }
+      if (struct.isSetEnumField()) {
+        optionals.set(13);
+      }
+      if (struct.isSetStructField()) {
+        optionals.set(14);
+      }
+      if (struct.isSetFooOrBar()) {
+        optionals.set(15);
+      }
+      oprot.writeBitSet(optionals, 16);
+      if (struct.isSetBoolField()) {
+        oprot.writeBool(struct.boolField);
+      }
+      if (struct.isSetByteField()) {
+        oprot.writeByte(struct.byteField);
+      }
+      if (struct.isSetByteOptionalField()) {
+        oprot.writeByte(struct.byteOptionalField);
+      }
+      if (struct.isSetI16Field()) {
+        oprot.writeI16(struct.i16Field);
+      }
+      if (struct.isSetI16OptionalField()) {
+        oprot.writeI16(struct.i16OptionalField);
+      }
+      if (struct.isSetI32Field()) {
+        oprot.writeI32(struct.i32Field);
+      }
+      if (struct.isSetI64Field()) {
+        oprot.writeI64(struct.i64Field);
+      }
+      if (struct.isSetDoubleField()) {
+        oprot.writeDouble(struct.doubleField);
+      }
+      if (struct.isSetStringField()) {
+        oprot.writeString(struct.stringField);
+      }
+      if (struct.isSetBinaryField()) {
+        oprot.writeBinary(struct.binaryField);
+      }
+      if (struct.isSetMapField()) {
+        {
+          oprot.writeI32(struct.mapField.size());
+          for (Map.Entry<String, Integer> _iter13 : struct.mapField.entrySet())
+          {
+            oprot.writeString(_iter13.getKey());
+            oprot.writeI32(_iter13.getValue());
+          }
+        }
+      }
+      if (struct.isSetListField()) {
+        {
+          oprot.writeI32(struct.listField.size());
+          for (int _iter14 : struct.listField)
+          {
+            oprot.writeI32(_iter14);
+          }
+        }
+      }
+      if (struct.isSetSetField()) {
+        {
+          oprot.writeI32(struct.setField.size());
+          for (int _iter15 : struct.setField)
+          {
+            oprot.writeI32(_iter15);
+          }
+        }
+      }
+      if (struct.isSetEnumField()) {
+        oprot.writeI32(struct.enumField.getValue());
+      }
+      if (struct.isSetStructField()) {
+        struct.structField.write(oprot);
+      }
+      if (struct.isSetFooOrBar()) {
+        struct.fooOrBar.write(oprot);
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, Test struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      BitSet incoming = iprot.readBitSet(16);
+      if (incoming.get(0)) {
+        struct.boolField = iprot.readBool();
+        struct.setBoolFieldIsSet(true);
+      }
+      if (incoming.get(1)) {
+        struct.byteField = iprot.readByte();
+        struct.setByteFieldIsSet(true);
+      }
+      if (incoming.get(2)) {
+        struct.byteOptionalField = iprot.readByte();
+        struct.setByteOptionalFieldIsSet(true);
+      }
+      if (incoming.get(3)) {
+        struct.i16Field = iprot.readI16();
+        struct.setI16FieldIsSet(true);
+      }
+      if (incoming.get(4)) {
+        struct.i16OptionalField = iprot.readI16();
+        struct.setI16OptionalFieldIsSet(true);
+      }
+      if (incoming.get(5)) {
+        struct.i32Field = iprot.readI32();
+        struct.setI32FieldIsSet(true);
+      }
+      if (incoming.get(6)) {
+        struct.i64Field = iprot.readI64();
+        struct.setI64FieldIsSet(true);
+      }
+      if (incoming.get(7)) {
+        struct.doubleField = iprot.readDouble();
+        struct.setDoubleFieldIsSet(true);
+      }
+      if (incoming.get(8)) {
+        struct.stringField = iprot.readString();
+        struct.setStringFieldIsSet(true);
+      }
+      if (incoming.get(9)) {
+        struct.binaryField = iprot.readBinary();
+        struct.setBinaryFieldIsSet(true);
+      }
+      if (incoming.get(10)) {
+        {
+          org.apache.thrift.protocol.TMap _map16 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I32, iprot.readI32());
+          struct.mapField = new HashMap<String,Integer>(2*_map16.size);
+          for (int _i17 = 0; _i17 < _map16.size; ++_i17)
+          {
+            String _key18;
+            int _val19;
+            _key18 = iprot.readString();
+            _val19 = iprot.readI32();
+            struct.mapField.put(_key18, _val19);
+          }
+        }
+        struct.setMapFieldIsSet(true);
+      }
+      if (incoming.get(11)) {
+        {
+          org.apache.thrift.protocol.TList _list20 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
+          struct.listField = new ArrayList<Integer>(_list20.size);
+          for (int _i21 = 0; _i21 < _list20.size; ++_i21)
+          {
+            int _elem22;
+            _elem22 = iprot.readI32();
+            struct.listField.add(_elem22);
+          }
+        }
+        struct.setListFieldIsSet(true);
+      }
+      if (incoming.get(12)) {
+        {
+          org.apache.thrift.protocol.TSet _set23 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I32, iprot.readI32());
+          struct.setField = new HashSet<Integer>(2*_set23.size);
+          for (int _i24 = 0; _i24 < _set23.size; ++_i24)
+          {
+            int _elem25;
+            _elem25 = iprot.readI32();
+            struct.setField.add(_elem25);
+          }
+        }
+        struct.setSetFieldIsSet(true);
+      }
+      if (incoming.get(13)) {
+        struct.enumField = E.findByValue(iprot.readI32());
+        struct.setEnumFieldIsSet(true);
+      }
+      if (incoming.get(14)) {
+        struct.structField = new Nested();
+        struct.structField.read(iprot);
+        struct.setStructFieldIsSet(true);
+      }
+      if (incoming.get(15)) {
+        struct.fooOrBar = new FooOrBar();
+        struct.fooOrBar.read(iprot);
+        struct.setFooOrBarIsSet(true);
+      }
+    }
+  }
+
+}
+
diff --git a/lang/java/thrift/src/test/thrift/test.thrift b/lang/java/thrift/src/test/thrift/test.thrift
new file mode 100644
index 0000000..34c5ddc
--- /dev/null
+++ b/lang/java/thrift/src/test/thrift/test.thrift
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+namespace java org.apache.avro.thrift.test
+
+enum E {
+  X = 1,
+  Y = 2,
+  Z = 3,
+}
+
+struct Nested {
+  1: i32 x
+}
+
+union FooOrBar {
+  1: string foo;
+  2: string bar;
+}
+
+
+// contains each primitive type
+struct Test {
+  1: bool boolField
+  2: byte byteField
+ 16: optional byte byteOptionalField
+  3: i16 i16Field
+ 15: optional i16 i16OptionalField
+  4: optional i32 i32Field
+  5: i64 i64Field
+  6: double doubleField
+  7: string stringField
+  8: optional binary binaryField
+  9: map<string,i32> mapField
+ 10: list<i32> listField
+ 11: set<i32> setField
+ 12: E enumField
+ 13: Nested structField
+ 14: FooOrBar fooOrBar
+}
+
+exception Error {
+  1: string message,
+}
+
+service Foo {
+
+   void ping(),
+
+   i32 add(1:i32 num1, 2:i32 num2),
+
+   oneway void zip(),
+}
diff --git a/lang/java/tools/pom.xml b/lang/java/tools/pom.xml
new file mode 100644
index 0000000..787d53c
--- /dev/null
+++ b/lang/java/tools/pom.xml
@@ -0,0 +1,185 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>avro-tools</artifactId>
+
+  <name>Apache Avro Tools</name>
+  <url>http://avro.apache.org</url>
+  <description>Avro command line tools and utilities</description>
+
+  <build>
+    <resources>
+      <resource>
+        <includes>
+          <include>VERSION.txt</include>
+        </includes>
+        <directory>../../../share/</directory>
+      </resource>
+      <resource>
+        <includes>
+          <include>NOTICE.txt</include>
+        </includes>
+        <directory>../../../</directory>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <!-- primary artifact is shaded -->
+        <executions>
+          <execution>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <createDependencyReducedPom>false</createDependencyReducedPom>
+              <transformers>
+                <transformer
+                  implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                  <mainClass>org.apache.avro.tool.Main</mainClass>
+                </transformer>
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <!-- 'nodeps' artifact is standalone -->
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>jar</goal>
+            </goals>
+            <configuration>
+              <classifier>nodeps</classifier>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro</artifactId>
+      <version>${project.version}</version>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-compiler</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-mapred</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <!--For testing TetherTool we need the mapred test jar
+	because that contains the word count example.-->
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>avro-mapred</artifactId>
+      <version>${project.version}</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>${commons-cli.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>${commons-logging.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-httpclient</groupId>
+      <artifactId>commons-httpclient</artifactId>
+      <version>${commons-httpclient.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-avro</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-core</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-avro</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>net.sf.jopt-simple</groupId>
+      <artifactId>jopt-simple</artifactId>
+    </dependency>
+  </dependencies>
+
+</project>
+
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
new file mode 100644
index 0000000..398fff1
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryDecoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+
+/** Converts an input file from Avro binary into JSON. */
+public class BinaryFragmentToJsonTool implements Tool {
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser optionParser = new OptionParser();
+    OptionSpec<Void> noPrettyOption = optionParser
+        .accepts("no-pretty", "Turns off pretty printing.");
+    OptionSpec<String> schemaFileOption = optionParser
+        .accepts("schema-file", "File containing schema, must not occur with inline schema.")
+        .withOptionalArg()
+        .ofType(String.class);
+    
+    OptionSet optionSet = optionParser.parse(args.toArray(new String[0]));
+    Boolean noPretty = optionSet.has(noPrettyOption);
+    List<String> nargs = (List<String>)optionSet.nonOptionArguments();
+    String schemaFile = schemaFileOption.value(optionSet);
+    
+    if (nargs.size() != (schemaFile == null ? 2 : 1)) {
+      err.println("fragtojson --no-pretty --schema-file <file> [inline-schema] input-file");
+      err.println("   converts Avro fragments to JSON.");
+      optionParser.printHelpOn(err);
+      err.println("   A dash '-' for input-file means stdin.");
+      return 1;
+    }
+    Schema schema;
+    String inputFile;
+    if (schemaFile == null) {
+      schema = new Schema.Parser().parse(nargs.get(0));
+      inputFile = nargs.get(1);
+    } else {
+      schema = new Schema.Parser().parse(Util.openFromFS(schemaFile));
+      inputFile = nargs.get(0);
+    }
+    InputStream input = Util.fileOrStdin(inputFile, stdin);
+
+    try {
+      DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+      BinaryDecoder binaryDecoder =
+        DecoderFactory.get().binaryDecoder(input, null);
+      DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
+      JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out, !noPretty);
+      Object datum = null;
+      while (!binaryDecoder.isEnd()){
+        datum = reader.read(datum, binaryDecoder);
+        writer.write(datum, jsonEncoder);
+        jsonEncoder.flush();
+      }
+      out.println();
+      out.flush();
+    } finally {
+      Util.close(input);
+    }
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "fragtojson";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Renders a binary-encoded Avro datum as JSON.";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
new file mode 100644
index 0000000..1fd1087
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.Path;
+
+/** Tool to extract samples from an Avro data file. */
+public class CatTool implements Tool {
+  
+  private long totalCopied;
+  private double sampleCounter;
+  
+  private GenericRecord reuse;
+  private DataFileStream<GenericRecord> reader;
+  private DataFileWriter<GenericRecord> writer;
+  private Schema schema;
+  private List<Path> inFiles;
+  private int currentInput;
+  
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser optParser = new OptionParser();
+    OptionSpec<Long> offsetOpt = optParser
+      .accepts("offset", "offset for reading input")
+      .withRequiredArg()
+      .ofType(Long.class)
+      .defaultsTo(new Long(0));
+    OptionSpec<Long> limitOpt = optParser
+      .accepts("limit", "maximum number of records in the outputfile")
+      .withRequiredArg()
+      .ofType(Long.class)
+      .defaultsTo(Long.MAX_VALUE); 
+    OptionSpec<Double> fracOpt = optParser
+      .accepts("samplerate", "rate at which records will be collected")
+      .withRequiredArg()
+      .ofType(Double.class)
+      .defaultsTo(new Double(1)); 
+
+    OptionSet opts = optParser.parse(args.toArray(new String[0]));
+    List<String> nargs = (List<String>)opts.nonOptionArguments();
+    if (nargs.size() < 2) {
+      printHelp(out);
+      return 0;
+    }
+    
+    inFiles = Util.getFiles(nargs.subList(0, nargs.size()-1));
+
+    System.out.println("List of input files:");
+    for (Path p : inFiles) {
+      System.out.println(p);
+    }
+    currentInput = -1;
+    nextInput();
+   
+    OutputStream output = out;
+    String lastArg = nargs.get(nargs.size()-1);
+    if (nargs.size() > 1 && !lastArg.equals("-")) {
+      output = Util.createFromFS(lastArg);
+    }
+    writer = new DataFileWriter<GenericRecord>(
+        new GenericDatumWriter<GenericRecord>());
+    
+    String codecName = reader.getMetaString(DataFileConstants.CODEC);
+    CodecFactory codec = (codecName == null)
+        ? CodecFactory.fromString(DataFileConstants.NULL_CODEC)
+        : CodecFactory.fromString(codecName);
+    writer.setCodec(codec);
+    for (String key : reader.getMetaKeys()) {
+      if (!DataFileWriter.isReservedMeta(key)) {
+        writer.setMeta(key, reader.getMeta(key));
+      }
+    }
+    writer.create(schema, output);
+    
+    long  offset = opts.valueOf(offsetOpt);
+    long limit = opts.valueOf(limitOpt);
+    double samplerate = opts.valueOf(fracOpt);
+    sampleCounter = 1;
+    totalCopied = 0;
+    reuse = null;
+    
+    if (limit < 0) {
+      System.out.println("limit has to be non-negative");
+      this.printHelp(out);
+      return 1;
+    }
+    if (offset < 0) {
+      System.out.println("offset has to be non-negative");
+      this.printHelp(out);
+      return 1;
+    }
+    if (samplerate < 0 || samplerate > 1) {
+      System.out.println("samplerate has to be a number between 0 and 1");
+      this.printHelp(out);
+      return 1;
+    }
+
+    skip(offset);
+    writeRecords(limit, samplerate);
+    System.out.println(totalCopied + " records written.");
+  
+    writer.flush();
+    writer.close();
+    Util.close(out);
+    return 0;
+  }
+  
+  private void nextInput() throws IOException{
+    currentInput++;
+    Path path = inFiles.get(currentInput); 
+    FSDataInputStream input = new FSDataInputStream(Util.openFromFS(path));
+    reader = new DataFileStream<GenericRecord>(input, new GenericDatumReader<GenericRecord>());
+    if (schema == null) {                            // if this is the first file, the schema gets saved
+      schema = reader.getSchema();
+    }
+    else if (!schema.equals(reader.getSchema())) {   // subsequent files have to have equal schemas
+      throw new IOException("schemas dont match");
+    }
+  }
+  
+  private boolean hasNextInput() {
+    return inFiles.size() > (currentInput + 1);
+  }
+  
+  /**skips a number of records from the input*/
+  private long skip(long skip) throws IOException {
+    long skipped = 0;
+    while( 0 < skip  && reader.hasNext()) {
+      reader.next(reuse);
+      skip--;
+      skipped++;
+    }
+    if ((0 < skip) && hasNextInput()) { // goto next file
+      nextInput();
+      skipped = skipped + skip(skip);
+    }
+  return skipped;
+}
+  
+  /** writes records with the given samplerate
+   * The record at position offset is guaranteed to be taken*/
+  private long writeRecords(long count, double samplerate) throws IOException {
+    long written = 0;
+    while(written < count && reader.hasNext()) {
+      reuse = reader.next(reuse);
+      sampleCounter = sampleCounter + samplerate;
+      if (sampleCounter >= 1) {
+        writer.append(reuse);
+        written++;
+        sampleCounter--;
+      }
+    }
+    totalCopied = totalCopied + written;
+    if (written < count && hasNextInput()) { // goto next file
+      nextInput();
+      written = written + writeRecords(count - written, samplerate);  
+    }
+    return written;
+  }
+  
+  private void printHelp(PrintStream out) {
+    out.println("cat --offset <offset> --limit <limit> --samplerate <samplerate> [input-files...] output-file");
+    out.println();
+    out.println("extracts records from a list of input files into a new file.");
+    out.println("--offset      start of the extract");
+    out.println("--limit       maximum number of records in the output file.");
+    out.println("--samplerate  rate at which records will be collected");
+    out.println("A dash ('-') can be given to direct output to stdout");
+  }
+
+  @Override
+  public String getName() {
+    return "cat";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "extracts samples from files";
+  }
+  
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
new file mode 100644
index 0000000..6026796
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+
+/**
+ * Tool to concatenate avro files with the same schema and non-reserved
+ * metatdata.
+ */
+public class ConcatTool implements Tool {
+  /**
+   * @return 0 for success, 1 if the schemas of the input files differ, 2 if
+   *         the non-reserved input metadata differs, 3 if the input files are
+   *         encoded with more than one codec.
+   */
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+
+    if(args.isEmpty()) {
+      printHelp(out);
+      return 0;
+    }
+
+    OutputStream output = out;
+    if (args.size() > 1) {
+      output = Util.fileOrStdout(args.get(args.size() - 1), out);
+      args = args.subList(0, args.size() - 1);
+    }
+
+    DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
+      new GenericDatumWriter<GenericRecord>());
+    Schema schema = null;
+    Map<String, byte[]> metadata = new TreeMap<String, byte[]>();
+    String inputCodec = null;
+
+    for (String inFile : args) {
+      InputStream input = Util.fileOrStdin(inFile, in);
+      DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
+        input, new GenericDatumReader<GenericRecord>());
+
+      if (schema == null) {
+        // this is the first file - set up the writer, and store the
+        // Schema & metadata we'll use.
+        schema = reader.getSchema();
+        for (String key : reader.getMetaKeys()) {
+          if (!DataFileWriter.isReservedMeta(key)) {
+            byte[] metadatum = reader.getMeta(key);
+            metadata.put(key, metadatum);
+            writer.setMeta(key, metadatum);
+          }
+        }
+        inputCodec = reader.getMetaString(DataFileConstants.CODEC);
+        if(inputCodec == null) {
+          inputCodec = DataFileConstants.NULL_CODEC;
+        }
+        writer.setCodec(CodecFactory.fromString(inputCodec));
+        writer.create(schema, output);
+      } else {
+        // check that we're appending to the same schema & metadata.
+        if (!schema.equals(reader.getSchema())) {
+          err.println("input files have different schemas");
+          reader.close();
+          return 1;
+        }
+        for (String key : reader.getMetaKeys()) {
+          if (!DataFileWriter.isReservedMeta(key)) {
+            byte[] metadatum = reader.getMeta(key);
+            byte[] writersMetadatum = metadata.get(key);
+            if(!Arrays.equals(metadatum, writersMetadatum)) {
+              err.println("input files have different non-reserved metadata");
+              reader.close();
+              return 2;
+            }
+          }
+        }
+        String thisCodec = reader.getMetaString(DataFileConstants.CODEC);
+        if(thisCodec == null) {
+          thisCodec = DataFileConstants.NULL_CODEC;
+        }
+        if (!inputCodec.equals(thisCodec)) {
+          err.println("input files have different codecs");
+          reader.close();
+          return 3;
+        }
+      }
+
+      writer.appendAllFrom(reader, /*recompress*/ false);
+      reader.close();
+    }
+
+    writer.close();
+    return 0;
+  }
+
+  private void printHelp(PrintStream out) {
+    out.println("concat [input-file...] output-file");
+    out.println();
+    out.println("Concatenates one or more input files into a new output file");
+    out.println("by appending the input blocks without decoding them. The input");
+    out.println("files must have the same schema, metadata and codec. If they");
+    out.println("do not the tool will return the following error codes:");
+    out.println("  1 if the schemas don't match");
+    out.println("  2 if the metadata doesn't match");
+    out.println("  3 if the codecs don't match");
+    out.println("If no input files are given stdin will be used. The tool");
+    out.println("0 on success. A dash ('-') can be given as an input file");
+    out.println("to use stdin, and as an output file to use stdout.");
+
+  }
+
+ at Override
+  public String getName() {
+    return "concat";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Concatenates avro files without re-compressing.";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/CreateRandomFileTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/CreateRandomFileTool.java
new file mode 100644
index 0000000..0fad307
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/CreateRandomFileTool.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.trevni.avro.RandomData;
+
+/** Creates a file filled with randomly-generated instances of a schema. */
+public class CreateRandomFileTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "random";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Creates a file with randomly generated instances of a schema.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+
+    OptionParser p = new OptionParser();
+    OptionSpec<Integer> count =
+      p.accepts("count", "Record Count")
+      .withRequiredArg()
+      .ofType(Integer.class);
+    OptionSpec<String> codec = Util.compressionCodecOption(p);
+    OptionSpec<Integer> level = Util.compressionLevelOption(p);
+    OptionSpec<String> file =
+        p.accepts("schema-file", "Schema File")
+        .withOptionalArg()
+        .ofType(String.class);
+    OptionSpec<String> inschema =
+        p.accepts("schema", "Schema")
+        .withOptionalArg()
+        .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    if (opts.nonOptionArguments().size() != 1) {
+      err.println("Usage: outFile (filename or '-' for stdout)");
+      p.printHelpOn(err);
+      return 1;
+    }
+    args = (List<String>)opts.nonOptionArguments();
+
+    String schemastr = inschema.value(opts);
+    String schemafile = file.value(opts);
+    if (schemastr == null && schemafile == null) {
+        err.println("Need input schema (--schema-file) or (--schema)");
+        p.printHelpOn(err);
+        return 1;
+    }
+    Schema schema = (schemafile != null)
+        ? new Schema.Parser().parse(Util.openFromFS(schemafile))
+        : new Schema.Parser().parse(schemastr);
+
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.setCodec(Util.codecFactory(opts, codec, level));
+    writer.create(schema, Util.fileOrStdout(args.get(0), out));
+
+    for (Object datum : new RandomData(schema, (int)count.value(opts)))
+      writer.append(datum);
+
+    writer.close();
+
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
new file mode 100644
index 0000000..bc9c6f3
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.mapred.FsInput;
+
+/** Reads a data file to get its metadata. */
+public class DataFileGetMetaTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "getmeta";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Prints out the metadata of an Avro data file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    
+    OptionParser p = new OptionParser();
+    OptionSpec<String> keyOption =
+        p.accepts("key", "Metadata key")
+        .withOptionalArg()
+        .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    String keyName = keyOption.value(opts);
+    
+    List<String> nargs = (List<String>)opts.nonOptionArguments();
+    if (nargs.size() != 1) {
+      err.println("Expected 1 arg: input_file");
+      p.printHelpOn(err);
+      return 1;
+    }
+    FsInput in = Util.openSeekableFromFS(args.get(0));
+    DataFileReader<Void> reader =
+      new DataFileReader<Void>(in, new GenericDatumReader<Void>());
+    if (keyName != null) {
+      byte[] value = reader.getMeta(keyName);
+      if (value != null) {
+        out.write(value, 0, value.length);
+        out.println();
+      }
+    } else {
+      List<String> keys = reader.getMetaKeys();
+      for (String key : keys) {
+        out.print(escapeKey(key));
+        out.print('\t');
+        byte[] value = reader.getMeta(key);
+        out.write(value, 0, value.length);
+        out.println();
+      }
+    }
+    return 0;
+  }
+  
+  // escape TAB, NL and CR in keys, so that output can be reliably parsed
+  static String escapeKey(String key) {
+    key = key.replace("\\","\\\\");               // escape backslashes first
+    key = key.replace("\t","\\t");                // TAB
+    key = key.replace("\n","\\n");                // NL
+    key = key.replace("\r","\\r");                // CR
+    return key;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
new file mode 100644
index 0000000..16ae67f
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericDatumReader;
+
+/** Reads a data file to get its schema. */
+public class DataFileGetSchemaTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "getschema";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Prints out schema of an Avro data file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    if (args.size() != 1) {
+      err.println("Expected 1 argument: input_file");
+      return 1;
+    }
+    DataFileReader<Void> reader =
+      new DataFileReader<Void>(Util.openSeekableFromFS(args.get(0)),
+                               new GenericDatumReader<Void>());
+    out.println(reader.getSchema().toString(true));
+    return 0;
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileReadTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileReadTool.java
new file mode 100644
index 0000000..fcc89ca
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileReadTool.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+
+/** Reads a data file and dumps to JSON */
+public class DataFileReadTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "tojson";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Dumps an Avro data file as JSON, record per line or pretty.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser optionParser = new OptionParser();
+    OptionSpec<Void> prettyOption = optionParser
+        .accepts("pretty", "Turns on pretty printing.");
+
+    OptionSet optionSet = optionParser.parse(args.toArray(new String[0]));
+    Boolean pretty = optionSet.has(prettyOption);
+    List<String> nargs = (List<String>)optionSet.nonOptionArguments();
+
+    if (nargs.size() != 1) {
+      printHelp(err);
+      err.println();
+      optionParser.printHelpOn(err);
+      return 1;
+    }
+
+    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
+
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    DataFileStream<Object> streamReader = new DataFileStream<Object>(inStream, reader);
+    try {
+      Schema schema = streamReader.getSchema();
+      DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
+      JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out, pretty);
+      for (Object datum : streamReader)
+        writer.write(datum, encoder);
+      encoder.flush();
+      out.println();
+      out.flush();
+    } finally {
+      streamReader.close();
+    }
+    return 0;
+  }
+
+  private void printHelp(PrintStream ps) {
+    ps.println("tojson --pretty input-file");
+    ps.println();
+    ps.println(getShortDescription());
+    ps.println("A dash ('-') can be given as an input file to use stdin");
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
new file mode 100644
index 0000000..0fdd8e5
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
@@ -0,0 +1,308 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+
+/** Recovers data from a corrupt Avro Data file */
+public class DataFileRepairTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "repair";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Recovers data from a corrupt Avro Data file";
+  }
+  
+  private void printInfo(PrintStream output) {
+    output.println("Insufficient arguments.  Arguments:  [-o option] "
+        + "input_file output_file \n"
+        + "   Where option is one of the following: \n"
+        + "      " + ALL
+        + " (default) recover as many records as possible.\n"
+        + "      " + PRIOR
+        + "         recover only records prior to the first instance"
+        + " of corruption \n"
+        + "      " + AFTER
+        + "         recover only records after the first instance of"
+        + " corruption.\n"
+        + "      " + REPORT
+        + "        print the corruption report only, reporting the\n"
+        + "                    number of valid and corrupted blocks and records\n"
+        + "   input_file is the file to read from.  output_file is the file to\n"
+        + "   create and write recovered data to.  output_file is ignored if\n"
+        + "   using the report option.");
+  }
+
+  private static final Set<String> OPTIONS = new HashSet<String>();
+  private static final String ALL = "all";
+  private static final String PRIOR = "prior";
+  private static final String AFTER = "after";
+  private static final String REPORT = "report";
+  static {
+    OPTIONS.add(ALL);
+    OPTIONS.add(PRIOR);
+    OPTIONS.add(AFTER);
+    OPTIONS.add(REPORT);
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    if (args.size() < 2) {
+      printInfo(err);
+      return 1;
+    }
+    int index = 0;
+    String input = args.get(index);
+    String option = "all";
+    if ("-o".equals(input)) {
+      option = args.get(1);
+      index += 2;
+    }
+    if (!OPTIONS.contains(option) || (args.size() - index < 1)) {
+      printInfo(err);
+      return 1;
+    }
+    input = args.get(index++);
+    if (!REPORT.equals(option)) {
+      if (args.size() - index < 1) {
+        printInfo(err);
+        return 1;
+      } 
+    }
+    if (ALL.equals(option)) {
+      return recoverAll(input, args.get(index), out, err);
+    } else if (PRIOR.equals(option)) {
+      return recoverPrior(input, args.get(index), out, err);
+    } else if (AFTER.equals(option)) {
+      return recoverAfter(input, args.get(index), out, err);
+    } else if (REPORT.equals(option)) {
+      return reportOnly(input, out, err);
+    } else {
+      return 1;
+    }
+  }
+
+  private int recover(String input, String output, PrintStream out,
+      PrintStream err, boolean recoverPrior, boolean recoverAfter)
+      throws IOException {
+    File infile = new File(input);
+    if (!infile.canRead()) {
+      err.println("cannot read file: " + input);
+      return 1;
+    }
+    out.println("Recovering file: " + input);
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    DataFileReader<Object> fileReader = new DataFileReader<Object>(infile,
+        reader);
+    try {
+      Schema schema = fileReader.getSchema();
+      String codecStr = fileReader.getMetaString(DataFileConstants.CODEC);
+      CodecFactory codecFactory = CodecFactory.fromString("" + codecStr);
+      List<String> metas = fileReader.getMetaKeys();
+      if (recoverPrior || recoverAfter) {
+        GenericDatumWriter<Object> writer = new GenericDatumWriter<Object>();
+        DataFileWriter<Object> fileWriter = new DataFileWriter<Object>(writer);
+        try {
+          File outfile = new File(output);
+          for (String key : metas) {
+            if (!key.startsWith("avro.")) {
+              byte[] val = fileReader.getMeta(key);
+              fileWriter.setMeta(key, val);
+            }
+          }
+          fileWriter.setCodec(codecFactory);
+          int result = innerRecover(fileReader, fileWriter, out, err, recoverPrior,
+              recoverAfter, schema, outfile);
+          return result;
+        } catch (Exception e) {
+          e.printStackTrace(err);
+          return 1;
+        } 
+      } else {
+        return innerRecover(fileReader, null, out, err, recoverPrior,
+            recoverAfter, null, null);
+      }
+
+    } finally {
+      fileReader.close();
+    }
+  }
+
+  private int innerRecover(DataFileReader<Object> fileReader,
+      DataFileWriter<Object> fileWriter, PrintStream out, PrintStream err,
+      boolean recoverPrior, boolean recoverAfter, Schema schema, File outfile) {
+    int numBlocks = 0;
+    int numCorruptBlocks = 0;
+    int numRecords = 0;
+    int numCorruptRecords = 0;
+    int recordsWritten = 0;
+    long position = fileReader.previousSync();
+    long blockSize = 0;
+    long blockCount = 0;
+    boolean fileWritten = false;
+    try {
+      while (true) {
+        try {
+          if (!fileReader.hasNext()) {
+            out.println("File Summary: ");
+            out.println("  Number of blocks: " + numBlocks
+                + " Number of corrupt blocks: " + numCorruptBlocks);
+            out.println("  Number of records: " + numRecords
+                + " Number of corrupt records: " + numCorruptRecords);
+            if (recoverAfter || recoverPrior) {
+              out.println("  Number of records written " + recordsWritten);
+            }
+            out.println();
+            return 0;
+          }
+          position = fileReader.previousSync();
+          blockCount = fileReader.getBlockCount();
+          blockSize = fileReader.getBlockSize();
+          numRecords += blockCount;
+          long blockRemaining = blockCount;
+          numBlocks++;
+          boolean lastRecordWasBad = false;
+          long badRecordsInBlock = 0;
+          while (blockRemaining > 0) {
+            try {
+              Object datum = fileReader.next();
+              if ((recoverPrior && numCorruptBlocks == 0)
+                  || (recoverAfter && numCorruptBlocks > 0)) {
+                if (!fileWritten) {
+                  try {
+                    fileWriter.create(schema, outfile);
+                    fileWritten = true;
+                  } catch (Exception e) {
+                    e.printStackTrace(err);
+                    return 1;
+                  }
+                }
+                try {
+                  fileWriter.append(datum);
+                  recordsWritten++;
+                } catch (Exception e) {
+                  e.printStackTrace(err);
+                  throw e;
+                }
+              }
+              blockRemaining--;
+              lastRecordWasBad = false;
+            } catch (Exception e) {
+              long pos = blockCount - blockRemaining;
+              if (badRecordsInBlock == 0) {
+                // first corrupt record
+                numCorruptBlocks++;
+                err.println("Corrupt block: " + numBlocks + " Records in block: "
+                    + blockCount + " uncompressed block size: " + blockSize);
+                err.println("Corrupt record at position: "
+                    + (pos));
+              } else {
+                // second bad record in block, if consecutive skip block.
+                err.println("Corrupt record at position: "
+                    + (pos));
+                if (lastRecordWasBad) {
+                  // consecutive bad record
+                  err.println("Second consecutive bad record in block: " + numBlocks 
+                      + ". Skipping remainder of block. ");
+                  numCorruptRecords += blockRemaining;
+                  badRecordsInBlock += blockRemaining;
+                  try {
+                    fileReader.sync(position);
+                  } catch (Exception e2) {
+                    err.println("failed to sync to sync marker, aborting");
+                    e2.printStackTrace(err);
+                    return 1;
+                  }
+                  break;
+                }
+              }
+              blockRemaining --;
+              lastRecordWasBad = true;
+              numCorruptRecords++;
+              badRecordsInBlock++;
+            }
+          }
+          if (badRecordsInBlock != 0) {
+            err.println("** Number of unrecoverable records in block: "
+                + (badRecordsInBlock));
+          }
+          position = fileReader.previousSync();
+        } catch (Exception e) {
+          err.println("Failed to read block " + numBlocks + ". Unknown record "
+              + "count in block.  Skipping. Reason: " + e.getMessage());
+          numCorruptBlocks++;
+          try {
+            fileReader.sync(position);
+          } catch (Exception e2) {
+            err.println("failed to sync to sync marker, aborting");
+            e2.printStackTrace(err);
+            return 1;
+          }
+        } 
+      }
+    } finally {
+      if (fileWritten) {
+        try {
+          fileWriter.close();
+        } catch (Exception e) {
+          e.printStackTrace(err);
+          return 1;
+        }
+      }
+    }
+  }
+
+  private int reportOnly(String input, PrintStream out, PrintStream err)
+      throws IOException {
+    return recover(input, null, out, err, false, false);
+  }
+
+  private int recoverAfter(String input, String output, PrintStream out,
+      PrintStream err) throws IOException {
+    return recover(input, output, out, err, false, true);
+  }
+
+  private int recoverPrior(String input, String output, PrintStream out,
+      PrintStream err) throws IOException {
+    return recover(input, output, out, err, true, false);
+  }
+
+  private int recoverAll(String input, String output, PrintStream out,
+      PrintStream err) throws IOException {
+    return recover(input, output, out, err, true, true);
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
new file mode 100644
index 0000000..ba17775
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+
+/** Reads new-line delimited JSON records and writers an Avro data file. */
+public class DataFileWriteTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "fromjson";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Reads JSON records and writes an Avro data file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+
+    OptionParser p = new OptionParser();
+    OptionSpec<String> codec = Util.compressionCodecOption(p);
+    OptionSpec<Integer> level = Util.compressionLevelOption(p);
+    OptionSpec<String> file =
+        p.accepts("schema-file", "Schema File")
+        .withOptionalArg()
+        .ofType(String.class);
+    OptionSpec<String> inschema =
+        p.accepts("schema", "Schema")
+        .withOptionalArg()
+        .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+
+    List<String> nargs = (List<String>)opts.nonOptionArguments();
+    if (nargs.size() != 1) {
+      err.println("Expected 1 arg: input_file");
+      p.printHelpOn(err);
+      return 1;
+    }
+    String schemastr = inschema.value(opts);
+    String schemafile = file.value(opts);
+    if (schemastr == null && schemafile == null) {
+        err.println("Need an input schema file (--schema-file) or inline schema (--schema)");
+        p.printHelpOn(err);
+        return 1;
+    }
+    Schema schema = (schemafile != null)
+        ? new Schema.Parser().parse(Util.openFromFS(schemafile))
+        : new Schema.Parser().parse(schemastr);
+    
+    DatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+
+    InputStream input = Util.fileOrStdin(nargs.get(0), stdin);
+    try {
+      DataInputStream din = new DataInputStream(input);
+      DataFileWriter<Object> writer =
+        new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+      writer.setCodec(Util.codecFactory(opts, codec, level, DataFileConstants.NULL_CODEC));
+      writer.create(schema, out);
+      Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din);
+      Object datum;
+      while (true) {
+        try {
+          datum = reader.read(null, decoder);
+        } catch (EOFException e) {
+          break;
+        }
+        writer.append(datum);
+      }
+      writer.close();
+    } finally {
+      Util.close(input);
+    }
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
new file mode 100644
index 0000000..115155e
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumWriter;
+
+/** Reads a text file into an Avro data file.
+ * 
+ * Can accept a file name, and HDFS file URI, or stdin. Can write to a file
+ * name, an HDFS URI, or stdout.*/
+public class FromTextTool implements Tool {
+  private static final String TEXT_FILE_SCHEMA = 
+    "\"bytes\"";
+  
+  @Override
+  public String getName() {
+    return "fromtext";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Imports a text file into an avro data file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    
+    OptionParser p = new OptionParser();
+    OptionSpec<Integer> level = Util.compressionLevelOption(p);
+    OptionSpec<String> codec = Util.compressionCodecOption(p);
+
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+
+    List<String> nargs = (List<String>)opts.nonOptionArguments();
+    if (nargs.size() != 2) {
+      err.println("Expected 2 args: from_file to_file (local filenames," +
+          " Hadoop URI's, or '-' for stdin/stdout");
+      p.printHelpOn(err);
+      return 1;
+    }
+ 
+    CodecFactory codecFactory = Util.codecFactory(opts, codec, level);
+  
+    BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin);
+    BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out);
+    
+    DataFileWriter<ByteBuffer> writer =
+        new DataFileWriter<ByteBuffer>(new GenericDatumWriter<ByteBuffer>());
+    writer.setCodec(codecFactory);
+    writer.create(Schema.parse(TEXT_FILE_SCHEMA), outStream);
+
+    ByteBuffer line = ByteBuffer.allocate(128);
+    boolean returnSeen = false;
+    byte[] buf = new byte[8192];
+    for (int end = inStream.read(buf); end != -1; end = inStream.read(buf)) {
+      for (int i = 0; i < end; i++) {
+        int b = buf[i] & 0xFF;
+        if (b == '\n') {                          // newline
+          if (!returnSeen) {
+            System.out.println("Writing line = "+line.position());
+            line.flip();
+            writer.append(line);
+            line.clear();
+          } else {
+            returnSeen = false;
+          }
+        } else if (b == '\r') {                   // return
+          line.flip();
+          writer.append(line);
+          line.clear();
+          returnSeen = true;
+        } else {
+          if (line.position() == line.limit()) {    // reallocate longer line
+            ByteBuffer tempLine = ByteBuffer.allocate(line.limit()*2);
+            line.flip();
+            tempLine.put(line);
+            line = tempLine;
+          }
+          line.put((byte)b);
+          returnSeen = false;
+        }
+      }
+    }
+    writer.close();
+    inStream.close();
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/IdlToSchemataTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlToSchemataTool.java
new file mode 100644
index 0000000..0c321b3
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlToSchemataTool.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.tool;
+
+import org.apache.avro.Schema;
+import org.apache.avro.compiler.idl.Idl;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+/**
+ * Extract the Avro JSON schemata of the types of a protocol defined through an
+ * idl format file.
+ */
+public class IdlToSchemataTool implements Tool {
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    if (args.isEmpty() || args.size() > 2 || isRequestingHelp(args)) {
+      err.println("Usage: idl2schemata [idl] [outdir]");
+      err.println("");
+      err.println("If an output directory is not specified, "
+          + "outputs to current directory.");
+      return -1;
+    }
+
+    boolean pretty = true;
+    Idl parser = new Idl(new File(args.get(0)));
+    File outputDirectory = getOutputDirectory(args);
+
+    for (Schema schema : parser.CompilationUnit().getTypes()) {
+      print(schema, outputDirectory, pretty);
+    }
+    parser.close();
+
+    return 0;
+  }
+
+  private boolean isRequestingHelp(List<String> args) {
+    return args.size() == 1
+        && (args.get(0).equals("--help") || args.get(0).equals("-help"));
+  }
+
+  private File getOutputDirectory(List<String> args) {
+    String dirname = (args.size() == 2) ? args.get(1) : "";
+    File outputDirectory = new File(dirname);
+    outputDirectory.mkdirs();
+    return outputDirectory;
+  }
+
+  private void print(Schema schema, File outputDirectory, boolean pretty)
+      throws FileNotFoundException {
+    String dirpath = outputDirectory.getAbsolutePath();
+    String filename = dirpath + "/" + schema.getName() + ".avsc";
+    FileOutputStream fileOutputStream = new FileOutputStream(filename);
+    PrintStream printStream = new PrintStream(fileOutputStream);
+    printStream.println(schema.toString(pretty));
+    printStream.close();
+  }
+
+  @Override
+  public String getName() {
+    return "idl2schemata";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Extract JSON schemata of the types from an Avro IDL file";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
new file mode 100644
index 0000000..48352e3
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.avro.tool;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.compiler.idl.Idl;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+/**
+ * Tool implementation for generating Avro JSON schemata from
+ * idl format files.
+ */
+public class IdlTool implements Tool {
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+                  List<String> args) throws Exception {
+
+    PrintStream parseOut = out;
+
+    if (args.size() > 2 ||
+        (args.size() == 1 && (args.get(0).equals("--help") ||
+                              args.get(0).equals("-help")))) {
+      err.println("Usage: idl [in] [out]");
+      err.println("");
+      err.println("If an output path is not specified, outputs to stdout.");
+      err.println("If no input or output is specified, takes input from");
+      err.println("stdin and outputs to stdin.");
+      err.println("The special path \"-\" may also be specified to refer to");
+      err.println("stdin and stdout.");
+      return -1;
+    }
+
+    Idl parser;
+    if (args.size() >= 1 && ! "-".equals(args.get(0))) {
+      parser = new Idl(new File(args.get(0)));
+    } else {
+      parser = new Idl(in);
+    }
+    
+    if (args.size() == 2 && ! "-".equals(args.get(1))) {
+      parseOut = new PrintStream(new FileOutputStream(args.get(1)));
+    }
+
+    Protocol p = parser.CompilationUnit();
+    parseOut.print(p.toString(true));
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "idl";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Generates a JSON schema from an Avro IDL file";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/InduceSchemaTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/InduceSchemaTool.java
new file mode 100644
index 0000000..336ca90
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/InduceSchemaTool.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.List;
+
+import org.apache.avro.reflect.ReflectData;
+
+/**
+ * Utility to induce a schema from a class or a protocol from an interface.
+ */
+public class InduceSchemaTool implements Tool {
+
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    if (args.size() == 0 || args.size() > 2) {
+      System.err.println("Usage: [colon-delimited-classpath] classname");
+      return 1;
+    }
+    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+    String className;
+    if (args.size() == 2) {
+      String classpaths = args.get(0);
+      className = args.get(1);
+      if (!classpaths.isEmpty()) {
+        String[] paths = args.get(0).split(":");
+        URL[] urls = new URL[paths.length];
+        for (int i = 0; i < paths.length; ++i) {
+          urls[i] = new File(paths[i]).toURI().toURL();
+        }
+        classLoader = URLClassLoader.newInstance(urls, classLoader);
+      }
+    } else {
+      className = args.get(0);
+    }
+
+    Class<?> klass = classLoader.loadClass(className);
+    if (klass.isInterface()) {
+      System.out.println(ReflectData.get().getProtocol(klass).toString(true));
+    } else {
+      System.out.println(ReflectData.get().getSchema(klass).toString(true));
+    }
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "induce";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Induce schema/protocol from Java class/interface via reflection.";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
new file mode 100644
index 0000000..98cca1a
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.EOFException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.JsonDecoder;
+
+/** Tool to convert JSON data into the binary form. */
+public class JsonToBinaryFragmentTool implements Tool {
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser optionParser = new OptionParser();
+    OptionSpec<String> schemaFileOption = optionParser
+        .accepts("schema-file", "File containing schema, must not occur with inline schema.")
+        .withOptionalArg()
+        .ofType(String.class);
+    
+    OptionSet optionSet = optionParser.parse(args.toArray(new String[0]));
+    List<String> nargs = (List<String>)optionSet.nonOptionArguments();
+    String schemaFile = schemaFileOption.value(optionSet);
+    
+    if (nargs.size() != (schemaFile == null ? 2 : 1)) {
+      err.println("jsontofrag --schema-file <file> [inline-schema] input-file");
+      err.println("   converts JSON to Avro fragments.");
+      optionParser.printHelpOn(err);
+      err.println("   A dash '-' for input-file means stdin.");
+      return 1;
+    }
+    Schema schema;
+    String inputFile;
+    if (schemaFile == null) {
+      schema = new Schema.Parser().parse(nargs.get(0));
+      inputFile = nargs.get(1);
+    } else {
+      schema = new Schema.Parser().parse(Util.openFromFS(schemaFile));
+      inputFile = nargs.get(0);
+    }
+    InputStream input = Util.fileOrStdin(inputFile, stdin);
+
+    try {
+      GenericDatumReader<Object> reader = 
+          new GenericDatumReader<Object>(schema);
+    
+      JsonDecoder jsonDecoder = 
+      DecoderFactory.get().jsonDecoder(schema, input);
+      GenericDatumWriter<Object> writer = 
+          new GenericDatumWriter<Object>(schema);
+      Encoder e = EncoderFactory.get().binaryEncoder(out, null);
+      Object datum = null;
+      while(true) {
+        try {
+          datum = reader.read(datum, jsonDecoder);
+        } catch (EOFException eofException) {
+          break;
+        }
+        writer.write(datum, e);
+        e.flush();
+      }
+    } finally {
+      Util.close(input);
+    }
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "jsontofrag";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Renders a JSON-encoded Avro datum as binary.";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Main.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Main.java
new file mode 100644
index 0000000..dad70ba
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Main.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+
+import java.io.InputStream;
+
+/** Command-line driver.*/
+public class Main {
+  /**
+   * Available tools, initialized in constructor.
+   */
+  final Map<String, Tool> tools;
+
+  int maxLen = 0;
+
+  Main() {
+    tools = new TreeMap<String, Tool>();
+    for (Tool tool : new Tool[] {
+        new CatTool(),
+        new SpecificCompilerTool(),
+        new InduceSchemaTool(),
+        new JsonToBinaryFragmentTool(),
+        new BinaryFragmentToJsonTool(),
+        new CreateRandomFileTool(),
+        new DataFileReadTool(),
+        new DataFileWriteTool(),
+        new DataFileGetMetaTool(),
+        new DataFileGetSchemaTool(),
+        new DataFileRepairTool(),
+        new IdlTool(),
+        new IdlToSchemataTool(),
+        new RecodecTool(),
+        new ConcatTool(),
+        new RpcReceiveTool(),
+        new RpcSendTool(),
+        new RpcProtocolTool(),
+        new FromTextTool(),
+        new ToTextTool(),
+        new ToTrevniTool(),
+        new TetherTool(),
+        new TrevniCreateRandomTool(),
+        new TrevniMetadataTool(),
+        new TrevniToJsonTool()
+        }) {
+      Tool prev = tools.put(tool.getName(), tool);
+      if (prev != null) {
+        throw new AssertionError(
+            "Two tools with identical names: " + tool + ", " + prev);
+      }
+      maxLen = Math.max(tool.getName().length(), maxLen);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    int rc = new Main().run(args);
+    System.exit(rc);
+  }
+
+  /**
+   * Delegates to tool specified on the command-line.
+   */
+  private int run(String[] args) throws Exception {
+    if (args.length != 0) {
+      Tool tool = tools.get(args[0]);
+      if (tool != null) {
+        return tool.run(
+          System.in, System.out, System.err, Arrays.asList(args).subList(1, args.length));
+      }
+    }
+    System.err.print("Version ");
+    printStream(Main.class.getClassLoader().getResourceAsStream("VERSION.txt"));
+    System.err.print(" of ");
+    printStream(Main.class.getClassLoader().getResourceAsStream("NOTICE.txt"));
+    System.err.println("----------------");
+
+    System.err.println("Available tools:");
+    for (Tool k : tools.values()) {
+      System.err.printf("%" + maxLen + "s  %s\n", k.getName(), k.getShortDescription());
+    }
+
+    return 1;
+  }
+
+  private static void printStream(InputStream in) throws Exception {
+    byte[] buffer = new byte[1024];
+    for (int i = in.read(buffer); i != -1; i = in.read(buffer))
+      System.err.write(buffer, 0, i);
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
new file mode 100644
index 0000000..13c38a3
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+
+/** Tool to alter the codec of an Avro data file. */
+public class RecodecTool implements Tool {
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+
+    OptionParser optParser = new OptionParser();
+    OptionSpec<String> codecOpt = Util.compressionCodecOption(optParser);
+    OptionSpec<Integer> levelOpt = Util.compressionLevelOption(optParser);
+    OptionSet opts = optParser.parse(args.toArray(new String[0]));
+
+    List<String> nargs = (List<String>)opts.nonOptionArguments();
+    if (nargs.size() > 2) {
+      err.println("Expected at most an input file and output file.");
+      optParser.printHelpOn(err);
+      return 1;
+    }
+    InputStream input = in;
+    boolean inputNeedsClosing = false;
+    if (nargs.size() > 0 && !nargs.get(0).equals("-")) {
+      input = Util.openFromFS(nargs.get(0));
+      inputNeedsClosing = true;
+    }
+    OutputStream output = out;
+    boolean outputNeedsClosing = false;
+    if (nargs.size() > 1 && !nargs.get(1).equals("-")) {
+      output = Util.createFromFS(nargs.get(1));
+      outputNeedsClosing = true;
+    }
+
+    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
+        input, new GenericDatumReader<GenericRecord>());
+    Schema schema = reader.getSchema();
+    DataFileWriter<GenericRecord> writer = new DataFileWriter<GenericRecord>(
+        new GenericDatumWriter<GenericRecord>());
+    // unlike the other Avro tools, we default to a null codec, not deflate
+    CodecFactory codec = Util.codecFactory(opts, codecOpt, levelOpt, DataFileConstants.NULL_CODEC);
+    writer.setCodec(codec);
+    for (String key : reader.getMetaKeys()) {
+      if (!DataFileWriter.isReservedMeta(key)) {
+        writer.setMeta(key, reader.getMeta(key));
+      }
+    }
+    writer.create(schema, output);
+
+    writer.appendAllFrom(reader, true);
+    writer.flush();
+
+    if (inputNeedsClosing) {
+      input.close();
+    }
+    if (outputNeedsClosing) {
+      output.close();
+    }
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "recodec";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Alters the codec of a data file.";
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcProtocolTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcProtocolTool.java
new file mode 100644
index 0000000..29d2a27
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcProtocolTool.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.ipc.HandshakeRequest;
+import org.apache.avro.ipc.HandshakeResponse;
+import org.apache.avro.ipc.Ipc;
+import org.apache.avro.ipc.MD5;
+import org.apache.avro.ipc.Transceiver;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.util.ByteBufferInputStream;
+import org.apache.avro.util.ByteBufferOutputStream;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+/**
+ * Tool to grab the protocol from a remote running service.
+ */
+public class RpcProtocolTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "rpcprotocol";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Output the protocol of a RPC service";
+  }
+
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception {
+
+    if (args.size() != 1) {
+      err.println("Usage: uri");
+      return 1;
+    }
+
+    URI uri = URI.create(args.get(0));
+
+    Transceiver transceiver = null;
+    try {
+      transceiver = Ipc.createTransceiver(uri);
+
+      // write an empty HandshakeRequest
+      HandshakeRequest rq = HandshakeRequest.newBuilder()
+          .setClientHash(new MD5(new byte[16]))
+          .setServerHash(new MD5(new byte[16]))
+          .setClientProtocol(null)
+          .setMeta(new LinkedHashMap<String, ByteBuffer>())
+          .build();
+
+      DatumWriter<HandshakeRequest> handshakeWriter = new SpecificDatumWriter<HandshakeRequest>(HandshakeRequest.class);
+
+      ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream();
+
+      BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(byteBufferOutputStream, null);
+
+      handshakeWriter.write(rq, encoder);
+      encoder.flush();
+
+      // send it and get the response
+      List<ByteBuffer> response = transceiver.transceive(byteBufferOutputStream.getBufferList());
+
+
+      // parse the response
+      ByteBufferInputStream byteBufferInputStream = new ByteBufferInputStream(response);
+
+      DatumReader<HandshakeResponse> handshakeReader = new SpecificDatumReader<HandshakeResponse>(HandshakeResponse.class);
+
+      HandshakeResponse handshakeResponse = handshakeReader.read(null, DecoderFactory.get().binaryDecoder(byteBufferInputStream, null));
+
+      Protocol p = Protocol.parse(handshakeResponse.getServerProtocol());
+
+      // finally output the protocol
+      out.println(p.toString(true));
+
+    } finally {
+      if( transceiver != null )
+        transceiver.close();
+    }
+    return 0;
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
new file mode 100644
index 0000000..29eb738
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.io.File;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.net.URI;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.AvroRemoteException;
+import org.apache.avro.Protocol;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.io.JsonEncoder;
+import org.apache.avro.ipc.Ipc;
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.generic.GenericResponder;
+
+/**
+ * Receives one RPC call and responds.  (The moral equivalent
+ * of "netcat".)
+ */
+public class RpcReceiveTool implements Tool {
+  private PrintStream out;
+  private Object response;
+  /** Used to communicate between server thread (responder) and run() */
+  private CountDownLatch latch;
+  private Message expectedMessage;
+  Server server;
+
+  @Override
+  public String getName() {
+    return "rpcreceive";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Opens an RPC Server and listens for one message.";
+  }
+  
+  private class SinkResponder extends GenericResponder {
+
+    public SinkResponder(Protocol local) {
+      super(local);
+    }
+
+    @Override
+    public Object respond(Message message, Object request)
+    throws AvroRemoteException {
+      if (!message.equals(expectedMessage)) {
+        out.println(String.format("Expected message '%s' but received '%s'.", 
+            expectedMessage.getName(), message.getName()));
+        latch.countDown();
+        throw new IllegalArgumentException("Unexpected message.");
+      }
+      out.print(message.getName());
+      out.print("\t");
+      try {
+        JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(message.getRequest(),
+            out);
+        GenericDatumWriter<Object> writer = new GenericDatumWriter<Object>(
+            message.getRequest());
+        writer.write(request, jsonEncoder);
+        jsonEncoder.flush();
+        out.flush();
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+      out.println();
+      new Thread() {
+        @Override
+        public void run() {
+          try {
+            Thread.sleep(1000);
+          } catch (InterruptedException e) {}
+          latch.countDown();
+        }
+      }.start();
+      return response;
+    }
+  }
+  
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    // Split up into two functions for easier testing.
+    int r = run1(in, out, err, args);
+    if (r != 0) {
+      return r;
+    }
+    return run2(err);
+  }
+
+  int run1(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser p = new OptionParser();
+    OptionSpec<String> file =
+      p.accepts("file", "Data file containing response datum.")
+      .withRequiredArg()
+      .ofType(String.class);
+    OptionSpec<String> data =
+      p.accepts("data", "JSON-encoded response datum.")
+      .withRequiredArg()
+      .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    args = (List<String>)opts.nonOptionArguments();
+
+    if (args.size() != 3) {
+      err.println("Usage: uri protocol_file message_name (-data d | -file f)");
+      p.printHelpOn(err);
+      return 1;
+    }
+
+    URI uri = new URI(args.get(0));
+    Protocol protocol = Protocol.parse(new File(args.get(1)));
+    String messageName = args.get(2);
+    expectedMessage = protocol.getMessages().get(messageName);
+    if (expectedMessage == null) {
+      err.println(String.format("No message named '%s' found in protocol '%s'.",
+          messageName, protocol));
+      return 1;
+    }
+    if (data.value(opts) != null) {
+      this.response =
+        Util.jsonToGenericDatum(expectedMessage.getResponse(),
+                                data.value(opts));
+    } else if (file.value(opts) != null) {
+      this.response = Util.datumFromFile(expectedMessage.getResponse(),
+                                         file.value(opts));
+    } else {
+      err.println("One of -data or -file must be specified.");
+      return 1;
+    }
+    
+    this.out = out;
+    
+    latch = new CountDownLatch(1);
+    server = Ipc.createServer(new SinkResponder(protocol), uri);
+    server.start();
+    out.println("Port: " + server.getPort());
+    return 0;
+  }
+  
+  int run2(PrintStream err) throws InterruptedException {
+    latch.await();
+    err.println("Closing server.");
+    server.close();
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
new file mode 100644
index 0000000..04e729c
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.io.File;
+import java.net.URI;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.Protocol.Message;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.io.EncoderFactory;
+import org.apache.avro.ipc.Ipc;
+import org.apache.avro.ipc.generic.GenericRequestor;
+
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+
+/**
+ * Sends a single RPC message.
+ */
+public class RpcSendTool implements Tool {
+  @Override
+  public String getName() {
+    return "rpcsend";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Sends a single RPC message.";
+  }
+
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    OptionParser p = new OptionParser();
+    OptionSpec<String> file =
+      p.accepts("file", "Data file containing request parameters.")
+      .withRequiredArg()
+      .ofType(String.class);
+    OptionSpec<String> data =
+      p.accepts("data", "JSON-encoded request parameters.")
+      .withRequiredArg()
+      .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    args = (List<String>)opts.nonOptionArguments();
+
+    if (args.size() != 3) {
+      err.println("Usage: uri protocol_file message_name (-data d | -file f)");
+      p.printHelpOn(err);
+      return 1;
+    }
+
+    URI uri = new URI(args.get(0));
+    Protocol protocol = Protocol.parse(new File(args.get(1)));
+    String messageName = args.get(2);
+    Message message = protocol.getMessages().get(messageName);
+    if (message == null) {
+      err.println(String.format("No message named '%s' found in protocol '%s'.",
+          messageName, protocol));
+      return 1;
+    }
+    
+    Object datum;
+    if (data.value(opts) != null) {
+      datum = Util.jsonToGenericDatum(message.getRequest(), data.value(opts));
+    } else if (file.value(opts) != null) {
+      datum = Util.datumFromFile(message.getRequest(), file.value(opts));
+    } else {
+      err.println("One of -data or -file must be specified.");
+      return 1;
+    }
+
+    GenericRequestor client =
+      new GenericRequestor(protocol, Ipc.createTransceiver(uri));
+    Object response = client.request(message.getName(), datum);
+    dumpJson(out, message.getResponse(), response);
+    return 0;
+  }
+
+  private void dumpJson(PrintStream out, Schema schema, Object datum) 
+  throws IOException {
+    DatumWriter<Object> writer = new GenericDatumWriter<Object>(schema);
+    JsonGenerator g =
+      new JsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
+    g.useDefaultPrettyPrinter();
+    writer.write(datum, EncoderFactory.get().jsonEncoder(schema, g));
+    g.flush();
+    out.println();
+    out.flush();
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/SpecificCompilerTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/SpecificCompilerTool.java
new file mode 100644
index 0000000..b038b4e
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/SpecificCompilerTool.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Set;
+import java.util.LinkedHashSet;
+import java.util.List;
+
+import org.apache.avro.Protocol;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData.StringType;
+import org.apache.avro.compiler.specific.SpecificCompiler;
+
+/**
+ * A Tool for compiling avro protocols or schemas to Java classes using the Avro
+ * SpecificCompiler.
+ */
+
+public class SpecificCompilerTool implements Tool {
+  @Override
+  public int run(InputStream in, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+    if (args.size() < 3) {
+      System.err
+          .println("Usage: [-encoding <outputencoding>] [-string] (schema|protocol) input... outputdir");
+      System.err
+          .println(" input - input files or directories");
+      System.err
+          .println(" outputdir - directory to write generated java");
+      System.err.println(" -string - use java.lang.String instead of Utf8");
+      return 1;
+    }
+
+    StringType stringType = StringType.CharSequence;
+
+    int arg = 0;
+
+    String encoding = null;
+    if ("-encoding".equals(args.get(arg))) {
+      arg++;
+      encoding = args.get(arg);
+      arg++;
+    }
+
+    if ("-string".equals(args.get(arg))) {
+      stringType = StringType.String;
+      arg++;
+    }
+
+    String method = args.get(arg);
+    List<File> inputs = new ArrayList<File>();
+    File output = new File(args.get(args.size() - 1));
+
+    for (int i = arg+1; i < args.size() - 1; i++) {
+      inputs.add(new File(args.get(i)));
+    }
+
+    if ("schema".equals(method)) {
+      Schema.Parser parser = new Schema.Parser();
+      for (File src : determineInputs(inputs, SCHEMA_FILTER)) {
+        Schema schema = parser.parse(src);
+        SpecificCompiler compiler = new SpecificCompiler(schema);
+        compiler.setStringType(stringType);
+        if (encoding != null) {
+          compiler.setOutputCharacterEncoding(encoding);
+        }
+        compiler.compileToDestination(src, output);
+      }
+    } else if ("protocol".equals(method)) {
+      for (File src : determineInputs(inputs, PROTOCOL_FILTER)) {
+        Protocol protocol = Protocol.parse(src);
+        SpecificCompiler compiler = new SpecificCompiler(protocol);
+        compiler.setStringType(stringType);
+        if (encoding != null) {
+          compiler.setOutputCharacterEncoding(encoding);
+        }
+        compiler.compileToDestination(src, output);
+      }
+    } else {
+      System.err.println("Expected \"schema\" or \"protocol\".");
+      return 1;
+    }
+    return 0;
+  }
+
+  @Override
+  public String getName() {
+    return "compile";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Generates Java code for the given schema.";
+  }
+
+  /**
+   * For a List of files or directories, returns a File[] containing each file
+   * passed as well as each file with a matching extension found in the directory.
+   *
+   * @param inputs List of File objects that are files or directories
+   * @param filter File extension filter to match on when fetching files from a directory
+   * @return Unique array of files
+   */
+  private static File[] determineInputs(List<File> inputs, FilenameFilter filter) {
+    Set<File> fileSet = new LinkedHashSet<File>(); // preserve order and uniqueness
+
+    for (File file : inputs) {
+      // if directory, look at contents to see what files match extension
+      if (file.isDirectory()) {
+        for (File f : file.listFiles(filter)) {
+          fileSet.add(f);
+        }
+      }
+      // otherwise, just add the file.
+      else {
+        fileSet.add(file);
+      }
+    }
+
+    if (fileSet.size() > 0) {
+      System.err.println("Input files to compile:");
+      for (File file : fileSet) {
+        System.err.println("  " + file);
+      }
+    }
+    else {
+      System.err.println("No input files found.");
+    }
+
+    return fileSet.toArray((new File[fileSet.size()]));
+  }
+
+  private static final FileExtensionFilter SCHEMA_FILTER =
+    new FileExtensionFilter("avsc");
+  private static final FileExtensionFilter PROTOCOL_FILTER =
+    new FileExtensionFilter("avpr");
+
+  private static class FileExtensionFilter implements FilenameFilter {
+    private String extension;
+
+    private FileExtensionFilter(String extension) {
+      this.extension = extension;
+    }
+
+    @Override
+    public boolean accept(File dir, String name) {
+      return name.endsWith(this.extension);
+    }
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
new file mode 100644
index 0000000..875bd47
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.tether.TetherJob;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+
+
+ at SuppressWarnings("deprecation")
+public class TetherTool implements Tool {
+  public TetherJob job;
+
+  @Override
+  public String getName() {
+    return "tether";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Run a tethered mapreduce job.";
+  }
+
+  @Override
+  public int run(InputStream ins, PrintStream outs, PrintStream err,
+      List<String> args) throws Exception {
+
+    String[] argarry = args.toArray(new String[0]);
+    Options opts = new Options();
+
+    Option helpopt = OptionBuilder.hasArg(false)
+        .withDescription("print this message")
+        .create("help");
+
+    Option inopt = OptionBuilder.hasArg()
+        .isRequired()
+        .withDescription("comma-separated input paths")
+        .create("in");
+
+    Option outopt = OptionBuilder.hasArg()
+        .isRequired()
+        .withDescription("The output path.")
+        .create("out");
+
+    Option pargs = OptionBuilder.hasArg()
+        .withDescription("A string containing the command line arguments to pass to the tethered process. String should be enclosed in quotes")
+        .create("exec_args");
+
+    Option popt = OptionBuilder.hasArg()
+        .isRequired()
+        .withDescription("executable program, usually in HDFS")
+        .create("program");
+
+    Option outscopt = OptionBuilder.withType(File.class).hasArg()
+        .isRequired()
+        .withDescription("schema file for output of reducer")
+        .create("outschema");
+
+    Option outscmapopt = OptionBuilder.withType(File.class).hasArg()
+        .withDescription("(optional) map output schema file,  if different from outschema")
+        .create("outschemamap");
+
+    Option redopt = OptionBuilder.withType(Integer.class).hasArg()
+        .withDescription("(optional) number of reducers")
+        .create("reduces");
+
+    Option cacheopt = OptionBuilder.withType(Boolean.class).hasArg()
+        .withDescription("(optional) boolean indicating whether or not the exectuable should be distributed via distributed cache")
+        .create("exec_cached");
+
+    Option protoopt = OptionBuilder.hasArg()
+        .withDescription("(optional) specifies the transport protocol 'http' or 'sasl'")
+        .create("protocol");
+
+    opts.addOption(redopt);
+    opts.addOption(outscopt);
+    opts.addOption(popt);
+    opts.addOption(pargs);
+    opts.addOption(inopt);
+    opts.addOption(outopt);
+    opts.addOption(helpopt);
+    opts.addOption(outscmapopt);
+    opts.addOption(cacheopt);
+    opts.addOption(protoopt);
+
+    CommandLineParser parser = new GnuParser();
+
+    String[] genargs = null;
+    CommandLine line = null;
+    HelpFormatter formatter = new HelpFormatter();
+
+    JobConf job = new JobConf();
+
+    try {
+      line = parser.parse(opts, argarry);
+
+      if (line.hasOption("help")) {
+        formatter.printHelp("tether", opts );
+        return 0;
+      }
+
+      genargs = line.getArgs();
+
+      FileInputFormat.addInputPaths(job, line.getOptionValue("in"));
+      FileOutputFormat.setOutputPath(job,new Path (line.getOptionValue("out")));
+
+      List<String> exargs = null;
+      Boolean cached = false;
+
+      if (line.hasOption("exec_args")) {
+        String[] splitargs = line.getOptionValue("exec_args").split(" ");
+        exargs = new ArrayList<String>();
+        for (String item: splitargs){
+          exargs.add(item);
+        }
+      }
+      if (line.hasOption("exec_cached")) {
+        cached = Boolean.parseBoolean(line.getOptionValue("exec_cached"));
+      }
+      TetherJob.setExecutable(job, new File(line.getOptionValue("program")), exargs, cached);
+
+      File outschema = (File)line.getParsedOptionValue("outschema");
+      job.set(AvroJob.OUTPUT_SCHEMA, Schema.parse(outschema).toString());
+      if (line.hasOption("outschemamap")) {
+        job.set(AvroJob.MAP_OUTPUT_SCHEMA,
+            Schema.parse((File)line.getParsedOptionValue("outschemamap")).toString());
+      }
+      if (line.hasOption("reduces")) {
+        job.setNumReduceTasks((Integer)line.getParsedOptionValue("reduces"));
+      }
+      if (line.hasOption("protocol")) {
+        TetherJob.setProtocol(job, line.getOptionValue("protocol"));
+      }
+    }
+    catch (Exception exp) {
+      System.out.println("Unexpected exception: " + exp.getMessage());
+      formatter.printHelp("tether", opts );
+      return -1;
+    }
+
+    TetherJob.runJob(job);
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
new file mode 100644
index 0000000..3eebf3b
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericDatumReader;
+
+/** Reads an avro data file into a plain text file. */
+public class ToTextTool implements Tool {
+  private static final String TEXT_FILE_SCHEMA = 
+        "\"bytes\"";
+  private static final byte[] LINE_SEPARATOR = 
+        System.getProperty("line.separator").getBytes();
+    
+  @Override
+  public String getName() {
+    return "totext";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Converts an Avro data file to a text file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+      
+    OptionParser p = new OptionParser();
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    if (opts.nonOptionArguments().size() != 2) {
+      err.println("Expected 2 args: from_file to_file (filenames or '-' for stdin/stdout");
+      p.printHelpOn(err);
+      return 1;
+    }
+
+    BufferedInputStream inStream = Util.fileOrStdin(args.get(0), stdin);
+    BufferedOutputStream outStream = Util.fileOrStdout(args.get(1), out);
+
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    DataFileStream<Object> fileReader =
+        new DataFileStream<Object>(inStream, reader);
+
+    if (!fileReader.getSchema().equals(new Schema.Parser().parse(TEXT_FILE_SCHEMA))) {
+      err.println("Avro file is not generic text schema");
+      p.printHelpOn(err);
+      fileReader.close();
+      return 1;
+    }
+    
+    while (fileReader.hasNext()) {
+      ByteBuffer outBuff = (ByteBuffer) fileReader.next();
+      outStream.write(outBuff.array());
+      outStream.write(LINE_SEPARATOR);
+    }
+    fileReader.close();
+    Util.close(inStream);
+    Util.close(outStream);
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/ToTrevniTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTrevniTool.java
new file mode 100644
index 0000000..ebd629c
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/ToTrevniTool.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericDatumReader;
+
+import org.apache.trevni.ColumnFileMetaData;
+import org.apache.trevni.avro.AvroColumnWriter;
+
+import joptsimple.OptionParser;
+import joptsimple.OptionSet;
+import joptsimple.OptionSpec;
+
+
+/** Reads an Avro data file and writes a Trevni file. */
+public class ToTrevniTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "totrevni";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Converts an Avro data file to a Trevni file.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+      List<String> args) throws Exception {
+
+    OptionParser p = new OptionParser();
+    OptionSpec<String> codec =
+      p.accepts("codec", "Compression codec")
+      .withRequiredArg()
+      .defaultsTo("null")
+      .ofType(String.class);
+    OptionSet opts = p.parse(args.toArray(new String[0]));
+    if (opts.nonOptionArguments().size() != 2) {
+      err.println("Usage: inFile outFile (filenames or '-' for stdin/stdout)");
+      p.printHelpOn(err);
+      return 1;
+    }
+    args = (List<String>)opts.nonOptionArguments();
+
+    DataFileStream<Object> reader =
+      new DataFileStream(Util.fileOrStdin(args.get(0), stdin),
+                         new GenericDatumReader<Object>());
+    OutputStream outs = Util.fileOrStdout(args.get(1), out);
+    AvroColumnWriter<Object> writer =
+      new AvroColumnWriter<Object>(reader.getSchema(),
+                                   new ColumnFileMetaData()
+                                   .setCodec(codec.value(opts)));
+    for (Object datum : reader)
+      writer.write(datum);
+    writer.writeTo(outs);
+    outs.close();
+    reader.close();
+    return 0;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Tool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Tool.java
new file mode 100644
index 0000000..3500a1f
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Tool.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+/**
+ * Command-line "avro-tools" utilities should implement this
+ * interface for delegation by {@link Main}.
+ */
+public interface Tool {
+  /**
+   * Runs the tool with supplied arguments.  Input and output streams
+   * are customizable for easier testing.
+   *
+   * @param in Input stream to read data (typically System.in).
+   * @param out Output of tool (typically System.out).
+   * @param err Error stream (typically System.err).
+   * @param args Non-null list of arguments.
+   * @return result code (0 for success)
+   * @throws Exception Just like main(), tools may throw Exception.
+   */
+  int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception;
+
+  /**
+   * Name of tool, to be used in listings.
+   */
+  String getName();
+
+  /**
+   * 1-line description to be used in command listings.
+   */
+  String getShortDescription();
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniCreateRandomTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniCreateRandomTool.java
new file mode 100644
index 0000000..961701c
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniCreateRandomTool.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.File;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import org.apache.avro.Schema;
+import org.apache.trevni.ColumnFileMetaData;
+import org.apache.trevni.avro.AvroColumnWriter;
+import org.apache.trevni.avro.RandomData;
+
+/** Tool to create randomly populated Trevni file based on an Avro schema */
+public class TrevniCreateRandomTool implements Tool {
+
+  @Override
+  public String getName() {
+    return "trevni_random";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Create a Trevni file filled with random instances of a schema.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+                 List<String> args) throws Exception {
+    if (args.size() != 3) {
+      err.println("Usage: schemaFile count outputFile");
+      return 1;
+    }
+
+    File schemaFile = new File(args.get(0));
+    int count = Integer.parseInt(args.get(1));
+    File outputFile = new File(args.get(2));
+
+    Schema schema = Schema.parse(schemaFile);
+
+    AvroColumnWriter<Object> writer =
+      new AvroColumnWriter<Object>(schema, new ColumnFileMetaData());
+
+    for (Object datum : new RandomData(schema, count))
+      writer.write(datum);
+
+    writer.writeTo(outputFile);
+
+    return 0;
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
new file mode 100644
index 0000000..361e6f6
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.trevni.Input;
+import org.apache.trevni.ColumnFileReader;
+import org.apache.trevni.MetaData;
+import org.apache.trevni.ColumnMetaData;
+
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
+
+/** Tool to print Trevni file metadata as JSON. */
+public class TrevniMetadataTool implements Tool {
+  static final JsonFactory FACTORY = new JsonFactory();
+
+  private JsonGenerator generator;
+  private ColumnFileReader reader;
+
+  @Override
+  public String getName() {
+    return "trevni_meta";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Dumps a Trevni file's metadata as JSON.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+                 List<String> args) throws Exception {
+    String filename;
+    boolean pretty = false;
+    if (args.size() == 2 && "-pretty".equals(args.get(0))) {
+      pretty = true;
+      filename = args.get(1);
+    } else if (args.size() == 1) {
+      filename = args.get(0);
+    } else {
+      err.println("Usage: [-pretty] input");
+      return 1;
+    }
+    
+    dump(TrevniUtil.input(filename), out, pretty);
+
+    return 0;
+  }
+
+  /** Read a Trevni file and print each row as a JSON object. */
+  public void dump(Input input, PrintStream out, boolean pretty)
+    throws IOException {
+    this.generator = FACTORY.createJsonGenerator(out, JsonEncoding.UTF8);
+    if (pretty) {
+      generator.useDefaultPrettyPrinter();
+    } else {                                      // ensure newline separation
+      MinimalPrettyPrinter pp = new MinimalPrettyPrinter();
+      pp.setRootValueSeparator(System.getProperty("line.separator"));
+      generator.setPrettyPrinter(pp);
+    }
+
+    this.reader = new ColumnFileReader(input);
+
+    generator.writeStartObject();
+    generator.writeNumberField("rowCount", reader.getRowCount());
+    generator.writeNumberField("columnCount", reader.getColumnCount());
+
+    generator.writeFieldName("metadata");
+    dump(reader.getMetaData());
+
+    generator.writeFieldName("columns");
+    generator.writeStartArray();
+    for (ColumnMetaData c : reader.getColumnMetaData())
+      dump(c);
+    generator.writeEndArray();
+
+    generator.writeEndObject();
+
+    generator.flush();
+    out.println();
+    reader.close();
+  }
+
+  private void dump(MetaData<?> meta) throws IOException {
+    generator.writeStartObject();
+    for (Map.Entry<String,byte[]> e : meta.entrySet())
+      generator.writeStringField(e.getKey(),
+                                 new String(e.getValue(), "ISO-8859-1"));
+    generator.writeEndObject();
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
new file mode 100644
index 0000000..7ae815a
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.List;
+
+import org.apache.trevni.Input;
+import org.apache.trevni.ColumnFileReader;
+import org.apache.trevni.ColumnMetaData;
+import org.apache.trevni.ColumnValues;
+
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonEncoding;
+import org.codehaus.jackson.util.MinimalPrettyPrinter;
+
+/** Tool to read Trevni files and print them as JSON.
+ * This can read any Trevni file.  Nested structure is reconstructed from the
+ * columns rather than any schema information.
+ */
+public class TrevniToJsonTool implements Tool {
+  static final JsonFactory FACTORY = new JsonFactory();
+
+  private JsonGenerator generator;
+  private ColumnFileReader reader;
+  private ColumnValues[] values;
+  private String[] shortNames;
+
+  @Override
+  public String getName() {
+    return "trevni_tojson";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Dumps a Trevni file as JSON.";
+  }
+
+  @Override
+  public int run(InputStream stdin, PrintStream out, PrintStream err,
+                 List<String> args) throws Exception {
+    String filename;
+    boolean pretty = false;
+    if (args.size() == 2 && "-pretty".equals(args.get(0))) {
+      pretty = true;
+      filename = args.get(1);
+    } else if (args.size() == 1) {
+      filename = args.get(0);
+    } else {
+      err.println("Usage: [-pretty] input");
+      return 1;
+    }
+    
+    toJson(TrevniUtil.input(filename), out, pretty);
+
+    return 0;
+  }
+
+  /** Read a Trevni file and print each row as a JSON object. */
+  public void toJson(Input input, PrintStream out, boolean pretty)
+    throws IOException {
+    this.generator = FACTORY.createJsonGenerator(out, JsonEncoding.UTF8);
+    if (pretty) {
+      generator.useDefaultPrettyPrinter();
+    } else {                                      // ensure newline separation
+      MinimalPrettyPrinter pp = new MinimalPrettyPrinter();
+      pp.setRootValueSeparator(System.getProperty("line.separator"));
+      generator.setPrettyPrinter(pp);
+    }
+
+    this.reader = new ColumnFileReader(input);
+
+    int columnCount = (int)reader.getColumnCount();
+    this.values = new ColumnValues[columnCount];
+    this.shortNames = new String[columnCount];
+    for (int i = 0; i < columnCount; i++) {
+      values[i] = reader.getValues(i);
+      shortNames[i] = shortName(reader.getColumnMetaData(i));
+    }
+
+    List<ColumnMetaData> roots = reader.getRoots();
+    for (long row = 0; row < reader.getRowCount(); row++) {
+      for (ColumnValues v : values)
+        v.startRow();
+      generator.writeStartObject();
+      for (ColumnMetaData root : roots)
+        valueToJson(root);
+      generator.writeEndObject();
+    }
+    generator.flush();
+    out.println();
+    reader.close();
+  }
+  
+  private void valueToJson(ColumnMetaData column) throws IOException {
+    generator.writeFieldName(shortNames[column.getNumber()]);
+    ColumnValues in = values[column.getNumber()];
+    if (!column.isArray()) {
+      primitiveToJson(column, in.nextValue());
+    } else {
+      generator.writeStartArray();
+      int length = in.nextLength();
+      for (int i = 0; i < length; i++) {
+        Object value = in.nextValue();
+        List<ColumnMetaData> children = column.getChildren();
+        if (children.size() == 0) {
+          primitiveToJson(column, value);
+        } else {
+          generator.writeStartObject();
+          if (value != null) {
+            generator.writeFieldName("value$");
+            primitiveToJson(column, value);
+          }
+          for (ColumnMetaData child : children)
+            valueToJson(child);
+          generator.writeEndObject();
+        }
+      }
+      generator.writeEndArray();
+    }
+  }
+
+  private void primitiveToJson(ColumnMetaData column, Object value) 
+    throws IOException {
+    switch (column.getType()) {
+    case NULL:
+      generator.writeNull();                        break;
+    case BOOLEAN:
+      generator.writeBoolean((Boolean)value);       break;
+    case INT:
+      generator.writeNumber((Integer)value);        break;
+    case LONG:
+      generator.writeNumber((Long)value);           break;
+    case FIXED32:
+      generator.writeNumber((Integer)value);        break;
+    case FIXED64:
+      generator.writeNumber((Long)value);           break;
+    case FLOAT:
+      generator.writeNumber((Float)value);          break;
+    case DOUBLE:
+      generator.writeNumber((Double)value);         break;
+    case STRING:
+      generator.writeString((String)value);         break;
+    case BYTES:
+      generator.writeBinary((byte[])value);
+      break;
+    default:
+      throw new RuntimeException("Unknown value type: "+column.getType());
+    }
+  }
+
+  // trim off portion of name shared with parent
+  private String shortName(ColumnMetaData column) {
+    String name = column.getName();
+    ColumnMetaData parent = column.getParent();
+    if (parent != null && name.startsWith(parent.getName()))
+      name = name.substring(parent.getName().length());
+    if (!Character.isLetterOrDigit(name.charAt(0)))
+      name = name.substring(1);
+    return name;
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
new file mode 100644
index 0000000..9b49957
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.InputStream;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URI;
+
+import org.apache.trevni.Input;
+import org.apache.trevni.avro.HadoopInput;
+import org.apache.trevni.InputFile;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+/** Static utility methods for tools. */
+class TrevniUtil {
+
+  static Input input(String filename) throws IOException {
+    if (filename.startsWith("hdfs://")) {
+      return new HadoopInput(new Path(filename), new Configuration());
+    } else {
+      return new InputFile(new File(filename));
+    }
+  }
+  
+  /**
+   * Returns stdin if filename is "-", else opens the local or HDFS file
+   * and returns an InputStream for it.
+   * @throws IOException 
+   */
+  static InputStream input(String filename, InputStream stdin)
+    throws IOException {
+    if (filename.equals("-"))
+      return new BufferedInputStream(stdin);
+    else if (filename.startsWith("hdfs://")) {
+      FileSystem fs = FileSystem.get(URI.create(filename), new Configuration());
+      return new BufferedInputStream(fs.open(new Path(filename)));
+    } else {
+      return new BufferedInputStream(new FileInputStream(new File(filename)));
+    }
+  }
+  
+  /**
+   * Returns stdout if filename is "-", else opens the local or HDFS file
+   * and returns an OutputStream for it.
+   * @throws IOException 
+   */
+  static OutputStream output(String filename, OutputStream stdout) 
+    throws IOException {
+    if (filename.equals("-"))
+      return new BufferedOutputStream(stdout);
+    else if (filename.startsWith("hdfs://")) {
+      FileSystem fs = FileSystem.get(URI.create(filename), new Configuration());
+      return new BufferedOutputStream(fs.create(new Path(filename)));
+    } else {
+      return new BufferedOutputStream(new FileOutputStream(new File(filename)));
+    }
+  }
+
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
new file mode 100644
index 0000000..c4b20b1
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.apache.avro.file.DataFileConstants.DEFLATE_CODEC;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.zip.Deflater;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.mapred.FsInput;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import joptsimple.OptionSet;
+import joptsimple.OptionParser;
+import joptsimple.OptionSpec;
+
+/** Static utility methods for tools. */
+class Util {
+  /**
+   * Returns stdin if filename is "-", else opens the File in the owning filesystem
+   * and returns an InputStream for it.
+   * Relative paths will be opened in the default filesystem. 
+   * @param filename The filename to be opened
+   * @throws IOException 
+   */
+  static BufferedInputStream fileOrStdin(String filename, InputStream stdin) 
+      throws IOException {
+    return new BufferedInputStream(filename.equals("-")
+        ? stdin
+        : openFromFS(filename));  
+  }
+  
+  /**
+   * Returns stdout if filename is "-", else opens the file from the owning filesystem
+   * and returns an OutputStream for it.
+   * Relative paths will be opened in the default filesystem.  
+   * @param filename The filename to be opened
+   * @throws IOException 
+   */
+  static BufferedOutputStream fileOrStdout(String filename, OutputStream stdout) 
+      throws IOException {
+    return new BufferedOutputStream(filename.equals("-")
+        ? stdout
+        : createFromFS(filename));
+  }
+  
+  /**
+   * Returns an InputStream for the file using the owning filesystem,
+   * or the default if none is given.
+   * @param filename The filename to be opened
+   * @throws IOException 
+   */
+  static InputStream openFromFS(String filename) 
+      throws IOException {
+    Path p = new Path(filename);
+    return p.getFileSystem(new Configuration()).open(p);
+  }
+  
+  /**
+   * Returns an InputStream for the file using the owning filesystem,
+   * or the default if none is given.
+   * @param filename The filename to be opened
+   * @throws IOException 
+   */
+  static InputStream openFromFS(Path filename) 
+      throws IOException {
+    return filename.getFileSystem(new Configuration()).open(filename);
+  }
+  
+  /**
+   * Returns a seekable FsInput using the owning filesystem, 
+   * or the default if none is given.
+   * @param filename The filename to be opened
+   * @throws IOException 
+   */
+  static FsInput openSeekableFromFS(String filename) 
+      throws IOException {       
+    return new FsInput(new Path(filename), new Configuration());
+  }
+  
+  /**
+   * Opens the file for writing in the owning filesystem,
+   * or the default if none is given.
+   * @param filename The filename to be opened.
+   * @return An OutputStream to the specified file.
+   * @throws IOException
+   */
+  static OutputStream createFromFS(String filename) 
+      throws IOException {
+    Path p = new Path(filename);
+    return new BufferedOutputStream(p.getFileSystem(new Configuration()).create(p));
+  }
+  
+  /**
+   * Closes the inputstream created from {@link Util.fileOrStdin} 
+   * unless it is System.in.
+   * @param in The inputstream to be closed.
+   */
+  static void close(InputStream in) {
+    if (!System.in.equals(in)) {
+      try {
+        in.close();
+      } catch (IOException e) {
+        System.err.println("could not close InputStream " + in.toString());
+      }
+    }
+  }
+  
+  /**
+   * Closes the outputstream created from {@link Util.fileOrStdout}
+   * unless it is System.out.
+   * @param out The outputStream to be closed.
+   */
+  static void close(OutputStream out) {
+    if (!System.out.equals(out)) {
+      try {
+        out.close();
+      } catch (IOException e) {
+        System.err.println("could not close OutputStream " + out.toString());
+      }
+    }
+  }
+  
+  /**If pathname is a file, this method returns a list with a single absolute Path to that file,
+   * if pathname is a directory, this method returns a list of Pathes to all the files within
+   * this directory.
+   * Only files inside that directory are included, no subdirectories or files in subdirectories
+   * will be added. 
+   * The List is sorted alphabetically.
+   * @param fileOrDirName filename or directoryname
+   * @return A Path List 
+   * @throws IOException
+   */
+  static List<Path> getFiles(String fileOrDirName) 
+    throws IOException {
+    List<Path> pathList = new ArrayList<Path>();  
+    Path path = new Path(fileOrDirName);
+    FileSystem fs = path.getFileSystem(new Configuration());
+    
+    if (fs.isFile(path)) {
+      pathList.add(path);
+    }
+    else if (fs.getFileStatus(path).isDir()) {
+      for (FileStatus status : fs.listStatus(path)) {
+        if(!status.isDir()) {
+          pathList.add(status.getPath());
+        }
+      }
+    }
+    Collections.sort(pathList);
+    return pathList;
+  }
+  
+  /**
+   * This method returns a list which contains a path to every given file
+   * in the input and a path to every file inside a given directory.
+   * The list is sorted alphabetically and contains no subdirectories or files within those.
+   * @param fileOrDirNames A list of filenames and directorynames
+   * @return A list of Pathes, one for each file 
+   * @throws IOException
+   */
+  static List<Path> getFiles(List<String> fileOrDirNames) 
+      throws IOException {
+    ArrayList<Path> pathList = new ArrayList<Path>();
+    for(String name : fileOrDirNames) {
+      pathList.addAll(getFiles(name));
+    }
+    Collections.sort(pathList);
+    return pathList;
+  }
+  
+  /** 
+   * Converts a String JSON object into a generic datum.
+   * 
+   * This is inefficient (creates extra objects), so should be used 
+   * sparingly.
+   */
+  static Object jsonToGenericDatum(Schema schema, String jsonData)
+      throws IOException {
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>(schema);
+    Object datum = reader.read(null,
+        DecoderFactory.get().jsonDecoder(schema, jsonData));
+    return datum;
+  }
+
+  /** Reads and returns the first datum in a data file. */
+  static Object datumFromFile(Schema schema, String file) throws IOException {
+    DataFileReader<Object> in =
+      new DataFileReader<Object>(new File(file),
+                                 new GenericDatumReader<Object>(schema));
+    try {
+      return in.next();
+    } finally {
+      in.close();
+    }
+  }
+
+  static OptionSpec<String> compressionCodecOption(OptionParser optParser) {
+    return optParser
+      .accepts("codec", "Compression codec")
+      .withRequiredArg()
+      .ofType(String.class)
+      .defaultsTo("null");
+}
+
+  static OptionSpec<Integer> compressionLevelOption(OptionParser optParser) {
+    return optParser
+      .accepts("level", "Compression level (only applies to deflate and xz)")
+      .withRequiredArg()
+      .ofType(Integer.class)
+      .defaultsTo(Deflater.DEFAULT_COMPRESSION);
+  }
+
+  static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level) {
+    return codecFactory(opts, codec, level, DEFLATE_CODEC);
+  }
+
+  static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level, String defaultCodec) {
+      String codecName = opts.hasArgument(codec)
+        ? codec.value(opts)
+        : defaultCodec;
+      if(codecName.equals(DEFLATE_CODEC)) {
+        return CodecFactory.deflateCodec(level.value(opts));
+      } else if(codecName.equals(DataFileConstants.XZ_CODEC)) {
+        return CodecFactory.xzCodec(level.value(opts));
+      } else {
+        return CodecFactory.fromString(codec.value(opts));
+      }
+  }
+}
diff --git a/lang/java/tools/src/main/java/org/apache/avro/tool/package.html b/lang/java/tools/src/main/java/org/apache/avro/tool/package.html
new file mode 100644
index 0000000..b08ebae
--- /dev/null
+++ b/lang/java/tools/src/main/java/org/apache/avro/tool/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+Avro command-line tool.
+</body>
+</html>
diff --git a/lang/java/tools/src/test/compiler/input/player.avsc b/lang/java/tools/src/test/compiler/input/player.avsc
new file mode 100644
index 0000000..386dca3
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/input/player.avsc
@@ -0,0 +1,8 @@
+{"type":"record", "name":"Player", "namespace": "avro.examples.baseball","doc":"選手 is Japanese for player.",
+  "fields": [
+   {"name": "number", "type": "int", "doc": "The number of the player"},
+   {"name": "first_name", "type": "string"},
+   {"name": "last_name", "type": "string"},
+   {"name": "position", "type": {"type": "array", "items": "Position"} }
+  ]
+}
diff --git a/lang/java/tools/src/test/compiler/input/position.avsc b/lang/java/tools/src/test/compiler/input/position.avsc
new file mode 100644
index 0000000..a47065a
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/input/position.avsc
@@ -0,0 +1,3 @@
+{"type":"enum", "name": "Position", "namespace": "avro.examples.baseball",
+    "symbols": ["P", "C", "B1", "B2", "B3", "SS", "LF", "CF", "RF", "DH"]
+}
diff --git a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java
new file mode 100644
index 0000000..d092078
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Player.java
@@ -0,0 +1,404 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package avro.examples.baseball;  
+ at SuppressWarnings("all")
+/** 選手 is Japanese for player. */
+ at org.apache.avro.specific.AvroGenerated
+public class Player extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  private static final long serialVersionUID = 3865593031278745715L;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Player\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"選手 is Japanese for player.\",\"fields\":[{\"name\":\"number\",\"type\":\"int\",\"doc\":\"The number of the player\"},{\"name\":\"first_name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"last_name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\ [...]
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+  /** The number of the player */
+  @Deprecated public int number;
+  @Deprecated public java.lang.String first_name;
+  @Deprecated public java.lang.String last_name;
+  @Deprecated public java.util.List<avro.examples.baseball.Position> position;
+
+  /**
+   * Default constructor.  Note that this does not initialize fields
+   * to their default values from the schema.  If that is desired then
+   * one should use <code>newBuilder()</code>. 
+   */
+  public Player() {}
+
+  /**
+   * All-args constructor.
+   * @param number The number of the player
+   */
+  public Player(java.lang.Integer number, java.lang.String first_name, java.lang.String last_name, java.util.List<avro.examples.baseball.Position> position) {
+    this.number = number;
+    this.first_name = first_name;
+    this.last_name = last_name;
+    this.position = position;
+  }
+
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  // Used by DatumWriter.  Applications should not call. 
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return number;
+    case 1: return first_name;
+    case 2: return last_name;
+    case 3: return position;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  // Used by DatumReader.  Applications should not call. 
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: number = (java.lang.Integer)value$; break;
+    case 1: first_name = (java.lang.String)value$; break;
+    case 2: last_name = (java.lang.String)value$; break;
+    case 3: position = (java.util.List<avro.examples.baseball.Position>)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+
+  /**
+   * Gets the value of the 'number' field.
+   * @return The number of the player
+   */
+  public java.lang.Integer getNumber() {
+    return number;
+  }
+
+  /**
+   * Sets the value of the 'number' field.
+   * The number of the player
+   * @param value the value to set.
+   */
+  public void setNumber(java.lang.Integer value) {
+    this.number = value;
+  }
+
+  /**
+   * Gets the value of the 'first_name' field.
+   */
+  public java.lang.String getFirstName() {
+    return first_name;
+  }
+
+  /**
+   * Sets the value of the 'first_name' field.
+   * @param value the value to set.
+   */
+  public void setFirstName(java.lang.String value) {
+    this.first_name = value;
+  }
+
+  /**
+   * Gets the value of the 'last_name' field.
+   */
+  public java.lang.String getLastName() {
+    return last_name;
+  }
+
+  /**
+   * Sets the value of the 'last_name' field.
+   * @param value the value to set.
+   */
+  public void setLastName(java.lang.String value) {
+    this.last_name = value;
+  }
+
+  /**
+   * Gets the value of the 'position' field.
+   */
+  public java.util.List<avro.examples.baseball.Position> getPosition() {
+    return position;
+  }
+
+  /**
+   * Sets the value of the 'position' field.
+   * @param value the value to set.
+   */
+  public void setPosition(java.util.List<avro.examples.baseball.Position> value) {
+    this.position = value;
+  }
+
+  /**
+   * Creates a new Player RecordBuilder.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder() {
+    return new avro.examples.baseball.Player.Builder();
+  }
+  
+  /**
+   * Creates a new Player RecordBuilder by copying an existing Builder.
+   * @param other The existing builder to copy.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player.Builder other) {
+    return new avro.examples.baseball.Player.Builder(other);
+  }
+  
+  /**
+   * Creates a new Player RecordBuilder by copying an existing Player instance.
+   * @param other The existing instance to copy.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player other) {
+    return new avro.examples.baseball.Player.Builder(other);
+  }
+  
+  /**
+   * RecordBuilder for Player instances.
+   */
+  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<Player>
+    implements org.apache.avro.data.RecordBuilder<Player> {
+
+    /** The number of the player */
+    private int number;
+    private java.lang.String first_name;
+    private java.lang.String last_name;
+    private java.util.List<avro.examples.baseball.Position> position;
+
+    /** Creates a new Builder */
+    private Builder() {
+      super(avro.examples.baseball.Player.SCHEMA$);
+    }
+    
+    /**
+     * Creates a Builder by copying an existing Builder.
+     * @param other The existing Builder to copy.
+     */
+    private Builder(avro.examples.baseball.Player.Builder other) {
+      super(other);
+      if (isValidValue(fields()[0], other.number)) {
+        this.number = data().deepCopy(fields()[0].schema(), other.number);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.first_name)) {
+        this.first_name = data().deepCopy(fields()[1].schema(), other.first_name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.last_name)) {
+        this.last_name = data().deepCopy(fields()[2].schema(), other.last_name);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.position)) {
+        this.position = data().deepCopy(fields()[3].schema(), other.position);
+        fieldSetFlags()[3] = true;
+      }
+    }
+    
+    /**
+     * Creates a Builder by copying an existing Player instance
+     * @param other The existing instance to copy.
+     */
+    private Builder(avro.examples.baseball.Player other) {
+            super(avro.examples.baseball.Player.SCHEMA$);
+      if (isValidValue(fields()[0], other.number)) {
+        this.number = data().deepCopy(fields()[0].schema(), other.number);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.first_name)) {
+        this.first_name = data().deepCopy(fields()[1].schema(), other.first_name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.last_name)) {
+        this.last_name = data().deepCopy(fields()[2].schema(), other.last_name);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.position)) {
+        this.position = data().deepCopy(fields()[3].schema(), other.position);
+        fieldSetFlags()[3] = true;
+      }
+    }
+
+    /**
+      * Gets the value of the 'number' field.
+      * The number of the player
+      * @return The value.
+      */
+    public java.lang.Integer getNumber() {
+      return number;
+    }
+
+    /**
+      * Sets the value of the 'number' field.
+      * The number of the player
+      * @param value The value of 'number'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setNumber(int value) {
+      validate(fields()[0], value);
+      this.number = value;
+      fieldSetFlags()[0] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'number' field has been set.
+      * The number of the player
+      * @return True if the 'number' field has been set, false otherwise.
+      */
+    public boolean hasNumber() {
+      return fieldSetFlags()[0];
+    }
+
+
+    /**
+      * Clears the value of the 'number' field.
+      * The number of the player
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearNumber() {
+      fieldSetFlags()[0] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'first_name' field.
+      * @return The value.
+      */
+    public java.lang.String getFirstName() {
+      return first_name;
+    }
+
+    /**
+      * Sets the value of the 'first_name' field.
+      * @param value The value of 'first_name'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setFirstName(java.lang.String value) {
+      validate(fields()[1], value);
+      this.first_name = value;
+      fieldSetFlags()[1] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'first_name' field has been set.
+      * @return True if the 'first_name' field has been set, false otherwise.
+      */
+    public boolean hasFirstName() {
+      return fieldSetFlags()[1];
+    }
+
+
+    /**
+      * Clears the value of the 'first_name' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearFirstName() {
+      first_name = null;
+      fieldSetFlags()[1] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'last_name' field.
+      * @return The value.
+      */
+    public java.lang.String getLastName() {
+      return last_name;
+    }
+
+    /**
+      * Sets the value of the 'last_name' field.
+      * @param value The value of 'last_name'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setLastName(java.lang.String value) {
+      validate(fields()[2], value);
+      this.last_name = value;
+      fieldSetFlags()[2] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'last_name' field has been set.
+      * @return True if the 'last_name' field has been set, false otherwise.
+      */
+    public boolean hasLastName() {
+      return fieldSetFlags()[2];
+    }
+
+
+    /**
+      * Clears the value of the 'last_name' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearLastName() {
+      last_name = null;
+      fieldSetFlags()[2] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'position' field.
+      * @return The value.
+      */
+    public java.util.List<avro.examples.baseball.Position> getPosition() {
+      return position;
+    }
+
+    /**
+      * Sets the value of the 'position' field.
+      * @param value The value of 'position'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setPosition(java.util.List<avro.examples.baseball.Position> value) {
+      validate(fields()[3], value);
+      this.position = value;
+      fieldSetFlags()[3] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'position' field has been set.
+      * @return True if the 'position' field has been set, false otherwise.
+      */
+    public boolean hasPosition() {
+      return fieldSetFlags()[3];
+    }
+
+
+    /**
+      * Clears the value of the 'position' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearPosition() {
+      position = null;
+      fieldSetFlags()[3] = false;
+      return this;
+    }
+
+    @Override
+    public Player build() {
+      try {
+        Player record = new Player();
+        record.number = fieldSetFlags()[0] ? this.number : (java.lang.Integer) defaultValue(fields()[0]);
+        record.first_name = fieldSetFlags()[1] ? this.first_name : (java.lang.String) defaultValue(fields()[1]);
+        record.last_name = fieldSetFlags()[2] ? this.last_name : (java.lang.String) defaultValue(fields()[2]);
+        record.position = fieldSetFlags()[3] ? this.position : (java.util.List<avro.examples.baseball.Position>) defaultValue(fields()[3]);
+        return record;
+      } catch (Exception e) {
+        throw new org.apache.avro.AvroRuntimeException(e);
+      }
+    }
+  }
+
+  private static final org.apache.avro.io.DatumWriter
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+
+  @Override public void writeExternal(java.io.ObjectOutput out)
+    throws java.io.IOException {
+    WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
+  }
+
+  private static final org.apache.avro.io.DatumReader
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+
+  @Override public void readExternal(java.io.ObjectInput in)
+    throws java.io.IOException {
+    READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
+  }
+
+}
diff --git a/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java
new file mode 100644
index 0000000..2d17900
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/output-string/avro/examples/baseball/Position.java
@@ -0,0 +1,13 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package avro.examples.baseball;  
+ at SuppressWarnings("all")
+ at org.apache.avro.specific.AvroGenerated
+public enum Position { 
+  P, C, B1, B2, B3, SS, LF, CF, RF, DH  ;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Position\",\"namespace\":\"avro.examples.baseball\",\"symbols\":[\"P\",\"C\",\"B1\",\"B2\",\"B3\",\"SS\",\"LF\",\"CF\",\"RF\",\"DH\"]}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}
diff --git a/lang/java/tools/src/test/compiler/output/Player.java b/lang/java/tools/src/test/compiler/output/Player.java
new file mode 100644
index 0000000..2363ed1
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/output/Player.java
@@ -0,0 +1,404 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package avro.examples.baseball;  
+ at SuppressWarnings("all")
+/** 選手 is Japanese for player. */
+ at org.apache.avro.specific.AvroGenerated
+public class Player extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
+  private static final long serialVersionUID = 3865593031278745715L;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Player\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"選手 is Japanese for player.\",\"fields\":[{\"name\":\"number\",\"type\":\"int\",\"doc\":\"The number of the player\"},{\"name\":\"first_name\",\"type\":\"string\"},{\"name\":\"last_name\",\"type\":\"string\"},{\"name\":\"position\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"Po [...]
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+  /** The number of the player */
+  @Deprecated public int number;
+  @Deprecated public java.lang.CharSequence first_name;
+  @Deprecated public java.lang.CharSequence last_name;
+  @Deprecated public java.util.List<avro.examples.baseball.Position> position;
+
+  /**
+   * Default constructor.  Note that this does not initialize fields
+   * to their default values from the schema.  If that is desired then
+   * one should use <code>newBuilder()</code>. 
+   */
+  public Player() {}
+
+  /**
+   * All-args constructor.
+   * @param number The number of the player
+   */
+  public Player(java.lang.Integer number, java.lang.CharSequence first_name, java.lang.CharSequence last_name, java.util.List<avro.examples.baseball.Position> position) {
+    this.number = number;
+    this.first_name = first_name;
+    this.last_name = last_name;
+    this.position = position;
+  }
+
+  public org.apache.avro.Schema getSchema() { return SCHEMA$; }
+  // Used by DatumWriter.  Applications should not call. 
+  public java.lang.Object get(int field$) {
+    switch (field$) {
+    case 0: return number;
+    case 1: return first_name;
+    case 2: return last_name;
+    case 3: return position;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+  // Used by DatumReader.  Applications should not call. 
+  @SuppressWarnings(value="unchecked")
+  public void put(int field$, java.lang.Object value$) {
+    switch (field$) {
+    case 0: number = (java.lang.Integer)value$; break;
+    case 1: first_name = (java.lang.CharSequence)value$; break;
+    case 2: last_name = (java.lang.CharSequence)value$; break;
+    case 3: position = (java.util.List<avro.examples.baseball.Position>)value$; break;
+    default: throw new org.apache.avro.AvroRuntimeException("Bad index");
+    }
+  }
+
+  /**
+   * Gets the value of the 'number' field.
+   * @return The number of the player
+   */
+  public java.lang.Integer getNumber() {
+    return number;
+  }
+
+  /**
+   * Sets the value of the 'number' field.
+   * The number of the player
+   * @param value the value to set.
+   */
+  public void setNumber(java.lang.Integer value) {
+    this.number = value;
+  }
+
+  /**
+   * Gets the value of the 'first_name' field.
+   */
+  public java.lang.CharSequence getFirstName() {
+    return first_name;
+  }
+
+  /**
+   * Sets the value of the 'first_name' field.
+   * @param value the value to set.
+   */
+  public void setFirstName(java.lang.CharSequence value) {
+    this.first_name = value;
+  }
+
+  /**
+   * Gets the value of the 'last_name' field.
+   */
+  public java.lang.CharSequence getLastName() {
+    return last_name;
+  }
+
+  /**
+   * Sets the value of the 'last_name' field.
+   * @param value the value to set.
+   */
+  public void setLastName(java.lang.CharSequence value) {
+    this.last_name = value;
+  }
+
+  /**
+   * Gets the value of the 'position' field.
+   */
+  public java.util.List<avro.examples.baseball.Position> getPosition() {
+    return position;
+  }
+
+  /**
+   * Sets the value of the 'position' field.
+   * @param value the value to set.
+   */
+  public void setPosition(java.util.List<avro.examples.baseball.Position> value) {
+    this.position = value;
+  }
+
+  /**
+   * Creates a new Player RecordBuilder.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder() {
+    return new avro.examples.baseball.Player.Builder();
+  }
+  
+  /**
+   * Creates a new Player RecordBuilder by copying an existing Builder.
+   * @param other The existing builder to copy.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player.Builder other) {
+    return new avro.examples.baseball.Player.Builder(other);
+  }
+  
+  /**
+   * Creates a new Player RecordBuilder by copying an existing Player instance.
+   * @param other The existing instance to copy.
+   * @return A new Player RecordBuilder
+   */
+  public static avro.examples.baseball.Player.Builder newBuilder(avro.examples.baseball.Player other) {
+    return new avro.examples.baseball.Player.Builder(other);
+  }
+  
+  /**
+   * RecordBuilder for Player instances.
+   */
+  public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<Player>
+    implements org.apache.avro.data.RecordBuilder<Player> {
+
+    /** The number of the player */
+    private int number;
+    private java.lang.CharSequence first_name;
+    private java.lang.CharSequence last_name;
+    private java.util.List<avro.examples.baseball.Position> position;
+
+    /** Creates a new Builder */
+    private Builder() {
+      super(avro.examples.baseball.Player.SCHEMA$);
+    }
+    
+    /**
+     * Creates a Builder by copying an existing Builder.
+     * @param other The existing Builder to copy.
+     */
+    private Builder(avro.examples.baseball.Player.Builder other) {
+      super(other);
+      if (isValidValue(fields()[0], other.number)) {
+        this.number = data().deepCopy(fields()[0].schema(), other.number);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.first_name)) {
+        this.first_name = data().deepCopy(fields()[1].schema(), other.first_name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.last_name)) {
+        this.last_name = data().deepCopy(fields()[2].schema(), other.last_name);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.position)) {
+        this.position = data().deepCopy(fields()[3].schema(), other.position);
+        fieldSetFlags()[3] = true;
+      }
+    }
+    
+    /**
+     * Creates a Builder by copying an existing Player instance
+     * @param other The existing instance to copy.
+     */
+    private Builder(avro.examples.baseball.Player other) {
+            super(avro.examples.baseball.Player.SCHEMA$);
+      if (isValidValue(fields()[0], other.number)) {
+        this.number = data().deepCopy(fields()[0].schema(), other.number);
+        fieldSetFlags()[0] = true;
+      }
+      if (isValidValue(fields()[1], other.first_name)) {
+        this.first_name = data().deepCopy(fields()[1].schema(), other.first_name);
+        fieldSetFlags()[1] = true;
+      }
+      if (isValidValue(fields()[2], other.last_name)) {
+        this.last_name = data().deepCopy(fields()[2].schema(), other.last_name);
+        fieldSetFlags()[2] = true;
+      }
+      if (isValidValue(fields()[3], other.position)) {
+        this.position = data().deepCopy(fields()[3].schema(), other.position);
+        fieldSetFlags()[3] = true;
+      }
+    }
+
+    /**
+      * Gets the value of the 'number' field.
+      * The number of the player
+      * @return The value.
+      */
+    public java.lang.Integer getNumber() {
+      return number;
+    }
+
+    /**
+      * Sets the value of the 'number' field.
+      * The number of the player
+      * @param value The value of 'number'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setNumber(int value) {
+      validate(fields()[0], value);
+      this.number = value;
+      fieldSetFlags()[0] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'number' field has been set.
+      * The number of the player
+      * @return True if the 'number' field has been set, false otherwise.
+      */
+    public boolean hasNumber() {
+      return fieldSetFlags()[0];
+    }
+
+
+    /**
+      * Clears the value of the 'number' field.
+      * The number of the player
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearNumber() {
+      fieldSetFlags()[0] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'first_name' field.
+      * @return The value.
+      */
+    public java.lang.CharSequence getFirstName() {
+      return first_name;
+    }
+
+    /**
+      * Sets the value of the 'first_name' field.
+      * @param value The value of 'first_name'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setFirstName(java.lang.CharSequence value) {
+      validate(fields()[1], value);
+      this.first_name = value;
+      fieldSetFlags()[1] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'first_name' field has been set.
+      * @return True if the 'first_name' field has been set, false otherwise.
+      */
+    public boolean hasFirstName() {
+      return fieldSetFlags()[1];
+    }
+
+
+    /**
+      * Clears the value of the 'first_name' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearFirstName() {
+      first_name = null;
+      fieldSetFlags()[1] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'last_name' field.
+      * @return The value.
+      */
+    public java.lang.CharSequence getLastName() {
+      return last_name;
+    }
+
+    /**
+      * Sets the value of the 'last_name' field.
+      * @param value The value of 'last_name'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setLastName(java.lang.CharSequence value) {
+      validate(fields()[2], value);
+      this.last_name = value;
+      fieldSetFlags()[2] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'last_name' field has been set.
+      * @return True if the 'last_name' field has been set, false otherwise.
+      */
+    public boolean hasLastName() {
+      return fieldSetFlags()[2];
+    }
+
+
+    /**
+      * Clears the value of the 'last_name' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearLastName() {
+      last_name = null;
+      fieldSetFlags()[2] = false;
+      return this;
+    }
+
+    /**
+      * Gets the value of the 'position' field.
+      * @return The value.
+      */
+    public java.util.List<avro.examples.baseball.Position> getPosition() {
+      return position;
+    }
+
+    /**
+      * Sets the value of the 'position' field.
+      * @param value The value of 'position'.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder setPosition(java.util.List<avro.examples.baseball.Position> value) {
+      validate(fields()[3], value);
+      this.position = value;
+      fieldSetFlags()[3] = true;
+      return this; 
+    }
+
+    /**
+      * Checks whether the 'position' field has been set.
+      * @return True if the 'position' field has been set, false otherwise.
+      */
+    public boolean hasPosition() {
+      return fieldSetFlags()[3];
+    }
+
+
+    /**
+      * Clears the value of the 'position' field.
+      * @return This builder.
+      */
+    public avro.examples.baseball.Player.Builder clearPosition() {
+      position = null;
+      fieldSetFlags()[3] = false;
+      return this;
+    }
+
+    @Override
+    public Player build() {
+      try {
+        Player record = new Player();
+        record.number = fieldSetFlags()[0] ? this.number : (java.lang.Integer) defaultValue(fields()[0]);
+        record.first_name = fieldSetFlags()[1] ? this.first_name : (java.lang.CharSequence) defaultValue(fields()[1]);
+        record.last_name = fieldSetFlags()[2] ? this.last_name : (java.lang.CharSequence) defaultValue(fields()[2]);
+        record.position = fieldSetFlags()[3] ? this.position : (java.util.List<avro.examples.baseball.Position>) defaultValue(fields()[3]);
+        return record;
+      } catch (Exception e) {
+        throw new org.apache.avro.AvroRuntimeException(e);
+      }
+    }
+  }
+
+  private static final org.apache.avro.io.DatumWriter
+    WRITER$ = new org.apache.avro.specific.SpecificDatumWriter(SCHEMA$);  
+
+  @Override public void writeExternal(java.io.ObjectOutput out)
+    throws java.io.IOException {
+    WRITER$.write(this, org.apache.avro.specific.SpecificData.getEncoder(out));
+  }
+
+  private static final org.apache.avro.io.DatumReader
+    READER$ = new org.apache.avro.specific.SpecificDatumReader(SCHEMA$);  
+
+  @Override public void readExternal(java.io.ObjectInput in)
+    throws java.io.IOException {
+    READER$.read(this, org.apache.avro.specific.SpecificData.getDecoder(in));
+  }
+
+}
diff --git a/lang/java/tools/src/test/compiler/output/Position.java b/lang/java/tools/src/test/compiler/output/Position.java
new file mode 100644
index 0000000..2d17900
--- /dev/null
+++ b/lang/java/tools/src/test/compiler/output/Position.java
@@ -0,0 +1,13 @@
+/**
+ * Autogenerated by Avro
+ * 
+ * DO NOT EDIT DIRECTLY
+ */
+package avro.examples.baseball;  
+ at SuppressWarnings("all")
+ at org.apache.avro.specific.AvroGenerated
+public enum Position { 
+  P, C, B1, B2, B3, SS, LF, CF, RF, DH  ;
+  public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"Position\",\"namespace\":\"avro.examples.baseball\",\"symbols\":[\"P\",\"C\",\"B1\",\"B2\",\"B3\",\"SS\",\"LF\",\"CF\",\"RF\",\"DH\"]}");
+  public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
+}
diff --git a/lang/java/tools/src/test/idl/protocol.avdl b/lang/java/tools/src/test/idl/protocol.avdl
new file mode 100644
index 0000000..82525b2
--- /dev/null
+++ b/lang/java/tools/src/test/idl/protocol.avdl
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * An example protocol in Avro IDL
+ */
+ at namespace("org.apache.avro.test")
+protocol Simple {
+
+  @aliases(["org.foo.KindOf"])
+  enum Kind {
+    FOO,
+    BAR, // the bar enum value
+    BAZ
+  }
+
+  fixed MD5(16);
+
+  record TestRecord {
+    @order("ignore")
+    string name;
+
+    @order("descending")
+    Kind kind;
+
+    MD5 hash;
+
+    union { MD5, null} @aliases(["hash"]) nullableHash;
+
+    array<long> arrayOfLongs;
+  }
+
+  error TestError {
+    string message;
+  }
+
+  string hello(string greeting);
+  TestRecord echo(TestRecord `record`);
+  int add(int arg1, int arg2);
+  bytes echoBytes(bytes data);
+  void `error`() throws TestError;
+  void ping() oneway;
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
new file mode 100644
index 0000000..d0c6d65
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCatTool.java
@@ -0,0 +1,347 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static java.util.Arrays.asList;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.junit.Test;
+
+public class TestCatTool {
+  private static final int ROWS_IN_INPUT_FILES = 100000;
+  private static final int OFFSET = 1000;
+  private static final int LIMIT_WITHIN_INPUT_BOUNDS = 100;
+  private static final int LIMIT_OUT_OF_INPUT_BOUNDS = 100001;
+  private static final double SAMPLERATE = .01;
+  private static final double SAMPLERATE_TOO_SMALL = .00000001;
+  
+  private final Schema INTSCHEMA = new Schema.Parser().parse(
+    "{\"type\":\"record\", " +
+    "\"name\":\"myRecord\", " +
+    "\"fields\":[ " +
+    "{\"name\":\"value\",\"type\":\"int\"} " +
+    "]}");
+  private final Schema STRINGSCHEMA = new Schema.Parser().parse(
+    "{\"type\":\"record\", " +
+    "\"name\":\"myRecord\", " +
+    "\"fields\":[ {\"name\":\"value\",\"type\":\"string\"} " +
+    "]}");
+  private static final CodecFactory DEFLATE = CodecFactory.deflateCodec(9);
+  private static final CodecFactory SNAPPY = CodecFactory.snappyCodec();
+  
+
+  private GenericRecord aDatum(Type ofType, int forRow) {
+    GenericRecord record = null;
+    switch (ofType) {
+      case STRING:
+        record = new GenericData.Record(STRINGSCHEMA);
+        record.put("value", String.valueOf(forRow % 100));
+        return record;
+      case INT:
+        record = new GenericData.Record(INTSCHEMA);
+        record.put("value", forRow);
+        return record;      
+      default:
+       throw new AssertionError("I can't generate data for this type");
+    }
+  }
+
+  private File generateData(String file, Type type, Map<String, String> metadata, CodecFactory codec) throws Exception {
+    File inputFile = AvroTestUtil.tempFile(getClass(), file);
+    inputFile.deleteOnExit();
+
+    Schema schema = null;
+    if(type.equals(Schema.Type.INT)) {
+      schema = INTSCHEMA;
+    }
+    if(type.equals(Schema.Type.STRING)) {
+      schema = STRINGSCHEMA;
+    }
+       
+    DataFileWriter<Object> writer = new DataFileWriter<Object>(
+              new GenericDatumWriter<Object>(schema));
+    for(Entry<String, String> metadatum : metadata.entrySet()) {
+        writer.setMeta(metadatum.getKey(), metadatum.getValue());
+    }
+    writer.setCodec(codec);
+    writer.create(schema, inputFile);
+
+    for (int i = 0; i < ROWS_IN_INPUT_FILES; i++) {
+      writer.append(aDatum(type, i));
+    }
+    writer.close();
+
+    return inputFile;
+  }
+ 
+  
+  private int getFirstIntDatum(File file) throws Exception {
+    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>( new FileInputStream(file) , 
+      new GenericDatumReader<GenericRecord>());
+
+    int result = (Integer) reader.next().get(0);
+    System.out.println(result);
+    reader.close();
+    return result;
+  }
+
+  private int numRowsInFile(File output) throws Exception {
+    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
+      new FileInputStream(output),
+      new GenericDatumReader<GenericRecord>());
+    Iterator<GenericRecord> rows = reader.iterator();
+    int rowcount = 0;
+    while(rows.hasNext()) {
+      ++rowcount;
+      rows.next();
+    }
+    reader.close();
+    return rowcount;
+  }
+
+  @Test
+  public void testCat() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File input2 = generateData("input2.avro", Type.INT, metadata, SNAPPY);
+    File input3 = generateData("input3.avro", Type.INT, metadata, DEFLATE);
+
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+//    file input
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      input3.getAbsolutePath(),
+      "--offset" , String.valueOf(OFFSET),
+      "--limit" , String.valueOf(LIMIT_WITHIN_INPUT_BOUNDS),
+      "--samplerate" , String.valueOf(SAMPLERATE),
+      output.getAbsolutePath());
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+
+    assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
+    
+//    folder input
+    args = asList(
+      input1.getParentFile().getAbsolutePath(),
+      output.getAbsolutePath(),
+      "--offset" , String.valueOf(OFFSET),
+      "--limit" , String.valueOf(LIMIT_WITHIN_INPUT_BOUNDS));
+    returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    assertEquals(LIMIT_WITHIN_INPUT_BOUNDS, numRowsInFile(output));
+  }
+
+  
+  @Test
+  public void testLimitOutOfBounds() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      "--offset=" + String.valueOf(OFFSET),
+      "--limit=" + String.valueOf(LIMIT_OUT_OF_INPUT_BOUNDS),
+      output.getAbsolutePath());
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    assertEquals(ROWS_IN_INPUT_FILES - OFFSET, numRowsInFile(output)); 
+  }
+  
+  @Test
+  public void testSamplerateAccuracy() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String>args = asList(
+      input1.getAbsolutePath(),
+      output.getAbsolutePath(),
+      "--offset" , String.valueOf(OFFSET),
+      "--samplerate" , String.valueOf(SAMPLERATE));
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    
+    assertTrue("Outputsize is not roughly (Inputsize - Offset) * samplerate",
+      (ROWS_IN_INPUT_FILES - OFFSET)*SAMPLERATE - numRowsInFile(output) < 2);    
+    assertTrue("", (ROWS_IN_INPUT_FILES - OFFSET)*SAMPLERATE - numRowsInFile(output) > -2);
+  }
+
+  @Test
+  public void testOffSetAccuracy() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      "--offset" , String.valueOf(OFFSET),
+      "--limit" , String.valueOf(LIMIT_WITHIN_INPUT_BOUNDS),
+      "--samplerate" , String.valueOf(SAMPLERATE),
+      output.getAbsolutePath());
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    assertEquals("output does not start at offset",
+      OFFSET, getFirstIntDatum(output));
+  }
+  
+  @Test
+  public void testOffsetBiggerThanInput() throws Exception{
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      "--offset" , String.valueOf(ROWS_IN_INPUT_FILES + 1),
+      output.getAbsolutePath());
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    assertEquals("output is not empty",
+      0, numRowsInFile(output));
+  }
+  
+  @Test
+  public void testSamplerateSmallerThanInput() throws Exception{
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.INT, metadata, DEFLATE);
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      output.getAbsolutePath(),
+      "--offset=" +  new Integer(OFFSET).toString(),
+      "--samplerate=" + new Double(SAMPLERATE_TOO_SMALL).toString());
+    int returnCode = new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+    
+    assertEquals("output should only contain the record at offset",
+      (int) OFFSET, getFirstIntDatum(output));
+  }
+  
+  
+  @Test(expected = IOException.class)
+  public void testDifferentSchemasFail() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.STRING, metadata, DEFLATE);
+    File input2 = generateData("input2.avro", Type.INT, metadata, DEFLATE);
+
+    File output = AvroTestUtil.tempFile(getClass(), "out/default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      output.getAbsolutePath());
+    new CatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+  }
+
+  @Test
+  public void testHelpfulMessageWhenNoArgsGiven() throws Exception {
+    ByteArrayOutputStream buffer = new ByteArrayOutputStream(1024);
+    PrintStream out = new PrintStream(buffer);
+    int returnCode = new CatTool().run(
+      System.in,
+      out,
+      System.err,
+      Collections.<String>emptyList());
+    out.close(); // flushes too
+
+    assertEquals(0, returnCode);
+    assertTrue(
+      "should have lots of help",
+      buffer.toString().trim().length() > 200);
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
new file mode 100644
index 0000000..af31ccb
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestConcatTool.java
@@ -0,0 +1,225 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static java.util.Arrays.asList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.PrintStream;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.CodecFactory;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.junit.Test;
+
+public class TestConcatTool {
+  private static final int ROWS_IN_INPUT_FILES = 100000;
+  private static final CodecFactory DEFLATE = CodecFactory.deflateCodec(9);
+
+  private Object aDatum(Type ofType, int forRow) {
+    switch (ofType) {
+      case STRING:
+        return String.valueOf(forRow % 100);
+      case INT:
+        return forRow;
+      default:
+       throw new AssertionError("I can't generate data for this type");
+    }
+  }
+
+  private File generateData(String file, Type type, Map<String, String> metadata, CodecFactory codec) throws Exception {
+    File inputFile = AvroTestUtil.tempFile(getClass(), file);
+    inputFile.deleteOnExit();
+
+    Schema schema = Schema.create(type);
+    DataFileWriter<Object> writer = new DataFileWriter<Object>(
+              new GenericDatumWriter<Object>(schema));
+    for(Entry<String, String> metadatum : metadata.entrySet()) {
+        writer.setMeta(metadatum.getKey(), metadatum.getValue());
+    }
+    writer.setCodec(codec);
+    writer.create(schema, inputFile);
+
+    for (int i = 0; i < ROWS_IN_INPUT_FILES; i++) {
+      writer.append(aDatum(type, i));
+    }
+    writer.close();
+
+    return inputFile;
+  }
+
+  private CodecFactory getCodec(File output) throws Exception {
+      DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
+        new FileInputStream(output),
+        new GenericDatumReader<GenericRecord>());
+      String codec = reader.getMetaString(DataFileConstants.CODEC);
+      try {
+        return codec == null ? CodecFactory.nullCodec() : CodecFactory.fromString(codec);
+      }finally{
+        reader.close();
+      }
+  }
+
+  private int numRowsInFile(File output) throws Exception {
+    DataFileStream<GenericRecord> reader = new DataFileStream<GenericRecord>(
+      new FileInputStream(output),
+      new GenericDatumReader<GenericRecord>());
+    Iterator<GenericRecord> rows = reader.iterator();
+    int rowcount = 0;
+    while(rows.hasNext()) {
+      ++rowcount;
+      rows.next();
+    }
+    reader.close();
+    return rowcount;
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.STRING, metadata, DEFLATE);
+    File input2 = generateData("input2.avro", Type.STRING, metadata, DEFLATE);
+    File input3 = generateData("input3.avro", Type.STRING, metadata, DEFLATE);
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      input3.getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(0, returnCode);
+
+    assertEquals(ROWS_IN_INPUT_FILES * 3, numRowsInFile(output));
+    assertEquals(getCodec(input1).getClass(), getCodec(output).getClass());
+  }
+
+  @Test
+  public void testDifferentSchemasFail() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.STRING, metadata, DEFLATE);
+    File input2 = generateData("input2.avro", Type.INT, metadata, DEFLATE);
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(1, returnCode);
+  }
+
+  @Test
+  public void testDifferentMetadataFail() throws Exception {
+    Map<String, String> metadata1 = new HashMap<String, String>();
+    metadata1.put("myMetaKey", "myMetaValue");
+    Map<String, String> metadata2 = new HashMap<String, String>();
+    metadata2.put("myOtherMetaKey", "myOtherMetaValue");
+
+    File input1 = generateData("input1.avro", Type.STRING, metadata1, DEFLATE);
+    File input2 = generateData("input2.avro", Type.STRING, metadata2, DEFLATE);
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(2, returnCode);
+  }
+
+  @Test
+  public void testDifferentCodecFail() throws Exception {
+    Map<String, String> metadata = new HashMap<String, String>();
+    metadata.put("myMetaKey", "myMetaValue");
+
+    File input1 = generateData("input1.avro", Type.STRING, metadata, DEFLATE);
+    File input2 = generateData("input2.avro", Type.STRING, metadata, CodecFactory.nullCodec());
+
+    File output = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    output.deleteOnExit();
+
+    List<String> args = asList(
+      input1.getAbsolutePath(),
+      input2.getAbsolutePath(),
+      output.getAbsolutePath());
+    int returnCode = new ConcatTool().run(
+      System.in,
+      System.out,
+      System.err,
+      args);
+    assertEquals(3, returnCode);
+  }
+
+  @Test
+  public void testHelpfulMessageWhenNoArgsGiven() throws Exception {
+    ByteArrayOutputStream buffer = new ByteArrayOutputStream(1024);
+    PrintStream out = new PrintStream(buffer);
+    int returnCode = new ConcatTool().run(
+      System.in,
+      out,
+      System.err,
+      Collections.<String>emptyList());
+    out.close(); // flushes too
+
+    assertEquals(0, returnCode);
+    assertTrue(
+      "should have lots of help",
+      buffer.toString().trim().length() > 200);
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java
new file mode 100644
index 0000000..f5c6056
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestCreateRandomFileTool.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Iterator;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.trevni.avro.RandomData;
+import org.apache.trevni.TestUtil;
+
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+public class TestCreateRandomFileTool {
+  private static final String COUNT = System.getProperty("test.count", "200");
+  private static final File DIR
+    = new File(System.getProperty("test.dir", "/tmp"));
+  private static final File OUT_FILE = new File(DIR, "random.avro");
+  private static final File SCHEMA_FILE =
+    new File("../../../share/test/schemas/weather.avsc");
+
+  private byte[] run(List<String> args) throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream p = new PrintStream(baos);
+    PrintStream save = System.out;
+    try {
+      System.setOut(p);
+      new CreateRandomFileTool().run(null, p, null, args);
+    } finally {
+      System.setOut(save);
+    }
+    return baos.toByteArray();
+  }
+  
+  public void check(String... extraArgs) throws Exception {
+    ArrayList<String> args = new ArrayList<String>();
+    args.addAll(Arrays.asList(new String[] {
+        OUT_FILE.toString(),
+        "--count", COUNT,
+        "--schema-file", SCHEMA_FILE.toString()
+        }));
+    args.addAll(Arrays.asList(extraArgs));
+    run(args);
+
+    DataFileReader<Object> reader =
+      new DataFileReader(OUT_FILE, new GenericDatumReader<Object>());
+    
+    Iterator<Object> found = reader.iterator();
+    for (Object expected :
+           new RandomData(Schema.parse(SCHEMA_FILE), Integer.parseInt(COUNT)))
+      assertEquals(expected, found.next());
+
+    reader.close();
+  }
+
+  @Test
+  public void testSimple() throws Exception {
+    check();
+  }
+
+  @Test
+  public void testCodec() throws Exception {
+    check("--codec", "snappy");
+  }
+
+  @Test
+  public void testStdOut() throws Exception {
+    TestUtil.resetRandomSeed();
+    byte[] file =
+      run(Arrays.asList(new String[]
+        { "-", "--count", COUNT, "--schema-file", SCHEMA_FILE.toString() }));
+    
+    DataFileStream<Object> reader =
+      new DataFileStream(new ByteArrayInputStream(file),
+                         new GenericDatumReader<Object>());
+    
+    Iterator<Object> found = reader.iterator();
+    for (Object expected :
+           new RandomData(Schema.parse(SCHEMA_FILE), Integer.parseInt(COUNT)))
+      assertEquals(expected, found.next());
+
+    reader.close();
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java
new file mode 100644
index 0000000..70c3667
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileRepairTool.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.util.Arrays;
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileConstants;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.BinaryData;
+import org.apache.avro.util.Utf8;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+public class TestDataFileRepairTool {
+
+  private static final Schema SCHEMA = Schema.create(Schema.Type.STRING);
+  private static File corruptBlockFile;
+  private static File corruptRecordFile;
+
+  private File repairedFile;
+
+  @BeforeClass
+  public static void writeCorruptFile() throws IOException {
+    // Write a data file
+    DataFileWriter<Utf8> w = new DataFileWriter<Utf8>(new GenericDatumWriter<Utf8>(SCHEMA));
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    w.create(SCHEMA, baos);
+    w.append(new Utf8("apple"));
+    w.append(new Utf8("banana"));
+    w.append(new Utf8("celery"));
+    w.sync();
+    w.append(new Utf8("date"));
+    w.append(new Utf8("endive"));
+    w.append(new Utf8("fig"));
+    long pos = w.sync();
+    w.append(new Utf8("guava"));
+    w.append(new Utf8("hazelnut"));
+    w.close();
+
+    byte[] original = baos.toByteArray();
+
+    // Corrupt the second block by inserting some zero bytes before the sync marker
+    int corruptPosition = (int) pos - DataFileConstants.SYNC_SIZE;
+    int corruptedBytes = 3;
+    byte[] corrupted = new byte[original.length + corruptedBytes];
+    System.arraycopy(original, 0, corrupted, 0, corruptPosition);
+    System.arraycopy(original, corruptPosition,
+        corrupted, corruptPosition + corruptedBytes, original.length - corruptPosition);
+
+    corruptBlockFile = AvroTestUtil.tempFile(TestDataFileRepairTool.class,
+        "corruptBlock.avro");
+    corruptBlockFile.deleteOnExit();
+    FileOutputStream out = new FileOutputStream(corruptBlockFile);
+    out.write(corrupted);
+    out.close();
+
+    // Corrupt the "endive" record by changing the length of the string to be negative
+    corruptPosition = (int) pos - DataFileConstants.SYNC_SIZE -
+        (1 + "fig".length() + 1 + "endive".length());
+    corrupted = new byte[original.length];
+    System.arraycopy(original, 0, corrupted, 0, original.length);
+    BinaryData.encodeLong(-1, corrupted, corruptPosition);
+
+    corruptRecordFile = AvroTestUtil.tempFile(TestDataFileRepairTool.class,
+        "corruptRecord.avro");
+    corruptRecordFile.deleteOnExit();
+    out = new FileOutputStream(corruptRecordFile);
+    out.write(corrupted);
+    out.close();
+  }
+
+  @Before
+  public void setUp() {
+    repairedFile = AvroTestUtil.tempFile(TestDataFileRepairTool.class, "repaired.avro");
+  }
+
+  @After
+  public void tearDown() {
+    repairedFile.delete();
+  }
+
+  private String run(Tool tool, String... args) throws Exception {
+    return run(tool, null, args);
+  }
+
+  private String run(Tool tool, InputStream stdin, String... args) throws Exception {
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    PrintStream stdout = new PrintStream(out);
+    tool.run(
+        stdin,
+        stdout,
+        System.err,
+        Arrays.asList(args));
+    return out.toString("UTF-8").replace("\r", "");
+  }
+
+  @Test
+  public void testReportCorruptBlock() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "report", corruptBlockFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 2 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 5 Number of corrupt records: 0"));
+  }
+
+  @Test
+  public void testReportCorruptRecord() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "report", corruptRecordFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 3 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 8 Number of corrupt records: 2"));
+  }
+
+  @Test
+  public void testRepairAllCorruptBlock() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "all",
+        corruptBlockFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 2 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 5 Number of corrupt records: 0"));
+    checkFileContains(repairedFile, "apple", "banana", "celery", "guava", "hazelnut");
+  }
+
+  @Test
+  public void testRepairAllCorruptRecord() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "all",
+        corruptRecordFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 3 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 8 Number of corrupt records: 2"));
+    checkFileContains(repairedFile, "apple", "banana", "celery", "date", "guava",
+        "hazelnut");
+  }
+
+  @Test
+  public void testRepairPriorCorruptBlock() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "prior",
+        corruptBlockFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 2 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 5 Number of corrupt records: 0"));
+    checkFileContains(repairedFile, "apple", "banana", "celery");
+  }
+
+  @Test
+  public void testRepairPriorCorruptRecord() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "prior",
+        corruptRecordFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 3 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 8 Number of corrupt records: 2"));
+    checkFileContains(repairedFile, "apple", "banana", "celery", "date");
+  }
+
+  @Test
+  public void testRepairAfterCorruptBlock() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "after",
+        corruptBlockFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 2 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 5 Number of corrupt records: 0"));
+    checkFileContains(repairedFile, "guava", "hazelnut");
+  }
+
+  @Test
+  public void testRepairAfterCorruptRecord() throws Exception {
+    String output = run(new DataFileRepairTool(), "-o", "after",
+        corruptRecordFile.getPath(), repairedFile.getPath());
+    assertTrue(output, output.contains("Number of blocks: 3 Number of corrupt blocks: 1"));
+    assertTrue(output, output.contains("Number of records: 8 Number of corrupt records: 2"));
+    checkFileContains(repairedFile, "guava", "hazelnut");
+  }
+
+  private void checkFileContains(File repairedFile, String... lines) throws IOException {
+    DataFileReader r = new DataFileReader<Utf8>(repairedFile,
+        new GenericDatumReader<Utf8>(SCHEMA));
+    for (String line : lines) {
+      assertEquals(line, r.next().toString());
+    }
+    assertFalse(r.hasNext());
+  }
+
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
new file mode 100644
index 0000000..9661e4e
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestDataFileTools.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.io.StringBufferInputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+ at SuppressWarnings("deprecation")
+public class TestDataFileTools {
+  static final int COUNT = 10;
+  static File sampleFile;
+  static String jsonData;
+  static Schema schema;
+  static File schemaFile;
+  
+  private static final String KEY_NEEDING_ESCAPES = "trn\\\r\t\n";
+  private static final String ESCAPED_KEY = "trn\\\\\\r\\t\\n";
+
+  @BeforeClass
+  public static void writeSampleFile() throws IOException {
+    sampleFile = AvroTestUtil.tempFile(TestDataFileTools.class,
+      TestDataFileTools.class.getName() + ".avro");
+    schema = Schema.create(Type.INT);
+    schemaFile = AvroTestUtil.tempFile(TestDataFileTools.class, "schema-temp.schema");
+    FileWriter fw = new FileWriter(schemaFile);
+    fw.append(schema.toString());
+    fw.close();
+    
+    DataFileWriter<Object> writer
+      = new DataFileWriter<Object>(new GenericDatumWriter<Object>(schema))
+      .setMeta(KEY_NEEDING_ESCAPES, "")
+      .create(schema, sampleFile);
+    StringBuilder builder = new StringBuilder();
+
+    for (int i = 0; i < COUNT; ++i) {
+      builder.append(Integer.toString(i));
+      builder.append("\n");
+      writer.append(i);
+    }
+
+    writer.flush();
+    writer.close();
+    
+    jsonData = builder.toString();
+  }
+  
+  private String run(Tool tool, String... args) throws Exception {
+    return run(tool, null, args);
+  }
+
+  private String run(Tool tool, InputStream stdin, String... args) throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream p = new PrintStream(baos);
+    tool.run(
+        stdin,
+        p, // stdout
+        null, // stderr
+        Arrays.asList(args));
+    return baos.toString("UTF-8").replace("\r", "");
+  }
+
+  @Test
+  public void testRead() throws Exception {
+    assertEquals(jsonData,
+        run(new DataFileReadTool(), sampleFile.getPath()));
+  }
+
+  @Test
+  public void testReadStdin() throws Exception {
+    FileInputStream stdin = new FileInputStream(sampleFile);
+    assertEquals(jsonData, run(new DataFileReadTool(), stdin, "-"));
+  }
+  
+  @Test
+  public void testReadToJsonPretty() throws Exception {
+    assertEquals(jsonData,
+        run(new DataFileReadTool(), "--pretty", sampleFile.getPath()));
+  }
+  
+  @Test
+  public void testGetMeta() throws Exception {
+    String output = run(new DataFileGetMetaTool(), sampleFile.getPath());
+    assertTrue(output, output.contains("avro.schema\t"+schema.toString()+"\n"));
+    assertTrue(output, output.contains(ESCAPED_KEY+"\t\n"));
+  }
+  
+  @Test
+  public void testGetMetaForSingleKey() throws Exception {
+    assertEquals(schema.toString() + "\n",
+        run(new DataFileGetMetaTool(), sampleFile.getPath(), "--key",
+            "avro.schema"));
+  }
+  
+  @Test
+  public void testGetSchema() throws Exception {
+    assertEquals(schema.toString() + "\n",
+        run(new DataFileGetSchemaTool(), sampleFile.getPath()));
+  }
+  
+  @Test
+  public void testWriteWithDeflate() throws Exception {
+    testWrite("deflate", Arrays.asList("--codec", "deflate"), "deflate");
+  }
+  
+  @Test
+  public void testWrite() throws Exception {
+    testWrite("plain", Collections.<String>emptyList(), "null");
+  }
+  
+  public void testWrite(String name, List<String> extra, String expectedCodec) 
+      throws Exception {
+      testWrite(name, extra, expectedCodec, "-schema", schema.toString());
+      testWrite(name, extra, expectedCodec, "-schema-file", schemaFile.toString());
+  }
+  public void testWrite(String name, List<String> extra, String expectedCodec, String... extraArgs) 
+  throws Exception {
+    File outFile = AvroTestUtil.tempFile(getClass(),
+        TestDataFileTools.class + ".testWrite." + name + ".avro");
+    FileOutputStream fout = new FileOutputStream(outFile);
+    PrintStream out = new PrintStream(fout);
+    List<String> args = new ArrayList<String>();
+    for (String arg : extraArgs) {
+        args.add(arg);
+    }
+    args.add("-");
+    args.addAll(extra);
+    new DataFileWriteTool().run(
+        new StringBufferInputStream(jsonData),
+        new PrintStream(out), // stdout
+        null, // stderr
+        args);
+    out.close();
+    fout.close();
+    
+    // Read it back, and make sure it's valid.
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    DataFileReader<Object> fileReader = new DataFileReader<Object>(outFile,reader);
+    int i = 0;
+    for (Object datum : fileReader) {
+      assertEquals(i, datum);
+      i++;
+    }
+    assertEquals(COUNT, i);
+    assertEquals(schema, fileReader.getSchema());
+    String codecStr = fileReader.getMetaString("avro.codec");
+    if (null == codecStr) {
+      codecStr = "null";
+    }
+    assertEquals(expectedCodec, codecStr);
+  }
+  
+  @Test
+  public void testFailureOnWritingPartialJSONValues() throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream out = new PrintStream(baos);
+    try {
+      new DataFileWriteTool().run(
+          new StringBufferInputStream("{"),
+          new PrintStream(out), // stdout
+          null, // stderr          
+          Arrays.asList("-schema", "{ \"type\":\"record\", \"fields\":" +
+                        "[{\"name\":\"foo\", \"type\":\"string\"}], " +
+                        "\"name\":\"boring\" }", "-"));
+      fail("Expected exception.");
+    } catch (IOException expected) {
+      // expected
+    }
+  }
+  
+  @Test
+  public void testWritingZeroJsonValues() throws Exception {
+    File outFile = writeToAvroFile("zerojsonvalues",
+        schema.toString(),
+        "");
+    assertEquals(0, countRecords(outFile));
+  }
+  
+  private int countRecords(File outFile) throws IOException {
+    GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+    DataFileReader<Object> fileReader = 
+      new DataFileReader<Object>(outFile,reader);
+    int i = 0;
+    for (@SuppressWarnings("unused") Object datum : fileReader) {
+      i++;
+    }
+    return i;
+  }
+
+  @Test
+  public void testDifferentSeparatorsBetweenJsonRecords() throws Exception {
+    File outFile = writeToAvroFile(
+        "seperators", 
+        "{ \"type\":\"array\", \"items\":\"int\" }", 
+        "[]    [] []\n[][3]     ");
+    assertEquals(5, countRecords(outFile));
+  }
+  
+  public File writeToAvroFile(String testName, String schema, String json) throws Exception {
+    File outFile = AvroTestUtil.tempFile(getClass(),
+        TestDataFileTools.class + "." + testName + ".avro");
+    FileOutputStream fout = new FileOutputStream(outFile);
+    PrintStream out = new PrintStream(fout);
+    new DataFileWriteTool().run(
+        new StringBufferInputStream(json),
+        new PrintStream(out), // stdout
+        null, // stderr
+        Arrays.asList("-schema", schema, "-"));
+    out.close();
+    fout.close();
+    return outFile;
+  }
+  
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestIdlToSchemataTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestIdlToSchemataTool.java
new file mode 100644
index 0000000..afe2e6d
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestIdlToSchemataTool.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.Test;
+
+public class TestIdlToSchemataTool {
+
+  @Test
+  public void testSplitIdlIntoSchemata() throws Exception {
+    String idl = "src/test/idl/protocol.avdl";
+    String outdir = "target/test-split";
+
+    List<String> arglist = Arrays.asList(idl, outdir);
+    new IdlToSchemataTool().run(null, null, null, arglist);
+
+    String[] files = new File(outdir).list();
+    assertEquals(4, files.length);
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java
new file mode 100644
index 0000000..557cac9
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestJsonToFromBinaryFragmentTools.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Tests both {@link JsonToBinaryFragmentTool} 
+ * and {@link BinaryFragmentToJsonTool}.
+ */
+public class TestJsonToFromBinaryFragmentTools {
+  private static final String STRING_SCHEMA = Schema.create(Type.STRING).toString();
+  private static final String UTF8 = "utf-8";
+  private static final String AVRO = 
+    "ZLong string implies readable length encoding.";
+  private static final String JSON = 
+    "\"Long string implies readable length encoding.\"\n";
+
+  @Test
+  public void testBinaryToJson() throws Exception {
+    binaryToJson(AVRO, JSON, STRING_SCHEMA);
+  }
+  
+  @Test
+    public void testJsonToBinary() throws Exception {
+    jsonToBinary(JSON, AVRO, STRING_SCHEMA);
+  }
+
+  @Test
+    public void testMultiBinaryToJson() throws Exception {
+    binaryToJson(AVRO + AVRO + AVRO, JSON + JSON + JSON, STRING_SCHEMA);
+  }
+
+  @Test
+    public void testMultiJsonToBinary() throws Exception {
+    jsonToBinary(JSON + JSON + JSON, AVRO + AVRO + AVRO, STRING_SCHEMA);
+  }
+
+  @Test
+  public void testBinaryToNoPrettyJson() throws Exception {
+    binaryToJson(AVRO, JSON, "--no-pretty", STRING_SCHEMA);
+  }
+
+  @Test
+    public void testMultiBinaryToNoPrettyJson() throws Exception {
+    binaryToJson(AVRO + AVRO + AVRO, JSON + JSON + JSON, "--no-pretty", STRING_SCHEMA);
+  }
+
+  @Test
+  public void testBinaryToJsonSchemaFile() throws Exception {
+    binaryToJson(AVRO, JSON, "--schema-file", schemaFile());
+  }
+  
+  @Test
+    public void testJsonToBinarySchemaFile() throws Exception {
+    jsonToBinary(JSON, AVRO, "--schema-file", schemaFile());
+  }
+
+  private void binaryToJson(String avro, String json, String... options) throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream p = new PrintStream(new BufferedOutputStream(baos));
+    
+    List<String> args = new ArrayList<String>();
+    args.addAll(Arrays.asList(options));
+    args.add("-");
+    new BinaryFragmentToJsonTool().run(
+        new ByteArrayInputStream(avro.getBytes(UTF8)), // stdin
+        p, // stdout
+        null, // stderr
+        args);
+    System.out.println(baos.toString(UTF8).replace("\r", ""));
+    assertEquals(json, baos.toString(UTF8).replace("\r", ""));
+  }
+
+  private void jsonToBinary(String json, String avro, String... options) throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream p = new PrintStream(new BufferedOutputStream(baos));
+
+    List<String> args = new ArrayList<String>();
+    args.addAll(Arrays.asList(options));
+    args.add("-");
+    new JsonToBinaryFragmentTool().run(
+        new ByteArrayInputStream(json.getBytes(UTF8)), // stdin
+        p, // stdout
+        null, // stderr
+        args);
+    assertEquals(avro, baos.toString(UTF8));
+  }
+
+  private static String schemaFile() throws IOException {
+    File schemaFile = AvroTestUtil.tempFile(TestJsonToFromBinaryFragmentTools.class, "String.avsc");
+    FileWriter fw = new FileWriter(schemaFile);
+    fw.append(STRING_SCHEMA);
+    fw.close();
+    return schemaFile.toString();
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestMain.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestMain.java
new file mode 100644
index 0000000..c990a42
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestMain.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+
+public class TestMain {
+  @Test
+  /** Make sure that tool descriptions fit in 80 characters. */
+  public void testToolDescriptionLength() {
+    Main m = new Main();
+    for (Tool t : m.tools.values()) {
+      if (m.maxLen + 2 + t.getShortDescription().length() > 80) {
+        fail("Tool description too long: " + t.getName());
+      }
+    }
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java
new file mode 100644
index 0000000..e885115
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRecodecTool.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static java.util.Arrays.asList;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.PrintStream;
+import java.util.ArrayList;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestRecodecTool {
+  @Test
+  public void testRecodec() throws Exception {
+    String metaKey = "myMetaKey";
+    String metaValue = "myMetaValue";
+    
+    File inputFile = AvroTestUtil.tempFile(getClass(), "input.avro");
+    
+    Schema schema = Schema.create(Type.STRING);
+    DataFileWriter<String> writer = new DataFileWriter<String>(
+        new GenericDatumWriter<String>(schema))
+        .setMeta(metaKey, metaValue)
+        .create(schema, inputFile);
+    // We write some garbage which should be quite compressible by deflate,
+    // but is complicated enough that deflate-9 will work better than deflate-1.
+    // These values were plucked from thin air and worked on the first try, so
+    // don't read too much into them.
+    for (int i = 0; i < 100000; i++) {
+      writer.append("" + i % 100);
+    }
+    writer.close();
+
+    File defaultOutputFile = AvroTestUtil.tempFile(getClass(), "default-output.avro");
+    File nullOutputFile = AvroTestUtil.tempFile(getClass(), "null-output.avro");
+    File deflateDefaultOutputFile = AvroTestUtil.tempFile(getClass(), "deflate-default-output.avro");
+    File deflate1OutputFile = AvroTestUtil.tempFile(getClass(), "deflate-1-output.avro");
+    File deflate9OutputFile = AvroTestUtil.tempFile(getClass(), "deflate-9-output.avro");
+    
+    new RecodecTool().run(new FileInputStream(inputFile), new PrintStream(defaultOutputFile), null, new ArrayList<String>());
+    new RecodecTool().run(new FileInputStream(inputFile), new PrintStream(nullOutputFile), null, asList("--codec=null"));
+    new RecodecTool().run(new FileInputStream(inputFile), new PrintStream(deflateDefaultOutputFile), null, asList("--codec=deflate"));
+    new RecodecTool().run(new FileInputStream(inputFile), new PrintStream(deflate1OutputFile), null, asList("--codec=deflate", "--level=1"));
+    new RecodecTool().run(new FileInputStream(inputFile), new PrintStream(deflate9OutputFile), null, asList("--codec=deflate", "--level=9"));
+    
+    // We assume that metadata copying is orthogonal to codec selection, and
+    // so only test it for a single file.
+    Assert.assertEquals(
+      metaValue,
+      new DataFileReader<Void>(defaultOutputFile, new GenericDatumReader<Void>())
+        .getMetaString(metaKey));
+    
+    // The "default" codec should be the same as null.
+    Assert.assertEquals(defaultOutputFile.length(), nullOutputFile.length());
+    
+    // All of the deflated files should be smaller than the null file.
+    assertLessThan(deflateDefaultOutputFile.length(), nullOutputFile.length());
+    assertLessThan(deflate1OutputFile.length(), nullOutputFile.length());
+    assertLessThan(deflate9OutputFile.length(), nullOutputFile.length());
+    
+    // The "level 9" file should be smaller than the "level 1" file.
+    assertLessThan(deflate9OutputFile.length(), deflate1OutputFile.length());
+    
+//    System.err.println(inputFile.length());
+//    System.err.println(defaultOutputFile.length());
+//    System.err.println(nullOutputFile.length());
+//    System.err.println(deflateDefaultOutputFile.length());
+//    System.err.println(deflate1OutputFile.length());
+//    System.err.println(deflate9OutputFile.length());
+    
+    inputFile.delete();
+    defaultOutputFile.delete();
+    nullOutputFile.delete();
+    deflateDefaultOutputFile.delete();
+    deflate1OutputFile.delete();
+    deflate9OutputFile.delete();
+  }
+  
+  private static void assertLessThan(long less, long more) {
+    if (less >= more) {
+      Assert.fail("Expected " + less + " to be less than " + more);
+    }
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcProtocolTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcProtocolTool.java
new file mode 100644
index 0000000..f7af3f6
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcProtocolTool.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import org.apache.avro.Protocol;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ *
+ */
+ at RunWith(Parameterized.class)
+public class TestRpcProtocolTool {
+
+  @Parameterized.Parameters(/*name = "{0}"*/)
+  public static List<Object[]> data() {
+    return Arrays.asList( new Object[]{"http"},
+                          new Object[]{"avro"});
+  }
+
+  private RpcReceiveTool receive;
+  private Protocol simpleProtocol;
+
+  private String uriScheme ;
+
+  public TestRpcProtocolTool(String uriScheme) {
+    this.uriScheme = uriScheme;
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    String protocolFile =
+      System.getProperty("share.dir", "../../../share") + "/test/schemas/simple.avpr";
+
+    simpleProtocol = Protocol.parse(new File(protocolFile));
+
+    // start a simple server
+    ByteArrayOutputStream baos1 = new ByteArrayOutputStream();
+    PrintStream p1 = new PrintStream(baos1);
+    receive = new RpcReceiveTool();
+    receive.run1(null, p1, System.err,
+        Arrays.asList(uriScheme + "://0.0.0.0:0/",
+            protocolFile, "hello",
+            "-data", "\"Hello!\""));
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if( receive != null )
+      receive.server.close(); // force the server to finish
+  }
+
+  @Test
+  public void testRpcProtocol() throws Exception {
+
+    // run the actual test
+    ByteArrayOutputStream baos2 = new ByteArrayOutputStream();
+    PrintStream p2 = new PrintStream(baos2, true, "UTF-8");
+    RpcProtocolTool testObject = new RpcProtocolTool();
+
+    testObject.run(null, p2, System.err,
+        Arrays.asList(uriScheme + "://127.0.0.1:" + receive.server.getPort() + "/"));
+
+    p2.flush();
+
+    assertEquals("Expected the simple.avpr protocol to be echoed to standout",
+        simpleProtocol, Protocol.parse(baos2.toString("UTF-8")));
+
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java
new file mode 100644
index 0000000..97e527d
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestRpcReceiveAndSendTools.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.util.Arrays;
+
+import org.junit.Test;
+
+public class TestRpcReceiveAndSendTools {
+  
+  /**
+   * Starts a server (using the tool) and sends a single message to it.
+   */
+  @Test
+  public void testServeAndSend() throws Exception {
+    String protocolFile =
+      System.getProperty("share.dir", "../../../share") + "/test/schemas/simple.avpr";
+    ByteArrayOutputStream baos1 = new ByteArrayOutputStream();
+    PrintStream p1 = new PrintStream(baos1);
+    RpcReceiveTool receive = new RpcReceiveTool();
+    receive.run1(null, p1, System.err, 
+                 Arrays.asList("http://0.0.0.0:0/",
+                               protocolFile, "hello",
+                               "-data", "\"Hello!\""));
+    ByteArrayOutputStream baos2 = new ByteArrayOutputStream();
+    PrintStream p2 = new PrintStream(baos2);
+    RpcSendTool send = new RpcSendTool();
+    send.run(null, p2, System.err,
+             Arrays.asList("http://127.0.0.1:"+receive.server.getPort()+"/",
+                           protocolFile, "hello",  
+                           "-data", "{ \"greeting\": \"Hi!\" }"));
+    receive.run2(System.err);
+    
+    assertTrue(baos1.toString("UTF-8").replace("\r", "")
+               .endsWith("hello\t{\"greeting\":\"Hi!\"}\n"));
+    assertEquals("\"Hello!\"\n", baos2.toString("UTF-8").replace("\r", ""));
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java
new file mode 100644
index 0000000..6beb138
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestSpecificCompilerTool.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.Arrays;
+
+/**
+ * Verifies that the SpecificCompilerTool generates Java source properly
+ */
+public class TestSpecificCompilerTool {
+
+  // where test input/expected output comes from
+  private static final File TEST_DIR =
+    new File(System.getProperty("test.compile.schema.dir", "src/test/compiler"));
+
+  // where test input comes from
+  private static final File TEST_INPUT_DIR =
+    new File(TEST_DIR, "input");
+
+  // where test expected output comes from
+  private static final File TEST_EXPECTED_OUTPUT_DIR =
+    new File(TEST_DIR, "output");
+  private static final File TEST_EXPECTED_POSITION =
+    new File(TEST_EXPECTED_OUTPUT_DIR, "Position.java");
+  private static final File TEST_EXPECTED_PLAYER =
+    new File(TEST_EXPECTED_OUTPUT_DIR, "Player.java");
+
+  private static final File TEST_EXPECTED_STRING_OUTPUT_DIR =
+    new File(TEST_DIR, "output-string");
+  private static final File TEST_EXPECTED_STRING_POSITION =
+    new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
+             "avro/examples/baseball/Position.java");
+  private static final File TEST_EXPECTED_STRING_PLAYER =
+    new File(TEST_EXPECTED_STRING_OUTPUT_DIR,
+             "avro/examples/baseball/Player.java");
+
+  // where test output goes
+  private static final File TEST_OUTPUT_DIR =
+    new File("target/compiler/output");
+  private static final File TEST_OUTPUT_PLAYER =
+    new File(TEST_OUTPUT_DIR, "avro/examples/baseball/Player.java");
+  private static final File TEST_OUTPUT_POSITION =
+    new File(TEST_OUTPUT_DIR, "avro/examples/baseball/Position.java");
+
+  private static final File TEST_OUTPUT_STRING_DIR =
+    new File("target/compiler/output-string");
+  private static final File TEST_OUTPUT_STRING_PLAYER =
+    new File(TEST_OUTPUT_STRING_DIR, "avro/examples/baseball/Player.java");
+  private static final File TEST_OUTPUT_STRING_POSITION =
+    new File(TEST_OUTPUT_STRING_DIR, "avro/examples/baseball/Position.java");
+
+  @Before
+  public void setUp() {
+    TEST_OUTPUT_DIR.delete();
+  }
+
+  @Test
+  public void testCompileSchemaSingleFile() throws Exception {
+
+    doCompile(new String[]{"-encoding", "UTF-8", "schema",
+      TEST_INPUT_DIR.toString() + "/position.avsc",
+      TEST_OUTPUT_DIR.getPath()});
+    assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
+  }
+
+  @Test
+  public void testCompileSchemaTwoFiles() throws Exception {
+
+    doCompile(new String[]{"-encoding", "UTF-8", "schema",
+      TEST_INPUT_DIR.toString() + "/position.avsc",
+      TEST_INPUT_DIR.toString() + "/player.avsc",
+      TEST_OUTPUT_DIR.getPath()});
+    assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
+    assertFileMatch(TEST_EXPECTED_PLAYER,   TEST_OUTPUT_PLAYER);
+  }
+
+  @Test
+  public void testCompileSchemaFileAndDirectory() throws Exception {
+
+    doCompile(new String[]{"-encoding", "UTF-8", "schema",
+      TEST_INPUT_DIR.toString() + "/position.avsc",
+      TEST_INPUT_DIR.toString(),
+      TEST_OUTPUT_DIR.getPath()});
+    assertFileMatch(TEST_EXPECTED_POSITION, TEST_OUTPUT_POSITION);
+    assertFileMatch(TEST_EXPECTED_PLAYER,   TEST_OUTPUT_PLAYER);
+  }
+
+  @Test
+  public void testCompileSchemasUsingString() throws Exception {
+
+    doCompile(new String[]{"-encoding", "UTF-8",
+      "-string", "schema",
+      TEST_INPUT_DIR.toString() + "/position.avsc",
+      TEST_INPUT_DIR.toString() + "/player.avsc",
+      TEST_OUTPUT_STRING_DIR.getPath()});
+    assertFileMatch(TEST_EXPECTED_STRING_POSITION, TEST_OUTPUT_STRING_POSITION);
+    assertFileMatch(TEST_EXPECTED_STRING_PLAYER,   TEST_OUTPUT_STRING_PLAYER);
+  }
+
+  // Runs the actual compiler tool with the given input args
+  private void doCompile(String[] args) throws Exception {
+    SpecificCompilerTool tool = new SpecificCompilerTool();
+    tool.run(null, null, null, Arrays.asList((args)));
+  }
+
+  /**
+   * Verify that the generated Java files match the expected. This approach has
+   * room for improvement, since we're currently just verify that the text matches,
+   * which can be brittle if the code generation formatting or method ordering
+   * changes for example. A better approach would be to compile the sources and
+   * do a deeper comparison.
+   *
+   * See http://download.oracle.com/javase/6/docs/api/javax/tools/JavaCompiler.html
+   */
+  private static void assertFileMatch(File expected, File found) throws IOException {
+    Assert.assertEquals("Found file: " + found +
+      " does not match expected file: " + expected,
+      readFile(expected), readFile(found));
+  }
+
+  /**
+   * Not the best implementation, but does the job. Building full strings of the
+   * file content and comparing provides nice diffs via JUnit when failures occur.
+   */
+  private static String readFile(File file) throws IOException {
+    BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
+    StringBuilder sb = new StringBuilder();
+    String line = null;
+    boolean first = true;
+    while ((line = reader.readLine()) != null) {
+      if (!first) {
+        sb.append("\n");
+        first = false;
+      }
+      sb.append(line);
+    }
+    return sb.toString();
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
new file mode 100644
index 0000000..865c245
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTetherTool.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+
+import static java.util.Arrays.asList;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.io.FileWriter;
+
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.Pair;
+import org.apache.avro.mapred.WordCountUtil;
+import org.apache.avro.mapred.tether.TetherJob;
+import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.util.Utf8;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobConf;
+import org.junit.Test;
+
+public class TestTetherTool {
+
+  /**
+   * Test that the tether tool works with the mapreduce example
+   *
+   * TODO: How can we ensure that when we run, the WordCountTether example has
+   * been properly compiled?
+   */
+  @Test
+  public void test() throws Exception {
+
+    // Create the schema files.
+    Schema outscheme = new Pair<Utf8,Long>(new Utf8(""), 0L).getSchema();
+
+    // we need to write the schemas to a file
+    File midscfile = AvroTestUtil.tempFile(getClass(), "midschema.avpr");
+
+    FileWriter hf = null;
+    try {
+      hf =new FileWriter(midscfile);
+      hf.write(outscheme.toString());
+    }
+    finally {
+      if (hf != null) {
+        hf.close();
+      }
+    }
+
+    // Get the classpath to use as an argument.
+    String cp = System.getProperty("java.class.path");
+
+    JobConf job = new JobConf();
+    String dir = System.getProperty("test.dir", ".") + "/mapred";
+    Path outputPath = new Path(dir + "/out");
+
+    outputPath.getFileSystem(job).delete(outputPath);
+
+    // create the input file
+    WordCountUtil.writeLinesFile();
+
+    // Executable is java? Argument will be WordCountTask.java - Is the classpath
+    // set appropriately automatically?
+    java.net.URI exec = new java.net.URI("java");
+    //input path
+    String in = dir + "/in";
+
+    // create a string of the arguments
+    String execargs = "-classpath " + System.getProperty("java.class.path");
+    execargs += " org.apache.avro.mapred.tether.WordCountTask";
+
+    // Create a list of the arguments to pass to the tull run method
+    java.util.List<String> runargs = new java.util.ArrayList<String> ();
+
+
+    runargs.addAll(java.util.Arrays.asList("--program", "java"));
+    runargs.addAll(asList("--exec_args", '"'+execargs+'"'));
+    runargs.addAll(asList("--exec_cached", "false"));
+    runargs.addAll(asList("--in", in));
+    runargs.addAll(asList("--out", outputPath.toString()));
+    runargs.addAll(asList("--outschema", midscfile.toString()));
+
+    TetherTool tool = new TetherTool();
+
+    tool.run(null, null, System.err, runargs);
+
+    // TODO:: We should probably do some validation
+    // validate the output
+    DatumReader<Pair<Utf8,Long>> reader = new SpecificDatumReader<Pair<Utf8,Long>>();
+    InputStream cin = new BufferedInputStream(new FileInputStream(WordCountUtil.COUNTS_FILE));
+    DataFileStream<Pair<Utf8,Long>> counts = new DataFileStream<Pair<Utf8,Long>>(cin,reader);
+    int numWords = 0;
+    for (Pair<Utf8,Long> wc : counts) {
+      assertEquals(wc.key().toString(),
+      WordCountUtil.COUNTS.get(wc.key().toString()), wc.value());
+      numWords++;
+    }
+    cin.close();
+    assertEquals(WordCountUtil.COUNTS.size(), numWords);
+  }
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
new file mode 100644
index 0000000..eee4027
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestTextFileTools.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.OutputStream;
+import java.io.InputStream;
+import java.io.FileOutputStream;
+import java.io.FileInputStream;
+import java.io.BufferedOutputStream;
+import java.io.BufferedInputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.avro.AvroTestUtil;
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Type;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumReader;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+
+ at SuppressWarnings("deprecation")
+public class TestTextFileTools {
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "10"));
+
+  private static final byte[] LINE_SEP = System.getProperty("line.separator").getBytes();
+  static File linesFile;
+  static ByteBuffer[] lines;
+  static Schema schema;
+  static File schemaFile;
+  
+  @BeforeClass
+  public static void writeRandomFile() throws IOException {
+    schema = Schema.create(Type.BYTES);
+    lines = new ByteBuffer[COUNT];
+    linesFile = AvroTestUtil.tempFile(TestTextFileTools.class, "random.lines");
+
+    OutputStream out =
+      new BufferedOutputStream(new FileOutputStream(linesFile));
+    Random rand = new Random();
+    for (int j = 0; j < COUNT; j++) {
+      byte[] line = new byte[rand.nextInt(512)];
+      System.out.println("Creating line = "+line.length);
+      for (int i = 0; i < line.length; i++) {
+        int b = rand.nextInt(256);
+        while (b == '\n' || b == '\r')
+          b = rand.nextInt(256);
+        line[i] = (byte)b;
+      }
+      out.write(line);
+      out.write(LINE_SEP);
+      lines[j] = ByteBuffer.wrap(line);
+    }
+    out.close();
+  }
+  
+  private void fromText(String name, String... args) throws Exception {
+    File avroFile = AvroTestUtil.tempFile(getClass(), name + ".avro");
+
+    ArrayList<String> arglist = new ArrayList<String>();
+    arglist.addAll(Arrays.asList(args));
+    arglist.add(linesFile.toString());
+    arglist.add(avroFile.toString());
+
+    new FromTextTool().run(null, null, null, arglist);
+    
+    // Read it back, and make sure it's valid.
+    DataFileReader<ByteBuffer> file = new DataFileReader<ByteBuffer>
+      (avroFile, new GenericDatumReader<ByteBuffer>());
+    int i = 0;
+    for (ByteBuffer line : file) {
+      System.out.println("Reading line = "+line.remaining());
+      assertEquals(line, lines[i]);
+      i++;
+    }
+    assertEquals(COUNT, i);
+  }
+  
+  @Test
+  public void testFromText() throws Exception {
+    fromText("null", "--codec", "null");
+    fromText("deflate", "--codec", "deflate");
+    fromText("snappy", "--codec", "snappy");
+  }
+
+  @AfterClass
+  public static void testToText() throws Exception {
+    toText("null");
+    toText("deflate");
+    toText("snappy");
+  }
+
+  private static void toText(String name) throws Exception {
+    File avroFile = AvroTestUtil.tempFile(TestTextFileTools.class, name + ".avro");
+    File outFile = AvroTestUtil.tempFile(TestTextFileTools.class, name + ".lines");
+
+    ArrayList<String> arglist = new ArrayList<String>();
+    arglist.add(avroFile.toString());
+    arglist.add(outFile.toString());
+
+    new ToTextTool().run(null, null, null, arglist);
+    
+    // Read it back, and make sure it's valid.
+    InputStream orig = new BufferedInputStream(new FileInputStream(linesFile));
+    InputStream after = new BufferedInputStream(new FileInputStream(outFile));
+
+    int b;
+    while ((b = orig.read()) != -1)
+      assertEquals(b, after.read());
+    assertEquals(-1, after.read());
+
+    orig.close();
+    after.close();
+  }
+
+}
diff --git a/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java b/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java
new file mode 100644
index 0000000..31f1ab6
--- /dev/null
+++ b/lang/java/tools/src/test/java/org/apache/avro/tool/TestToTrevniTool.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.tool;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Iterator;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.trevni.avro.AvroColumnReader;
+import org.apache.trevni.avro.RandomData;
+
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+
+public class TestToTrevniTool {
+  private static final int COUNT =
+    Integer.parseInt(System.getProperty("test.count", "200"));
+  private static final File DIR
+    = new File(System.getProperty("test.dir", "/tmp"));
+  private static final File AVRO_FILE = new File(DIR, "random.avro");
+  private static final File TREVNI_FILE = new File(DIR, "random.trv");
+  private static final File SCHEMA_FILE =
+    new File("../../../share/test/schemas/weather.avsc");
+
+  private String run(String... args) throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream p = new PrintStream(baos);
+    new ToTrevniTool().run(null, p, null, Arrays.asList(args));
+    return baos.toString("UTF-8").replace("\r", "");
+  }
+  
+  @Test
+  public void test() throws Exception {
+    Schema schema = Schema.parse(SCHEMA_FILE);
+
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>());
+    writer.create(schema, Util.createFromFS(AVRO_FILE.toString()));
+    for (Object datum : new RandomData(schema, COUNT))
+      writer.append(datum);
+    writer.close();
+
+    run(AVRO_FILE.toString(), TREVNI_FILE.toString());
+
+    AvroColumnReader<Object> reader =
+      new AvroColumnReader<Object>(new AvroColumnReader.Params(TREVNI_FILE));
+    Iterator<Object> found = reader.iterator();
+    for (Object expected : new RandomData(schema, COUNT))
+      assertEquals(expected, found.next());
+    reader.close();
+  }
+
+}
diff --git a/lang/java/trevni/avro/pom.xml b/lang/java/trevni/avro/pom.xml
new file mode 100644
index 0000000..aa67ce5
--- /dev/null
+++ b/lang/java/trevni/avro/pom.xml
@@ -0,0 +1,144 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>trevni-java</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>trevni-avro</artifactId>
+  <name>Trevni Java Avro</name>
+  <url>http://avro.apache.org/</url>
+  <description>Trevni Java Avro</description>
+
+  <dependencies>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${project.groupId}</groupId>
+      <artifactId>trevni-core</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>main</id>
+            <goals><goal>jar</goal></goals>
+            <phase>package</phase>
+          </execution>
+          <execution>
+            <id>with-classifier</id>
+            <goals><goal>jar</goal></goals>
+            <phase>package</phase>
+            <configuration>
+              <classifier>${envClassifier}</classifier>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+     <profile>
+      <id>hadoop1</id>
+       <activation>
+         <property>
+           <name>hadoop.version</name>
+           <value>1</value>
+         </property>
+       </activation>
+      <properties>
+        <envClassifier>hadoop1</envClassifier>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+          <version>${hadoop1.version}</version>
+          <!-- hadoop's execution environment provides its own jars, usurping any others.
+            So we should not include it here -->
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro-mapred</artifactId>
+          <version>${project.version}</version>
+          <classifier>hadoop1</classifier>
+          <scope>compile</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>hadoop2</id>
+      <activation>
+        <property>
+          <name>!hadoop.version</name> <!-- if no hadoop.version is set -->
+        </property>
+      </activation>
+      <properties>
+        <envClassifier>hadoop2</envClassifier>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+          <version>${hadoop2.version}</version>
+          <!-- hadoop's execution environment provides its own jars, usurping any others.
+            So we should not include it here -->
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro-mapred</artifactId>
+          <version>${project.version}</version>
+          <classifier>hadoop2</classifier>
+          <scope>compile</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro</artifactId>
+          <version>${project.version}</version>
+          <scope>compile</scope>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
+</project>
+
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
new file mode 100644
index 0000000..f7514db
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java
@@ -0,0 +1,276 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+import java.io.Closeable;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.trevni.ColumnMetaData;
+import org.apache.trevni.ColumnFileReader;
+import org.apache.trevni.ColumnValues;
+import org.apache.trevni.Input;
+import org.apache.trevni.InputFile;
+import org.apache.trevni.TrevniRuntimeException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+
+import static org.apache.trevni.avro.AvroColumnator.isSimple;
+
+/** Read files written with {@link AvroColumnWriter}.  A subset of the schema
+ * used for writing may be specified when reading.  In this case only columns
+ * of the subset schema are read. */
+public class AvroColumnReader<D>
+  implements Iterator<D>, Iterable<D>, Closeable {
+
+  private ColumnFileReader reader;
+  private GenericData model;
+  private Schema fileSchema;
+  private Schema readSchema;
+  
+  private ColumnValues[] values;
+  private int[] arrayWidths;
+  private int column;                          // current index in values
+
+  private Map<String,Map<String,Object>> defaults =
+    new HashMap<String,Map<String,Object>>();
+
+  /** Parameters for reading an Avro column file. */
+  public static class Params {
+    Input input;
+    Schema schema;
+    GenericData model = GenericData.get();
+
+    /** Construct reading from a file. */
+    public Params(File file) throws IOException {
+      this(new InputFile(file));
+    }
+
+    /** Construct reading from input. */
+    public Params(Input input) { this.input = input; }
+
+    /** Set subset schema to project data down to. */
+    public Params setSchema(Schema schema) {
+      this.schema = schema;
+      return this;
+    }
+
+    /** Set data representation. */
+    public Params setModel(GenericData model) {
+      this.model = model;
+      return this;
+    }
+  }
+
+  /** Construct a reader for a file. */
+  public AvroColumnReader(Params params)
+    throws IOException {
+    this.reader = new ColumnFileReader(params.input);
+    this.model = params.model;
+    this.fileSchema =
+      Schema.parse(reader.getMetaData().getString(AvroColumnWriter.SCHEMA_KEY));
+    this.readSchema = params.schema == null ? fileSchema : params.schema;
+    initialize();
+  }
+
+  /** Return the schema for data in this file. */
+  public Schema getFileSchema() { return fileSchema; }
+
+  void initialize() throws IOException {
+    // compute a mapping from column name to number for file
+    Map<String,Integer> fileColumnNumbers = new HashMap<String,Integer>();
+    int i = 0;
+    for (ColumnMetaData c : new AvroColumnator(fileSchema).getColumns())
+      fileColumnNumbers.put(c.getName(), i++);
+
+    // create iterator for each column in readSchema
+    AvroColumnator readColumnator = new AvroColumnator(readSchema);
+    this.arrayWidths = readColumnator.getArrayWidths();
+    ColumnMetaData[] readColumns = readColumnator.getColumns();
+    this.values = new ColumnValues[readColumns.length];
+    int j = 0;
+    for (ColumnMetaData c : readColumns) {
+      Integer n = fileColumnNumbers.get(c.getName());
+      if (n != null)
+        values[j++] = reader.getValues(n);
+    }
+    findDefaults(readSchema, fileSchema);
+  }
+
+  // get defaults for fields in read that are not in write
+  private void findDefaults(Schema read, Schema write) {
+    switch (read.getType()) {
+    case NULL: case BOOLEAN:
+    case INT: case LONG:
+    case FLOAT: case DOUBLE: 
+    case BYTES: case STRING: 
+    case ENUM: case FIXED:
+      if (read.getType() != write.getType())
+        throw new TrevniRuntimeException("Type mismatch: "+read+" & "+write);
+      break;
+    case MAP: 
+      findDefaults(read.getValueType(), write.getValueType());
+      break;
+    case ARRAY: 
+      findDefaults(read.getElementType(), write.getElementType());
+      break;
+    case UNION:
+      for (Schema s : read.getTypes()) {
+        Integer index = write.getIndexNamed(s.getFullName());
+        if (index == null)
+          throw new TrevniRuntimeException("No matching branch: "+s);
+        findDefaults(s, write.getTypes().get(index));
+      }
+      break;
+    case RECORD: 
+      for (Field f : read.getFields()) {
+        Field g = write.getField(f.name());
+        if (g == null)
+          setDefault(read, f);
+        else
+          findDefaults(f.schema(), g.schema());
+      }
+      break;
+    default:
+      throw new TrevniRuntimeException("Unknown schema: "+read);
+    }
+  }
+
+  private void setDefault(Schema record, Field f) {
+    String recordName = record.getFullName();
+    Map<String,Object> recordDefaults = defaults.get(recordName);
+    if (recordDefaults == null) {
+      recordDefaults = new HashMap<String,Object>();
+      defaults.put(recordName, recordDefaults);
+    }
+    recordDefaults.put(f.name(), model.getDefaultValue(f));
+  }
+
+  @Override
+  public Iterator<D> iterator() { return this; }
+
+  @Override
+  public boolean hasNext() {
+    return values[0].hasNext();
+  }
+
+  /** Return the number of rows in this file. */
+  public long getRowCount() { return reader.getRowCount(); }
+
+  @Override
+  public D next() {
+    try {
+      for (int i = 0; i < values.length; i++)
+        if (values[i] != null)
+          values[i].startRow();
+      this.column = 0;
+      return (D)read(readSchema);
+    } catch (IOException e) {
+      throw new TrevniRuntimeException(e);
+    }
+  }
+
+  private Object read(Schema s) throws IOException {
+    if (isSimple(s))
+      return nextValue(s, column++);
+
+    final int startColumn = column;
+
+    switch (s.getType()) {
+    case MAP: 
+      int size = values[column].nextLength();
+      Map map = (Map)new HashMap(size);
+      for (int i = 0; i < size; i++) {
+        this.column = startColumn;
+        values[column++].nextValue();                      // null in parent
+        String key = (String)values[column++].nextValue(); // key
+        map.put(key, read(s.getValueType()));              // value
+      }
+      column = startColumn + arrayWidths[startColumn];
+      return map;
+    case RECORD: 
+      Object record = model.newRecord(null, s);
+      Map<String,Object> rDefaults = defaults.get(s.getFullName());
+      for (Field f : s.getFields()) {
+        Object value = ((rDefaults != null) && rDefaults.containsKey(f.name()))
+          ? model.deepCopy(f.schema(), rDefaults.get(f.name()))
+          : read(f.schema());
+        model.setField(record, f.name(), f.pos(), value);
+      }
+      return record;
+    case ARRAY: 
+      int length = values[column].nextLength();
+      List elements = (List)new GenericData.Array(length, s);
+      for (int i = 0; i < length; i++) {
+        this.column = startColumn;
+        Object value = nextValue(s, column++);
+        if (!isSimple(s.getElementType()))
+          value = read(s.getElementType());
+        elements.add(value);
+      }
+      column = startColumn + arrayWidths[startColumn];
+      return elements;
+    case UNION:
+      Object value = null;
+      for (Schema branch : s.getTypes()) {
+        if (branch.getType() == Schema.Type.NULL) continue;
+        if (values[column].nextLength() == 1) {
+          value = nextValue(branch, column);
+          column++;
+          if (!isSimple(branch))
+            value = read(branch);
+        } else {
+          column += arrayWidths[column];
+        }
+      }
+      return value;
+    default:
+      throw new TrevniRuntimeException("Unknown schema: "+s);
+    }
+  }
+
+  private Object nextValue(Schema s, int column) throws IOException {
+    Object v = values[column].nextValue();
+    
+    switch (s.getType()) {
+    case ENUM:
+      return model.createEnum(s.getEnumSymbols().get((Integer)v), s);
+    case FIXED:
+      return model.createFixed(null, ((ByteBuffer)v).array(), s);
+    }
+
+    return v;
+  }
+
+  @Override
+  public void remove() { throw new UnsupportedOperationException(); }
+
+  @Override
+  public void close() throws IOException {
+    reader.close();
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java
new file mode 100644
index 0000000..abb5682
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.OutputStream;
+import java.util.Collection;
+import java.util.Map;
+
+import org.apache.trevni.ColumnFileMetaData;
+import org.apache.trevni.ColumnFileWriter;
+import org.apache.trevni.TrevniRuntimeException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericFixed;
+import org.apache.avro.util.Utf8;
+
+import static org.apache.trevni.avro.AvroColumnator.isSimple;
+
+/** Write Avro records to a Trevni column file.
+ *
+ * <p>Each primitive type is written to a separate column.
+ *
+ * <p>Output is buffered until {@link #writeTo(OutputStream)} is called.  The
+ * {@link #sizeEstimate()} indicates both the amount of data buffered and the
+ * size of the file that will be written.
+ */
+public class AvroColumnWriter<D> {
+  private Schema schema;
+  private GenericData model;
+  private ColumnFileWriter writer;
+  private int[] arrayWidths;
+
+  public static final String SCHEMA_KEY = "avro.schema";
+
+  public AvroColumnWriter(Schema s, ColumnFileMetaData meta)
+    throws IOException {
+    this(s, meta, GenericData.get());
+  }
+
+  public AvroColumnWriter(Schema s, ColumnFileMetaData meta, GenericData model)
+    throws IOException {
+    this.schema = s;
+    AvroColumnator columnator = new AvroColumnator(s);
+    meta.set(SCHEMA_KEY, s.toString());           // save schema in file
+    this.writer = new ColumnFileWriter(meta, columnator.getColumns());
+    this.arrayWidths = columnator.getArrayWidths();
+    this.model = model;
+  }
+
+  /** Return the approximate size of the file that will be written.  Tries to
+   * slightly over-estimate.  Indicates both the size in memory of the buffered
+   * data as well as the size of the file that will be written by {@link
+   * #writeTo(OutputStream)}. */
+  public long sizeEstimate() { return writer.sizeEstimate(); }
+
+  /** Write all rows added to the named output stream. */
+  public void writeTo(OutputStream out) throws IOException {
+    writer.writeTo(out);
+  }
+
+  /** Write all rows added to the named file. */
+  public void writeTo(File file) throws IOException {
+    writer.writeTo(file);
+  }
+
+  /** Add a row to the file. */
+  public void write(D value) throws IOException {
+    writer.startRow();
+    int count = write(value, schema, 0);
+    assert(count == writer.getColumnCount());
+    writer.endRow();
+  }
+  
+  private int write(Object o, Schema s, int column) throws IOException {
+    if (isSimple(s)) {
+      writeValue(o, s, column);
+      return column+1;
+    }
+    switch (s.getType()) {
+    case MAP: 
+      Map<?,?> map = (Map)o;
+      writer.writeLength(map.size(), column);
+      for (Map.Entry e : map.entrySet()) {
+        writer.writeValue(null, column);
+        writer.writeValue(e.getKey(), column+1);
+        int c = write(e.getValue(), s.getValueType(), column+2);
+        assert(c == column+arrayWidths[column]);
+      }
+      return column+arrayWidths[column];
+    case RECORD: 
+      for (Field f : s.getFields())
+        column = write(model.getField(o,f.name(),f.pos()), f.schema(), column);
+      return column;
+    case ARRAY: 
+      Collection elements = (Collection)o;
+      writer.writeLength(elements.size(), column);
+      if (isSimple(s.getElementType())) {         // optimize simple arrays
+        for (Object element : elements)
+          writeValue(element, s.getElementType(), column);
+        return column+1;
+      }
+      for (Object element : elements) {
+        writer.writeValue(null, column);
+        int c = write(element, s.getElementType(), column+1);
+        assert(c == column+arrayWidths[column]);
+      }
+      return column+arrayWidths[column];
+    case UNION:
+      int b = model.resolveUnion(s, o);
+      int i = 0;
+      for (Schema branch : s.getTypes()) {
+        boolean selected = i++ == b;
+        if (branch.getType() == Schema.Type.NULL) continue;
+        if (!selected) {
+          writer.writeLength(0, column);
+          column+=arrayWidths[column];
+        } else {
+          writer.writeLength(1, column);
+          if (isSimple(branch)) {
+            writeValue(o, branch, column++);
+          } else {
+            writer.writeValue(null, column);
+            column = write(o, branch, column+1);
+          }
+        }
+      }
+      return column;
+    default:
+      throw new TrevniRuntimeException("Unknown schema: "+s);
+    }
+  }
+
+  private void writeValue(Object value, Schema s, int column)
+    throws IOException {
+    
+    switch (s.getType()) {
+    case STRING:
+      if (value instanceof Utf8)                    // convert Utf8 to String
+        value = value.toString();
+      break;
+    case ENUM:
+      if (value instanceof Enum)
+        value = ((Enum)value).ordinal();
+      else 
+        value = s.getEnumOrdinal(value.toString());
+      break;
+    case FIXED:
+      value = ((GenericFixed)value).bytes();
+      break;
+    }
+    writer.writeValue(value, column);
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java
new file mode 100644
index 0000000..2f9a3ef
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.IdentityHashMap;
+
+import org.apache.trevni.ColumnMetaData;
+import org.apache.trevni.ValueType;
+import org.apache.trevni.TrevniRuntimeException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.Schema.Field;
+
+/** Utility that computes the column layout of a schema. */
+class AvroColumnator {
+
+  private Schema schema;
+
+  private List<ColumnMetaData> columns = new ArrayList<ColumnMetaData>();
+  private List<Integer> arrayWidths = new ArrayList<Integer>();
+
+  public AvroColumnator(Schema schema) {
+    this.schema = schema;
+    columnize(null, schema, null, false);
+  }
+
+  /** Return columns for the schema. */
+  public ColumnMetaData[] getColumns() {
+    return columns.toArray(new ColumnMetaData[columns.size()]);
+  }
+
+  /** Return array giving the number of columns immediately following each
+   * column that are descendents of that column. */
+  public int[] getArrayWidths() {
+    int[] result = new int[arrayWidths.size()];
+    int i = 0;
+    for (Integer width : arrayWidths)
+      result[i++] = width;
+    return result;
+  }
+
+  private Map<Schema,Schema> seen = new IdentityHashMap<Schema,Schema>();
+
+  private void columnize(String path, Schema s,
+                         ColumnMetaData parent, boolean isArray) {
+
+    if (isSimple(s)) {
+      if (path == null) path = s.getFullName();
+      addColumn(path, simpleValueType(s), parent, isArray);
+      return;
+    }
+
+    if (seen.containsKey(s))                      // catch recursion
+      throw new TrevniRuntimeException("Cannot shred recursive schemas: "+s);
+    seen.put(s, s);
+    
+    switch (s.getType()) {
+    case MAP: 
+      path = path == null ? ">" : path+">";
+      int start = columns.size();
+      ColumnMetaData p = addColumn(path, ValueType.NULL, parent, true);
+      addColumn(p(path,"key", ""), ValueType.STRING, p, false);
+      columnize(p(path,"value", ""), s.getValueType(), p, false);
+      arrayWidths.set(start, columns.size()-start); // fixup with actual width
+      break;
+    case RECORD:
+      for (Field field : s.getFields())           // flatten fields to columns
+        columnize(p(path, field.name(), "#"), field.schema(), parent, isArray);
+      break;
+    case ARRAY: 
+      path = path == null ? "[]" : path+"[]";
+      addArrayColumn(path, s.getElementType(), parent);
+      break;
+    case UNION:
+      for (Schema branch : s.getTypes())          // array per non-null branch
+        if (branch.getType() != Schema.Type.NULL)
+          addArrayColumn(p(path, branch, "/"), branch, parent);
+      break;
+    default:
+      throw new TrevniRuntimeException("Unknown schema: "+s);
+    }
+    seen.remove(s);
+  }
+
+  private String p(String parent, Schema child, String sep) {
+    if (child.getType() == Schema.Type.UNION)
+      return parent;
+    return p(parent, child.getFullName(), sep);
+  }
+
+  private String p(String parent, String child, String sep) {
+    return parent == null ? child : parent + sep + child;
+  }
+
+  private ColumnMetaData addColumn(String path, ValueType type,
+                                   ColumnMetaData parent, boolean isArray) {
+    ColumnMetaData column = new ColumnMetaData(path, type);
+    if (parent != null)
+      column.setParent(parent);
+    column.isArray(isArray);
+    columns.add(column);
+    arrayWidths.add(1);                           // placeholder
+    return column;
+ }
+
+  private void addArrayColumn(String path, Schema element,
+                              ColumnMetaData parent) {
+    if (path == null) path = element.getFullName();
+    if (isSimple(element)) {                      // optimize simple arrays
+      addColumn(path, simpleValueType(element), parent, true);
+      return;
+    }
+    // complex array: insert a parent column with lengths
+    int start = columns.size();
+    ColumnMetaData array = addColumn(path, ValueType.NULL, parent, true);
+    columnize(path, element, array, false); 
+    arrayWidths.set(start, columns.size()-start); // fixup with actual width
+  }
+
+  static boolean isSimple(Schema s) {
+    switch (s.getType()) {
+    case NULL: case BOOLEAN:
+    case INT: case LONG:
+    case FLOAT: case DOUBLE: 
+    case BYTES: case STRING: 
+    case ENUM: case FIXED:
+      return true;
+    default:
+      return false;
+    }
+  }
+
+  private ValueType simpleValueType(Schema s) {
+    switch (s.getType()) {
+    case NULL:   return ValueType.NULL;
+    case BOOLEAN:return ValueType.BOOLEAN;
+    case INT:    return ValueType.INT;
+    case LONG:   return ValueType.LONG;
+    case FLOAT:  return ValueType.FLOAT;
+    case DOUBLE: return ValueType.DOUBLE;
+    case BYTES:  return ValueType.BYTES;
+    case STRING: return ValueType.STRING;
+    case ENUM:   return ValueType.INT;
+    case FIXED:  return ValueType.BYTES;
+    default:
+      throw new TrevniRuntimeException("Unknown schema: "+s);
+    }
+  }
+
+}    
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
new file mode 100644
index 0000000..47bec01
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RecordReader;
+
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.AvroWrapper;
+
+/** An {@link org.apache.hadoop.mapred.InputFormat} for Trevni files.
+ *
+ * <p>A subset schema to be read may be specified with {@link
+ * AvroJob#setInputSchema(JobConf,Schema)}.
+ */
+public class AvroTrevniInputFormat<T>
+  extends FileInputFormat<AvroWrapper<T>, NullWritable> {
+
+  @Override
+  protected boolean isSplitable(FileSystem fs, Path filename) {
+    return false;
+  }
+
+  @Override
+  protected FileStatus[] listStatus(JobConf job) throws IOException {
+    List<FileStatus> result = new ArrayList<FileStatus>();
+    job.setBoolean("mapred.input.dir.recursive", true);
+    for (FileStatus file : super.listStatus(job))
+      if (file.getPath().getName().endsWith(AvroTrevniOutputFormat.EXT))
+        result.add(file);
+    return result.toArray(new FileStatus[0]);
+  }
+
+  @Override
+  public RecordReader<AvroWrapper<T>, NullWritable>
+    getRecordReader(InputSplit split, final JobConf job,
+                    Reporter reporter) throws IOException {
+    final FileSplit file = (FileSplit)split;
+    reporter.setStatus(file.toString());
+
+    final AvroColumnReader.Params params =
+      new AvroColumnReader.Params(new HadoopInput(file.getPath(), job));
+    params.setModel(ReflectData.get());
+    if (job.get(AvroJob.INPUT_SCHEMA) != null)
+      params.setSchema(AvroJob.getInputSchema(job));
+
+    return new RecordReader<AvroWrapper<T>, NullWritable>() {
+      private AvroColumnReader<T> reader = new AvroColumnReader<T>(params);
+      private float rows = reader.getRowCount();
+      private long row;
+
+      public AvroWrapper<T> createKey() { return new AvroWrapper<T>(null); }
+  
+      public NullWritable createValue() { return NullWritable.get(); }
+    
+      public boolean next(AvroWrapper<T> wrapper, NullWritable ignore)
+        throws IOException {
+        if (!reader.hasNext())
+          return false;
+        wrapper.datum(reader.next());
+        row++;
+        return true;
+      }
+  
+      public float getProgress() throws IOException { return row / rows; }
+  
+      public long getPos() throws IOException { return row; }
+
+      public void close() throws IOException { reader.close(); }
+  
+    };
+
+  }
+
+}
+
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
new file mode 100644
index 0000000..60b432b
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Map;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.util.Progressable;
+
+import org.apache.avro.Schema;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.AvroWrapper;
+
+import org.apache.trevni.MetaData;
+import org.apache.trevni.ColumnFileMetaData;
+
+/** An {@link org.apache.hadoop.mapred.OutputFormat} that writes Avro data to
+ * Trevni files.
+ *
+ * <p>Writes a directory of files per task, each comprising a single filesystem
+ * block.  To reduce the number of files, increase the default filesystem block
+ * size for the job.  Each task also requires enough memory to buffer a
+ * filesystem block.
+ */
+public class AvroTrevniOutputFormat <T>
+  extends FileOutputFormat<AvroWrapper<T>, NullWritable> {
+
+  /** The file name extension for trevni files. */
+  public final static String EXT = ".trv";
+  
+  public static final String META_PREFIX = "trevni.meta.";
+
+  /** Add metadata to job output files.*/
+  public static void setMeta(JobConf job, String key, String value) {
+    job.set(META_PREFIX+key, value);
+  }
+
+  @Override
+  public RecordWriter<AvroWrapper<T>, NullWritable>
+    getRecordWriter(FileSystem ignore, final JobConf job,
+                    final String name, Progressable prog)
+    throws IOException {
+
+    boolean isMapOnly = job.getNumReduceTasks() == 0;
+    final Schema schema = isMapOnly
+      ? AvroJob.getMapOutputSchema(job)
+      : AvroJob.getOutputSchema(job);
+
+    final ColumnFileMetaData meta = filterMetadata(job);
+
+    final Path dir = FileOutputFormat.getTaskOutputPath(job, name);
+    final FileSystem fs = dir.getFileSystem(job);
+    if (!fs.mkdirs(dir))
+      throw new IOException("Failed to create directory: " + dir);
+    final long blockSize = fs.getDefaultBlockSize();
+
+    return new RecordWriter<AvroWrapper<T>, NullWritable>() {
+      private int part = 0;
+
+      private AvroColumnWriter<T> writer =
+        new AvroColumnWriter<T>(schema, meta, ReflectData.get());
+    
+      private void flush() throws IOException {
+        OutputStream out = fs.create(new Path(dir, "part-"+(part++)+EXT));
+        try {
+          writer.writeTo(out);
+        } finally {
+          out.close();
+        }
+        writer = new AvroColumnWriter<T>(schema, meta, ReflectData.get());
+      }
+
+      public void write(AvroWrapper<T> wrapper, NullWritable ignore)
+        throws IOException {
+        writer.write(wrapper.datum());
+        if (writer.sizeEstimate() >= blockSize)              // block full
+          flush();
+      }
+      public void close(Reporter reporter) throws IOException {
+        flush();
+      }
+    };
+  }
+
+   static ColumnFileMetaData filterMetadata(final JobConf job) {
+    final ColumnFileMetaData meta = new ColumnFileMetaData();
+    for (Map.Entry<String,String> e : job)
+      if (e.getKey().startsWith(META_PREFIX))
+        meta.put(e.getKey().substring(META_PREFIX.length()),
+                 e.getValue().getBytes(MetaData.UTF8));
+    return meta;
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/HadoopInput.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/HadoopInput.java
new file mode 100644
index 0000000..410a577
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/HadoopInput.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FSDataInputStream;
+
+import org.apache.trevni.Input;
+
+/** Adapt a Hadoop {@link FSDataInputStream} to Trevni's {@link Input}. */
+public class HadoopInput implements Input {
+  private final FSDataInputStream stream;
+  private final long len;
+
+  /** Construct given a path and a configuration. */
+  public HadoopInput(Path path, Configuration conf) throws IOException {
+    this.stream = path.getFileSystem(conf).open(path);
+    this.len = path.getFileSystem(conf).getFileStatus(path).getLen();
+  }
+
+  @Override public long length() {
+    return len;
+  }
+
+  @Override public int read(long p, byte[] b, int s, int l) throws IOException {
+    return stream.read(p, b, s, l);
+  }
+
+  @Override public void close() throws IOException {
+    stream.close();
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java
new file mode 100644
index 0000000..89287e6
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+
+/**
+ * An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
+ * 
+ * This implement was modeled off
+ * {@link org.apache.avro.mapreduce.AvroKeyInputFormat} to allow for easy
+ * transition
+ * 
+ * A MapReduce InputFormat that can handle Trevni container files.
+ *
+ * <p>Keys are AvroKey wrapper objects that contain the Trevni data.  Since Trevni
+ * container files store only records (not key/value pairs), the value from
+ * this InputFormat is a NullWritable.</p>
+ * 
+ * <p>
+ * A subset schema to be read may be specified with
+ * {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema}.
+ */
+public class AvroTrevniKeyInputFormat<T> extends FileInputFormat<AvroKey<T>, NullWritable> {
+  
+  @Override
+  public RecordReader<AvroKey<T>, NullWritable> createRecordReader(
+      InputSplit split, TaskAttemptContext context) throws IOException,
+      InterruptedException {
+    
+    return new AvroTrevniKeyRecordReader<T>();
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java
new file mode 100644
index 0000000..34354f7
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+/** An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
+ * Trevni files.
+ *
+ * This implement was modeled off 
+ * {@link org.apache.avro.mapreduce.AvroKeyOutputFormat} to allow for easy
+ * transition
+ * 
+ * FileOutputFormat for writing Trevni container files.
+ *
+ * <p>Since Trevni container files only contain records (not key/value pairs), this output
+ * format ignores the value.</p>
+ *
+ * @param <T> The (java) type of the Trevni data to write.
+ * 
+ * <p>Writes a directory of files per task, each comprising a single filesystem
+ * block.  To reduce the number of files, increase the default filesystem block
+ * size for the job.  Each task also requires enough memory to buffer a
+ * filesystem block.
+ */
+public class AvroTrevniKeyOutputFormat <T> extends FileOutputFormat<AvroKey<T>, NullWritable> {
+
+  @Override
+  public RecordWriter<AvroKey<T>, NullWritable> getRecordWriter(TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    
+    return new AvroTrevniKeyRecordWriter<T>(context );
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java
new file mode 100644
index 0000000..88f2410
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.hadoop.io.NullWritable;
+
+/**
+ * Reads records from an input split representing a chunk of an Trenvi container file.
+ *
+ * @param <T> The (java) type of data in Trevni container file.
+ */
+public class AvroTrevniKeyRecordReader<T> extends AvroTrevniRecordReaderBase<AvroKey<T>, NullWritable, T> {
+  
+  /** A reusable object to hold records of the Avro container file. */
+  private final AvroKey<T> mCurrentKey = new AvroKey<T>();
+  
+  /** {@inheritDoc} */
+  @Override
+  public AvroKey<T> getCurrentKey() throws IOException,
+      InterruptedException {
+    return mCurrentKey;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public NullWritable getCurrentValue() throws IOException,
+      InterruptedException {
+    return NullWritable.get();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    boolean hasNext = super.nextKeyValue();
+    mCurrentKey.datum(getCurrentRecord());
+    return hasNext;
+  }
+
+}
+
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordWriter.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordWriter.java
new file mode 100644
index 0000000..cfc4c2f
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordWriter.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Writes Trevni records to an Trevni container file output stream.
+ *
+ * @param <T> The Java type of the Trevni data to write.
+ */
+public class AvroTrevniKeyRecordWriter<T> extends AvroTrevniRecordWriterBase<AvroKey<T>, NullWritable, T> {
+
+  /**
+   * Constructor.
+   * @param context The TaskAttempContext to supply the writer with information form the job configuration
+   */
+  public AvroTrevniKeyRecordWriter(TaskAttemptContext context)
+      throws IOException {
+    super(context);
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void write(AvroKey<T> key, NullWritable value) throws IOException,
+      InterruptedException {
+    writer.write(key.datum());
+    if (writer.sizeEstimate() >= blockSize) // block full
+      flush();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  protected Schema initSchema(TaskAttemptContext context) {
+    boolean isMapOnly = context.getNumReduceTasks() == 0;
+    return isMapOnly ? AvroJob.getMapOutputKeySchema(context
+        .getConfiguration()) : AvroJob.getOutputKeySchema(context
+        .getConfiguration());
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java
new file mode 100644
index 0000000..c16e381
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+
+/**
+ * An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
+ * 
+ * This implement was modeled off
+ * {@link org.apache.avro.mapreduce.AvroKeyValueInputFormat} to allow for easy
+ * transition
+ * 
+ * <p>
+ * A MapReduce InputFormat that reads from Trevni container files of key/value generic records.
+ *
+ * <p>
+ * Trevni container files that container generic records with the two fields 'key' and
+ * 'value' are expected.  The contents of the 'key' field will be used as the job input
+ * key, and the contents of the 'value' field will be used as the job output value.</p>
+ *
+ * @param <K> The type of the Trevni key to read.
+ * @param <V> The type of the Trevni value to read.
+ * 
+ * <p>
+ * A subset schema to be read may be specified with
+ * {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema} and
+ * {@link org.apache.avro.mapreduce.AvroJob#setInputValueSchema}.
+ */
+public class AvroTrevniKeyValueInputFormat<K, V>  extends FileInputFormat<AvroKey<K>, AvroValue<V>> {
+
+  /** {@inheritDoc} */
+  @Override
+  public RecordReader<AvroKey<K>, AvroValue<V>> createRecordReader(
+      InputSplit split, TaskAttemptContext context) throws IOException,
+      InterruptedException {
+    
+    return new AvroTrevniKeyValueRecordReader<K, V>();
+  }
+
+
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java
new file mode 100644
index 0000000..c508df3
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+/** An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
+ * Trevni files.
+ * 
+ * This implement was modeled off 
+ * {@link org.apache.avro.mapreduce.AvroKeyValueOutputFormat} to allow for easy
+ * transition 
+ * 
+ *  * FileOutputFormat for writing Trevni container files of key/value pairs.
+ *
+ * <p>Since Trevni container files can only contain records (not key/value pairs), this
+ * output format puts the key and value into an Avro generic record with two fields, named
+ * 'key' and 'value'.</p>
+ *
+ * <p>The keys and values given to this output format may be Avro objects wrapped in
+ * <code>AvroKey</code> or <code>AvroValue</code> objects.  The basic Writable types are
+ * also supported (e.g., IntWritable, Text); they will be converted to their corresponding
+ * Avro types.</p>
+ *
+ * @param <K> The type of key. If an Avro type, it must be wrapped in an <code>AvroKey</code>.
+ * @param <V> The type of value. If an Avro type, it must be wrapped in an <code>AvroValue</code>.
+ * 
+ * <p>Writes a directory of files per task, each comprising a single filesystem
+ * block.  To reduce the number of files, increase the default filesystem block
+ * size for the job.  Each task also requires enough memory to buffer a
+ * filesystem block.
+ */
+public class AvroTrevniKeyValueOutputFormat <K, V> extends FileOutputFormat<AvroKey<K>, AvroValue<V>> { 
+  
+  /** {@inheritDoc} */
+  @Override
+  public RecordWriter<AvroKey<K>, AvroValue<V>> getRecordWriter(TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    
+    return new AvroTrevniKeyValueRecordWriter<K, V>(context );
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java
new file mode 100644
index 0000000..3aa956f
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+
+/**
+ * Reads Trevni generic records from an Trevni container file, where the records contain two
+ * fields: 'key' and 'value'.
+ *
+ * <p>The contents of the 'key' field will be parsed into an AvroKey object. The contents
+ * of the 'value' field will be parsed into an AvroValue object.</p>
+ *
+ * @param <K> The type of the Avro key to read.
+ * @param <V> The type of the Avro value to read.
+ */
+public class AvroTrevniKeyValueRecordReader<K, V> extends AvroTrevniRecordReaderBase<AvroKey<K>, AvroValue<V>, GenericRecord> {
+
+  /** The current key the reader is on. */
+  private final AvroKey<K> mCurrentKey = new AvroKey<K>();
+  /** The current value the reader is on. */
+  private final AvroValue<V> mCurrentValue = new AvroValue<V>();
+  
+  /** {@inheritDoc} */
+  @Override
+  public AvroKey<K> getCurrentKey() throws IOException,
+      InterruptedException {
+    return mCurrentKey;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public AvroValue<V> getCurrentValue() throws IOException,
+      InterruptedException {
+    return mCurrentValue;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    boolean hasNext = super.nextKeyValue();
+    AvroKeyValue<K, V> avroKeyValue = new AvroKeyValue<K, V>(getCurrentRecord());
+    mCurrentKey.datum(avroKeyValue.getKey());
+    mCurrentValue.datum(avroKeyValue.getValue());
+    return hasNext;
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java
new file mode 100644
index 0000000..136ef06
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.hadoop.io.AvroDatumConverter;
+import org.apache.avro.hadoop.io.AvroDatumConverterFactory;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+
+/**
+ * Writes key/value pairs to an Trevni container file.
+ *
+ * <p>Each entry in the Trevni container file will be a generic record with two fields,
+ * named 'key' and 'value'.  The input types may be basic Writable objects like Text or
+ * IntWritable, or they may be AvroWrapper subclasses (AvroKey or AvroValue).  Writable
+ * objects will be converted to their corresponding Avro types when written to the generic
+ * record key/value pair.</p>
+ *
+ * @param <K> The type of key to write.
+ * @param <V> The type of value to write.
+ */
+public class AvroTrevniKeyValueRecordWriter <K, V> extends AvroTrevniRecordWriterBase<AvroKey<K>, AvroValue<V>, GenericRecord> {
+
+  /** The writer schema for the generic record entries of the Trevni container file. */
+  Schema mKeyValuePairSchema;
+  
+  /** A reusable Avro generic record for writing key/value pairs to the file. */
+  AvroKeyValue<Object, Object> keyValueRecord;
+  
+  /** A helper object that converts the input key to an Avro datum. */
+  AvroDatumConverter<K, ?> keyConverter;
+  
+  /** A helper object that converts the input value to an Avro datum. */
+  AvroDatumConverter<V, ?> valueConverter;
+    
+  /**
+   * Constructor.
+   * @param context The TaskAttempContext to supply the writer with information form the job configuration
+   */
+  public AvroTrevniKeyValueRecordWriter(TaskAttemptContext context)
+      throws IOException {
+    super(context);
+    
+    mKeyValuePairSchema = initSchema(context);
+    keyValueRecord  = new AvroKeyValue<Object, Object>(new GenericData.Record(mKeyValuePairSchema));
+  }
+  
+  /** {@inheritDoc} */
+  @Override
+  public void write(AvroKey<K> key, AvroValue<V> value) throws IOException,
+      InterruptedException {
+    
+    keyValueRecord.setKey(key.datum());
+    keyValueRecord.setValue(value.datum());
+    writer.write(keyValueRecord.get());
+    if (writer.sizeEstimate() >= blockSize) // block full
+      flush();
+  }
+  
+  /** {@inheritDoc} */
+  @SuppressWarnings("unchecked")
+  @Override
+  protected Schema initSchema(TaskAttemptContext context) {
+    AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(
+        context.getConfiguration());
+    
+    keyConverter = converterFactory.create((Class<K>) context
+        .getOutputKeyClass());
+    valueConverter = converterFactory.create((Class<V>) context
+        .getOutputValueClass());
+
+    // Create the generic record schema for the key/value pair.
+    return AvroKeyValue.getSchema(
+        keyConverter.getWriterSchema(), valueConverter.getWriterSchema());
+    
+  }
+  
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java
new file mode 100644
index 0000000..b68669f
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+import org.apache.trevni.avro.AvroColumnReader;
+import org.apache.trevni.avro.HadoopInput;
+
+/**
+ * Abstract base class for <code>RecordReader</code>s that read Trevni container files.
+ *
+ * @param <K> The type of key the record reader should generate.
+ * @param <V> The type of value the record reader should generate.
+ * @param <T> The type of the entries within the Trevni container file being read.
+ */
+public abstract class AvroTrevniRecordReaderBase<K, V, T> extends RecordReader<K, V> {
+  
+  /** The Trevni file reader */
+  private AvroColumnReader<T> reader;
+  
+  /** Number of rows in the Trevni file */
+  private float rows;
+  
+  /** The current row number being read in */
+  private long row;
+  
+  /** A reusable object to hold records of the Avro container file. */
+  private T mCurrentRecord;
+
+  /** {@inheritDoc} */
+  @Override
+  public void initialize(InputSplit inputSplit, TaskAttemptContext context)
+      throws IOException, InterruptedException {
+    final FileSplit file = (FileSplit)inputSplit;
+    context.setStatus(file.toString());
+
+    final AvroColumnReader.Params params =
+      new AvroColumnReader.Params(new HadoopInput(file.getPath(), context.getConfiguration()));
+    params.setModel(ReflectData.get());
+    
+    if (AvroJob.getInputKeySchema(context.getConfiguration()) != null) {
+      params.setSchema(AvroJob.getInputKeySchema(context.getConfiguration()));
+    }
+    
+    reader = new AvroColumnReader<T>(params);
+    rows = reader.getRowCount();
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public boolean nextKeyValue() throws IOException, InterruptedException {
+    if (!reader.hasNext())
+      return false;
+    mCurrentRecord = reader.next();
+    row++;
+    return true;
+  }
+  
+  /**
+   * Gets the current record read from the Trevni container file.
+   *
+   * <p>Calling <code>nextKeyValue()</code> moves this to the next record.</p>
+   *
+   * @return The current Trevni record (may be null if no record has been read).
+   */
+  protected T getCurrentRecord() {
+    return mCurrentRecord;
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public void close() throws IOException {
+    reader.close(); 
+  }
+
+  /** {@inheritDoc} */
+  @Override
+  public float getProgress() throws IOException, InterruptedException {
+    return row / rows;
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
new file mode 100644
index 0000000..94a332d
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Iterator;
+import java.util.Map.Entry;
+
+import org.apache.avro.Schema;
+import org.apache.avro.reflect.ReflectData;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.trevni.ColumnFileMetaData;
+import org.apache.trevni.MetaData;
+import org.apache.trevni.avro.AvroColumnWriter;
+
+/**
+ * Abstract base class for <code>RecordWriter</code>s that writes Trevni container files.
+ *
+ * @param <K> The type of key the record writer should generate.
+ * @param <V> The type of value the record wrtier should generate.
+ * @param <T> The type of the entries within the Trevni container file being writen.
+ */
+public abstract class AvroTrevniRecordWriterBase<K,V, T> extends RecordWriter<K, V> {
+  
+  /** trevni file extension */
+  public final static String EXT = ".trv";
+  
+  /** prefix of job configs that we care about */
+  public static final String META_PREFIX = "trevni.meta.";
+  
+  /** Counter that increments as new trevni files are create because the current file 
+   * has exceeded the block size 
+   * */
+  protected int part = 0;
+
+  /** Trevni file writer */
+  protected AvroColumnWriter<T> writer;
+
+  /** This will be a unique directory linked to the task */
+  final Path dirPath;
+  
+  /** HDFS object */
+  final FileSystem fs;
+
+  /** Current configured blocksize */
+  final long blockSize;
+  
+  /** Provided avro schema from the context */
+  protected Schema schema;
+  
+  /** meta data to be stored in the output file.  */
+  protected ColumnFileMetaData meta;
+  
+  /**
+   * Constructor.
+   * @param context The TaskAttempContext to supply the writer with information form the job configuration
+   */
+  public AvroTrevniRecordWriterBase(TaskAttemptContext context) throws IOException {
+    
+    schema = initSchema(context);
+    meta = filterMetadata(context.getConfiguration());
+    writer = new AvroColumnWriter<T>(schema, meta, ReflectData.get());
+
+    Path outputPath = FileOutputFormat.getOutputPath(context);
+    
+    String dir = FileOutputFormat.getUniqueFile(context, "part", "");
+    dirPath = new Path(outputPath.toString() + "/" + dir);
+    fs = dirPath.getFileSystem(context.getConfiguration());
+    fs.mkdirs(dirPath);
+
+    blockSize = fs.getDefaultBlockSize();
+  }
+
+  /**
+   * Use the task context to construct a schema for writing
+   * @throws IOException
+   */
+  abstract protected  Schema initSchema(TaskAttemptContext context); 
+  
+  /**
+   * A Trevni flush will close the current file and prep a new writer
+   * @throws IOException
+   */
+  public void flush() throws IOException {
+    OutputStream out = fs.create(new Path(dirPath, "part-" + (part++) + EXT));
+    try {
+      writer.writeTo(out);
+    } finally {
+      out.close();
+    }
+    writer = new AvroColumnWriter<T>(schema, meta, ReflectData.get());
+  }
+  
+  /** {@inheritDoc} */
+  @Override
+  public void close(TaskAttemptContext arg0) throws IOException,
+      InterruptedException {
+    flush();
+  }
+  
+  static ColumnFileMetaData filterMetadata(final Configuration configuration) {
+    final ColumnFileMetaData meta = new ColumnFileMetaData();
+    Iterator<Entry<String, String>> keyIterator = configuration.iterator();
+
+    while (keyIterator.hasNext()) {
+      Entry<String, String> confEntry = keyIterator.next();
+      if (confEntry.getKey().startsWith(META_PREFIX))
+        meta.put(confEntry.getKey().substring(META_PREFIX.length()), confEntry
+            .getValue().getBytes(MetaData.UTF8));
+    }
+
+    return meta;
+  }
+}
diff --git a/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/package.html b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/package.html
new file mode 100644
index 0000000..7fcab27
--- /dev/null
+++ b/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/package.html
@@ -0,0 +1,38 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>Read and write <a href="http://avro.apache.org/">Avro</a> data
+in Trevni column files.</body>
+
+<h2>Limitations</h2>
+
+The current implementation does not correctly handle all Avro data.
+In particular:
+
+<ul>
+  <li>Recursive types are not supported.</li>
+  <li>With ReflectData, fields of Java type <b>byte</b>, <b>short</b>
+    and <b>char</b> are not supported.  Instead use int. </li>
+  <li>With ReflectData, Java arrays are not supported.  Instead use
+  List. </li>
+  <li>An <b>enum</b> is always read as a GenericData.EnumSymbol, even
+    when SpecificData or ReflectData are used. </li>
+</ul>
+
+</html>
diff --git a/lang/java/trevni/avro/src/test/cases/dremel/input.avsc b/lang/java/trevni/avro/src/test/cases/dremel/input.avsc
new file mode 100644
index 0000000..81946d2
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/dremel/input.avsc
@@ -0,0 +1,78 @@
+{
+    "type": "record",
+    "name": "Document",
+    "fields": [
+        {
+            "name": "DocId",
+            "type": "long"
+        },
+        {
+            "name": "Links",
+            "type": [
+                "null",
+                {
+                    "name": "Links",
+                    "type": "record",
+                    "fields": [
+                        {
+                            "name": "Backward",
+                            "type": {
+                                "type": "array",
+                                "items": "long"
+                            }
+                        },
+                        {
+                            "name": "Forward",
+                            "type": {
+                                "type": "array",
+                                "items": "long"
+                            }
+                        }
+                    ]
+                }
+            ]
+        },
+        {
+            "name": "Name",
+            "type": {
+                "type": "array",
+                "items": {
+                    "name": "Name",
+                    "type": "record",
+                    "fields": [
+                        {
+                            "name": "Language",
+                            "type": {
+                                "type": "array",
+                                "items": {
+                                    "name": "Language",
+                                    "type": "record",
+                                    "fields": [
+                                        {
+                                            "name": "Code",
+                                            "type": "string"
+                                        },
+                                        {
+                                            "name": "Country",
+                                            "type": [
+                                                "null",
+                                                "string"
+                                            ]
+                                        }
+                                    ]
+                                }
+                            }
+                        },
+                        {
+                            "name": "Url",
+                            "type": [
+                                "null",
+                                "string"
+                            ]
+                        }
+                    ]
+                }
+            }
+        }
+    ]
+}
diff --git a/lang/java/trevni/avro/src/test/cases/dremel/input.json b/lang/java/trevni/avro/src/test/cases/dremel/input.json
new file mode 100644
index 0000000..ebc002e
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/dremel/input.json
@@ -0,0 +1,73 @@
+{
+    "DocId": 10,
+    "Links": {
+        "Links": {
+            "Backward": [],
+            "Forward": [
+                20,
+                40,
+                60
+            ]
+        }
+    },
+    "Name": [
+        {
+            "Language": [
+                {
+                    "Code": "en-us",
+                    "Country": {
+                        "string": "us"
+                    }
+                },
+                {
+                    "Code": "en",
+                    "Country": null
+                }
+            ],
+            "Url": {
+                "string": "http://A"
+            }
+        },
+        {
+            "Language": [],
+            "Url": {
+                "string": "http://B"
+            }
+        },
+        {
+            "Language": [
+                {
+                    "Code": "en-gb",
+                    "Country": {
+                        "string": "gb"
+                    }
+                }
+            ],
+            "Url": null
+        }
+    ]
+}
+{
+    "DocId": 20,
+    "Links": {
+        "Links": {
+            "Backward": [
+                10,
+                30
+            ],
+            "Forward": [
+                80
+            ]
+        }
+    },
+    "Name": [
+        {
+            "Language": [
+                
+            ],
+            "Url": {
+                "string": "http://C"
+            }
+        }
+    ]
+}
diff --git a/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.avsc b/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.avsc
new file mode 100644
index 0000000..5cf2f10
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.avsc
@@ -0,0 +1,41 @@
+{
+    "type": "record",
+    "name": "Document",
+    "fields": [
+        {
+            "name": "DocId",
+            "type": "long"
+        },
+        {
+            "name": "Name",
+            "type": {
+                "type": "array",
+                "items": {
+                    "name": "Name",
+                    "type": "record",
+                    "fields": [
+                        {
+                            "name": "Language",
+                            "type": {
+                                "type": "array",
+                                "items": {
+                                    "name": "Language",
+                                    "type": "record",
+                                    "fields": [
+                                        {
+                                            "name": "Country",
+                                            "type": [
+                                                "null",
+                                                "string"
+                                            ]
+                                        }
+                                    ]
+                                }
+                            }
+                        }
+                    ]
+                }
+            }
+        }
+    ]
+}
diff --git a/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.json b/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.json
new file mode 100644
index 0000000..aa58d10
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/dremel/sub1/sub.json
@@ -0,0 +1,37 @@
+{
+    "DocId": 10,
+    "Name": [
+        {
+            "Language": [
+                {
+                    "Country": {
+                        "string": "us"
+                    }
+                },
+                {
+                    "Country": null
+                }
+            ]
+        },
+        {
+            "Language": []
+        },
+        {
+            "Language": [
+                {
+                    "Country": {
+                        "string": "gb"
+                    }
+                }
+            ]
+        }
+    ]
+}
+{
+    "DocId": 20,
+    "Name": [
+        {
+            "Language": []
+        }
+    ]
+}
diff --git a/lang/java/trevni/avro/src/test/cases/enum/input.avsc b/lang/java/trevni/avro/src/test/cases/enum/input.avsc
new file mode 100644
index 0000000..7505a57
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/enum/input.avsc
@@ -0,0 +1,17 @@
+{
+    "type": "record",
+    "name": "Test",
+    "fields": [
+        {
+            "name": "kind",
+            "type": [
+                "null",
+                {
+                    "name": "Kind",
+                    "type": "enum",
+                    "symbols": ["X", "Y", "Z"]
+                }
+            ]
+        }
+    ]
+}
diff --git a/lang/java/trevni/avro/src/test/cases/enum/input.json b/lang/java/trevni/avro/src/test/cases/enum/input.json
new file mode 100644
index 0000000..c7d335a
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/cases/enum/input.json
@@ -0,0 +1,4 @@
+{"kind":null}
+{"kind":{"Kind":"X"}}
+{"kind":{"Kind":"Y"}}
+{"kind":{"Kind":"Z"}}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java
new file mode 100644
index 0000000..8f5976a
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/RandomData.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+
+import org.apache.trevni.TestUtil;
+
+/** Generates schema data as Java objects with random values. */
+public class RandomData implements Iterable<Object> {
+  public static final String USE_DEFAULT = "use-default";
+
+  private final Schema root;
+  private final int count;
+
+  public RandomData(Schema schema, int count) {
+    this.root = schema;
+    this.count = count;
+  }
+  
+  public Iterator<Object> iterator() {
+    return new Iterator<Object>() {
+      private int n;
+      private Random random = TestUtil.createRandom();
+      public boolean hasNext() { return n < count; }
+      public Object next() {
+        n++;
+        return generate(root, random, 0);
+      }
+      public void remove() { throw new UnsupportedOperationException(); }
+    };
+  }
+  
+  @SuppressWarnings(value="unchecked")
+  private static Object generate(Schema schema, Random random, int d) {
+    switch (schema.getType()) {
+    case RECORD:
+      GenericRecord record = new GenericData.Record(schema);
+      for (Schema.Field field : schema.getFields()) {
+        Object value = (field.getJsonProp(USE_DEFAULT) == null) 
+          ? generate(field.schema(), random, d+1)
+          : GenericData.get().getDefaultValue(field);
+        record.put(field.name(), value);
+      }
+      return record;
+    case ENUM:
+      List<String> symbols = schema.getEnumSymbols();
+      return new GenericData.EnumSymbol
+        (schema, symbols.get(random.nextInt(symbols.size())));
+    case ARRAY:
+      int length = (random.nextInt(5)+2)-d;
+      GenericArray<Object> array =
+        new GenericData.Array(length<=0?0:length, schema);
+      for (int i = 0; i < length; i++)
+        array.add(generate(schema.getElementType(), random, d+1));
+      return array;
+    case MAP:
+      length = (random.nextInt(5)+2)-d;
+      Map<Object,Object> map = new HashMap<Object,Object>(length<=0?0:length);
+      for (int i = 0; i < length; i++) {
+        map.put(TestUtil.randomString(random),
+                generate(schema.getValueType(), random, d+1));
+      }
+      return map;
+    case UNION:
+      List<Schema> types = schema.getTypes();
+      return generate(types.get(random.nextInt(types.size())), random, d);
+    case FIXED:
+      byte[] bytes = new byte[schema.getFixedSize()];
+      random.nextBytes(bytes);
+      return new GenericData.Fixed(schema, bytes);
+    case STRING:  return TestUtil.randomString(random);
+    case BYTES:   return TestUtil.randomBytes(random);
+    case INT:     return random.nextInt();
+    case LONG:    return random.nextLong();
+    case FLOAT:   return random.nextFloat();
+    case DOUBLE:  return random.nextDouble();
+    case BOOLEAN: return random.nextBoolean();
+    case NULL:    return null;
+    default: throw new RuntimeException("Unknown type: "+schema);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    if(args.length != 3) {
+      System.out.println("Usage: RandomData <schemafile> <outputfile> <count>");
+      System.exit(-1);
+    }
+    Schema sch = Schema.parse(new File(args[0]));
+    DataFileWriter<Object> writer =
+      new DataFileWriter<Object>(new GenericDatumWriter<Object>())
+      .create(sch, new File(args[1]));
+    try {
+      for (Object datum : new RandomData(sch, Integer.parseInt(args[2]))) {
+        writer.append(datum);
+      }
+    } finally {
+      writer.close();
+    }
+  }
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java
new file mode 100644
index 0000000..5d659cb
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.EOFException;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.util.List;
+import java.util.ArrayList;
+
+import org.apache.trevni.ValueType;
+import org.apache.trevni.ColumnMetaData;
+import org.apache.trevni.ColumnFileMetaData;
+
+import org.apache.avro.Schema;
+import org.apache.avro.io.Decoder;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.generic.GenericDatumReader;
+
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestCases {
+
+  private static final File DIR = new File("src/test/cases/");
+  private static final File FILE = new File("target", "case.trv");
+
+  @Test public void testCases() throws Exception {
+    for (File f : DIR.listFiles())
+      if (f.isDirectory() && !f.getName().startsWith("."))
+        runCase(f);
+  }
+
+  private void runCase(File dir) throws Exception {
+    Schema schema = Schema.parse(new File(dir, "input.avsc"));
+    List<Object> data = fromJson(schema, new File(dir, "input.json"));
+
+    // write full data
+    AvroColumnWriter<Object> writer =
+      new AvroColumnWriter<Object>(schema, new ColumnFileMetaData());
+    for (Object datum : data)
+      writer.write(datum);
+    writer.writeTo(FILE);
+
+    // test that the full schema reads correctly
+    checkRead(schema, data);
+
+    // test that sub-schemas read correctly
+    for (File f : dir.listFiles())
+      if (f.isDirectory() && !f.getName().startsWith(".")) {
+        Schema s = Schema.parse(new File(f, "sub.avsc"));
+        checkRead(s, fromJson(s, new File(f, "sub.json")));
+      }
+  }
+
+  private void checkRead(Schema s, List<Object> data) throws Exception {
+    AvroColumnReader<Object> reader =
+      new AvroColumnReader<Object>(new AvroColumnReader.Params(FILE)
+                                   .setSchema(s));
+    try {
+      for (Object datum : data)
+        assertEquals(datum, reader.next());
+    } finally {
+      reader.close();
+    }
+  }
+
+  private List<Object> fromJson(Schema schema, File file) throws Exception {
+    InputStream in = new FileInputStream(file);
+    List<Object> data = new ArrayList<Object>();
+    try {
+      DatumReader reader = new GenericDatumReader(schema);
+      Decoder decoder = DecoderFactory.get().jsonDecoder(schema, in);
+      while (true)
+        data.add(reader.read(null, decoder));
+    } catch (EOFException e) {
+    } finally {
+      in.close();
+    }
+    return data;
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
new file mode 100644
index 0000000..2b0fc4a
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+import junit.framework.TestCase;
+
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.io.DatumWriter;
+import org.apache.trevni.ColumnFileMetaData;
+import org.apache.trevni.avro.AvroColumnReader.Params;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestEvolvedSchema extends TestCase {
+  private static String writerSchema = "{"
+    + "    \"namespace\": \"org.apache.avro\","
+    + "    \"name\": \"test_evolution\"," + "    \"type\": \"record\","
+    + "    \"fields\": ["
+    + "        { \"name\": \"a\", \"type\":\"string\" },"
+    + "        { \"name\": \"b\", \"type\":\"int\" }"
+    + "     ]"
+    + "}";
+  private static String innerSchema = "{\"name\":\"c1\","
+    + "          \"type\":\"record\","
+    + "          \"fields\":[{\"name\":\"c11\", \"type\":\"int\", \"default\": 2},"
+    + "                      {\"name\":\"c12\", \"type\":\"string\", \"default\":\"goodbye\"}]}";
+  private static String evolvedSchema2 = "{"
+    + "    \"namespace\": \"org.apache.avro\","
+    + "    \"name\": \"test_evolution\"," + "    \"type\": \"record\","
+    + "    \"fields\": ["
+    + "        { \"name\": \"a\", \"type\":\"string\" },"
+    + "        { \"name\": \"b\", \"type\":\"int\" },"
+    + "        { \"name\": \"c\", \"type\":" + innerSchema + ","
+    + "          \"default\":{\"c11\": 1, \"c12\": \"hello\"}"
+    + "        }"
+    + "     ]"
+    + "}";
+
+  GenericData.Record writtenRecord;
+  GenericData.Record evolvedRecord;
+  GenericData.Record innerRecord;
+
+  private static final Schema writer = Schema.parse(writerSchema);
+  private static final Schema evolved = Schema.parse(evolvedSchema2);
+  private static final Schema inner = Schema.parse(innerSchema);
+
+  @Before
+    public void setUp() {
+    writtenRecord = new GenericData.Record(writer);
+    writtenRecord.put("a", "record");
+    writtenRecord.put("b", 21);
+
+    innerRecord = new GenericData.Record(inner);
+    innerRecord.put("c11", 1);
+    innerRecord.put("c12", "hello");
+
+    evolvedRecord = new GenericData.Record(evolved);
+    evolvedRecord.put("a", "record");
+    evolvedRecord.put("b", 21);
+    evolvedRecord.put("c", innerRecord);
+  }
+
+  @Test
+    public void testTrevniEvolvedRead() throws IOException {
+    AvroColumnWriter<GenericRecord> acw =
+      new AvroColumnWriter<GenericRecord>(writer, new ColumnFileMetaData());
+    acw.write(writtenRecord);
+    File serializedTrevni = File.createTempFile("trevni", null);
+    acw.writeTo(serializedTrevni);
+
+    AvroColumnReader.Params params = new Params(serializedTrevni);
+    params.setSchema(evolved);
+    AvroColumnReader<GenericRecord> acr =
+      new AvroColumnReader<GenericRecord>(params);
+    GenericRecord readRecord = acr.next();
+    assertEquals(evolvedRecord, readRecord);
+    assertFalse(acr.hasNext());
+  }
+
+  @Test
+    public void testAvroEvolvedRead() throws IOException {
+    File serializedAvro = File.createTempFile("avro", null);
+    DatumWriter<GenericRecord> dw =
+      new GenericDatumWriter<GenericRecord>(writer);
+    DataFileWriter<GenericRecord> dfw =
+      new DataFileWriter<GenericRecord>(dw);
+    dfw.create(writer, serializedAvro);
+    dfw.append(writtenRecord);
+    dfw.flush();
+    dfw.close();
+
+    GenericDatumReader<GenericRecord> reader =
+      new GenericDatumReader<GenericRecord>(writer);
+    reader.setExpected(evolved);
+    DataFileReader<GenericRecord> dfr =
+      new DataFileReader<GenericRecord>(serializedAvro, reader);
+    GenericRecord readRecord = dfr.next();
+    assertEquals(evolvedRecord, readRecord);
+    assertFalse(dfr.hasNext());
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
new file mode 100644
index 0000000..920eb89
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import org.apache.avro.mapred.AvroJob;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.trevni.ColumnFileMetaData;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestMetadataFiltering {
+
+  @Test public void testMetadataFiltering() throws Exception {
+    JobConf job = new JobConf();
+    
+    job.set(AvroTrevniOutputFormat.META_PREFIX + "test1", "1");
+    job.set(AvroTrevniOutputFormat.META_PREFIX + "test2", "2");
+    job.set("test3", "3");
+    job.set(AvroJob.TEXT_PREFIX + "test4", "4");
+    job.set(AvroTrevniOutputFormat.META_PREFIX + "test5", "5");
+    
+    ColumnFileMetaData metadata = AvroTrevniOutputFormat.filterMetadata(job);
+    
+    assertTrue(metadata.get("test1") != null);
+    assertTrue(new String(metadata.get("test1")).equals("1"));
+    assertTrue(metadata.get("test2") != null);
+    assertTrue(new String(metadata.get("test2")).equals("2"));
+    assertTrue(metadata.get("test5") != null);
+    assertTrue(new String(metadata.get("test5")).equals("5"));
+    assertTrue(metadata.get("test3") == null);
+    assertTrue(metadata.get("test4") == null);
+    
+  }
+  
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
new file mode 100644
index 0000000..06fdd09
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java
@@ -0,0 +1,276 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni.avro;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.trevni.ValueType;
+import org.apache.trevni.ColumnMetaData;
+import org.apache.trevni.ColumnFileMetaData;
+
+import org.apache.avro.Schema;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class TestShredder {
+
+  private static final int COUNT = 100;
+  private static final File FILE = new File("target", "test.trv");
+
+  @Test public void testPrimitives() throws Exception {
+    check(Schema.create(Schema.Type.NULL),
+          new ColumnMetaData("null", ValueType.NULL));
+    check(Schema.create(Schema.Type.BOOLEAN),
+          new ColumnMetaData("boolean", ValueType.BOOLEAN));
+
+    check(Schema.create(Schema.Type.INT),
+          new ColumnMetaData("int", ValueType.INT));
+    check(Schema.create(Schema.Type.LONG),
+          new ColumnMetaData("long", ValueType.LONG));
+
+    check(Schema.create(Schema.Type.FLOAT),
+          new ColumnMetaData("float", ValueType.FLOAT));
+    check(Schema.create(Schema.Type.DOUBLE),
+          new ColumnMetaData("double", ValueType.DOUBLE));
+
+    check(Schema.create(Schema.Type.BYTES),
+          new ColumnMetaData("bytes", ValueType.BYTES));
+    check(Schema.create(Schema.Type.STRING),
+          new ColumnMetaData("string", ValueType.STRING));
+
+    check(Schema.createEnum("E", null, null, Arrays.asList("X","Y","Z")),
+          new ColumnMetaData("E", ValueType.INT));
+    check(Schema.createFixed("F", null, null, 5),
+          new ColumnMetaData("F", ValueType.BYTES));
+  }
+
+  private static final String SIMPLE_FIELDS =
+    "{\"name\":\"x\",\"type\":\"int\"},"+
+    "{\"name\":\"y\",\"type\":\"string\"}";
+
+  private static final String SIMPLE_RECORD =
+    "{\"type\":\"record\",\"name\":\"R\",\"fields\":["
+    +SIMPLE_FIELDS
+    +"]}";
+
+  @Test public void testSimpleRecord() throws Exception {
+    check(Schema.parse(SIMPLE_RECORD),
+          new ColumnMetaData("x", ValueType.INT),
+          new ColumnMetaData("y", ValueType.STRING));
+  }
+
+  @Test public void testDefaultValue() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"R\",\"fields\":["
+      +SIMPLE_FIELDS+","
+      +"{\"name\":\"z\",\"type\":\"int\","
+      +"\"default\":1,\""+RandomData.USE_DEFAULT+"\":true}"
+      +"]}";
+    checkWrite(Schema.parse(SIMPLE_RECORD));
+    checkRead(Schema.parse(s));
+  }
+
+  @Test public void testNestedRecord() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"x\",\"type\":\"int\"},"
+      +"{\"name\":\"R\",\"type\":"+SIMPLE_RECORD+"},"
+      +"{\"name\":\"y\",\"type\":\"string\"}"
+      +"]}";
+    check(Schema.parse(s),
+          new ColumnMetaData("x", ValueType.INT),
+          new ColumnMetaData("R#x", ValueType.INT),
+          new ColumnMetaData("R#y", ValueType.STRING),
+          new ColumnMetaData("y", ValueType.STRING));
+  }
+
+  @Test public void testNamedRecord() throws Exception {
+	    String s = 
+	      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+	      +"{\"name\":\"R1\",\"type\":"+SIMPLE_RECORD+"},"
+	      +"{\"name\":\"R2\",\"type\":\"R\"}"
+	      +"]}";
+	    check(Schema.parse(s),
+	          new ColumnMetaData("R1#x", ValueType.INT),
+	          new ColumnMetaData("R1#y", ValueType.STRING),
+	          new ColumnMetaData("R2#x", ValueType.INT),
+	          new ColumnMetaData("R2#y", ValueType.STRING));
+	  }
+  
+  @Test public void testSimpleArray() throws Exception {
+    String s = "{\"type\":\"array\",\"items\":\"long\"}";
+    check(Schema.parse(s),
+          new ColumnMetaData("[]", ValueType.LONG).isArray(true));
+  }
+
+  private static final String RECORD_ARRAY = 
+    "{\"type\":\"array\",\"items\":"+SIMPLE_RECORD+"}";
+
+  @Test public void testArray() throws Exception {
+    ColumnMetaData p = new ColumnMetaData("[]", ValueType.NULL).isArray(true);
+    check(Schema.parse(RECORD_ARRAY),
+          p,
+          new ColumnMetaData("[]#x", ValueType.INT).setParent(p),
+          new ColumnMetaData("[]#y", ValueType.STRING).setParent(p));
+  }
+
+  @Test public void testSimpleUnion() throws Exception {
+    String s = "[\"int\",\"string\"]";
+    check(Schema.parse(s),
+          new ColumnMetaData("int", ValueType.INT).isArray(true),
+          new ColumnMetaData("string", ValueType.STRING).isArray(true));
+  }
+
+  @Test public void testSimpleOptional() throws Exception {
+    String s = "[\"null\",\"string\"]";
+    check(Schema.parse(s),
+          new ColumnMetaData("string", ValueType.STRING).isArray(true));
+  }
+
+  private static final String UNION = "[\"null\",\"int\","+SIMPLE_RECORD+"]";
+
+  @Test public void testUnion() throws Exception {
+    ColumnMetaData p = new ColumnMetaData("R", ValueType.NULL).isArray(true);
+    check(Schema.parse(UNION),
+          new ColumnMetaData("int", ValueType.INT).isArray(true),
+          p,
+          new ColumnMetaData("R#x", ValueType.INT).setParent(p),
+          new ColumnMetaData("R#y", ValueType.STRING).setParent(p));
+  }
+
+  @Test public void testNestedArray() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"x\",\"type\":\"int\"},"
+      +"{\"name\":\"A\",\"type\":"+RECORD_ARRAY+"},"
+      +"{\"name\":\"y\",\"type\":\"string\"}"
+      +"]}";
+    ColumnMetaData p = new ColumnMetaData("A[]", ValueType.NULL).isArray(true);
+    check(Schema.parse(s),
+          new ColumnMetaData("x", ValueType.INT),
+          p,
+          new ColumnMetaData("A[]#x", ValueType.INT).setParent(p),
+          new ColumnMetaData("A[]#y", ValueType.STRING).setParent(p),
+          new ColumnMetaData("y", ValueType.STRING));
+  }
+
+  @Test public void testNestedUnion() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"x\",\"type\":\"int\"},"
+      +"{\"name\":\"u\",\"type\":"+UNION+"},"
+      +"{\"name\":\"y\",\"type\":\"string\"}"
+      +"]}";
+    ColumnMetaData p = new ColumnMetaData("u/R", ValueType.NULL).isArray(true);
+    check(Schema.parse(s),
+          new ColumnMetaData("x", ValueType.INT),
+          new ColumnMetaData("u/int", ValueType.INT).isArray(true),
+          p,
+          new ColumnMetaData("u/R#x", ValueType.INT).setParent(p),
+          new ColumnMetaData("u/R#y", ValueType.STRING).setParent(p),
+          new ColumnMetaData("y", ValueType.STRING));
+  }
+
+  @Test public void testUnionInArray() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"a\",\"type\":{\"type\":\"array\",\"items\":"+UNION+"}}"
+      +"]}";
+    ColumnMetaData p = new ColumnMetaData("a[]",ValueType.NULL).isArray(true);
+    ColumnMetaData r = new ColumnMetaData("a[]/R", ValueType.NULL)
+      .setParent(p)
+      .isArray(true);
+      check(Schema.parse(s),
+          p,
+          new ColumnMetaData("a[]/int", ValueType.INT)
+            .setParent(p)
+            .isArray(true),
+          r,
+          new ColumnMetaData("a[]/R#x", ValueType.INT).setParent(r),
+          new ColumnMetaData("a[]/R#y", ValueType.STRING).setParent(r));
+  }
+
+  @Test public void testArrayInUnion() throws Exception {
+    String s = 
+      "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+      +"{\"name\":\"a\",\"type\":[\"int\","+RECORD_ARRAY+"]}]}";
+    ColumnMetaData q = new ColumnMetaData("a/array",ValueType.NULL)
+      .isArray(true);
+    ColumnMetaData r = new ColumnMetaData("a/array[]", ValueType.NULL)
+      .setParent(q)
+      .isArray(true);
+    check(Schema.parse(s),
+          new ColumnMetaData("a/int", ValueType.INT).isArray(true),
+          q,
+          r,
+          new ColumnMetaData("a/array[]#x", ValueType.INT).setParent(r),
+          new ColumnMetaData("a/array[]#y", ValueType.STRING).setParent(r));
+  }
+
+  @Test public void testSimpleMap() throws Exception {
+    String s = "{\"type\":\"map\",\"values\":\"long\"}";
+    ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
+    check(Schema.parse(s),
+          p,
+          new ColumnMetaData(">key", ValueType.STRING).setParent(p),
+          new ColumnMetaData(">value", ValueType.LONG).setParent(p));
+  }
+
+  @Test public void testMap() throws Exception {
+    String s = "{\"type\":\"map\",\"values\":"+SIMPLE_RECORD+"}";
+    ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
+    check(Schema.parse(s),
+          p,
+          new ColumnMetaData(">key", ValueType.STRING).setParent(p),
+          new ColumnMetaData(">value#x", ValueType.INT).setParent(p),
+          new ColumnMetaData(">value#y", ValueType.STRING).setParent(p));
+  }
+
+  private void check(Schema s, ColumnMetaData... expected) throws Exception {
+    ColumnMetaData[] shredded = new AvroColumnator(s).getColumns();
+    assertEquals(expected.length, shredded.length);
+    for (int i = 0; i < expected.length; i++)
+      assertEquals(expected[i].toString(), shredded[i].toString());
+    checkWrite(s);
+    checkRead(s);
+  }
+
+  private void checkWrite(Schema schema) throws IOException {
+    AvroColumnWriter<Object> writer =
+      new AvroColumnWriter<Object>(schema, new ColumnFileMetaData());
+    int count = 0;
+    for (Object datum : new RandomData(schema, COUNT)) {
+      //System.out.println("datum="+datum);
+      writer.write(datum);
+    }
+    writer.writeTo(FILE);
+  }
+
+  private void checkRead(Schema schema) throws IOException {
+    AvroColumnReader<Object> reader =
+      new AvroColumnReader<Object>(new AvroColumnReader.Params(FILE)
+                                   .setSchema(schema));
+    for (Object expected : new RandomData(schema, COUNT))
+      assertEquals(expected, reader.next());
+    reader.close();
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
new file mode 100644
index 0000000..d928a9f
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+import java.io.File;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.mapred.AvroJob;
+import org.apache.avro.mapred.Pair;
+import org.apache.avro.mapred.AvroMapper;
+import org.apache.avro.mapred.AvroReducer;
+import org.apache.avro.mapred.AvroCollector;
+
+import org.apache.avro.Schema;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+
+public class TestWordCount {
+
+  public static class MapImpl extends AvroMapper<String, Pair<String, Long> > {
+    @Override
+      public void map(String text, AvroCollector<Pair<String,Long>> collector,
+                      Reporter reporter) throws IOException {
+      StringTokenizer tokens = new StringTokenizer(text.toString());
+      while (tokens.hasMoreTokens())
+        collector.collect(new Pair<String,Long>(tokens.nextToken(),1L));
+    }
+  }
+  
+  public static class ReduceImpl
+    extends AvroReducer<String, Long, Pair<String, Long> > {
+    @Override
+    public void reduce(String word, Iterable<Long> counts,
+                       AvroCollector<Pair<String,Long>> collector,
+                       Reporter reporter) throws IOException {
+      long sum = 0;
+      for (long count : counts)
+        sum += count;
+      collector.collect(new Pair<String,Long>(word, sum));
+    }
+  }    
+
+  @Test public void runTestsInOrder() throws Exception {
+    testOutputFormat();
+    testInputFormat();
+  }
+
+  static final Schema STRING = Schema.create(Schema.Type.STRING);
+  static { GenericData.setStringType(STRING, GenericData.StringType.String); }
+  static final Schema LONG = Schema.create(Schema.Type.LONG);
+
+  public void testOutputFormat() throws Exception {
+    JobConf job = new JobConf();
+    
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
+    
+    wordCountUtil.writeLinesFile();
+    
+    AvroJob.setInputSchema(job, STRING);
+    AvroJob.setOutputSchema(job, Pair.getPairSchema(STRING,LONG));
+    
+    AvroJob.setMapperClass(job, MapImpl.class);        
+    AvroJob.setCombinerClass(job, ReduceImpl.class);
+    AvroJob.setReducerClass(job, ReduceImpl.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
+    FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
+    FileOutputFormat.setCompressOutput(job, true);
+    
+    job.setOutputFormat(AvroTrevniOutputFormat.class);
+
+    JobClient.runJob(job);
+    
+    wordCountUtil.validateCountsFile();
+  }
+
+  private static long total;
+
+  public static class Counter extends AvroMapper<GenericRecord,Void> {
+    @Override public void map(GenericRecord r, AvroCollector<Void> collector,
+                              Reporter reporter) throws IOException {
+      total += (Long)r.get("value");
+    }
+  }
+  
+  public void testInputFormat() throws Exception {
+    JobConf job = new JobConf();
+
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
+    
+    
+    Schema subSchema = Schema.parse("{\"type\":\"record\"," +
+                                    "\"name\":\"PairValue\","+
+                                    "\"fields\": [ " + 
+                                    "{\"name\":\"value\", \"type\":\"long\"}" + 
+                                    "]}");
+    AvroJob.setInputSchema(job, subSchema);
+    AvroJob.setMapperClass(job, Counter.class);        
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
+    job.setInputFormat(AvroTrevniInputFormat.class);
+
+    job.setNumReduceTasks(0);                     // map-only
+    job.setOutputFormat(NullOutputFormat.class);  // ignore output
+
+    total = 0;
+    JobClient.runJob(job);
+    assertEquals(WordCountUtil.TOTAL, total);
+  }
+
+
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java
new file mode 100644
index 0000000..68af7a3
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro;
+
+import static org.junit.Assert.*;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.File;
+import java.io.InputStream;
+import java.io.FileInputStream;
+import java.io.BufferedInputStream;
+import java.io.PrintStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.StringTokenizer;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.mapred.JobConf;
+
+import org.apache.avro.Schema;
+import org.apache.avro.hadoop.io.AvroKeyValue;
+import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.DatumWriter;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.file.DataFileStream;
+import org.apache.avro.mapred.Pair;
+
+public class WordCountUtil {
+
+  public File dir;
+  public File linesFiles;
+  public File countFiles;
+
+  public WordCountUtil (String testName) {
+    this(testName, "part-00000");
+  }
+  
+  public WordCountUtil (String testName, String partDirName) {
+    dir = new File("target/wc", testName);
+    linesFiles = new File(new File(dir, "in"), "lines.avro");
+    countFiles = new File(new File(dir, "out"), partDirName + "/part-0.trv");
+  }
+  
+  public static final String[] LINES = new String[] {
+    "the quick brown fox jumps over the lazy dog",
+    "the cow jumps over the moon",
+    "the rain in spain falls mainly on the plains"
+  };
+
+  public static final Map<String,Long> COUNTS = new TreeMap<String,Long>();
+  public static final long TOTAL;
+  static {
+    long total = 0;
+    for (String line : LINES) {
+      StringTokenizer tokens = new StringTokenizer(line);
+      while (tokens.hasMoreTokens()) {
+        String word = tokens.nextToken();
+        long count = COUNTS.containsKey(word) ? COUNTS.get(word) : 0L;
+        count++;
+        total++;
+        COUNTS.put(word, count);
+      }
+    }
+    TOTAL = total;
+  }
+
+  public File getDir() {
+    return dir;
+  }
+  
+  public void writeLinesFile() throws IOException {
+    FileUtil.fullyDelete(dir);
+    DatumWriter<String> writer = new GenericDatumWriter<String>();
+    DataFileWriter<String> out = new DataFileWriter<String>(writer);
+    linesFiles.getParentFile().mkdirs();
+    out.create(Schema.create(Schema.Type.STRING), linesFiles);
+    for (String line : LINES)
+      out.append(line);
+    out.close();
+  }
+
+  public void validateCountsFile() throws Exception {
+    AvroColumnReader<Pair<String,Long>> reader =
+      new AvroColumnReader<Pair<String,Long>>
+      (new AvroColumnReader.Params(countFiles).setModel(SpecificData.get()));
+    int numWords = 0;
+    for (Pair<String,Long> wc : reader) {
+      assertEquals(wc.key(), COUNTS.get(wc.key()), wc.value());
+      numWords++;
+    }
+    reader.close();
+    assertEquals(COUNTS.size(), numWords);
+  }
+  
+  public void validateCountsFileGenericRecord() throws Exception {
+    AvroColumnReader<GenericRecord > reader =
+      new AvroColumnReader<GenericRecord >
+      (new AvroColumnReader.Params(countFiles).setModel(SpecificData.get()));
+    int numWords = 0;
+    for (GenericRecord  wc : reader) {
+      assertEquals((String)wc.get("key"), COUNTS.get(wc.get("key")), (Long)wc.get("value"));
+      //assertEquals(wc.getKey(), COUNTS.get(wc.getKey()), wc.getValue());
+      numWords++;
+    }
+    reader.close();
+    assertEquals(COUNTS.size(), numWords);
+  }
+
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
new file mode 100644
index 0000000..deea1ca
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.AvroValue;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.avro.mapreduce.AvroKeyInputFormat;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.apache.trevni.avro.WordCountUtil;
+import org.junit.Test;
+
+public class TestKeyValueWordCount {
+  
+  private static long total = 0;
+
+  static final Schema STRING = Schema.create(Schema.Type.STRING);
+  static { GenericData.setStringType(STRING, GenericData.StringType.String); }
+  static final Schema LONG = Schema.create(Schema.Type.LONG);
+  
+  private static class WordCountMapper extends
+      Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
+    private LongWritable mCount = new LongWritable();
+    private Text mText = new Text();
+
+    @Override
+    protected void setup(Context context) {
+      mCount.set(1);
+    }
+
+    @Override
+    protected void map(AvroKey<String> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+
+      try {
+        StringTokenizer tokens = new StringTokenizer(key.datum());
+        while (tokens.hasMoreTokens()) {
+          mText.set(tokens.nextToken());
+          context.write(mText, mCount);
+        }
+      } catch (Exception e) {
+        throw new RuntimeException(key + " " + key.datum(), e);
+      }
+
+    }
+  }
+  
+  private static class WordCountReducer extends Reducer< Text, LongWritable, AvroKey<String>, AvroValue<Long>> {
+    
+    AvroKey<String> resultKey = new AvroKey<String>();
+    AvroValue<Long> resultValue = new AvroValue<Long>();
+    
+    @Override
+    protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+      long sum = 0;
+      for (LongWritable value: values) {
+        sum += value.get();
+      }
+      resultKey.datum(key.toString());
+      resultValue.datum(sum);
+      
+      context.write(resultKey, resultValue);
+    }
+  }
+   
+  public static class Counter extends
+  Mapper<AvroKey<String>, AvroValue<Long>, NullWritable, NullWritable> {
+    @Override
+    protected void map(AvroKey<String> key, AvroValue<Long> value, Context context)
+        throws IOException, InterruptedException {
+      total += value.datum();
+    }
+  }  
+  
+  @Test public void testIOFormat() throws Exception {
+    checkOutputFormat();
+    checkInputFormat();
+  }
+
+  public void checkOutputFormat() throws Exception {
+    Job job = new Job();
+    
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest", "part-r-00000");
+     
+    wordCountUtil.writeLinesFile();
+    
+    AvroJob.setInputKeySchema(job, STRING);
+    AvroJob.setOutputKeySchema(job, STRING);
+    AvroJob.setOutputValueSchema(job, LONG);
+    
+    job.setMapperClass(WordCountMapper.class);
+    job.setReducerClass(WordCountReducer.class);
+    
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(LongWritable.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
+    FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
+    FileOutputFormat.setCompressOutput(job, true);
+    
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    job.setOutputFormatClass(AvroTrevniKeyValueOutputFormat.class);
+
+    job.waitForCompletion(true);
+    
+    wordCountUtil.validateCountsFileGenericRecord();
+  }
+  
+  public void checkInputFormat() throws Exception {
+    Job job = new Job();
+    
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest");
+    
+    job.setMapperClass(Counter.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
+    job.setInputFormatClass(AvroTrevniKeyValueInputFormat.class);
+    
+    job.setNumReduceTasks(0);
+    job.setOutputFormatClass(NullOutputFormat.class);
+    
+    total = 0;
+    job.waitForCompletion(true);
+    assertEquals(WordCountUtil.TOTAL, total);
+    
+  }
+}
diff --git a/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
new file mode 100644
index 0000000..8623fb4
--- /dev/null
+++ b/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni.avro.mapreduce;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+import java.util.StringTokenizer;
+
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericData.Record;
+import org.apache.avro.mapreduce.AvroJob;
+import org.apache.avro.mapreduce.AvroKeyInputFormat;
+import org.apache.avro.mapred.AvroKey;
+import org.apache.avro.mapred.Pair;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
+import org.apache.trevni.avro.WordCountUtil;
+import org.apache.trevni.avro.mapreduce.AvroTrevniKeyOutputFormat;
+import org.junit.Test;
+
+public class TestKeyWordCount {
+  
+  private static long total = 0;
+
+  static final Schema STRING = Schema.create(Schema.Type.STRING);
+  static { GenericData.setStringType(STRING, GenericData.StringType.String); }
+  static final Schema LONG = Schema.create(Schema.Type.LONG);
+  
+  
+  private static class WordCountMapper extends
+      Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
+    private LongWritable mCount = new LongWritable();
+    private Text mText = new Text();
+
+    @Override
+    protected void setup(Context context) {
+      mCount.set(1);
+    }
+
+    @Override
+    protected void map(AvroKey<String> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+
+      try {
+        StringTokenizer tokens = new StringTokenizer(key.datum());
+        while (tokens.hasMoreTokens()) {
+          mText.set(tokens.nextToken());
+          context.write(mText, mCount);
+        }
+      } catch (Exception e) {
+        throw new RuntimeException(key + " "  + key.datum() , e);
+      }
+
+    }
+  }
+  
+  private static class WordCountReducer extends Reducer< Text, LongWritable, AvroKey<GenericData.Record>, NullWritable> {
+    
+    private AvroKey<GenericData.Record> result ;
+    
+    @Override
+    protected void setup(Context context) {
+      result = new AvroKey<GenericData.Record>();
+      result.datum(new Record(Pair.getPairSchema(STRING,LONG)));
+    }
+    
+    @Override
+    protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+      long count = 0;
+      for (LongWritable value: values) {
+        count += value.get();
+      }
+      
+      result.datum().put("key", key.toString());
+      result.datum().put("value", count);
+      
+      context.write(result, NullWritable.get());
+    }
+  }
+   
+
+  
+  public static class Counter extends
+  Mapper<AvroKey<GenericData.Record>, NullWritable, NullWritable, NullWritable> {
+    @Override
+    protected void map(AvroKey<GenericData.Record> key, NullWritable value, Context context)
+        throws IOException, InterruptedException {
+      total += (Long)key.datum().get("value");
+    }
+  }
+  
+  
+  @Test public void testIOFormat() throws Exception {
+    checkOutputFormat();
+    checkInputFormat();
+  }
+
+  public void checkOutputFormat() throws Exception {
+    Job job = new Job();
+    
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest", "part-r-00000");
+    
+    wordCountUtil.writeLinesFile();
+    
+    AvroJob.setInputKeySchema(job, STRING);
+    AvroJob.setOutputKeySchema(job, Pair.getPairSchema(STRING,LONG));
+    
+    job.setMapperClass(WordCountMapper.class);
+    job.setReducerClass(WordCountReducer.class);
+    
+    job.setMapOutputKeyClass(Text.class);
+    job.setMapOutputValueClass(LongWritable.class);
+    
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
+    FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
+    FileOutputFormat.setCompressOutput(job, true);
+    
+    job.setInputFormatClass(AvroKeyInputFormat.class);
+    job.setOutputFormatClass(AvroTrevniKeyOutputFormat.class);
+
+    job.waitForCompletion(true);
+    
+    wordCountUtil.validateCountsFile();
+  }
+  
+  public void checkInputFormat() throws Exception {
+    Job job = new Job();
+    
+    WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest");
+    
+    job.setMapperClass(Counter.class);
+
+    Schema subSchema = Schema.parse("{\"type\":\"record\"," +
+                                    "\"name\":\"PairValue\","+
+                                    "\"fields\": [ " + 
+                                    "{\"name\":\"value\", \"type\":\"long\"}" + 
+                                    "]}");
+    AvroJob.setInputKeySchema(job, subSchema);
+    
+    FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
+    job.setInputFormatClass(AvroTrevniKeyInputFormat.class);
+    
+    job.setNumReduceTasks(0);
+    job.setOutputFormatClass(NullOutputFormat.class);
+    
+    total = 0;
+    job.waitForCompletion(true);
+    assertEquals(WordCountUtil.TOTAL, total);
+    
+  }
+  
+}
diff --git a/lang/java/trevni/checkstyle.xml b/lang/java/trevni/checkstyle.xml
new file mode 100644
index 0000000..a51089a
--- /dev/null
+++ b/lang/java/trevni/checkstyle.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<!DOCTYPE module PUBLIC
+    "-//Puppy Crawl//DTD Check Configuration 1.2//EN"
+    "http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
+
+<module name="Checker">
+    <module name="FileTabCharacter"/>
+    <module name="NewlineAtEndOfFile">
+        <property name="lineSeparator" value="lf"/>
+    </module>
+
+    <module name="TreeWalker">
+        <module name="ConstantName"/>
+        <module name="LocalFinalVariableName"/>
+        <module name="LocalVariableName"/>
+        <module name="MemberName"/>
+        <module name="MethodName"/>
+        <module name="PackageName"/>
+        <module name="ParameterName"/>
+        <module name="StaticVariableName"/>
+        <module name="TypeName"/>
+
+        <module name="AvoidStarImport"/>
+        <module name="RedundantImport"/>
+        <module name="UnusedImports"/>
+
+        <module name="RedundantModifier"/>
+
+        <module name="EmptyStatement"/>
+        <module name="IllegalInstantiation"/>
+        <module name="RedundantThrows"/>
+        <module name="SimplifyBooleanExpression"/>
+        <module name="SimplifyBooleanReturn"/>
+
+        <module name="InterfaceIsType"/>
+
+        <module name="ArrayTypeStyle"/>
+        <module name="UpperEll"/>
+
+    </module>
+</module>
diff --git a/lang/java/trevni/core/pom.xml b/lang/java/trevni/core/pom.xml
new file mode 100644
index 0000000..c500444
--- /dev/null
+++ b/lang/java/trevni/core/pom.xml
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <artifactId>trevni-java</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>trevni-core</artifactId>
+  <name>Trevni Java Core</name>
+  <url>http://avro.apache.org/</url>
+  <description>Trevni Java Core</description>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.xerial.snappy</groupId>
+      <artifactId>snappy-java</artifactId>
+      <version>${snappy.version}</version>
+      <scope>compile</scope>
+    </dependency>    
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-checkstyle-plugin</artifactId>
+      </plugin>
+    </plugins>
+  </build>
+
+  <profiles>
+  </profiles>
+
+</project>
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ArrayColumnOutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ArrayColumnOutputBuffer.java
new file mode 100644
index 0000000..d471aef
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ArrayColumnOutputBuffer.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+
+/** A column output buffer for array columns. */
+class ArrayColumnOutputBuffer extends ColumnOutputBuffer {
+  private int length;                             // remaining in current array
+
+  private static final int NONE = -1;
+
+  private int runLength;                          // length of current run
+  private int runValue = NONE;                    // what kind of run
+
+  public ArrayColumnOutputBuffer(ColumnFileWriter writer, ColumnMetaData meta)
+    throws IOException {
+    super(writer, meta);
+    assert getMeta().isArray() || getMeta().getParent() != null;
+    assert !getMeta().hasIndexValues();
+  }
+
+  @Override public void writeLength(int l) throws IOException {
+    assert this.length == 0;
+    assert l >= 0;
+    this.length = l;
+    if (l == runValue) {
+      runLength++;                                // continue a run
+      return;
+    }
+    flushRun();                                   // end a run
+    if (l == 1 || l == 0) {
+      runLength = 1;                              // start a run
+      runValue = l;
+    } else {
+      getBuffer().writeLength(l);                 // not a run
+    }
+  }
+
+  @Override public void writeValue(Object value) throws IOException {
+    assert length > 0;
+    if (getMeta().getType() != ValueType.NULL) {
+      flushRun();
+      getBuffer().writeValue(value, getMeta().getType());
+    }
+    length -= 1;
+  }
+
+  @Override void flushBuffer() throws IOException {
+    flushRun();
+    super.flushBuffer();
+  }
+
+  private void flushRun() throws IOException {
+    if (runLength == 0)                           // not in run
+      return;
+    else if (runLength == 1)                      // single value
+      getBuffer().writeLength(runValue);
+    else                                          // a run
+      getBuffer().writeLength((3-runValue)-(runLength<<1));
+
+    runLength = 0;                                // reset
+    runValue = NONE;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java
new file mode 100644
index 0000000..1c0d64b
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
+
+public class BZip2Codec extends Codec {
+
+  private ByteArrayOutputStream outputBuffer;
+  public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
+  
+  @Override
+  ByteBuffer compress(ByteBuffer uncompressedData) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(uncompressedData.remaining());
+    BZip2CompressorOutputStream outputStream = new BZip2CompressorOutputStream(baos);
+
+    try {
+      outputStream.write(uncompressedData.array());
+    } finally {
+      outputStream.close();
+    }
+
+    ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+    return result;
+  }
+
+  @Override
+  ByteBuffer decompress(ByteBuffer compressedData) throws IOException {
+    ByteArrayInputStream bais = new ByteArrayInputStream(compressedData.array());
+    BZip2CompressorInputStream inputStream = new BZip2CompressorInputStream(bais);
+    try {
+      ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+      byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
+
+      int readCount = -1;
+      
+      while ( (readCount = inputStream.read(buffer, compressedData.position(), buffer.length))> 0) {
+        baos.write(buffer, 0, readCount);
+      }
+      
+      ByteBuffer result = ByteBuffer.wrap(baos.toByteArray());
+      return result;
+    } finally {
+      inputStream.close();
+    }
+  }
+  
+  private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
+    if (null == outputBuffer)
+      outputBuffer = new ByteArrayOutputStream(suggestedLength);
+    outputBuffer.reset();
+    return outputBuffer;
+  }
+  
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java
new file mode 100644
index 0000000..5f2c0f3
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+
+class BlockDescriptor {
+  int rowCount;
+  int uncompressedSize;
+  int compressedSize;
+
+  BlockDescriptor() {}
+  
+  BlockDescriptor(int rowCount, int uncompressedSize, int compressedSize) {
+    this.rowCount = rowCount;
+    this.uncompressedSize = uncompressedSize;
+    this.compressedSize = compressedSize;
+  }
+  
+  public void writeTo(OutputBuffer out) throws IOException {
+    out.writeFixed32(rowCount);
+    out.writeFixed32(uncompressedSize);
+    out.writeFixed32(compressedSize);
+  }
+
+  public static BlockDescriptor read(InputBuffer in) throws IOException {
+    BlockDescriptor result = new BlockDescriptor();
+    result.rowCount = in.readFixed32();
+    result.uncompressedSize = in.readFixed32();
+    result.compressedSize = in.readFixed32();
+    return result;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/Checksum.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/Checksum.java
new file mode 100644
index 0000000..70f656c
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/Checksum.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.nio.ByteBuffer;
+
+/** Interface for checksum algorithms. */
+abstract class Checksum {
+
+  public static Checksum get(MetaData meta) {
+    String name = meta.getChecksum();
+    if (name == null || "null".equals(name))
+      return new NullChecksum();
+    else if ("crc32".equals(name))
+      return new Crc32Checksum();
+    else
+      throw new TrevniRuntimeException("Unknown checksum: "+name);
+  }
+
+  /** The number of bytes per checksum. */
+  public abstract int size();
+
+  /** Compute a checksum. */
+  public abstract ByteBuffer compute(ByteBuffer data);
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/Codec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/Codec.java
new file mode 100644
index 0000000..098ed73
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/Codec.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** Interface for compression codecs. */
+abstract class Codec {
+
+  public static Codec get(MetaData meta) {
+    String name = meta.getCodec();
+    if (name == null || "null".equals(name))
+      return new NullCodec();
+    else if ("deflate".equals(name))
+      return new DeflateCodec();
+    else if ("snappy".equals(name))
+      return new SnappyCodec();
+    else if ("bzip2".equals(name))
+      return new BZip2Codec();
+    else
+      throw new TrevniRuntimeException("Unknown codec: "+name);
+  }
+
+  /** Compress data */
+  abstract ByteBuffer compress(ByteBuffer uncompressedData) throws IOException;
+
+  /** Decompress data  */
+  abstract ByteBuffer decompress(ByteBuffer compressedData) throws IOException;
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
new file mode 100644
index 0000000..6a052d1
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+
+import java.util.Arrays;
+
+class ColumnDescriptor<T extends Comparable> {
+  final Input file;
+  final ColumnMetaData metaData;
+
+  long start;
+  long dataStart;
+
+  BlockDescriptor[] blocks;
+
+  long[] blockStarts;                             // for random access
+  long[] firstRows;                               // for binary searches
+  T[] firstValues;                                // for binary searches
+
+  public ColumnDescriptor(Input file, ColumnMetaData metaData) {
+    this.file = file;
+    this.metaData = metaData;
+  }
+
+  public int findBlock(long row) {
+    int block = Arrays.binarySearch(firstRows, row);
+    if (block < 0)
+      block = -block - 2;
+    return block;
+  }
+
+  public int findBlock(T value) {
+    int block = Arrays.binarySearch(firstValues, value);
+    if (block < 0)
+      block = -block - 2;
+    return block;
+  }
+
+  public int blockCount() { return blocks.length; }
+
+  public long lastRow(int block) {
+    if (blocks.length == 0 || block < 0) return 0;
+    return firstRows[block] + blocks[block].rowCount;
+  }
+
+  public void ensureBlocksRead() throws IOException {
+    if (blocks != null) return;
+
+    // read block descriptors
+    InputBuffer in = new InputBuffer(file, start);
+    int blockCount = in.readFixed32();
+    BlockDescriptor[] blocks = new BlockDescriptor[blockCount];
+    if (metaData.hasIndexValues())
+      firstValues = (T[])new Comparable[blockCount];
+
+    for (int i = 0; i < blockCount; i++) {
+      blocks[i] = BlockDescriptor.read(in);
+      if (metaData.hasIndexValues())
+        firstValues[i] = in.<T>readValue(metaData.getType());
+    }
+    dataStart = in.tell();
+    
+    // compute blockStarts and firstRows
+    Checksum checksum = Checksum.get(metaData);
+    blockStarts = new long[blocks.length];
+    firstRows = new long[blocks.length];
+    long startPosition = dataStart;
+    long row = 0;
+    for (int i = 0; i < blockCount; i++) {
+      BlockDescriptor b = blocks[i];
+      blockStarts[i] = startPosition;
+      firstRows[i] = row;
+      startPosition += b.compressedSize + checksum.size();
+      row += b.rowCount;
+    }
+    this.blocks = blocks;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileMetaData.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileMetaData.java
new file mode 100644
index 0000000..1674671
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileMetaData.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+
+/** File-level metadata. */
+public class ColumnFileMetaData extends MetaData<ColumnFileMetaData> {
+
+  static ColumnFileMetaData read(InputBuffer in) throws IOException {
+    ColumnFileMetaData result = new ColumnFileMetaData();
+    MetaData.read(in, result);
+    return result;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java
new file mode 100644
index 0000000..ec080b8
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.io.Closeable;
+import java.io.File;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/** Reads data from a column file. */
+public class ColumnFileReader implements Closeable {
+  private Input file;
+
+  private long rowCount;
+  private int columnCount;
+  private ColumnFileMetaData metaData;
+  private ColumnDescriptor[] columns;
+  private Map<String,ColumnDescriptor> columnsByName;
+
+  /** Construct reading from the named file. */
+  public ColumnFileReader(File file) throws IOException {
+    this(new InputFile(file));
+  }
+
+  /** Construct reading from the provided input. */
+  public ColumnFileReader(Input file) throws IOException {
+    this.file = file;
+    readHeader();
+  }
+
+  /** Return the number of rows in this file. */
+  public long getRowCount() { return rowCount; }
+
+  /** Return the number of columns in this file. */
+  public long getColumnCount() { return columnCount; }
+
+  /** Return this file's metadata. */
+  public ColumnFileMetaData getMetaData() { return metaData; }
+
+  /** Return all columns' metadata. */
+  public ColumnMetaData[] getColumnMetaData() {
+    ColumnMetaData[] result = new ColumnMetaData[columnCount];
+    for (int i = 0; i < columnCount; i++)
+      result[i] = columns[i].metaData;
+    return result;
+  }
+
+  /** Return root columns' metadata.  Roots are columns that have no parent. */
+  public List<ColumnMetaData> getRoots() {
+    List<ColumnMetaData> result = new ArrayList<ColumnMetaData>();
+    for (int i = 0; i < columnCount; i++)
+      if (columns[i].metaData.getParent() == null)
+        result.add(columns[i].metaData);
+    return result;
+  }
+
+  /** Return a column's metadata. */
+  public ColumnMetaData getColumnMetaData(int number) {
+    return columns[number].metaData;
+  }
+
+  /** Return a column's metadata. */
+  public ColumnMetaData getColumnMetaData(String name) {
+    return getColumn(name).metaData;
+  }
+
+  private <T extends Comparable> ColumnDescriptor<T> getColumn(String name) {
+    ColumnDescriptor column = columnsByName.get(name);
+    if (column == null)
+      throw new TrevniRuntimeException("No column named: "+name);
+    return (ColumnDescriptor<T>)column;
+  }
+
+  private void readHeader() throws IOException {
+    InputBuffer in = new InputBuffer(file, 0);
+    readMagic(in);
+    this.rowCount = in.readFixed64();
+    this.columnCount = in.readFixed32();
+    this.metaData = ColumnFileMetaData.read(in);
+    this.columnsByName = new HashMap<String,ColumnDescriptor>(columnCount);
+
+    columns = new ColumnDescriptor[columnCount];
+    readColumnMetaData(in);
+    readColumnStarts(in);
+  }
+
+  private void readMagic(InputBuffer in) throws IOException {
+    byte[] magic = new byte[ColumnFileWriter.MAGIC.length];
+    try {
+      in.readFully(magic);
+    } catch (IOException e) {
+      throw new IOException("Not a data file.");
+    }
+    if (!(Arrays.equals(ColumnFileWriter.MAGIC, magic)
+          || !Arrays.equals(ColumnFileWriter.MAGIC_1, magic)
+          || !Arrays.equals(ColumnFileWriter.MAGIC_0, magic)))
+      throw new IOException("Not a data file.");
+  }
+
+  private void readColumnMetaData(InputBuffer in) throws IOException {
+    for (int i = 0; i < columnCount; i++) {
+      ColumnMetaData meta = ColumnMetaData.read(in, this);
+      meta.setDefaults(this.metaData);
+      ColumnDescriptor column = new ColumnDescriptor(file, meta);
+      columns[i] = column;
+      meta.setNumber(i);
+      columnsByName.put(meta.getName(), column);
+    }
+  }
+
+  private void readColumnStarts(InputBuffer in) throws IOException {
+    for (int i = 0; i < columnCount; i++)
+      columns[i].start = in.readFixed64();
+  }
+ 
+  /** Return an iterator over values in the named column. */
+  public <T extends Comparable> ColumnValues<T> getValues(String columnName)
+    throws IOException {
+    return new ColumnValues<T>(getColumn(columnName));
+  }
+
+  /** Return an iterator over values in a column. */
+  public <T extends Comparable> ColumnValues<T> getValues(int column)
+    throws IOException {
+    return new ColumnValues<T>(columns[column]);
+  }
+
+  @Override public void close() throws IOException {
+    file.close();
+  }
+
+}
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java
new file mode 100644
index 0000000..0f4a21e
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStream;
+import java.util.Set;
+import java.util.HashSet;
+
+/** Writes data to a column file.
+ * All data is buffered until {@link #writeTo(File)} is called.
+ */
+public class ColumnFileWriter {
+
+  static final byte[] MAGIC_0 = new byte[] {'T', 'r', 'v', 0};
+  static final byte[] MAGIC_1 = new byte[] {'T', 'r', 'v', 1};
+  static final byte[] MAGIC = new byte[] {'T', 'r', 'v', 2};
+
+  private ColumnFileMetaData metaData;
+  private ColumnOutputBuffer[] columns;
+
+  private long rowCount;
+  private int columnCount;
+  private long size;
+
+  /** Construct given metadata for each column in the file. */
+  public ColumnFileWriter(ColumnFileMetaData fileMeta,
+                          ColumnMetaData... columnMeta) throws IOException {
+    checkColumns(columnMeta);
+    this.metaData = fileMeta;
+    this.columnCount = columnMeta.length;
+    this.columns = new ColumnOutputBuffer[columnCount];
+    for (int i = 0; i < columnCount; i++) {
+      ColumnMetaData c = columnMeta[i];
+      c.setDefaults(metaData);
+      columns[i] = c.isArray()
+        ? new ArrayColumnOutputBuffer(this, c)
+        : new ColumnOutputBuffer(this, c);
+      size += OutputBuffer.BLOCK_SIZE;            // over-estimate
+    }
+  }
+
+  private void checkColumns(ColumnMetaData[] columnMeta) {
+    Set<String> seen = new HashSet<String>();
+    for (int i = 0; i < columnMeta.length; i++) {
+      ColumnMetaData c = columnMeta[i];
+      String name = c.getName();
+      if (seen.contains(name))
+        throw new TrevniRuntimeException("Duplicate column name: "+name);
+      ColumnMetaData parent = c.getParent();
+      if (parent != null && !seen.contains(parent.getName()))
+        throw new TrevniRuntimeException("Parent must precede child: "+name);
+      seen.add(name);
+    }          
+  }
+
+  void incrementSize(int n) { size += n; }
+
+  /** Return the approximate size of the file that will be written.  Tries to
+   * slightly over-estimate.  Indicates both the size in memory of the buffered
+   * data as well as the size of the file that will be written by {@link
+   * #writeTo(OutputStream)}. */
+  public long sizeEstimate() { return size; }
+
+  /** Return this file's metadata. */
+  public ColumnFileMetaData getMetaData() { return metaData; }
+
+  /** Return the number of columns in the file. */
+  public int getColumnCount() { return columnCount; }
+
+  /** Add a row to the file. */
+  public void writeRow(Object... row) throws IOException {
+    startRow();
+    for (int column = 0; column < columnCount; column++)
+      writeValue(row[column], column);
+    endRow();
+  }
+
+  /** Expert: Called before any values are written to a row. */
+  public void startRow() throws IOException {
+    for (int column = 0; column < columnCount; column++)
+      columns[column].startRow();
+  }
+
+  /** Expert: Declare a count of items to be written to an array column or a
+   * column whose parent is an array. */
+  public void writeLength(int length, int column) throws IOException {
+    columns[column].writeLength(length);
+  }
+
+  /** Expert: Add a value to a row.  For values in array columns or whose
+   * parents are array columns, this must be preceded by a call to {@link
+   * #writeLength(int, int)} and must be called that many times.   For normal
+   * columns this is called once for each row in the column. */
+  public void writeValue(Object value, int column) throws IOException {
+    columns[column].writeValue(value);
+  }
+
+  /** Expert: Called after all values are written to a row. */
+  public void endRow() throws IOException {
+    for (int column = 0; column < columnCount; column++)
+      columns[column].endRow();
+    rowCount++;
+  }
+
+  /** Write all rows added to the named file. */
+  public void writeTo(File file) throws IOException {
+    OutputStream out = new FileOutputStream(file);
+    try {
+      writeTo(out);
+    } finally {
+      out.close();
+    }
+  }
+
+  /** Write all rows added to the named output stream. */
+  public void writeTo(OutputStream out) throws IOException {
+    writeHeader(out);
+    
+    for (int column = 0; column < columnCount; column++)
+      columns[column].writeTo(out);
+  }
+
+  private void writeHeader(OutputStream out) throws IOException {
+    OutputBuffer header = new OutputBuffer();
+
+    header.write(MAGIC);                          // magic
+
+    header.writeFixed64(rowCount);                // row count
+
+    header.writeFixed32(columnCount);             // column count
+
+    metaData.write(header);                       // file metadata
+
+    for (ColumnOutputBuffer column : columns)
+      column.getMeta().write(header);             // column metadata
+
+    for (long start : computeStarts(header.size()))
+      header.writeFixed64(start);                 // column starts
+
+    header.writeTo(out);
+
+  }
+
+  private long[] computeStarts(long start) throws IOException {
+    long[] result = new long[columnCount];
+    start += columnCount * 8;                     // room for starts
+    for (int column = 0; column < columnCount; column++) {
+      result[column] = start;
+      start += columns[column].size();
+    }
+    return result;
+  }
+
+}
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnMetaData.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnMetaData.java
new file mode 100644
index 0000000..e079c80
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnMetaData.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+
+/** Metadata for a column. */
+public class ColumnMetaData extends MetaData<ColumnMetaData> {
+
+  static final String NAME_KEY = RESERVED_KEY_PREFIX + "name";
+  static final String TYPE_KEY = RESERVED_KEY_PREFIX + "type";
+  static final String VALUES_KEY = RESERVED_KEY_PREFIX + "values";
+  static final String PARENT_KEY = RESERVED_KEY_PREFIX + "parent";
+  static final String ARRAY_KEY = RESERVED_KEY_PREFIX + "array";
+
+  // cache these values for better performance
+  private String name;
+  private ValueType type;
+  private boolean values;
+  private ColumnMetaData parent;
+  private boolean isArray;
+
+  private transient List<ColumnMetaData> children =
+    new ArrayList<ColumnMetaData>(0);
+  private transient int number = -1;
+
+  private ColumnMetaData() {}                     // non-public ctor
+
+  /** Construct given a name and type. */
+  public ColumnMetaData(String name, ValueType type) {
+    this.name = name;
+    setReserved(NAME_KEY, name);
+    this.type = type;
+    setReserved(TYPE_KEY, type.getName());
+  }
+
+  /** Return this column's name. */
+  public String getName() { return name; }
+
+  /** Return this column's type. */
+  public ValueType getType() { return type; }
+
+  /** Return this column's parent or null. */
+  public ColumnMetaData getParent() { return parent; }
+
+  /** Return this column's children or null. */
+  public List<ColumnMetaData> getChildren() { return children; }
+
+  /** Return true if this column is an array. */
+  public boolean isArray() { return isArray; }
+
+  /** Return this column's number in a file. */
+  public int getNumber() { return number; }
+
+  void setNumber(int number) { this.number = number; }
+
+  /** Set whether this column has an index of blocks by value.  This only makes
+   * sense for sorted columns and permits one to seek into a column by value.
+   */
+  public ColumnMetaData hasIndexValues(boolean values) {
+    if (isArray)
+      throw new TrevniRuntimeException("Array column cannot have index: "+this);
+    this.values = values;
+    return setReservedBoolean(VALUES_KEY, values);
+  }
+
+  /** Set this column's parent.  A parent must be a preceding array column. */
+  public ColumnMetaData setParent(ColumnMetaData parent) {
+    if (!parent.isArray())
+      throw new TrevniRuntimeException("Parent is not an array: "+parent);
+    if (values)
+      throw new TrevniRuntimeException("Array column cannot have index: "+this);
+    this.parent = parent;
+    parent.children.add(this);
+    return setReserved(PARENT_KEY, parent.getName());
+  }
+
+  /** Set whether this column is an array. */
+  public ColumnMetaData isArray(boolean isArray) {
+    if (values)
+      throw new TrevniRuntimeException("Array column cannot have index: "+this);
+    this.isArray = isArray;
+    return setReservedBoolean(ARRAY_KEY, isArray);
+  }
+
+  /** Get whether this column has an index of blocks by value. */
+  public boolean hasIndexValues() { return getBoolean(VALUES_KEY); }
+
+  static ColumnMetaData read(InputBuffer in, ColumnFileReader file)
+    throws IOException {
+    ColumnMetaData result = new ColumnMetaData();
+    MetaData.read(in, result);
+    result.name = result.getString(NAME_KEY);
+    result.type = ValueType.forName(result.getString(TYPE_KEY));
+    result.values = result.getBoolean(VALUES_KEY);
+    result.isArray = result.getBoolean(ARRAY_KEY);
+
+    String parentName = result.getString(PARENT_KEY);
+    if (parentName != null)
+      result.setParent(file.getColumnMetaData(parentName));
+
+    return result;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java
new file mode 100644
index 0000000..b689915
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+class ColumnOutputBuffer {
+  private ColumnFileWriter writer;
+  private ColumnMetaData meta;
+  private Codec codec;
+  private Checksum checksum;
+  private OutputBuffer buffer;
+  private List<BlockDescriptor> blockDescriptors;
+  private List<byte[]> blockData;
+  private List<byte[]> firstValues;
+  private int rowCount;
+  private long size = 4;                          // room for block count
+
+  public ColumnOutputBuffer(ColumnFileWriter writer, ColumnMetaData meta)
+    throws IOException {
+    this.writer = writer;
+    this.meta = meta;
+    this.codec = Codec.get(meta);
+    this.checksum = Checksum.get(meta);
+    this.buffer = new OutputBuffer();
+    this.blockDescriptors = new ArrayList<BlockDescriptor>();
+    this.blockData = new ArrayList<byte[]>();
+    if (meta.hasIndexValues())
+      this.firstValues = new ArrayList<byte[]>();
+  }
+
+  public ColumnMetaData getMeta() { return meta; }
+  public OutputBuffer getBuffer() { return buffer; }
+
+  public void startRow() throws IOException {
+    if (buffer.isFull())
+      flushBuffer();
+  }
+
+  public void writeLength(int length) throws IOException {
+    throw new TrevniRuntimeException("Not an array column: "+meta);
+  }
+
+  public void writeValue(Object value) throws IOException {
+    buffer.writeValue(value, meta.getType());
+    if (meta.hasIndexValues() && rowCount == 0)
+      firstValues.add(buffer.toByteArray());
+  }
+
+  public void endRow() throws IOException {
+    rowCount++;
+  }
+
+  void flushBuffer() throws IOException {
+    if (rowCount == 0) return;
+    ByteBuffer raw = buffer.asByteBuffer();
+    ByteBuffer c = codec.compress(raw);
+
+    blockDescriptors.add(new BlockDescriptor(rowCount,
+                                             raw.remaining(),
+                                             c.remaining()));
+
+    ByteBuffer data = ByteBuffer.allocate(c.remaining() + checksum.size());
+    data.put(c);
+    data.put(checksum.compute(raw));
+    blockData.add(data.array());
+
+    int sizeIncrement =
+      (4*3)                                       // descriptor
+      + (firstValues != null                      // firstValue
+         ? firstValues.get(firstValues.size()-1).length
+         : 0)
+      + data.position();                         // data
+    
+    writer.incrementSize(sizeIncrement);
+    size += sizeIncrement;                         
+
+    buffer = new OutputBuffer();
+    rowCount = 0;
+  }
+
+  public long size() throws IOException {
+    flushBuffer();
+    return size;
+  }
+
+  public void writeTo(OutputStream out) throws IOException {
+    OutputBuffer header = new OutputBuffer();
+    header.writeFixed32(blockDescriptors.size());
+    for (int i = 0; i < blockDescriptors.size(); i++) {
+      blockDescriptors.get(i).writeTo(header);
+      if (meta.hasIndexValues())
+        header.write(firstValues.get(i));
+    }
+    header.writeTo(out);
+
+    for (byte[] data : blockData)
+      out.write(data);
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java
new file mode 100644
index 0000000..a47fc85
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+/** An iterator over column values. */
+public class ColumnValues<T extends Comparable>
+  implements Iterator<T>, Iterable<T> {
+
+  private final ColumnDescriptor column;
+  private final ValueType type;
+  private final Codec codec;
+  private final Checksum checksum;
+  private final InputBuffer in;
+
+  private InputBuffer values;
+  private int block = -1;
+  private long row = 0;
+  private T previous; 
+
+  private int arrayLength;
+
+  ColumnValues(ColumnDescriptor column) throws IOException {
+    this.column = column;
+    this.type = column.metaData.getType();
+    this.codec = Codec.get(column.metaData);
+    this.checksum = Checksum.get(column.metaData);
+    this.in = new InputBuffer(column.file);
+
+    column.ensureBlocksRead();
+  }
+
+  /** Return the current row number within this file. */
+  public long getRow() { return row; }
+
+  /** Seek to the named row. */
+  public void seek(long r) throws IOException {
+    if (r < row || r >= column.lastRow(block))    // not in current block
+      startBlock(column.findBlock(r));            // seek to block start
+    while (r > row && hasNext()) {                // skip within block
+      values.skipValue(type);
+      row++;
+    }
+    previous = null;
+  }
+
+  /** Seek to the named value. */
+  public void seek(T v) throws IOException {
+    if (!column.metaData.hasIndexValues())
+      throw new TrevniRuntimeException
+        ("Column does not have value index: " +column.metaData.getName());
+
+    if (previous == null                          // not in current block?
+        || previous.compareTo(v) > 0
+        || (block != column.blockCount()-1
+            && column.firstValues[block+1].compareTo(v) <= 0))
+      startBlock(column.findBlock(v));            // seek to block start
+
+    while (hasNext()) {                           // scan block
+      long savedPosition = values.tell();
+      T savedPrevious = previous;
+      if (next().compareTo(v) >= 0) {
+        values.seek(savedPosition);
+        previous = savedPrevious;
+        row--;
+        return;
+      }
+    }
+  }
+
+  private void startBlock(int block) throws IOException {
+    this.block = block;
+    this.row = column.firstRows[block];
+
+    in.seek(column.blockStarts[block]);
+    int end = column.blocks[block].compressedSize;
+    byte[] raw = new byte[end+checksum.size()];
+    in.readFully(raw);
+    ByteBuffer data = codec.decompress(ByteBuffer.wrap(raw, 0, end));
+    if (!checksum.compute(data).equals
+        (ByteBuffer.wrap(raw, end, checksum.size())))
+      throw new IOException("Checksums mismatch.");
+    values = new InputBuffer(new InputBytes(data));
+  }
+
+  @Override public Iterator iterator() { return this; }
+
+  @Override public boolean hasNext() {
+    return block < column.blockCount()-1 || row < column.lastRow(block);
+  }
+
+  @Override public T next() {
+    if (column.metaData.isArray() || column.metaData.getParent() != null)
+      throw new TrevniRuntimeException
+        ("Column is array: " +column.metaData.getName());
+    try {
+      startRow();
+      return nextValue();
+    } catch (IOException e) {
+      throw new TrevniRuntimeException(e);
+    }
+  }
+
+  /** Expert: Must be called before any calls to {@link #nextLength()} or
+   * {@link #nextValue()}. */
+  public void startRow() throws IOException {
+    if (row >= column.lastRow(block)) {
+      if (block >= column.blockCount())
+        throw new TrevniRuntimeException("Read past end of column.");
+      startBlock(block+1);
+    }
+    row++;
+  }
+
+  /** Expert: Returns the next length in an array column. */
+  public int nextLength() throws IOException {
+    if (!column.metaData.isArray())
+      throw new TrevniRuntimeException
+        ("Column is not array: " +column.metaData.getName());
+    assert arrayLength == 0;
+    return arrayLength = values.readLength();
+  }
+
+  /** Expert: Returns the next value in a column. */
+  public T nextValue() throws IOException {
+    arrayLength--;
+    return previous = values.<T>readValue(type);
+  }
+
+  @Override public void remove() { throw new UnsupportedOperationException(); }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/Crc32Checksum.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/Crc32Checksum.java
new file mode 100644
index 0000000..0be8b18
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/Crc32Checksum.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.nio.ByteBuffer;
+import java.util.zip.CRC32;
+
+/** Implements CRC32 checksum. */
+final class Crc32Checksum extends Checksum {
+  private CRC32 crc32 = new CRC32();
+
+  @Override public int size() { return 4; }
+
+  @Override public ByteBuffer compute(ByteBuffer data) {
+    crc32.reset();
+    crc32.update(data.array(), data.position(), data.remaining());
+
+    ByteBuffer result = ByteBuffer.allocate(size());
+    result.putInt((int)crc32.getValue());
+    result.flip();
+    return result;
+ }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java
new file mode 100644
index 0000000..7e9a0be
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+import java.util.zip.Inflater;
+import java.util.zip.InflaterOutputStream;
+
+/** Implements DEFLATE (RFC1951) compression and decompression. */
+class DeflateCodec extends Codec {
+  private ByteArrayOutputStream outputBuffer;
+  private Deflater deflater;
+  private Inflater inflater;
+
+  @Override
+  ByteBuffer compress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    writeAndClose(data, new DeflaterOutputStream(baos, getDeflater()));
+    return ByteBuffer.wrap(baos.toByteArray());
+  }
+
+  @Override
+  ByteBuffer decompress(ByteBuffer data) throws IOException {
+    ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
+    writeAndClose(data, new InflaterOutputStream(baos, getInflater()));
+    return ByteBuffer.wrap(baos.toByteArray());
+  }
+  
+  private void writeAndClose(ByteBuffer data, OutputStream out)
+    throws IOException {
+    out.write(data.array(), data.position(), data.remaining());
+    out.close();
+  }
+  
+  private Inflater getInflater() {
+    if (null == inflater)
+      inflater = new Inflater(true);
+    inflater.reset();
+    return inflater;
+  }
+
+  private Deflater getDeflater() {
+    if (null == deflater)
+      deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true);
+    deflater.reset();
+    return deflater;
+  }
+  
+  private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
+    if (null == outputBuffer)
+      outputBuffer = new ByteArrayOutputStream(suggestedLength);
+    outputBuffer.reset();
+    return outputBuffer;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/Input.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/Input.java
new file mode 100644
index 0000000..595eb73
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/Input.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.io.Closeable;
+
+/** A byte source that supports positioned read and length. */
+public interface Input extends Closeable {
+  /** Return the total length of the input. */
+  long length() throws IOException;
+
+  /** Positioned read. */
+  int read(long position, byte[] b, int start, int len) throws IOException;
+}
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java
new file mode 100644
index 0000000..e3bd415
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java
@@ -0,0 +1,379 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+import java.nio.charset.CharsetDecoder;
+
+/** Used to read values. */
+class InputBuffer {
+  private Input in;
+
+  private long inLength;
+  private long offset;                            // pos of next read from in
+
+  private byte[] buf;                             // data from input
+  private int pos;                                // position within buffer
+  private int limit;                              // end of valid buffer data
+
+  private CharsetDecoder utf8 = Charset.forName("UTF-8").newDecoder();
+  
+  private int bitCount;                           // position in booleans
+
+  private int runLength;                          // length of run
+  private int runValue;                           // value of run
+
+  public InputBuffer(Input in) throws IOException { this(in, 0); }
+
+  public InputBuffer(Input in, long position) throws IOException {
+    this.in = in;
+    this.inLength = in.length();
+    this.offset = position;
+
+    if (in instanceof InputBytes) {               // use buffer directly
+      this.buf = ((InputBytes)in).getBuffer();
+      this.limit = (int)in.length();
+      this.offset = limit;
+      this.pos = (int)position;
+    } else {                                      // create new buffer
+      this.buf = new byte[8192];                  // big enough for primitives
+    }
+  }
+
+  public void seek(long position) throws IOException {
+    runLength = 0;
+    if (position >= (offset-limit) && position <= offset) {
+      pos = (int)(limit - (offset - position));   // seek in buffer;
+      return;
+    }
+    pos = 0;
+    limit = 0;
+    offset = position;
+  }
+
+  public long tell() { return (offset-limit)+pos; }
+
+  public long length() { return inLength; }
+
+  public <T extends Comparable> T readValue(ValueType type) throws IOException {
+    switch (type) {
+    case NULL:
+      return (T)null;
+    case BOOLEAN:
+      return (T)Boolean.valueOf(readBoolean());
+    case INT:
+      return (T)Integer.valueOf(readInt());
+    case LONG:
+      return (T)Long.valueOf(readLong());
+    case FIXED32:
+      return (T)Integer.valueOf(readFixed32());
+    case FIXED64:
+      return (T)Long.valueOf(readFixed64());
+    case FLOAT:
+      return (T)Float.valueOf(readFloat());
+    case DOUBLE:
+      return (T)Double.valueOf(readDouble());
+    case STRING:
+      return (T)readString();
+    case BYTES:
+      return (T)readBytes(null);
+    default:
+      throw new TrevniRuntimeException("Unknown value type: "+type);
+    }
+  }
+
+  public void skipValue(ValueType type) throws IOException {
+    switch (type) {
+    case NULL:
+                    break;
+    case BOOLEAN:
+      readBoolean(); break;
+    case INT:
+      readInt();    break;
+    case LONG:
+      readLong();   break;
+    case FIXED32:
+    case FLOAT:
+      skip(4);      break;
+    case FIXED64:
+    case DOUBLE:
+      skip(8);      break;
+    case STRING:
+    case BYTES:
+      skipBytes();  break;
+    default:
+      throw new TrevniRuntimeException("Unknown value type: "+type);
+    }
+  }
+
+  public boolean readBoolean() throws IOException {
+    if (bitCount == 0)
+      read();
+    int bits = buf[pos-1] & 0xff;
+    int bit = (bits >> bitCount) & 1;
+    bitCount++;
+    if (bitCount == 8)
+      bitCount = 0;
+    return bit == 0 ? false : true;
+  }
+
+  public int readLength() throws IOException {
+    bitCount = 0;
+    if (runLength > 0) {
+      runLength--;                                // in run
+      return runValue;
+    }
+
+    int length = readInt();
+    if (length >= 0)                              // not a run
+      return length;
+
+    runLength = (1-length)>>>1;                   // start of run
+    runValue = (length+1) & 1;
+    return runValue;
+  }
+
+  public int readInt() throws IOException {
+    if ((limit - pos) < 5) {                      // maybe not in buffer
+      int b = read();
+      int n = b & 0x7f;
+      for (int shift = 7; b > 0x7f; shift += 7) {
+        b = read();
+        n ^= (b & 0x7f) << shift;
+      }
+      return (n >>> 1) ^ -(n & 1);                  // back to two's-complement
+    }
+    int len = 1;
+    int b = buf[pos] & 0xff;
+    int n = b & 0x7f;
+    if (b > 0x7f) {
+      b = buf[pos + len++] & 0xff;
+      n ^= (b & 0x7f) << 7;
+      if (b > 0x7f) {
+        b = buf[pos + len++] & 0xff;
+        n ^= (b & 0x7f) << 14;
+        if (b > 0x7f) {
+          b = buf[pos + len++] & 0xff;
+          n ^= (b & 0x7f) << 21;
+          if (b > 0x7f) {
+            b = buf[pos + len++] & 0xff;
+            n ^= (b & 0x7f) << 28;
+            if (b > 0x7f) {
+              throw new IOException("Invalid int encoding");
+            }
+          }
+        }
+      }
+    }
+    pos += len;
+    if (pos > limit)
+      throw new EOFException();
+    return (n >>> 1) ^ -(n & 1);                  // back to two's-complement
+  }
+
+  public long readLong() throws IOException {
+    if ((limit - pos) < 10) {                     // maybe not in buffer
+      int b = read();
+      long n = b & 0x7f;
+      for (int shift = 7; b > 0x7f; shift += 7) {
+        b = read();
+        n ^= (b & 0x7fL) << shift;
+      }
+      return (n >>> 1) ^ -(n & 1);                // back to two's-complement
+    }
+
+    int b = buf[pos++] & 0xff;
+    int n = b & 0x7f;
+    long l;
+    if (b > 0x7f) {
+      b = buf[pos++] & 0xff;
+      n ^= (b & 0x7f) << 7;
+      if (b > 0x7f) {
+        b = buf[pos++] & 0xff;
+        n ^= (b & 0x7f) << 14;
+        if (b > 0x7f) {
+          b = buf[pos++] & 0xff;
+          n ^= (b & 0x7f) << 21;
+          if (b > 0x7f) {
+            // only the low 28 bits can be set, so this won't carry
+            // the sign bit to the long
+            l = innerLongDecode((long)n);
+          } else {
+            l = n;
+          }
+        } else {
+          l = n;
+        }
+      } else {
+        l = n;
+      }
+    } else {
+      l = n;
+    }
+    if (pos > limit) {
+      throw new EOFException();
+    }
+    return (l >>> 1) ^ -(l & 1); // back to two's-complement
+  }
+  
+  // splitting readLong up makes it faster because of the JVM does more
+  // optimizations on small methods
+  private long innerLongDecode(long l) throws IOException {
+    int len = 1;
+    int b = buf[pos] & 0xff;
+    l ^= (b & 0x7fL) << 28;
+    if (b > 0x7f) {
+      b = buf[pos + len++] & 0xff;
+      l ^= (b & 0x7fL) << 35;
+      if (b > 0x7f) {
+        b = buf[pos + len++] & 0xff;
+        l ^= (b & 0x7fL) << 42;
+        if (b > 0x7f) {
+          b = buf[pos + len++] & 0xff;
+          l ^= (b & 0x7fL) << 49;
+          if (b > 0x7f) {
+            b = buf[pos + len++] & 0xff;
+            l ^= (b & 0x7fL) << 56;
+            if (b > 0x7f) {
+              b = buf[pos + len++] & 0xff;
+              l ^= (b & 0x7fL) << 63;
+              if (b > 0x7f) {
+                throw new IOException("Invalid long encoding");
+              }
+            }
+          }
+        }
+      }
+    }
+    pos += len;
+    return l;
+  }
+
+  public float readFloat() throws IOException {
+    return Float.intBitsToFloat(readFixed32());
+  }
+
+  public int readFixed32() throws IOException {
+    if ((limit - pos) < 4)                        // maybe not in buffer
+      return read() | (read() << 8) | (read() << 16) | (read() << 24);
+
+    int len = 1;
+    int n = (buf[pos] & 0xff) | ((buf[pos + len++] & 0xff) << 8)
+        | ((buf[pos + len++] & 0xff) << 16) | ((buf[pos + len++] & 0xff) << 24);
+    if ((pos + 4) > limit)
+      throw new EOFException();
+    pos += 4;
+    return n;
+  }
+
+  public double readDouble() throws IOException {
+    return Double.longBitsToDouble(readFixed64());
+  }
+
+  public long readFixed64() throws IOException {
+    return (readFixed32() & 0xFFFFFFFFL) | (((long)readFixed32()) << 32);
+  }
+
+  public String readString() throws IOException {
+    int length = readInt();
+    if (length <= (limit - pos)) {                        // in buffer
+      String result = utf8.decode(ByteBuffer.wrap(buf, pos, length)).toString();
+      pos += length;
+      return result;
+    }
+    byte[] bytes = new byte[length];
+    readFully(bytes, 0, length);
+    return utf8.decode(ByteBuffer.wrap(bytes, 0, length)).toString();
+  }  
+
+  public byte[] readBytes() throws IOException {
+    byte[] result = new byte[readInt()];
+    readFully(result);
+    return result;
+  }
+
+  public ByteBuffer readBytes(ByteBuffer old) throws IOException {
+    int length = readInt();
+    ByteBuffer result;
+    if (old != null && length <= old.capacity()) {
+      result = old;
+      result.clear();
+    } else {
+      result = ByteBuffer.allocate(length);
+    }
+    readFully(result.array(), result.position(), length);
+    result.limit(length);
+    return result;
+  }
+
+  public void skipBytes() throws IOException {
+    skip(readInt());
+  }
+
+  private void skip(long length) throws IOException {
+    seek(tell()+length);
+  }
+
+  public int read() throws IOException {
+    if (pos >= limit) {
+      limit = readInput(buf, 0, buf.length);
+      pos = 0;
+    }
+    return buf[pos++] & 0xFF;
+  }
+
+  public void readFully(byte[] bytes) throws IOException {
+    readFully(bytes, 0, bytes.length);
+  }
+
+  public void readFully(byte[] bytes, int start, int len) throws IOException {
+    int buffered = limit - pos;
+    if (len > buffered) {                        // buffer is insufficient
+
+      System.arraycopy(buf, pos, bytes, start, buffered); // consume buffer
+      start += buffered;
+      len -= buffered;
+      pos += buffered;
+      if (len > buf.length) {                     // bigger than buffer
+        do {
+          int read = readInput(bytes, start, len); // read directly into result
+          len -= read;
+          start += read;
+        } while (len > 0);
+        return;
+      }
+
+      limit = readInput(buf, 0, buf.length);        // refill buffer
+      pos = 0;
+    }
+
+    System.arraycopy(buf, pos, bytes, start, len); // copy from buffer
+    pos += len;
+  }
+
+  private int readInput(byte[] b, int start, int len) throws IOException {
+    int read = in.read(offset, b, start, len);
+    if (read < 0) throw new EOFException();
+    offset += read;
+    return read;
+ }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBytes.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBytes.java
new file mode 100644
index 0000000..e8727a8
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBytes.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** An {@link Input} backed with data in a byte array. */
+public class InputBytes extends ByteArrayInputStream implements Input {
+
+  /** Construct for the given bytes. */
+  public InputBytes(byte[] data) { super(data); }
+
+  /** Construct for the given bytes. */
+  public InputBytes(ByteBuffer data) {
+    super(data.array(), data.position(), data.limit());
+  }
+
+  @Override
+  public long length() throws IOException { return this.count; }
+
+  @Override
+  public synchronized int read(long pos, byte[] b, int start, int len)
+    throws IOException {
+    this.pos = (int)pos;
+    return read(b, start, len);
+  }
+
+  byte[] getBuffer() { return buf; }
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/InputFile.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputFile.java
new file mode 100644
index 0000000..51de4c0
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/InputFile.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.nio.channels.FileChannel;
+import java.nio.ByteBuffer;
+import java.io.IOException;
+
+/** An {@link Input} for files. */
+public class InputFile implements Input {
+
+  private FileChannel channel;
+
+  /** Construct for the given file. */
+  public InputFile(File file) throws IOException {
+    this.channel = new FileInputStream(file).getChannel();
+  }
+
+  @Override
+  public long length() throws IOException { return channel.size(); }
+
+  @Override
+  public int read(long position, byte[] b, int start, int len)
+    throws IOException {
+    return channel.read(ByteBuffer.wrap(b, start, len), position);
+  }
+
+  @Override
+  public void close() throws IOException { channel.close(); }
+
+}
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
new file mode 100644
index 0000000..648d8a1
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Map;
+import java.util.LinkedHashMap;
+
+/** Base class for metadata. */
+public class MetaData<T extends MetaData> extends LinkedHashMap<String,byte[]> {
+
+  static final String RESERVED_KEY_PREFIX = "trevni.";
+
+  static final String CODEC_KEY = RESERVED_KEY_PREFIX + "codec";
+  static final String CHECKSUM_KEY = RESERVED_KEY_PREFIX + "checksum";
+
+  public static final Charset UTF8 = Charset.forName("UTF-8");
+
+  private MetaData<?> defaults;
+
+  void setDefaults(MetaData defaults) { this.defaults = defaults; }
+
+  /** Return the compression codec name. */
+  public String getCodec() { return getString(CODEC_KEY); }
+
+  /** Set the compression codec name. */
+  public T setCodec(String codec) {
+    setReserved(CODEC_KEY, codec);
+    return (T)this;
+  }
+   
+  /** Return the checksum algorithm name. */
+  public String getChecksum() { return getString(CHECKSUM_KEY); }
+
+  /** Set the checksum algorithm name. */
+  public T setChecksum(String checksum) {
+    setReserved(CHECKSUM_KEY, checksum);
+    return (T)this;
+  }
+
+  /** Return the value of a metadata property as a String. */
+  public String getString(String key) {
+    byte[] value = get(key);
+    if (value == null && defaults != null)
+      value = defaults.get(key);
+    if (value == null)
+      return null;
+    return new String(value, UTF8);
+  }
+
+  /** Return the value of a metadata property as a long. */
+  public long getLong(String key) {
+    return Long.parseLong(getString(key));
+  }
+
+  /** Return true iff a key has any value, false if it is not present. */
+  public boolean getBoolean(String key) {
+    return get(key) != null;
+  }
+
+  /** Set a metadata property to a binary value. */
+  public T set(String key, byte[] value) {
+    if (isReserved(key)) {
+      throw new TrevniRuntimeException("Cannot set reserved key: " + key);
+    }
+    put(key, value);
+    return (T)this;
+  }
+
+  /** Test if a metadata key is reserved. */
+  public static boolean isReserved(String key) {
+    return key.startsWith(RESERVED_KEY_PREFIX);
+  }
+
+  /** Set a metadata property to a String value. */
+  public T set(String key, String value) {
+    return set(key, value.getBytes(UTF8));
+  }
+
+  T setReserved(String key, String value) {
+    put(key, value.getBytes(UTF8));
+    return (T)this;
+  }
+
+  T setReservedBoolean(String key, boolean value) {
+    if (value)
+      setReserved(key, "");
+    else
+      remove(key);
+    return (T)this;
+  }
+
+  /** Set a metadata property to a long value. */
+  public T set(String key, long value) {
+    return set(key, Long.toString(value));
+  }
+
+  void write(OutputBuffer out) throws IOException {
+    out.writeInt(size());
+    for (Map.Entry<String,byte[]> e : entrySet()) {
+      out.writeString(e.getKey());
+      out.writeBytes(e.getValue());
+    }
+  }
+
+  static void read(InputBuffer in, MetaData<?> metaData) throws IOException {
+    int size = in.readInt();
+    for (int i = 0; i < size; i++)
+      metaData.put(in.readString(), in.readBytes());
+  }
+
+  @Override public String toString() {
+    StringBuffer buffer = new StringBuffer();
+    buffer.append("{ ");
+    for (Map.Entry<String,byte[]> e : entrySet()) {
+      buffer.append(e.getKey());
+      buffer.append("=");
+      try {
+        buffer.append(new String(e.getValue(), "ISO-8859-1"));
+      } catch (java.io.UnsupportedEncodingException error) {
+        throw new TrevniRuntimeException(error);
+      }
+      buffer.append(" ");
+    }
+    buffer.append("}");
+    return buffer.toString();
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/NullChecksum.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/NullChecksum.java
new file mode 100644
index 0000000..7948fc1
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/NullChecksum.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.nio.ByteBuffer;
+
+/** Implements "null" (empty) checksum. */
+final class NullChecksum extends Checksum {
+
+  @Override public int size() { return 0; }
+
+  @Override public ByteBuffer compute(ByteBuffer data) {
+    return ByteBuffer.allocate(0);
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/NullCodec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/NullCodec.java
new file mode 100644
index 0000000..282d1a9
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/NullCodec.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+/** Implements "null" (pass through) codec. */
+final class NullCodec extends Codec {
+
+  @Override ByteBuffer compress(ByteBuffer buffer) throws IOException {
+    return buffer;
+  }
+
+  @Override ByteBuffer decompress(ByteBuffer data) throws IOException {
+    return data;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
new file mode 100644
index 0000000..41174fb
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java
@@ -0,0 +1,317 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.nio.charset.Charset;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+
+/** Used to write values. */
+class OutputBuffer extends ByteArrayOutputStream {
+  static final int BLOCK_SIZE = 64 * 1024;
+
+  private int bitCount;                           // position in booleans
+
+  public OutputBuffer() { super(BLOCK_SIZE + BLOCK_SIZE >> 2); }
+
+  public boolean isFull() { return size() >= BLOCK_SIZE; }
+
+  public ByteBuffer asByteBuffer() { return ByteBuffer.wrap(buf, 0, count); }
+
+  public void writeValue(Object value, ValueType type)
+    throws IOException {
+    switch (type) {
+    case NULL:
+                                              break;
+    case BOOLEAN:
+      writeBoolean((Boolean)value);           break;
+    case INT:
+      writeInt((Integer)value);               break;
+    case LONG:
+      writeLong((Long)value);                 break;
+    case FIXED32:
+      writeFixed32((Integer)value);           break;
+    case FIXED64:
+      writeFixed64((Long)value);              break;
+    case FLOAT:
+      writeFloat((Float)value);               break;
+    case DOUBLE:
+      writeDouble((Double)value);             break;
+    case STRING:
+      writeString((String)value);             break;
+    case BYTES:
+      if (value instanceof ByteBuffer)
+        writeBytes((ByteBuffer)value);
+      else
+        writeBytes((byte[])value);
+      break;
+    default:
+      throw new TrevniRuntimeException("Unknown value type: "+type);
+    }
+  }
+
+  public void writeBoolean(boolean value) {
+    if (bitCount == 0) {                           // first bool in byte
+      ensure(1);
+      count++;
+    }
+    if (value)
+      buf[count-1] |= (byte)(1 << bitCount);
+    bitCount++;
+    if (bitCount == 8)
+      bitCount = 0;
+  }
+
+  public void writeLength(int length) throws IOException {
+    bitCount = 0;
+    writeInt(length);
+  }
+
+  private static final Charset UTF8 = Charset.forName("UTF-8");
+
+  public void writeString(String string) throws IOException {
+    byte[] bytes = string.getBytes(UTF8);
+    writeInt(bytes.length);
+    write(bytes, 0, bytes.length);
+  }
+
+  public void writeBytes(ByteBuffer bytes) throws IOException {
+    int pos = bytes.position();
+    int start = bytes.arrayOffset() + pos;
+    int len = bytes.limit() - pos;
+    writeBytes(bytes.array(), start, len);
+  }
+  
+  public void writeBytes(byte[] bytes) throws IOException {
+    writeBytes(bytes, 0, bytes.length);
+  }
+
+  public void writeBytes(byte[] bytes, int start, int len) throws IOException {
+    writeInt(len);
+    write(bytes, start, len);
+  }
+
+  public void writeFloat(float f) throws IOException {
+    writeFixed32(Float.floatToRawIntBits(f));
+  }
+
+  public void writeDouble(double d) throws IOException {
+    writeFixed64(Double.doubleToRawLongBits(d));
+  }
+
+  public void writeFixed32(int i) throws IOException {
+    ensure(4);
+    buf[count  ] = (byte)((i       ) & 0xFF);
+    buf[count+1] = (byte)((i >>>  8) & 0xFF);
+    buf[count+2] = (byte)((i >>> 16) & 0xFF);
+    buf[count+3] = (byte)((i >>> 24) & 0xFF);
+    count += 4;
+  }
+
+  public void writeFixed64(long l) throws IOException {
+    ensure(8);
+    int first = (int)(l & 0xFFFFFFFF);
+    int second = (int)((l >>> 32) & 0xFFFFFFFF);
+    buf[count  ] = (byte)((first        ) & 0xFF);
+    buf[count+4] = (byte)((second       ) & 0xFF);
+    buf[count+5] = (byte)((second >>>  8) & 0xFF);
+    buf[count+1] = (byte)((first >>>   8) & 0xFF);
+    buf[count+2] = (byte)((first >>>  16) & 0xFF);
+    buf[count+6] = (byte)((second >>> 16) & 0xFF);
+    buf[count+7] = (byte)((second >>> 24) & 0xFF);
+    buf[count+3] = (byte)((first >>>  24) & 0xFF);
+    count += 8;
+  }
+
+  public void writeInt(int n) throws IOException {
+    ensure(5);
+    n = (n << 1) ^ (n >> 31);                     // move sign to low-order bit
+    if ((n & ~0x7F) != 0) {
+      buf[count++] = (byte)((n | 0x80) & 0xFF);
+      n >>>= 7;
+      if (n > 0x7F) {
+        buf[count++] = (byte)((n | 0x80) & 0xFF);
+        n >>>= 7;
+        if (n > 0x7F) {
+          buf[count++] = (byte)((n | 0x80) & 0xFF);
+          n >>>= 7;
+          if (n > 0x7F) {
+            buf[count++] = (byte)((n | 0x80) & 0xFF);
+            n >>>= 7;
+          }
+        }
+      }
+    } 
+    buf[count++] = (byte) n;
+  }
+
+  public void writeLong(long n) throws IOException {
+    ensure(10);
+    n = (n << 1) ^ (n >> 63);                     // move sign to low-order bit
+    if ((n & ~0x7FL) != 0) {
+      buf[count++] = (byte)((n | 0x80) & 0xFF);
+      n >>>= 7;
+      if (n > 0x7F) {
+        buf[count++] = (byte)((n | 0x80) & 0xFF);
+        n >>>= 7;
+        if (n > 0x7F) {
+          buf[count++] = (byte)((n | 0x80) & 0xFF);
+          n >>>= 7;
+          if (n > 0x7F) {
+            buf[count++] = (byte)((n | 0x80) & 0xFF);
+            n >>>= 7;
+            if (n > 0x7F) {
+              buf[count++] = (byte)((n | 0x80) & 0xFF);
+              n >>>= 7;
+              if (n > 0x7F) {
+                buf[count++] = (byte)((n | 0x80) & 0xFF);
+                n >>>= 7;
+                if (n > 0x7F) {
+                  buf[count++] = (byte)((n | 0x80) & 0xFF);
+                  n >>>= 7;
+                  if (n > 0x7F) {
+                    buf[count++] = (byte)((n | 0x80) & 0xFF);
+                    n >>>= 7;
+                    if (n > 0x7F) {
+                      buf[count++] = (byte)((n | 0x80) & 0xFF);
+                      n >>>= 7;
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+    buf[count++] = (byte) n;
+  }
+  
+  private void ensure(int n) {
+    if (count + n > buf.length)
+      buf = Arrays.copyOf(buf, Math.max(buf.length << 1, count + n));
+  }
+
+  public static int size(Object value, ValueType type) {
+    switch (type) {
+    case NULL:
+      return 0;
+    case INT:
+      return size((Integer)value);
+    case LONG:
+      return size((Long)value);
+    case FIXED32:
+    case FLOAT:
+      return 4;
+    case FIXED64:
+    case DOUBLE:
+      return 8;
+    case STRING:
+      return size((String)value);
+    case BYTES:
+      if (value instanceof ByteBuffer)
+        return size((ByteBuffer)value);
+      return size((byte[])value);
+    default:
+      throw new TrevniRuntimeException("Unknown value type: "+type);
+    }
+  }
+
+  public static int size(int n) {
+    n = (n << 1) ^ (n >> 31);                     // move sign to low-order bit
+    if (n <= (1<<7*1)-1)
+      return 1;
+    if (n <= (1<<7*2)-1)
+      return 2;
+    if (n <= (1<<7*3)-1)
+      return 3;
+    if (n <= (1<<7*4)-1)
+      return 4;
+    return 5;
+  }
+
+  public static int size(long n) {
+    n = (n << 1) ^ (n >> 63);                     // move sign to low-order bit
+    if (n <= (1<<7*1)-1)
+      return 1;
+    if (n <= (1<<7*2)-1)
+      return 2;
+    if (n <= (1<<7*3)-1)
+      return 3;
+    if (n <= (1<<7*4)-1)
+      return 4;
+    if (n <= (1<<7*5)-1)
+      return 5;
+    if (n <= (1<<7*6)-1)
+      return 6;
+    if (n <= (1<<7*7)-1)
+      return 7;
+    if (n <= (1<<7*8)-1)
+      return 8;
+    if (n <= (1<<7*9)-1)
+      return 9;
+    return 10;
+  }
+
+  public static int size(ByteBuffer bytes) {
+    int length = bytes.remaining();
+    return size(length) + length;
+  }
+
+  public static int size(byte[] bytes) {
+    int length = bytes.length;
+    return size(length) + length;
+  }
+
+  public static int size(String string) {
+    int length = utf8Length(string);
+    return size(length) + length;
+  }
+
+  private static int utf8Length(String string) {
+    int stringLength = string.length();
+    int utf8Length = 0;
+    for (int i = 0; i < stringLength; i++) {
+      char c = string.charAt(i);
+      int p = c;                                  // code point
+      if (Character.isHighSurrogate(c)            // surrogate pair
+          && i != stringLength-1
+          && Character.isLowSurrogate(string.charAt(i+1))) {
+        p = string.codePointAt(i);
+        i++;
+      }
+      if (p <= 0x007F) {
+        utf8Length += 1;
+      } else if (p <= 0x07FF) {
+        utf8Length += 2;
+      } else if (p <= 0x0FFFF) {
+        utf8Length += 3;
+      } else if (p <= 0x01FFFFF) {
+        utf8Length += 4;
+      } else if (p <= 0x03FFFFFF) {
+        utf8Length += 5;
+      } else {
+        utf8Length += 6;
+      }
+    }
+    return utf8Length;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/SnappyCodec.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/SnappyCodec.java
new file mode 100644
index 0000000..872c664
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/SnappyCodec.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import org.xerial.snappy.Snappy;
+
+/** Implements <a href="http://code.google.com/p/snappy/">Snappy</a> codec. */
+final class SnappyCodec extends Codec {
+
+  @Override ByteBuffer compress(ByteBuffer in) throws IOException {
+    ByteBuffer out =
+      ByteBuffer.allocate(Snappy.maxCompressedLength(in.remaining()));
+    int size = Snappy.compress(in.array(), in.position(), in.remaining(),
+                               out.array(), 0);
+    out.limit(size);
+    return out;
+  }
+
+  @Override ByteBuffer decompress(ByteBuffer in) throws IOException {
+    ByteBuffer out = ByteBuffer.allocate
+      (Snappy.uncompressedLength(in.array(),in.position(),in.remaining()));
+    int size = Snappy.uncompress(in.array(),in.position(),in.remaining(),
+                                 out.array(), 0);
+    out.limit(size);
+    return out;
+  }
+
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/TrevniRuntimeException.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/TrevniRuntimeException.java
new file mode 100644
index 0000000..aa87c6d
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/TrevniRuntimeException.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.trevni;
+
+/** Base runtime exception thrown by Trevni. */
+public class TrevniRuntimeException extends RuntimeException {
+  public TrevniRuntimeException(Throwable cause) { super(cause); }
+  public TrevniRuntimeException(String message) { super(message); }
+  public TrevniRuntimeException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}
+
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
new file mode 100644
index 0000000..712a7d9
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+/** The datatypes that may be stored in a column. */
+public enum ValueType {
+  NULL, BOOLEAN, INT, LONG, FIXED32, FIXED64, FLOAT, DOUBLE, STRING, BYTES;
+  private String name;
+  private ValueType() { this.name = this.name().toLowerCase(); }
+
+  /** Return the name of this type. */
+  public String getName() { return name; }
+
+  /** Return a type given its name. */
+  public static ValueType forName(String name) {
+    return valueOf(name.toUpperCase());
+  }
+ 
+}
diff --git a/lang/java/trevni/core/src/main/java/org/apache/trevni/package.html b/lang/java/trevni/core/src/main/java/org/apache/trevni/package.html
new file mode 100644
index 0000000..e6ec818
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/org/apache/trevni/package.html
@@ -0,0 +1,23 @@
+<html>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<body>
+A column file format.
+</body>
+</html>
diff --git a/lang/java/trevni/core/src/main/java/overview.html b/lang/java/trevni/core/src/main/java/overview.html
new file mode 100644
index 0000000..d64872e
--- /dev/null
+++ b/lang/java/trevni/core/src/main/java/overview.html
@@ -0,0 +1,88 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<html>
+<head>
+   <title>Avro</title>
+</head>
+<body>Avro is a data serialization system.
+
+  <h2>Overview</h2>
+
+  <p>Avro provides:
+    <ul>
+      <li>Rich data structures.
+      <li>A compact, fast, binary data format.
+      <li>A container file, to store persistent data.
+      <li>Remote procedure call (RPC).
+      <li>Simple integration with dynamic languages.  Code generation
+      is not required to read or write data files nor to use or
+      implement RPC protocols.  Code generation as an optional
+      optimization, only worth implementing for statically typed
+      languages.
+    </ul>  
+
+  <h2>Schemas</h2>
+
+  <p>Avro relies on <i>{@link org.apache.avro.Schema schemas}</i>.
+  When Avro data is read, the schema used when writing it is always
+  present.  This permits each datum to be written with no per-value
+  overheads, making serialization both fast and small.  This also
+  facilitates use with dynamic, scripting languages, since data,
+  together with its schema, is fully self-describing.
+
+  <p>When Avro data is stored in a {@link
+  org.apache.avro.file.DataFileWriter file}, its schema is stored with
+  it, so that files may be processed later by any program.  If the
+  program reading the data expects a different schema this can be
+  easily resolved, since both schemas are present.
+
+  <p>When Avro is used in {@link org.apache.avro.ipc RPC}, the client
+    and server exchange schemas in the connection handshake.  (This
+    can be optimized so that, for most calls, no schemas are actually
+    transmitted.)  Since both client and server both have the other's
+    full schema, correspondence between same named fields, missing
+    fields, extra fields, etc. can all be easily resolved.
+
+  <p>Avro schemas are defined with
+  with <a href="http://www.json.org/">JSON</a> .  This facilitates
+  implementation in languages that already have JSON libraries.
+
+  <h2>Comparison with other systems</h2>
+
+  Avro provides functionality similar to systems such
+  as <a href="http://incubator.apache.org/thrift/">Thrift</a>,
+  <a href="http://code.google.com/protobuf/">Protocol Buffers</a>,
+  etc.  Avro differs from these systems in the following fundamental
+  aspects.
+  <ul>
+    <li><i>Dynamic typing</i>: Avro does not require that code be
+    generated.  Data is always accompanied by a schema that permits
+    full processing of that data without code generation, static
+    datatypes, etc.  This facilitates construction of generic
+    data-processing systems and languages.
+    <li><i>Untagged data</i>: Since the schema is present when data is
+    read, considerably less type information need be encoded with
+    data, resulting in smaller serialization size.</li>
+    <li><i>No manually-assigned field IDs</i>: When a schema changes,
+    both the old and new schema are always present when processing
+    data, so differences may be resolved symbolically, using field
+    names.
+  </ul>  
+
+</body>
+</html>
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java
new file mode 100644
index 0000000..c4a39de
--- /dev/null
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestBZip2Codec.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+public class TestBZip2Codec {
+  
+  @Test
+  public void testBZip2CompressionAndDecompression() throws IOException {
+    
+    MetaData meta = new MetaData();
+    meta.setCodec("bzip2");
+    Codec codec = Codec.get(meta);
+    
+    //Confirm that the right codec Came back
+    assertTrue(codec instanceof BZip2Codec);
+    
+    //This is 3 times the byte buffer on the BZip2 decompress plus some extra
+    final int inputByteSize = BZip2Codec.DEFAULT_BUFFER_SIZE * 3 + 42;
+    
+    byte[] inputByteArray = new byte[inputByteSize];
+    
+    //Generate something that will compress well
+    for (int i = 0; i < inputByteSize; i++) {
+      inputByteArray[i] = (byte)(65 + i % 10);
+    }
+    
+    ByteBuffer inputByteBuffer = ByteBuffer.wrap(inputByteArray);
+    
+    ByteBuffer compressedBuffer = codec.compress(inputByteBuffer);
+    
+    //Make sure something returned
+    assertTrue(compressedBuffer.array().length > 0);
+    //Make sure the compressed output is smaller then the original
+    assertTrue(compressedBuffer.array().length < inputByteArray.length);
+    
+    ByteBuffer decompressedBuffer = codec.decompress(compressedBuffer);
+    
+    //The original array should be the same length as the decompressed array
+    assertTrue(decompressedBuffer.array().length == inputByteArray.length);
+    
+    //Every byte in the outputByteArray should equal every byte in the input array 
+    byte[] outputByteArray = decompressedBuffer.array();
+    for (int i = 0; i < inputByteSize; i++) {
+      inputByteArray[i] = outputByteArray[i];
+    }
+  }
+}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestColumnFile.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestColumnFile.java
new file mode 100644
index 0000000..68b7b61
--- /dev/null
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestColumnFile.java
@@ -0,0 +1,266 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.Collection;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+ at RunWith(value = Parameterized.class)
+public class TestColumnFile {
+
+  private static final File FILE = new File("target", "test.trv");
+  private static final int COUNT = 1024*64;
+
+  private String codec;
+  private String checksum;
+
+  public TestColumnFile(String codec, String checksum) {
+    this.codec = codec;
+    this.checksum = checksum;
+  }
+
+  @Parameters public static Collection<Object[]> codecs() {
+    Object[][] data = new Object[][] {{"null", "null"},
+                                      {"snappy", "crc32"},
+                                      {"deflate", "crc32"}};
+    return Arrays.asList(data);
+  }
+
+  private ColumnFileMetaData createFileMeta() {
+    return new ColumnFileMetaData()
+      .setCodec(codec)
+      .setChecksum(checksum);
+  }
+
+  @Test public void testEmptyFile() throws Exception {
+    FILE.delete();
+    ColumnFileWriter out = new ColumnFileWriter(createFileMeta());
+    out.writeTo(FILE);
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(0, in.getRowCount());
+    Assert.assertEquals(0, in.getColumnCount());
+    in.close();
+  }
+
+  @Test public void testEmptyColumn() throws Exception {
+    FILE.delete();
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.INT));
+    out.writeTo(FILE);
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(0, in.getRowCount());
+    Assert.assertEquals(1, in.getColumnCount());
+    ColumnValues<Integer> values = in.getValues("test");
+    for (int i : values)
+      throw new Exception("no value should be found");
+    in.close();
+  }
+
+  @Test public void testInts() throws Exception {
+    FILE.delete();
+
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.INT));
+    Random random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      out.writeRow(TestUtil.randomLength(random));
+    out.writeTo(FILE);
+
+    random = TestUtil.createRandom();
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(COUNT, in.getRowCount());
+    Assert.assertEquals(1, in.getColumnCount());
+    Iterator<Integer> i = in.getValues("test");
+    int count = 0;
+    while (i.hasNext()) {
+      Assert.assertEquals(TestUtil.randomLength(random), (int)i.next());
+      count++;
+    }
+    Assert.assertEquals(COUNT, count);
+  }
+
+  @Test public void testLongs() throws Exception {
+    FILE.delete();
+
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.LONG));
+    Random random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      out.writeRow(random.nextLong());
+    out.writeTo(FILE);
+
+    random = TestUtil.createRandom();
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(COUNT, in.getRowCount());
+    Assert.assertEquals(1, in.getColumnCount());
+    Iterator<Long> i = in.getValues("test");
+    int count = 0;
+    while (i.hasNext()) {
+      Assert.assertEquals(random.nextLong(), (long)i.next());
+      count++;
+    }
+    Assert.assertEquals(COUNT, count);
+  }
+
+  @Test public void testStrings() throws Exception {
+    FILE.delete();
+
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.STRING));
+    Random random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      out.writeRow(TestUtil.randomString(random));
+    out.writeTo(FILE);
+
+    random = TestUtil.createRandom();
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(COUNT, in.getRowCount());
+    Assert.assertEquals(1, in.getColumnCount());
+    Iterator<String> i = in.getValues("test");
+    int count = 0;
+    while (i.hasNext()) {
+      Assert.assertEquals(TestUtil.randomString(random), i.next());
+      count++;
+    }
+    Assert.assertEquals(COUNT, count);
+  }
+
+  @Test public void testTwoColumn() throws Exception {
+    FILE.delete();
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("a", ValueType.FIXED32),
+                           new ColumnMetaData("b", ValueType.STRING));
+    Random random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      out.writeRow(random.nextInt(), TestUtil.randomString(random));
+    out.writeTo(FILE);
+
+    random = TestUtil.createRandom();
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    Assert.assertEquals(COUNT, in.getRowCount());
+    Assert.assertEquals(2, in.getColumnCount());
+    Iterator<String> i = in.getValues("a");
+    Iterator<String> j = in.getValues("b");
+    int count = 0;
+    while (i.hasNext() && j.hasNext()) {
+      Assert.assertEquals(random.nextInt(), i.next());
+      Assert.assertEquals(TestUtil.randomString(random), j.next());
+      count++;
+    }
+    Assert.assertEquals(COUNT, count);
+  }
+
+  @Test public void testSeekLongs() throws Exception {
+    FILE.delete();
+
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.LONG));
+    Random random = TestUtil.createRandom();
+
+    int seekCount = COUNT/1024;
+    int[] seekRows = new int[seekCount];
+    Map<Integer,Integer> seekRowMap = new HashMap<Integer,Integer>(seekCount);
+    while (seekRowMap.size() < seekCount) {
+      int row = random.nextInt(COUNT);
+      if (!seekRowMap.containsKey(row)) {
+        seekRows[seekRowMap.size()] = row;
+        seekRowMap.put(row, seekRowMap.size());
+      }
+    }
+
+    Long[] seekValues = new Long[seekCount];
+    for (int i = 0; i < COUNT; i++) {
+      long l = random.nextLong();
+      out.writeRow(l);
+      if (seekRowMap.containsKey(i))
+        seekValues[seekRowMap.get(i)] = l;
+    }
+    out.writeTo(FILE);
+
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    ColumnValues<Long> v = in.getValues("test");
+
+    for (int i = 0; i < seekCount; i++) {
+      v.seek(seekRows[i]);
+      Assert.assertEquals(seekValues[i], v.next());
+    }
+
+  }
+
+  @Test public void testSeekStrings() throws Exception {
+    FILE.delete();
+
+    ColumnFileWriter out =
+      new ColumnFileWriter(createFileMeta(),
+                           new ColumnMetaData("test", ValueType.STRING)
+                           .hasIndexValues(true));
+
+    Random random = TestUtil.createRandom();
+
+    int seekCount = COUNT/1024;
+    Map<Integer,Integer> seekRowMap = new HashMap<Integer,Integer>(seekCount);
+    while (seekRowMap.size() < seekCount) {
+      int row = random.nextInt(COUNT);
+      if (!seekRowMap.containsKey(row))
+        seekRowMap.put(row, seekRowMap.size());
+    }
+
+    String[] values = new String[COUNT];
+    for (int i = 0; i < COUNT; i++)
+      values[i] = TestUtil.randomString(random);
+    Arrays.sort(values);
+
+    String[] seekValues = new String[seekCount];
+    for (int i = 0; i < COUNT; i++) {
+      out.writeRow(values[i]);
+      if (seekRowMap.containsKey(i))
+        seekValues[seekRowMap.get(i)] = values[i];
+    }
+    out.writeTo(FILE);
+
+    ColumnFileReader in = new ColumnFileReader(FILE);
+    ColumnValues<String> v = in.getValues("test");
+
+    for (int i = 0; i < seekCount; i++) {
+      v.seek(seekValues[i]);
+      Assert.assertEquals(seekValues[i], v.next());
+    }
+
+  }
+
+}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
new file mode 100644
index 0000000..707848e
--- /dev/null
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestIOBuffers.java
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.util.Random;
+
+import java.io.ByteArrayOutputStream;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestIOBuffers {
+
+  private static final int COUNT = 1001;
+
+  @Test public void testEmpty() throws Exception {
+    OutputBuffer out = new OutputBuffer();
+    ByteArrayOutputStream temp = new ByteArrayOutputStream();
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    Assert.assertEquals(0, in.tell());
+    Assert.assertEquals(0, in.length());
+  }
+
+  @Test public void testZero() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    out.writeInt(0);
+    byte[] bytes = out.toByteArray();
+    Assert.assertEquals(1, bytes.length);
+    Assert.assertEquals(0, bytes[0]);
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    Assert.assertEquals(0, in.readInt());
+  }
+
+  @Test public void testBoolean() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeValue(random.nextBoolean(), ValueType.BOOLEAN);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextBoolean(),
+                          in.readValue(ValueType.BOOLEAN));
+  }
+
+  @Test public void testInt() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeInt(random.nextInt());
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextInt(), in.readInt());
+  }
+
+  @Test public void testLong() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeLong(random.nextLong());
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextLong(), in.readLong());
+  }
+
+  @Test public void testFixed32() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeFixed32(random.nextInt());
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextInt(), in.readFixed32());
+  }
+
+  @Test public void testFixed64() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeFixed64(random.nextLong());
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextLong(), in.readFixed64());
+  }
+  
+  @Test public void testFloat() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeFloat(random.nextFloat());
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(random.nextFloat(), in.readFloat(), 0);
+  }
+  
+  @Test public void testDouble() throws Exception {
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeDouble(Double.MIN_VALUE);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(Double.MIN_VALUE, in.readDouble(), 0);
+  }
+  
+  @Test public void testBytes() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeBytes(TestUtil.randomBytes(random));
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(TestUtil.randomBytes(random), in.readBytes(null));
+  }
+
+  @Test public void testString() throws Exception {
+    Random random = TestUtil.createRandom();
+    OutputBuffer out = new OutputBuffer();
+    for (int i = 0; i < COUNT; i++)
+      out.writeString(TestUtil.randomString(random));
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    random = TestUtil.createRandom();
+    for (int i = 0; i < COUNT; i++)
+      Assert.assertEquals(TestUtil.randomString(random), in.readString());
+  }
+  @Test public void testSkipNull() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(null, ValueType.NULL);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.NULL);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipBoolean() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(false, ValueType.BOOLEAN);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.BOOLEAN);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipInt() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Integer.MAX_VALUE, ValueType.INT);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.INT);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipLong() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Long.MAX_VALUE, ValueType.LONG);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.LONG);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipFixed32() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Integer.MAX_VALUE, ValueType.FIXED32);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.LONG);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipFixed64() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Long.MAX_VALUE, ValueType.FIXED64);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.LONG);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipFloat() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Float.MAX_VALUE, ValueType.FLOAT);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.FLOAT);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipDouble() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Double.MAX_VALUE, ValueType.DOUBLE);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.DOUBLE);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipString() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue("trevni", ValueType.STRING);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.STRING);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testSkipBytes() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue("trevni".getBytes(), ValueType.BYTES);
+    out.writeLong(sentinel);
+    
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.skipValue(ValueType.BYTES);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+  @Test public void testInitPos() throws Exception {
+    long sentinel = Long.MAX_VALUE;
+    OutputBuffer out = new OutputBuffer();
+    out.writeValue(Integer.MAX_VALUE, ValueType.INT);
+    out.writeLong(sentinel);
+    InputBuffer in = new InputBuffer(new InputBytes(out.toByteArray()));
+    in.readInt();
+    long pos = in.tell();
+    in = new InputBuffer(new InputBytes(out.toByteArray()), pos);
+    Assert.assertEquals(sentinel, in.readLong());
+  }
+}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
new file mode 100644
index 0000000..c4cb2aa
--- /dev/null
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestInputBytes.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.Arrays;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestInputBytes {
+
+  private static final int SIZE = 1000;
+  private static final int COUNT = 100;
+
+  @Test public void testRandomReads() throws Exception {
+    Random random = new Random();
+    int length = random.nextInt(SIZE);
+    byte[] data = new byte[length];
+    random.nextBytes(data);
+
+    Input in = new InputBytes(data);
+      
+    for (int i = 0; i < COUNT; i++) {
+      int p = random.nextInt(length);
+      int l = Math.min(random.nextInt(SIZE/10), length-p);
+      byte[] buffer = new byte[l];
+      in.read(p, buffer, 0, l);
+      Assert.assertArrayEquals(Arrays.copyOfRange(data, p, p+l), buffer);
+    }
+  }
+}
diff --git a/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java
new file mode 100644
index 0000000..ab4796d
--- /dev/null
+++ b/lang/java/trevni/core/src/test/java/org/apache/trevni/TestUtil.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.trevni;
+
+import java.util.Random;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestUtil {
+
+  private static long seed;
+  private static boolean seedSet;
+
+  /** Returns the random seed for this test run.  By default uses the current
+   * time, but a test run can be replicated by specifying the "test.seed"
+   * system property.  The seed is printed the first time it's accessed so that
+   * failures can be replicated if needed. */
+  public static long getRandomSeed() {
+    if (!seedSet) {
+      String configured = System.getProperty("test.seed");
+      if (configured != null)
+        seed = Long.valueOf(configured);
+      else 
+        seed = System.currentTimeMillis();
+      System.err.println("test.seed="+seed);
+      seedSet = true;
+    }
+    return seed;
+ }
+
+  public static void resetRandomSeed() {
+    seedSet = false;
+  }
+
+  public static Random createRandom() {
+    return new Random(getRandomSeed());
+  }
+
+  public static ByteBuffer randomBytes(Random random) {
+    byte[] bytes = new byte[randomLength(random)];
+    random.nextBytes(bytes);
+    return ByteBuffer.wrap(bytes);
+  }
+
+  public static String randomString(Random random) {
+    int length = randomLength(random);
+    char[] chars = new char[length];
+    for (int i = 0; i < length; i++)
+      chars[i] = (char)('a'+random.nextInt('z'-'a'));
+    return new String(chars);
+  }
+
+  /** Returns [0-15] 15/16 times.
+   * Returns [0-255] 255/256 times.
+   * Returns [0-4095] 4095/4096 times.
+   * Returns [0-65535] every time. */
+  public static int randomLength(Random random) {
+    int n = random.nextInt();
+    if (n < 0) n = -n;
+    return n &
+      ((n & 0xF0000) != 0
+       ? 0xF
+       : ((n & 0xFF0000) != 0
+          ? 0xFF
+          : ((n & 0xFFF0000) != 0
+             ? 0xFFF
+             : 0xFFFF)));
+  }
+
+  @Test public void testRandomLength() {
+    long total = 0;
+    int count = 1024 * 1024;
+    int min = Short.MAX_VALUE;
+    int max = 0;
+    Random r = createRandom();
+    for (int i = 0; i < count; i++) {
+      int length = randomLength(r);
+      if (min > length) min = length;
+      if (max < length) max = length;
+      total += length;
+    }
+    Assert.assertEquals(0, min);
+    Assert.assertTrue(max > 1024 * 32);
+
+    float average = total / (float)count;
+    Assert.assertTrue(average > 16.0f);
+    Assert.assertTrue(average < 64.0f);
+
+  }
+
+}
diff --git a/lang/java/trevni/doc/apt/spec.apt b/lang/java/trevni/doc/apt/spec.apt
new file mode 100644
index 0000000..dbd1fcf
--- /dev/null
+++ b/lang/java/trevni/doc/apt/spec.apt
@@ -0,0 +1,471 @@
+~~ Licensed to the Apache Software Foundation (ASF) under one or more
+~~ contributor license agreements.  See the NOTICE file distributed with
+~~ this work for additional information regarding copyright ownership.
+~~ The ASF licenses this file to You under the Apache License, Version 2.0
+~~ (the "License"); you may not use this file except in compliance with
+~~ the License.  You may obtain a copy of the License at
+~~
+~~     http://www.apache.org/licenses/LICENSE-2.0
+~~
+~~ Unless required by applicable law or agreed to in writing, software
+~~ distributed under the License is distributed on an "AS IS" BASIS,
+~~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~~ See the License for the specific language governing permissions and
+~~ limitations under the License.
+  ---
+  Trevni: A Column File Format
+  ---
+
+Trevni: A Column File Format
+
+  Version 0.1
+
+  DRAFT
+
+  This document is the authoritative specification of a file format.
+  Its intent is to permit compatible, independent implementations that
+  read and/or write files in this format.
+
+Introduction
+
+  Data sets are often described as a <table> composed of <rows> and
+  <columns>.  Each record in the dataset is considered a row, with
+  each field of the record occupying a different column.  Writing
+  records to a file one-by-one as they are created results in a
+  <row-major> format, like Hadoop’s SequenceFile or Avro data files.
+
+  In many cases higher query performance may be achieved if the data
+  is instead organized in a <column-major> format, where multiple
+  values of a given column are stored adjacently.  This document
+  defines such a column-major file format for datasets.
+
+  To permit scalable, distributed query evaluation, datasets are
+  partitioned into row groups, containing distinct collections of
+  rows.  Each row group is organized in column-major order, while row
+  groups form a row-major partitioning of the entire dataset.
+
+Rationale
+
+* Goals
+
+  The format is meant satisfy the following goals:
+
+  [[1]] Maximize the size of row groups.  Disc drives are used most
+  efficiently when sequentially accessing data.  Consider a drive that
+  takes 10ms to seek and transfers at 100MB/second.  If a 10-column
+  dataset whose values are all the same size is split into 10MB row
+  groups, then accessing a single column will require a sequence of
+  seek+1MB reads, for a cost of 20ms/MB processed.  If the same
+  dataset is split into 100MB row groups then this drops to 11ms/MB
+  processed.  This effect is exaggerated for datasets with larger
+  numbers of columns and with columns whose values are smaller than
+  average.  So we’d prefer row groups that are 100MB or greater.
+
+  [[1]] Permit random access within a row group.  Some queries will
+  first examine one column, and, only when certain relatively rare
+  criteria are met, examine other columns.  Rather than iterating
+  through selected columns of the row-group in parallel, one might
+  iterate through one column and randomly access another.  This is
+  called support for WHERE clauses, after the SQL operator of that
+  name.
+
+  [[1]] Minimize the number of files per dataset.  HDFS is a primary
+  intended deployment platform for these files.  The HDFS Namenode
+  requires memory for each file in the filesystem, thus for a format
+  to be HDFS-friendly it should strive to require the minimum number
+  of distinct files.
+
+  [[1]] Support co-location of columns within row-groups.  Row groups
+  are the unit of parallel operation on a column dataset.  For
+  efficient file i/o, the entirety of a row-group should ideally
+  reside on the host that is evaluating the query in order to avoid
+  network latencies and bottlenecks.
+
+  [[1]] Data integrity.  The format should permit applications to
+  detect data corruption.  Many file systems may prevent corruption,
+  but files may be moved between filesystems and be subject to
+  corruption at points in that process.  It is best if the data in a
+  file can be validated independently.
+
+  [[1]] Extensibility.  The format should permit applications to store
+  additional annotations about a datasets in the files, such as type
+  information, origin, etc.  Some environments may have metadata
+  stores for such information, but not all do, and files might be
+  moved among systems with different metadata systems.  The ability to
+  keep such information within the file simplifies the coordination of
+  such information.
+
+  [[1]] Minimal overhead.  The column format should not make datasets
+  appreciably larger.  Storage is a primary cost and a choice to use
+  this format should not require additional storage.
+
+  [[1]] Primary format.  The column format should be usable as a
+  primary format for datasets, not as an auxiliary, accelerated
+  format. Applications that process a dataset in row-major order
+  should be able to easily consume column files and applications that
+  produce datasets in row-major order should be able to easily
+  generate column files.
+
+* Design
+
+  To meet these goals we propose the following design.
+
+  [[1]] Each row group is a separate file.  All values of a column in
+  a file are written contiguously.  This maximizes the row group size,
+  optimizing performance when querying few and small columns.
+
+  [[1]] Each file occupies a single HDFS block.  A larger than normal
+  block size may be specified, e.g., ~1GB instead of the typical
+  ~100MB.  This guarantees co-location and eliminates network use when
+  query processing can be co-located with the file.  This also
+  moderates the memory impact on the HDFS Namenode since no small
+  files are written.
+
+  [[1]] Each column in a file is written as a sequence of ~64kB
+  compressed blocks.  The sequence is prefixed by a table describing
+  all of the blocks in the column to permit random access within the
+  column.
+
+  [[1]] Application-specific metadata may be added at the file,
+  column, and block levels.
+
+  [[1]] Checksums are included with each block, providing data integrity.
+
+* Discussion
+
+  The use of a single block per file achieves the same effect as the
+  custom block placement policy described in the {{CIF}} paper,
+  but while still permitting HDFS rebalancing and not increasing the
+  number of files in the namespace.
+
+Format Specification
+
+  This section formally describes the proposed column file format.
+
+* Data Model
+
+  We assume a simple data model, where a record is a set of named
+  fields, and the value of each field is a sequence of untyped bytes.
+  A type system may be layered on top of this, as specified in the
+  Type Mapping section below.
+
+* Primitive Values
+
+  We define the following primitive value types:
+
+  * Signed 64-bit <<long>> values are written using a variable-length
+zig-zag coding, where the high-order bit in each byte determines
+whether subsequent bytes are present.  For example:
+
+*--------------*------*
+ decimal value | hex bytes
+*--------------*------*
+0              | 00
+*--------------*------*
+-1             | 01
+*--------------*------*
+1              | 02
+*--------------*------*
+...             
+*--------------*------*
+-64            | 7f
+*--------------*------*
+64             | 80 01
+*--------------*------*
+...             
+*--------------*------*
+
+  * <<bytes>> are encoded as a <long> followed by that many bytes of data.
+
+  * a <<string>> is encoded as a <long> followed by that many bytes of
+    UTF-8 encoded character data.
+
+  For example, the three-character string "foo" would be encoded as
+  the <long> value 3 (encoded as hex 06) followed by the UTF-8
+  encoding of 'f', 'o', and 'o' (the hex bytes 66 6f 6f): 06 66 6f 6f
+
+* Type Names
+
+  The following type names are used to describe column values:
+
+  * <<null>>, requires zero bytes.  Sometimes used in array columns.
+
+  * <<boolean>>, one bit, packed into bytes, little-endian;
+
+  * <<int>>, like <long>, but restricted to 32-bit signed values
+
+  * <<long>> 64-bit signed values, represented as above
+
+  * <<fixed32>> 32-bit values stored as four bytes, little-endian.
+
+  * <<fixed64>> 64-bit values stored as eight bytes, little-endian.
+
+  * <<float>> 32-bit IEEE floating point value, little-endian
+
+  * <<double>> 64-bit IEEE floating point value, little-endian
+
+  * <<string>> as above
+
+  * <<bytes>> as above, may be used to encapsulate more complex objects
+
+  []
+
+  Type names are represented as <strings> (UTF-8 encoded, length-prefixed).
+
+* Metadata
+
+  <<Metadata>> consists of:
+
+  * A <long> indicating the number of metadata key/value pairs.
+
+  * For each pair, a <string> key and <bytes> value.
+
+  []
+
+  All metadata properties that start with "trevni." are reserved.
+
+** File Metadata
+
+  The following file metadata properties are defined:
+
+  * <<trevni.codec>> the name of the default compression codec used to
+    compress blocks, as a <string>. Implementations are required to
+    support the "null" codec.  Optional.  If absent, it is assumed to
+    be "null".  Codecs are described in more detail below.
+
+  * <<trevni.checksum>> the name of the checksum algorithm used in this
+    file, as a <string>.  Implementations are required to support the
+    "crc-32” checksum.  Optional.  If absent, it is assumed to be
+    "null".  Checksums are described in more detail below.
+
+  []
+
+** Column Metadata
+
+  The following column metadata properties are defined:
+
+  * <<trevni.codec>> the name of the compression codec used to compress
+    the blocks of this column, as a <string>. Implementations are
+    required to support the "null" codec.  Optional.  If absent, it is
+    assumed to be "null".  Codecs are described in more detail below.
+
+  * <<trevni.name>> the name of the column, as a <string>.  Required.
+
+  * <<trevni.type>> the type of data in the column.  One of the type names
+    above.  Required.
+
+  * <<trevni.values>> if present, indicates that the initial value of each
+    block in this column will be stored in the block’s descriptor.
+    Not permitted for array columns or columns that specify a parent.
+
+  * <<trevni.array>> if present, indicates that each row in this column
+    contains a sequence of values of the named type rather than just a
+    single value.  An integer length precedes each sequence of values
+    indicating the count of values in the sequence.  If the length is
+    negative then it indicates a sequence of zero or one lengths, where -1
+    indicates two zeros, -2 two ones, -3 three zeros, -4 three ones, etc.
+
+  * <<trevni.parent>> if present, the name of an <array> column whose
+    lengths are also used by this column.  Thus values of this column
+    are sequences but no lengths are stored in this column.
+
+  []
+
+  For example, consider the following row, as JSON, where all values
+  are primitive types, but one has multiple values.
+
+---
+{"id"=566, "date"=23423234234
+ "from"="foo at bar.com",
+ "to"=["bar at baz.com", "bang at foo.com"],
+ "content"="Hi!"}
+---
+
+  The columns for this might be specified as:
+
+---
+name=id       type=int
+name=date     type=long
+name=from     type=string
+name=to       type=string  array=true
+name=content  type=string 
+---
+
+  If a row contains an array of records, e.g. "received" in the following:
+
+---
+{"id"=566, "date"=23423234234
+ "from"="foo at bar.com",
+ "to"=["bar at baz.com", "bang at foo.com"],
+ "content"="Hi!"
+ "received"=[{"date"=234234234234, "host"="192.168.0.0.1"},
+             {"date"=234234545645, "host"="192.168.0.0.2"}]
+}
+---
+
+  Then one can define a parent column followed by a column for each
+  field in the record, adding the following columns:
+
+---
+name=received  type=null    array=true
+name=date      type=long    parent=received
+name=host      type=string  parent=received
+---
+
+  If an array value itself contains an array, e.g. the "sigs" below:
+
+---
+{"id"=566, "date"=23423234234
+ "from"="foo at bar.com",
+ "to"=["bar at baz.com", "bang at foo.com"],
+ "content"="Hi!"
+ "received"=[{"date"=234234234234, "host"="192.168.0.0.1",
+              "sigs"=[{"algo"="weak", "value"="0af345de"}]},
+             {"date"=234234545645, "host"="192.168.0.0.2",
+              "sigs"=[]}]
+}
+---
+
+  Then a parent column may be defined that itself has a parent column.
+
+---
+name=sigs   type=null    array=true  parent=received
+name=algo   type=string              parent=sigs
+name=value  type=string              parent=sigs
+---
+
+** Block Metadata
+
+  No block metadata properties are currently defined.
+
+* File Format
+
+  A <<file>> consists of:
+
+  * A <file header>, followed by
+
+  * one or more <columns>.
+
+  []
+
+  A <<file header>> consists of:
+
+  * Four bytes, ASCII 'T', 'r', 'v', followed by 0x02.
+
+  * a <fixed64> indicating the number of rows in the file
+
+  * a <fixed32> indicating the number of columns in the file
+
+  * file <metadata>.
+
+  * for each column, its <column metadata>
+
+  * for each column, its starting position in the file as a <fixed64>.
+
+  []
+
+  A <<column>> consists of:
+
+  * A <fixed32> indicating the number of blocks in this column.
+
+  * For each block, a <block descriptor>
+
+  * One or more <blocks>.
+
+  []
+
+  A <<block descriptor>> consists of:
+
+  * A <fixed32> indicating the number of rows in the block
+
+  * A <fixed32> indicating the size in bytes of the block before the
+    codec is applied (excluding checksum).
+
+  * A <fixed32> indicating the size in bytes of the block after the
+    codec is applied (excluding checksum).
+
+  * If this column’s metadata declares it to include values, the first
+    value in the column, serialized according to this column's type.
+
+  []
+
+  A <<block>> consists of:
+
+  * The serialized column values.  If a column is an array column then
+    value sequences are preceded by their length, as an <int>.  If a
+    codec is specified, the values and lengths are compressed by that
+    codec.
+
+  * The checksum, as determined by the file metadata.
+
+  []
+
+* Codecs
+
+  [null] The "null" codec simply passes data through uncompressed.
+
+  [deflate] The "deflate" codec writes the data block using the
+  deflate algorithm as specified in RFC 1951.
+
+  [snappy] The "snappy" codec uses Google's Snappy compression library.
+
+* Checksum algorithms
+
+  [null] The "null" checksum contains zero bytes.
+
+  [crc-32] Each "crc-32" checksum contains the four bytes of an ISO
+  3309 CRC-32 checksum of the uncompressed block data as a fixed32.
+
+* Type Mappings
+
+  We define a standard mapping for how types defined in various
+  serialization systems are represented in a column file.  Records
+  from these systems are <shredded> into columns.  When records are
+  nested, a depth-first recursive walk can assign a separate column
+  for each primitive value.
+
+** Avro
+
+** Protocol Buffers
+
+** Thrift
+
+Implementation Notes
+
+  Some possible techniques for writing column files include:
+
+  [[1]] Use a standard ~100MB block, buffer in memory up to the block
+  size, then flush the file directly to HDFS.  A single reduce task
+  might create multiple output files.  The namenode requires memory
+  proportional to the number of names and blocks*replication.  This
+  would increase the number of names but not blocks, so this should
+  still be much better than a file per column.
+
+  [[1]] Spill each column to a separate local, temporary file then,
+  when the file is closed, append these files, writing a single file
+  to HDFS whose block size is set to be that of the entire file.  This
+  would be a bit slower than and may have trouble when the local disk
+  is full, but it would better use HDFS namespace and further reduce
+  seeks when processing columns whose values are small.
+
+  [[1]] Use a separate mapreduce job to convert row-major files to
+  column-major.  The map output would output a by (row#, column#,
+  value) tuple, partitioned by row# but sorted by column# then row#.
+  The reducer could directly write the column file.  But the column
+  file format would need to be changed to write counts, descriptors,
+  etc. at the end of files rather than at the front.
+
+  []
+
+  (1) is the simplest to implement and most implementations should
+  start with it.
+
+* References
+
+  {CIF} {{{http://arxiv.org/pdf/1105.4252.pdf}<Column-Oriented Storage
+  Techniques for MapReduce>}}, Floratou, Patel, Shekita, & Tata, VLDB
+  2011.
+
+  {DREMEL} {{{http://research.google.com/pubs/archive/36632.pdf}<Dremel:
+  Interactive Analysis of Web-Scale Datasets>}}, Melnik, Gubarev, Long,
+  Romer, Shivakumar, & Tolton, VLDB 2010.
diff --git a/lang/java/trevni/doc/pom.xml b/lang/java/trevni/doc/pom.xml
new file mode 100644
index 0000000..b0001ec
--- /dev/null
+++ b/lang/java/trevni/doc/pom.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>trevni-java</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>..</relativePath>
+  </parent>
+
+  <groupId>org.apache.avro</groupId>
+  <artifactId>trevni-doc</artifactId>
+  <version>1.8.0</version>
+  <packaging>pom</packaging>
+
+  <name>Trevni Specification</name>
+  <url>http://avro.apache.org/</url>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-site-plugin</artifactId>
+          <version>${maven-site-plugin.version}</version>
+          <configuration>
+            <generateReports>false</generateReports>
+            <siteDirectory>.</siteDirectory>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+
+  </build>
+
+</project>
diff --git a/lang/java/trevni/doc/resources/css/site.css b/lang/java/trevni/doc/resources/css/site.css
new file mode 100644
index 0000000..2513999
--- /dev/null
+++ b/lang/java/trevni/doc/resources/css/site.css
@@ -0,0 +1,31 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+#banner {
+  height: 93px;
+  background: none;
+}
+
+#bannerLeft img {
+  height: 90px;
+  margin-left: 30px;
+  margin-top: 4px;
+}
+
+#bannerRight img {
+  margin: 17px;
+}
+
diff --git a/lang/java/trevni/doc/site.xml b/lang/java/trevni/doc/site.xml
new file mode 100644
index 0000000..fe9134b
--- /dev/null
+++ b/lang/java/trevni/doc/site.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project>
+  <skin>
+    <groupId>org.apache.maven.skins</groupId>
+    <artifactId>maven-stylus-skin</artifactId>
+    <version>1.2</version>
+  </skin>
+  <body>
+    <menu name="Trevni">
+      <item name="Spec" href="spec.html" />
+    </menu>
+  </body>
+</project>
diff --git a/lang/java/trevni/pom.xml b/lang/java/trevni/pom.xml
new file mode 100644
index 0000000..2c7a1ef
--- /dev/null
+++ b/lang/java/trevni/pom.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>avro-parent</artifactId>
+    <groupId>org.apache.avro</groupId>
+    <version>1.8.0</version>
+    <relativePath>../</relativePath>
+  </parent>
+
+  <artifactId>trevni-java</artifactId>
+  <name>Trevni Java</name>
+  <groupId>org.apache.avro</groupId>
+  <description>Trevni Java</description>
+  <url>http://avro.apache.org/</url>
+  <packaging>pom</packaging>
+
+  <modules>
+    <module>core</module>
+    <module>avro</module>
+    <module>doc</module>
+  </modules>
+
+  <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-surefire-plugin</artifactId>
+          <version>${surefire-plugin.version}</version>
+          <configuration>
+            <failIfNoTests>false</failIfNoTests>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-compiler-plugin</artifactId>
+          <version>${compiler-plugin.version}</version>
+          <configuration>
+            <source>1.6</source>
+            <target>1.6</target>
+          </configuration>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-checkstyle-plugin</artifactId>
+          <version>${checkstyle-plugin.version}</version>
+          <configuration>
+            <consoleOutput>true</consoleOutput>
+            <configLocation>checkstyle.xml</configLocation>
+          </configuration>
+          <executions>
+            <execution>
+              <id>checkstyle-check</id>
+              <phase>test</phase>
+              <goals>
+                <goal>check</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-jar-plugin</artifactId>
+          <version>${jar-plugin.version}</version>
+          <executions>
+            <execution>
+              <goals>
+                <goal>test-jar</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
+    </pluginManagement>
+  </build>
+
+  <profiles>
+  </profiles>
+
+</project>
+
diff --git a/lang/js/Gruntfile.js b/lang/js/Gruntfile.js
new file mode 100644
index 0000000..863fb9d
--- /dev/null
+++ b/lang/js/Gruntfile.js
@@ -0,0 +1,57 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+module.exports = function(grunt) {
+
+  // Project configuration.
+  grunt.initConfig({
+    pkg: grunt.file.readJSON('package.json'),
+    nodeunit: {
+      files: ['test/**/*.js']
+    },
+    watch: {
+      files: ['<%= jshint.files %>'],
+      tasks: ['jshint', 'nodeunit']
+    },
+    jshint: {
+      files: ['Gruntfile.js', 'lib/**/*.js', 'test/**/*.js'],
+      options: {
+        curly: true,
+        eqeqeq: true,
+        immed: true,
+        latedef: true,
+        newcap: true,
+        noarg: true,
+        sub: true,
+        undef: true,
+        boss: true,
+        eqnull: true,
+        node: true
+      },
+      globals: {
+        exports: true
+      }
+    }
+  });
+
+  grunt.loadNpmTasks('grunt-contrib-jshint');
+  grunt.loadNpmTasks('grunt-contrib-nodeunit');
+  grunt.loadNpmTasks('grunt-contrib-watch');
+
+  grunt.registerTask('default', ['jshint', 'nodeunit']);
+  grunt.registerTask('test', ['nodeunit']);
+  grunt.registerTask('lint', ['jshint']);
+
+};
diff --git a/lang/js/README b/lang/js/README
new file mode 100644
index 0000000..85d12b7
--- /dev/null
+++ b/lang/js/README
@@ -0,0 +1,20 @@
+Avro Javascript
+===============================================================================
+
+Usage
+-------------------------------------------------------------------------------
+
+* *With node.js*: Install this avro-js npm package and then
+  use ```require('avro-js')``` in your program.
+
+* *Outside of node.js (e.g., browser)*: Include the validator.js file and the
+  [underscore.js library](http://underscorejs.org/).
+
+
+Running tests
+-------------------------------------------------------------------------------
+
+To run the included test suite using node.js:
+
+1. Install the npm dependencies by running ```npm install``` in the "js" dir.
+2. Run ```node_modules/grunt/bin/grunt test```.
diff --git a/lang/js/build.sh b/lang/js/build.sh
new file mode 100755
index 0000000..9e7073c
--- /dev/null
+++ b/lang/js/build.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+cd `dirname "$0"`
+
+case "$1" in
+     test)
+        npm install
+        grunt test
+       ;;
+
+     dist)
+       ;;
+
+     clean)
+       ;;
+
+     *)
+       echo "Usage: $0 {test|dist|clean}"
+       exit 1
+
+esac
+
+exit 0
diff --git a/lang/js/lib/validator.js b/lang/js/lib/validator.js
new file mode 100644
index 0000000..bda44a7
--- /dev/null
+++ b/lang/js/lib/validator.js
@@ -0,0 +1,448 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+if (typeof require !== 'undefined') {
+  var _ = require("underscore");
+}
+
+var AvroSpec = {
+  PrimitiveTypes: ['null', 'boolean', 'int', 'long', 'float', 'double', 'bytes', 'string'],
+  ComplexTypes: ['record', 'enum', 'array', 'map', 'union', 'fixed']
+};
+AvroSpec.Types = AvroSpec.PrimitiveTypes.concat(AvroSpec.ComplexTypes);
+
+var InvalidSchemaError = function(msg) { return new Error('InvalidSchemaError: ' + msg); };
+var InvalidProtocolError = function(msg) { return new Error('InvalidProtocolError: ' + msg); };
+var ValidationError = function(msg) { return new Error('ValidationError: ' + msg); };
+var ProtocolValidationError = function(msg) { return new Error('ProtocolValidationError: ' + msg); };
+
+
+function Record(name, namespace, fields) {
+  function validateArgs(name, namespace, fields) {
+    if (!_.isString(name)) {
+      throw new InvalidSchemaError('Record name must be string');
+    }
+
+    if (!_.isNull(namespace) && !_.isUndefined(namespace) && !_.isString(namespace)) {
+      throw new InvalidSchemaError('Record namespace must be string or null');
+    }
+
+    if (!_.isArray(fields)) {
+      throw new InvalidSchemaError('Record name must be string');
+    }
+  }
+
+  validateArgs(name, namespace, fields);
+
+  this.name = name;
+  this.namespace = namespace;
+  this.fields = fields;
+}
+
+function makeFullyQualifiedTypeName(schema, namespace) {
+  var typeName = null;
+  if (_.isString(schema)) {
+    typeName = schema;
+  } else if (_.isObject(schema)) {
+    if (_.isString(schema.namespace)) {
+      namespace = schema.namespace;
+    }
+    if (_.isString(schema.name)) {
+      typeName = schema.name;
+    } else if (_.isString(schema.type)) {
+      typeName = schema.type;
+    }
+  } else {
+    throw new InvalidSchemaError('unable to determine fully qualified type name from schema ' + JSON.stringify(schema) + ' in namespace ' + namespace);
+  }
+
+  if (!_.isString(typeName)) {
+    throw new InvalidSchemaError('unable to determine type name from schema ' + JSON.stringify(schema) + ' in namespace ' + namespace);
+  }
+
+  if (typeName.indexOf('.') !== -1) {
+    return typeName;
+  } else if (_.contains(AvroSpec.PrimitiveTypes, typeName)) {
+    return typeName;
+  } else if (_.isString(namespace)) {
+    return namespace + '.' + typeName;
+  } else {
+    return typeName;
+  }
+}
+
+function Union(typeSchemas, namespace) {
+  this.branchNames = function() {
+    return _.map(typeSchemas, function(typeSchema) { return makeFullyQualifiedTypeName(typeSchema, namespace); });
+  };
+
+  function validateArgs(typeSchemas) {
+    if (!_.isArray(typeSchemas) || _.isEmpty(typeSchemas)) {
+      throw new InvalidSchemaError('Union must have at least 1 branch');
+    }
+  }
+
+  validateArgs(typeSchemas);
+
+  this.typeSchemas = typeSchemas;
+  this.namespace = namespace;
+}
+
+function Enum(symbols) {
+
+  function validateArgs(symbols) {
+    if (!_.isArray(symbols)) {
+      throw new InvalidSchemaError('Enum must have array of symbols, got ' + JSON.stringify(symbols));
+    }
+    if (!_.all(symbols, function(symbol) { return _.isString(symbol); })) {
+      throw new InvalidSchemaError('Enum symbols must be strings, got ' + JSON.stringify(symbols));
+    }
+  }
+
+  validateArgs(symbols);
+
+  this.symbols = symbols;
+}
+
+function AvroArray(itemSchema) {
+
+  function validateArgs(itemSchema) {
+    if (_.isNull(itemSchema) || _.isUndefined(itemSchema)) {
+      throw new InvalidSchemaError('Array "items" schema should not be null or undefined');
+    }
+  }
+
+  validateArgs(itemSchema);
+
+  this.itemSchema = itemSchema;
+}
+
+function Map(valueSchema) {
+
+  function validateArgs(valueSchema) {
+    if (_.isNull(valueSchema) || _.isUndefined(valueSchema)) {
+      throw new InvalidSchemaError('Map "values" schema should not be null or undefined');
+    }
+  }
+
+  validateArgs(valueSchema);
+
+  this.valueSchema = valueSchema;
+}
+
+function Field(name, schema) {
+  function validateArgs(name, schema) {
+    if (!_.isString(name)) {
+      throw new InvalidSchemaError('Field name must be string');
+    }
+  }
+
+  this.name = name;
+  this.schema = schema;
+}
+
+function Primitive(type) {
+  function validateArgs(type) {
+    if (!_.isString(type)) {
+      throw new InvalidSchemaError('Primitive type name must be a string');
+    }
+
+    if (!_.contains(AvroSpec.PrimitiveTypes, type)) {
+      throw new InvalidSchemaError('Primitive type must be one of: ' + JSON.stringify(AvroSpec.PrimitiveTypes) + '; got ' + type);
+    }
+  }
+
+  validateArgs(type);
+
+  this.type = type;
+}
+
+function Validator(schema, namespace, namedTypes) {
+  this.validate = function(obj) {
+    return _validate(this.schema, obj);
+  };
+
+  var _validate = function(schema, obj) {
+    if (schema instanceof Record) {
+      return _validateRecord(schema, obj);
+    } else if (schema instanceof Union) {
+      return _validateUnion(schema, obj);
+    } else if (schema instanceof Enum) {
+      return _validateEnum(schema, obj);
+    } else if (schema instanceof AvroArray) {
+      return _validateArray(schema, obj);
+    } else if (schema instanceof Map) {
+      return _validateMap(schema, obj);
+    } else if (schema instanceof Primitive) {
+      return _validatePrimitive(schema, obj);
+    } else {
+      throw new InvalidSchemaError('validation not yet implemented: ' + JSON.stringify(schema));
+    }
+  };
+
+  var _validateRecord = function(schema, obj) {
+    if (!_.isObject(obj) || _.isArray(obj)) {
+      throw new ValidationError('Expected record Javascript type to be non-array object, got ' + JSON.stringify(obj));
+    }
+
+    var schemaFieldNames = _.pluck(schema.fields, 'name').sort();
+    var objFieldNames = _.keys(obj).sort();
+    if (!_.isEqual(schemaFieldNames, objFieldNames)) {
+      throw new ValidationError('Expected record fields ' + JSON.stringify(schemaFieldNames) + '; got ' + JSON.stringify(objFieldNames));
+    }
+
+    return _.all(schema.fields, function(field) {
+      return _validate(field.schema, obj[field.name]);
+    });
+  };
+
+  var _validateUnion = function(schema, obj) {
+    if (_.isObject(obj)) {
+      if (_.isArray(obj)) {
+        throw new ValidationError('Expected union Javascript type to be non-array object (or null), got ' + JSON.stringify(obj));
+      } else if (_.size(obj) !== 1) {
+        throw new ValidationError('Expected union Javascript object to be object with exactly 1 key (or null), got ' + JSON.stringify(obj));
+      } else {
+        var unionBranch = _.keys(obj)[0];
+        if (unionBranch === "") {
+          throw new ValidationError('Expected union Javascript object to contain non-empty string branch, got ' + JSON.stringify(obj));
+        }
+        if (_.contains(schema.branchNames(), unionBranch)) {
+          return true;
+        } else {
+          throw new ValidationError('Expected union branch to be one of ' + JSON.stringify(schema.branchNames()) + '; got ' + JSON.stringify(unionBranch));
+        }
+      }
+    } else if (_.isNull(obj)) {
+      if (_.contains(schema.branchNames(), 'null')) {
+        return true;
+      } else {
+        throw new ValidationError('Expected union branch to be one of ' + JSON.stringify(schema.branchNames()) + '; got ' + JSON.stringify(obj));
+      }
+    } else {
+      throw new ValidationError('Expected union Javascript object to be non-array object of size 1 or null, got ' + JSON.stringify(obj));
+    }
+  };
+
+  var _validateEnum = function(schema, obj) {
+    if (_.isString(obj)) {
+      if (_.contains(schema.symbols, obj)) {
+        return true;
+      } else {
+        throw new ValidationError('Expected enum value to be one of ' + JSON.stringify(schema.symbols) + '; got ' + JSON.stringify(obj));
+      }
+    } else {
+      throw new ValidationError('Expected enum Javascript object to be string, got ' + JSON.stringify(obj));
+    }
+  };
+
+  var _validateArray = function(schema, obj) {
+    if (_.isArray(obj)) {
+      return _.all(obj, function(member) { return _validate(schema.itemSchema, member); });
+    } else {
+      throw new ValidationError('Expected array Javascript object to be array, got ' + JSON.stringify(obj));
+    }
+  };
+
+  var _validateMap = function(schema, obj) {
+    if (_.isObject(obj) && !_.isArray(obj)) {
+      return _.all(obj, function(value) { return _validate(schema.valueSchema, value); });
+    } else if (_.isArray(obj)) {
+      throw new ValidationError('Expected map Javascript object to be non-array object, got array ' + JSON.stringify(obj));
+    } else {
+      throw new ValidationError('Expected map Javascript object to be non-array object, got ' + JSON.stringify(obj));
+    }
+  };
+
+  var _validatePrimitive = function(schema, obj) {
+    switch (schema.type) {
+      case 'null':
+        if (_.isNull(obj) || _.isUndefined(obj)) {
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript null or undefined for Avro null, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'boolean':
+        if (_.isBoolean(obj)) {
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript boolean for Avro boolean, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'int':
+        if (_.isNumber(obj) && Math.floor(obj) === obj && Math.abs(obj) <= Math.pow(2, 31)) {
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript int32 number for Avro int, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'long':
+        if (_.isNumber(obj) && Math.floor(obj) === obj && Math.abs(obj) <= Math.pow(2, 63)) {
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript int64 number for Avro long, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'float':
+        if (_.isNumber(obj)) { // TODO: handle NaN?
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript float number for Avro float, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'double':
+        if (_.isNumber(obj)) { // TODO: handle NaN?
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript double number for Avro double, got ' + JSON.stringify(obj));
+        }
+        break;
+      case 'bytes':
+        throw new InvalidSchemaError('not yet implemented: ' + schema.type);
+      case 'string':
+        if (_.isString(obj)) { // TODO: handle NaN?
+          return true;
+        } else {
+          throw new ValidationError('Expected Javascript string for Avro string, got ' + JSON.stringify(obj));
+        }
+        break;
+      default:
+        throw new InvalidSchemaError('unrecognized primitive type: ' + schema.type);
+    }
+  };
+
+  // TODO: namespace handling is rudimentary. multiple namespaces within a certain nested schema definition
+  // are probably buggy.
+  var _namedTypes = namedTypes || {};
+  var _saveNamedType = function(fullyQualifiedTypeName, schema) {
+    if (_.has(_namedTypes, fullyQualifiedTypeName)) {
+      if (!_.isEqual(_namedTypes[fullyQualifiedTypeName], schema)) {
+        throw new InvalidSchemaError('conflicting definitions for type ' + fullyQualifiedTypeName + ': ' + JSON.stringify(_namedTypes[fullyQualifiedTypeName]) + ' and ' + JSON.stringify(schema));
+      }
+    } else {
+      _namedTypes[fullyQualifiedTypeName] = schema;
+    }
+  };
+
+  var _lookupTypeByFullyQualifiedName = function(fullyQualifiedTypeName) {
+    if (_.has(_namedTypes, fullyQualifiedTypeName)) {
+      return _namedTypes[fullyQualifiedTypeName];
+    } else {
+      return null;
+    }
+  };
+
+  var _parseNamedType = function(schema, namespace) {
+    if (_.contains(AvroSpec.PrimitiveTypes, schema)) {
+      return new Primitive(schema);
+    } else if (!_.isNull(_lookupTypeByFullyQualifiedName(makeFullyQualifiedTypeName(schema, namespace)))) {
+      return _lookupTypeByFullyQualifiedName(makeFullyQualifiedTypeName(schema, namespace));
+    } else {
+      throw new InvalidSchemaError('unknown type name: ' + JSON.stringify(schema) + '; known type names are ' + JSON.stringify(_.keys(_namedTypes)));
+    }
+  };
+
+  var _parseSchema = function(schema, parentSchema, namespace) {
+    if (_.isNull(schema) || _.isUndefined(schema)) {
+      throw new InvalidSchemaError('schema is null, in parentSchema: ' + JSON.stringify(parentSchema));
+    } else if (_.isString(schema)) {
+      return _parseNamedType(schema, namespace);
+    } else if (_.isObject(schema) && !_.isArray(schema)) {
+      if (schema.type === 'record') {
+        var newRecord = new Record(schema.name, schema.namespace, _.map(schema.fields, function(field) {
+          return new Field(field.name, _parseSchema(field.type, schema, schema.namespace || namespace));
+        }));
+        _saveNamedType(makeFullyQualifiedTypeName(schema, namespace), newRecord);
+        return newRecord;
+      } else if (schema.type === 'enum') {
+        if (_.has(schema, 'symbols')) {
+          var newEnum = new Enum(schema.symbols);
+          _saveNamedType(makeFullyQualifiedTypeName(schema, namespace), newEnum);
+          return newEnum;
+        } else {
+          throw new InvalidSchemaError('enum must specify symbols, got ' + JSON.stringify(schema));
+        }
+      } else if (schema.type === 'array') {
+        if (_.has(schema, 'items')) {
+          return new AvroArray(_parseSchema(schema.items, schema, namespace));
+        } else {
+          throw new InvalidSchemaError('array must specify "items" schema, got ' + JSON.stringify(schema));
+        }
+      } else if (schema.type === 'map') {
+        if (_.has(schema, 'values')) {
+          return new Map(_parseSchema(schema.values, schema, namespace));
+        } else {
+          throw new InvalidSchemaError('map must specify "values" schema, got ' + JSON.stringify(schema));
+        }
+      } else if (_.has(schema, 'type') && _.contains(AvroSpec.PrimitiveTypes, schema.type)) {
+        return _parseNamedType(schema.type, namespace);
+      } else {
+        throw new InvalidSchemaError('not yet implemented: ' + schema.type);
+      }
+    } else if (_.isArray(schema)) {
+      if (_.isEmpty(schema)) {
+        throw new InvalidSchemaError('unions must have at least 1 branch');
+      }
+      var branchTypes = _.map(schema, function(branchType) { return _parseSchema(branchType, schema, namespace); });
+      return new Union(branchTypes, namespace);
+    } else {
+      throw new InvalidSchemaError('unexpected Javascript type for schema: ' + (typeof schema));
+    }
+  };
+
+  this.rawSchema = schema;
+  this.schema = _parseSchema(schema, null, namespace);
+}
+
+Validator.validate = function(schema, obj) {
+  return (new Validator(schema)).validate(obj);
+};
+
+function ProtocolValidator(protocol) {
+  this.validate = function(typeName, obj) {
+    var fullyQualifiedTypeName = makeFullyQualifiedTypeName(typeName, protocol.namespace);
+    if (!_.has(_typeSchemaValidators, fullyQualifiedTypeName)) {
+      throw new ProtocolValidationError('Protocol does not contain definition for type ' + JSON.stringify(fullyQualifiedTypeName) + ' (fully qualified from input "' + typeName + '"); known types are ' + JSON.stringify(_.keys(_typeSchemaValidators)));
+    }
+    return _typeSchemaValidators[fullyQualifiedTypeName].validate(obj);
+  };
+
+  var _typeSchemaValidators = {};
+  var _initSchemaValidators = function(protocol) {
+    var namedTypes = {};
+    if (!_.has(protocol, 'protocol') || !_.isString(protocol.protocol)) {
+      throw new InvalidProtocolError('Protocol must contain a "protocol" attribute with a string value');
+    }
+    if (_.isArray(protocol.types)) {
+      _.each(protocol.types, function(typeSchema) {
+        var schemaValidator = new Validator(typeSchema, protocol.namespace, namedTypes);
+        var fullyQualifiedTypeName = makeFullyQualifiedTypeName(typeSchema, protocol.namespace);
+        _typeSchemaValidators[fullyQualifiedTypeName] = schemaValidator;
+      });
+    }
+  };
+
+  _initSchemaValidators(protocol);
+}
+
+ProtocolValidator.validate = function(protocol, typeName, obj) {
+  return (new ProtocolValidator(protocol)).validate(typeName, obj);
+};
+
+if (typeof exports !== 'undefined') {
+  exports['Validator'] = Validator;
+  exports['ProtocolValidator'] = ProtocolValidator;
+}
diff --git a/lang/js/package.json b/lang/js/package.json
new file mode 100644
index 0000000..39430b1
--- /dev/null
+++ b/lang/js/package.json
@@ -0,0 +1,37 @@
+{
+  "name": "avro-js",
+  "version": "0.0.1",
+  "author": "Avro Developers <dev at avro.apache.org>",
+  "description": "Avro validator for Javascript",
+  "contributors": [
+    {
+      "name": "Quinn Slack",
+      "email": "sqs at cs.stanford.edu"
+    } 
+  ],
+  "scripts": {
+    "test": "grunt test"
+  },
+  "repository": {
+    "type": "svn",
+    "url": "http://svn.apache.org/repos/asf/avro/trunk/lang/js/"
+  },
+  "keywords": [
+    "avro",
+    "json"
+  ],
+  "dependencies" : {
+    "underscore"   :  "*"
+  },
+  "devDependencies" : {
+    "grunt"        :  "*",
+    "grunt-contrib-jshint" : "*",
+    "grunt-contrib-nodeunit" : "*",
+    "grunt-contrib-watch" : "*"
+  },
+  "noAnalyze": true,
+  "license": "Apache",
+  "engine": {
+    "node": ">=0.4"
+  }
+}
diff --git a/lang/js/test/validator.js b/lang/js/test/validator.js
new file mode 100644
index 0000000..b05e93d
--- /dev/null
+++ b/lang/js/test/validator.js
@@ -0,0 +1,525 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+var validator = require('../lib/validator.js');
+var Validator = validator.Validator;
+var ProtocolValidator = validator.ProtocolValidator;
+
+exports['test'] = {
+  setUp: function(done) {
+    done();
+  },
+  'nonexistent/null/undefined': function(test) {
+    test.throws(function() { return new Validator(); });
+    test.throws(function() { return new Validator(null); });
+    test.throws(function() { return new Validator(undefined); });
+    test.done();
+  },
+  'unrecognized primitive type name': function(test) {
+    test.throws(function() { return new Validator('badtype'); });
+    test.done();
+  },
+  'invalid schema javascript type': function(test) {
+    test.throws(function() { return new Validator(123); });
+    test.throws(function() { return new Validator(function() { }); });
+    test.done();
+  },
+
+  // Primitive types
+  'null': function(test) {
+    test.ok(Validator.validate('null', null));
+    test.ok(Validator.validate('null', undefined));
+    test.throws(function() { Validator.validate('null', 1); });
+    test.throws(function() { Validator.validate('null', 'a'); });
+    test.done();
+  },
+  'boolean': function(test) {
+    test.ok(Validator.validate('boolean', true));
+    test.ok(Validator.validate('boolean', false));
+    test.throws(function() { Validator.validate('boolean', null); });
+    test.throws(function() { Validator.validate('boolean', 1); });
+    test.throws(function() { Validator.validate('boolean', 'a'); });
+    test.done();
+  },
+  'int': function(test) {
+    test.ok(Validator.validate('int', 1));
+    test.ok(Validator.validate('long', Math.pow(2, 31) - 1));
+    test.throws(function() { Validator.validate('int', 1.5); });
+    test.throws(function() { Validator.validate('int', Math.pow(2, 40)); });
+    test.throws(function() { Validator.validate('int', null); });
+    test.throws(function() { Validator.validate('int', 'a'); });
+    test.done();
+  },
+  'long': function(test) {
+    test.ok(Validator.validate('long', 1));
+    test.ok(Validator.validate('long', Math.pow(2, 63) - 1));
+    test.throws(function() { Validator.validate('long', 1.5); });
+    test.throws(function() { Validator.validate('long', Math.pow(2, 70)); });
+    test.throws(function() { Validator.validate('long', null); });
+    test.throws(function() { Validator.validate('long', 'a'); });
+    test.done();
+  },
+  'float': function(test) {
+    test.ok(Validator.validate('float', 1));
+    test.ok(Validator.validate('float', 1.5));
+    test.throws(function() { Validator.validate('float', 'a'); });
+    test.throws(function() { Validator.validate('float', null); });
+    test.done();
+  },
+  'double': function(test) {
+    test.ok(Validator.validate('double', 1));
+    test.ok(Validator.validate('double', 1.5));
+    test.throws(function() { Validator.validate('double', 'a'); });
+    test.throws(function() { Validator.validate('double', null); });
+    test.done();
+  },
+  'bytes': function(test) {
+    // not implemented yet
+    test.throws(function() { Validator.validate('bytes', 1); });
+    test.done();
+  },
+  'string': function(test) {
+    test.ok(Validator.validate('string', 'a'));
+    test.throws(function() { Validator.validate('string', 1); });
+    test.throws(function() { Validator.validate('string', null); });
+    test.done();
+  },
+
+  // Records
+  'empty-record': function(test) {
+    var schema = {type: 'record', name: 'EmptyRecord', fields: []};
+    test.ok(Validator.validate(schema, {}));
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, 'a'); });
+    test.done();
+  },
+  'record-with-string': function(test) {
+    var schema = {type: 'record', name: 'EmptyRecord', fields: [{name: 'stringField', type: 'string'}]};
+    test.ok(Validator.validate(schema, {stringField: 'a'}));
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, {stringField: 1}); });
+    test.throws(function() { Validator.validate(schema, {stringField: []}); });
+    test.throws(function() { Validator.validate(schema, {stringField: {}}); });
+    test.throws(function() { Validator.validate(schema, {stringField: null}); });
+    test.throws(function() { Validator.validate(schema, {stringField: 'a', unexpectedField: 'a'}); });
+    test.done();
+  },
+  'record-with-string-and-number': function(test) {
+    var schema = {type: 'record', name: 'EmptyRecord', fields: [{name: 'stringField', type: 'string'}, {name: 'intField', type: 'int'}]};
+    test.ok(Validator.validate(schema, {stringField: 'a', intField: 1}));
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, {stringField: 'a'}); });
+    test.throws(function() { Validator.validate(schema, {intField: 1}); });
+    test.throws(function() { Validator.validate(schema, {stringField: 'a', intField: 1, unexpectedField: 'a'}); });
+    test.done();
+  },
+  'nested-record-with-namespace-relative': function(test) {
+    var schema = {type: 'record', namespace: 'x.y.z', name: 'RecordA', fields: [{name: 'recordBField1', type: ['null', {type: 'record', name: 'RecordB', fields: []}]}, {name: 'recordBField2', type: 'RecordB'}]};
+    test.ok(Validator.validate(schema, {recordBField1: null, recordBField2: {}}));
+    test.ok(Validator.validate(schema, {recordBField1: {'x.y.z.RecordB': {}}, recordBField2: {}}));
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, {recordBField1: null}); });
+    test.throws(function() { Validator.validate(schema, {recordBField2: {}}); });
+    test.throws(function() { Validator.validate(schema, {recordBField1: {'RecordB': {}}, recordBField2: {}}); });
+    test.done();
+  },
+  'nested-record-with-namespace-absolute': function(test) {
+    var schema = {type: 'record', namespace: 'x.y.z', name: 'RecordA', fields: [{name: 'recordBField1', type: ['null', {type: 'record', name: 'RecordB', fields: []}]}, {name: 'recordBField2', type: 'x.y.z.RecordB'}]};
+    test.ok(Validator.validate(schema, {recordBField1: null, recordBField2: {}}));
+    test.ok(Validator.validate(schema, {recordBField1: {'x.y.z.RecordB': {}}, recordBField2: {}}));
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, {recordBField1: null}); });
+    test.throws(function() { Validator.validate(schema, {recordBField2: {}}); });
+    test.throws(function() { Validator.validate(schema, {recordBField1: {'RecordB': {}}, recordBField2: {}}); });
+    test.done();
+  },
+
+
+  // Enums
+  'enum': function(test) {
+    var schema = {type: 'enum', name: 'Colors', symbols: ['Red', 'Blue']};
+    test.ok(Validator.validate(schema, 'Red'));
+    test.ok(Validator.validate(schema, 'Blue'));
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.throws(function() { Validator.validate(schema, 'NotAColor'); });
+    test.throws(function() { Validator.validate(schema, ''); });
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, []); });
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.done();
+  },
+
+  // Unions
+  'union': function(test) {
+    var schema = ['string', 'int'];
+    test.ok(Validator.validate(schema, {string: 'a'}));
+    test.ok(Validator.validate(schema, {int: 1}));
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.throws(function() { Validator.validate(schema, 'a'); });
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.throws(function() { Validator.validate(schema, {string: 'a', int: 1}); });
+    test.throws(function() { Validator.validate(schema, []); });
+    test.done();
+  },
+
+  'union with null': function(test) {
+    var schema = ['string', 'null'];
+    test.ok(Validator.validate(schema, {string: 'a'}));
+    test.ok(Validator.validate(schema, null));
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.done();
+  },
+
+  'nested union': function(test) {
+    var schema = ['string', {type: 'int'}];
+    test.ok(Validator.validate(schema, {string: 'a'}));
+    test.ok(Validator.validate(schema, {int: 1}));
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.throws(function() { Validator.validate(schema, 'a'); });
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.throws(function() { Validator.validate(schema, {string: 'a', int: 1}); });
+    test.throws(function() { Validator.validate(schema, []); });
+    test.done();
+  },
+
+  // Arrays
+  'array': function(test) {
+    var schema = {type: "array", items: "string"};
+    test.ok(Validator.validate(schema, []));
+    test.ok(Validator.validate(schema, ["a"]));
+    test.ok(Validator.validate(schema, ["a", "b", "a"]));
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.throws(function() { Validator.validate(schema, 'a'); });
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.throws(function() { Validator.validate(schema, {}); });
+    test.throws(function() { Validator.validate(schema, {"1": "a"}); });
+    test.throws(function() { Validator.validate(schema, {1: "a"}); });
+    test.throws(function() { Validator.validate(schema, {1: "a", "b": undefined}); });
+    test.throws(function() { var a = {}; a[0] = "a"; Validator.validate(schema, a); });
+    test.throws(function() { Validator.validate(schema, [1]); });
+    test.throws(function() { Validator.validate(schema, [1, "a"]); });
+    test.throws(function() { Validator.validate(schema, ["a", 1]); });
+    test.throws(function() { Validator.validate(schema, [null, 1]); });
+    test.done();
+  },
+
+  // Maps
+  'map': function(test) {
+    var schema = {type: "map", values: "string"};
+    test.ok(Validator.validate(schema, {}));
+    test.ok(Validator.validate(schema, {"a": "b"}));
+    test.ok(Validator.validate(schema, {"a": "b", "c": "d"}));
+    test.throws(function() { Validator.validate(schema, null); });
+    test.throws(function() { Validator.validate(schema, undefined); });
+    test.throws(function() { Validator.validate(schema, 'a'); });
+    test.throws(function() { Validator.validate(schema, 1); });
+    test.throws(function() { Validator.validate(schema, [1]); });
+    test.throws(function() { Validator.validate(schema, {"a": 1}); });
+    test.throws(function() { Validator.validate(schema, {"a": "b", "c": 1}); });
+    test.done();
+  },
+
+  // Protocols
+  'protocol': function(test) {
+    var protocol = {protocol: "Protocol1", namespace: "x.y.z", types: [
+      {type: "record", name: "RecordA", fields: []},
+      {type: "record", name: "RecordB", fields: [{name: "recordAField", type: "RecordA"}]}
+    ]};
+    test.ok(ProtocolValidator.validate(protocol, 'RecordA', {}));
+    test.ok(ProtocolValidator.validate(protocol, 'x.y.z.RecordA', {}));
+    test.ok(ProtocolValidator.validate(protocol, 'RecordB', {recordAField: {}}));
+    test.ok(ProtocolValidator.validate(protocol, 'x.y.z.RecordB', {recordAField: {}}));
+    test.throws(function() { ProtocolValidator.validate(protocol, 'RecordDoesNotExist', {}); });
+    test.throws(function() { ProtocolValidator.validate(protocol, 'RecordDoesNotExist', null); });
+    test.throws(function() { ProtocolValidator.validate(protocol, 'RecordB', {}); });
+    test.throws(function() { ProtocolValidator.validate(protocol, null, {}); });
+    test.throws(function() { ProtocolValidator.validate(protocol, '', {}); });
+    test.throws(function() { ProtocolValidator.validate(protocol, {}, {}); });
+    test.done();    
+  },
+
+  // Samples
+  'link': function(test) {
+    var schema = {
+      "type" : "record",
+      "name" : "Bundle",
+      "namespace" : "aa.bb.cc",
+      "fields" : [ {
+        "name" : "id",
+        "type" : "string"
+      }, {
+        "name" : "type",
+        "type" : "string"
+      }, {
+        "name" : "data_",
+        "type" : [ "null", {
+          "type" : "record",
+          "name" : "LinkData",
+          "fields" : [ {
+            "name" : "address",
+            "type" : "string"
+          }, {
+            "name" : "title",
+            "type" : [ "null", "string" ],
+            "default" : null
+          }, {
+            "name" : "excerpt",
+            "type" : [ "null", "string" ],
+            "default" : null
+          }, {
+            "name" : "image",
+            "type" : [ "null", {
+              "type" : "record",
+              "name" : "Image",
+              "fields" : [ {
+                "name" : "url",
+                "type" : "string"
+              }, {
+                "name" : "width",
+                "type" : "int"
+              }, {
+                "name" : "height",
+                "type" : "int"
+              } ]
+            } ],
+            "default" : null
+          }, {
+            "name" : "meta",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          } ]
+        } ],
+        "default" : null
+      }, {
+        "name" : "atoms_",
+        "type" : {
+          "type" : "map",
+          "values" : {
+            "type" : "map",
+            "values" : {
+              "type" : "record",
+              "name" : "Atom",
+              "fields" : [ {
+                "name" : "index_",
+                "type" : {
+                  "type" : "record",
+                  "name" : "AtomIndex",
+                  "fields" : [ {
+                    "name" : "type_",
+                    "type" : "string"
+                  }, {
+                    "name" : "id",
+                    "type" : "string"
+                  } ]
+                }
+              }, {
+                "name" : "data_",
+                "type" : [ "LinkData" ]
+              } ]
+            }
+          }
+        },
+        "default" : {
+        }
+      }, {
+        "name" : "meta_",
+        "type" : {
+          "type" : "record",
+          "name" : "BundleMetadata",
+          "fields" : [ {
+            "name" : "date",
+            "type" : "long",
+            "default" : 0
+          }, {
+            "name" : "members",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "tags",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "meta",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "votes",
+            "type" : {
+              "type" : "map",
+              "values" : {
+                "type" : "record",
+                "name" : "VoteData",
+                "fields" : [ {
+                  "name" : "date",
+                  "type" : "long"
+                }, {
+                  "name" : "userName",
+                  "type" : [ "null", "string" ],
+                  "default" : null
+                }, {
+                  "name" : "direction",
+                  "type" : {
+                    "type" : "enum",
+                    "name" : "VoteDirection",
+                    "symbols" : [ "Up", "Down", "None" ]
+                  }
+                } ]
+              }
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "views",
+            "type" : {
+              "type" : "map",
+              "values" : {
+                "type" : "record",
+                "name" : "ViewData",
+                "fields" : [ {
+                  "name" : "userName",
+                  "type" : "string"
+                }, {
+                  "name" : "count",
+                  "type" : "int"
+                } ]
+              }
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "relevance",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          }, {
+            "name" : "clicks",
+            "type" : {
+              "type" : "map",
+              "values" : "string"
+            },
+            "default" : {
+            }
+          } ]
+        }
+      } ]
+    };
+    var okObj = {
+      "id": "https://github.com/sqs/akka-kryo-serialization/subscription",
+      "type": "link",
+      "data_": {
+        "aa.bb.cc.LinkData": {
+          "address": "https://github.com/sqs/akka-kryo-serialization/subscription",
+          "title": {
+            "string": "Sign in · GitHub"
+          },
+          "excerpt": {
+            "string": "Signup and Pricing Explore GitHub Features Blog Sign in Sign in (Pricing and Signup) Username or Email Password (forgot password) GitHub Links GitHub About Blog Feat"
+          },
+          "image": {
+            "aa.bb.cc.Image": {
+              "url": "https://a248.e.akamai.net/assets.github.com/images/modules/header/logov7@4x.png?1340659561",
+              "width": 280,
+              "height": 120
+            }
+          },
+          "meta": {}
+        }
+      },
+      "atoms_": {
+        "link": {
+          "https://github.com/sqs/akka-kryo-serialization/subscription": {
+            "index_": {
+              "type_": "link",
+              "id": "https://github.com/sqs/akka-kryo-serialization/subscription"
+            },
+            "data_": {
+              "aa.bb.cc.LinkData": {
+                "address": "https://github.com/sqs/akka-kryo-serialization/subscription",
+                "title": {
+                  "string": "Sign in · GitHub"
+                },
+                "excerpt": {
+                  "string": "Signup and Pricing Explore GitHub Features Blog Sign in Sign in (Pricing and Signup) Username or Email Password (forgot password) GitHub Links GitHub About Blog Feat"
+                },
+                "image": {
+                  "aa.bb.cc.Image": {
+                    "url": "https://a248.e.akamai.net/assets.github.com/images/modules/header/logov7@4x.png?1340659561",
+                    "width": 280,
+                    "height": 120
+                  }
+                },
+                "meta": {}
+              }
+            }
+          }
+        }
+      },
+      "meta_": {
+        "date": 1345537530000,
+        "members": {
+          "a at a.com": "1"
+        },
+        "tags": {
+          "blue": "1"
+        },
+        "meta": {},
+        "votes": {},
+        "views": {
+          "a at a.com": {
+            "userName": "John Smith",
+            "count": 100
+          }
+        },
+        "relevance": {
+          "a at a.com": "1",
+          "b at b.com": "2"
+        },
+        "clicks": {}
+      }
+    };
+
+    test.ok(Validator.validate(schema, okObj));
+
+    var badObj = okObj; // no deep copy since we won't reuse okObj
+    badObj.meta_.clicks['a at a.com'] = 123;
+    test.throws(function() { Validator.validate(schema, badObj); });
+
+    test.done();
+  }
+};
diff --git a/lang/perl/.gitignore b/lang/perl/.gitignore
new file mode 100644
index 0000000..ca296d1
--- /dev/null
+++ b/lang/perl/.gitignore
@@ -0,0 +1,10 @@
+MANIFEST.bak
+META.yml
+MYMETA.json
+MYMETA.yml
+Makefile
+Makefile.old
+/inc
+pm_to_blib
+*~
+/blib
diff --git a/lang/perl/.shipit b/lang/perl/.shipit
new file mode 100644
index 0000000..d2778c7
--- /dev/null
+++ b/lang/perl/.shipit
@@ -0,0 +1,2 @@
+steps = FindVersion, ChangeVersion, CheckChangeLog, DistTest, Commit, Tag, MakeDist, UploadCPAN
+git.push_to = origin
diff --git a/lang/perl/Changes b/lang/perl/Changes
new file mode 100644
index 0000000..5620745
--- /dev/null
+++ b/lang/perl/Changes
@@ -0,0 +1,7 @@
+Revision history for Perl extension Avro
+
+1.00  Fri Jan 17 15:00:00 2014
+        - Relicense under apache license 2.0
+
+0.01  Thu May 27 20:56:19 2010
+        - original version
diff --git a/lang/perl/MANIFEST b/lang/perl/MANIFEST
new file mode 100644
index 0000000..21a4f2e
--- /dev/null
+++ b/lang/perl/MANIFEST
@@ -0,0 +1,32 @@
+.gitignore
+bin/avro-to-json
+Changes
+inc/Module/Install.pm
+inc/Module/Install/Base.pm
+inc/Module/Install/Makefile.pm
+inc/Module/Install/MakeMaker.pm
+inc/Module/Install/Metadata.pm
+inc/Module/Install/ReadmeFromPod.pm
+inc/Module/Install/Repository.pm
+lib/Avro.pm
+lib/Avro/BinaryDecoder.pm
+lib/Avro/BinaryEncoder.pm
+lib/Avro/DataFile.pm
+lib/Avro/DataFileReader.pm
+lib/Avro/DataFileWriter.pm
+lib/Avro/Protocol.pm
+lib/Avro/Protocol/Message.pm
+lib/Avro/Schema.pm
+Makefile.PL
+MANIFEST			This list of files
+META.yml
+NOTICE.txt
+README
+t/00_compile.t
+t/01_names.t
+t/01_schema.t
+t/02_bin_encode.t
+t/03_bin_decode.t
+t/04_datafile.t
+t/05_protocol.t
+xt/pod.t
diff --git a/lang/perl/MANIFEST.SKIP b/lang/perl/MANIFEST.SKIP
new file mode 100644
index 0000000..cb0a9c5
--- /dev/null
+++ b/lang/perl/MANIFEST.SKIP
@@ -0,0 +1,16 @@
+\bRCS\b
+\bCVS\b
+\.svn/
+\.git/
+^MANIFEST\.
+^Makefile$
+~$
+\.old$
+^blib/
+^pm_to_blib
+^MakeMaker-\d
+\.gz$
+\.cvsignore
+\.shipit
+^MYMETA.yml$
+^MYMETA.json$
diff --git a/lang/perl/Makefile.PL b/lang/perl/Makefile.PL
new file mode 100644
index 0000000..33c61b3
--- /dev/null
+++ b/lang/perl/Makefile.PL
@@ -0,0 +1,43 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use Config;
+use inc::Module::Install;
+
+my $version = `cat ../../share/VERSION.txt`;
+
+license 'apache';
+version $version;
+readme_from 'lib/Avro.pm';
+all_from 'lib/Avro.pm';
+build_requires 'Test::More', 0.88;
+test_requires 'Math::BigInt';
+test_requires 'Test::Exception';
+requires 'JSON::XS';
+requires 'Try::Tiny';
+requires 'parent';
+requires 'Regexp::Common';
+requires 'Encode';
+requires 'IO::String';
+requires 'Object::Tiny';
+requires 'Compress::Zlib';
+unless ($Config{use64bitint}) {
+    requires 'Math::BigInt';
+}
+auto_set_repository();
+
+WriteMakefile(PM_FILTER => "sed -e 's/\+\+MODULE_VERSION\+\+/$version/'");
diff --git a/lang/perl/NOTICE.txt b/lang/perl/NOTICE.txt
new file mode 100644
index 0000000..0da03f5
--- /dev/null
+++ b/lang/perl/NOTICE.txt
@@ -0,0 +1 @@
+Copyright (C) 2010 Yann Kerherve. All rights reserved.
diff --git a/lang/perl/README b/lang/perl/README
new file mode 100644
index 0000000..85e3c04
--- /dev/null
+++ b/lang/perl/README
@@ -0,0 +1,24 @@
+NAME
+    Avro - official Perl API for the Avro serialization and RPC framework
+
+SYNOPSIS
+      use Avro;
+
+DESCRIPTION
+AUTHOR
+    Apache Avro <avro-dev at hadoop.apache.org>
+
+HISTORY
+    Before contribution to the Apache Avro project, this module was
+    developed by Yann Kerhervé <yannk at cpank.org> with contributions from
+    Andy Grundman <andy at hybridized.org>, David Bushong
+    <dbushong at mashlogic.com>, and Ilya Martynov <ilya at iponweb.net>.
+
+COPYRIGHT
+    Copyright 2014 Apache Software Foundation.
+
+LICENSE
+    The Apache License, Version 2.0
+    <http://www.apache.org/licenses/LICENSE-2.0>
+
+SEE ALSO
diff --git a/lang/perl/bin/avro-to-json b/lang/perl/bin/avro-to-json
new file mode 100644
index 0000000..17ff9ec
--- /dev/null
+++ b/lang/perl/bin/avro-to-json
@@ -0,0 +1,37 @@
+#!/usr/bin/env perl
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use strict;
+use warnings;
+
+use Avro::DataFileReader;
+use Carp;
+use IO::File;
+use JSON::XS;
+
+my $j = JSON::XS->new->allow_nonref;
+
+my $fh = IO::File->new(shift || croak "specify a file");
+my $reader = Avro::DataFileReader->new(
+    fh => $fh,
+);
+for ($reader->all) {
+    print $j->encode($_);
+    print "\n";
+}
diff --git a/lang/perl/lib/Avro.pm b/lang/perl/lib/Avro.pm
new file mode 100644
index 0000000..5f04ffd
--- /dev/null
+++ b/lang/perl/lib/Avro.pm
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro;
+
+use strict;
+use 5.008_001;
+our $VERSION = '++MODULE_VERSION++';
+
+1;
+__END__
+
+=encoding utf-8
+
+=head1 NAME
+
+Avro - official Perl API for the Avro serialization and RPC framework
+
+=head1 SYNOPSIS
+
+  use Avro;
+
+=head1 DESCRIPTION
+
+=head1 AUTHOR
+
+Apache Avro <avro-dev at hadoop.apache.org>
+
+=head1 HISTORY
+
+Before contribution to the Apache Avro project, this module was 
+developed by Yann KerhervE<eacute> <yannk at cpank.org> with contributions 
+from Andy Grundman <andy at hybridized.org>, David Bushong 
+<dbushong at mashlogic.com>, and Ilya Martynov <ilya at iponweb.net>.
+
+=head1 COPYRIGHT
+
+Copyright 2014 Apache Software Foundation.
+
+=head1 LICENSE
+
+The Apache License, Version 2.0
+L<http://www.apache.org/licenses/LICENSE-2.0>
+
+=head1 SEE ALSO
+
+=cut
diff --git a/lang/perl/lib/Avro/BinaryDecoder.pm b/lang/perl/lib/Avro/BinaryDecoder.pm
new file mode 100644
index 0000000..42fa1e7
--- /dev/null
+++ b/lang/perl/lib/Avro/BinaryDecoder.pm
@@ -0,0 +1,391 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::BinaryDecoder;
+use strict;
+use warnings;
+
+use Config;
+use Encode();
+use Error::Simple;
+use Avro::Schema;
+
+our $complement = ~0x7F;
+unless ($Config{use64bitint}) {
+    require Math::BigInt;
+    $complement = Math::BigInt->new("0b" . ("1" x 57) . ("0" x 7));
+}
+
+=head2 decode(%param)
+
+Resolve the given writer and reader_schema to decode the data provided by the
+reader.
+
+=over 4
+
+=item * writer_schema
+
+The schema that was used to encode the data provided by the C<reader>
+
+=item * reader_schema
+
+The schema we want to use to decode the data.
+
+=item * reader
+
+An object implementing a straightforward interface. C<read($buf, $nbytes)> and
+C<seek($nbytes, $whence)> are expected. Typically a IO::String object or a
+IO::File object. It is expected that this calls will block the decoder, if not
+enough data is available for read.
+
+=back
+
+=cut
+sub decode {
+    my $class = shift;
+    my %param = @_;
+
+    my ($writer_schema, $reader_schema, $reader)
+        = @param{qw/writer_schema reader_schema reader/};
+
+    my $type = Avro::Schema->match(
+        writer => $writer_schema,
+        reader => $reader_schema,
+    ) or throw Avro::Schema::Error::Mismatch;
+
+    my $meth = "decode_$type";
+    return $class->$meth($writer_schema, $reader_schema, $reader);
+}
+
+sub skip {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    my $type = ref $schema ? $schema->type : $schema;
+    my $meth = "skip_$type";
+    return $class->$meth($schema, $reader);
+}
+
+sub decode_null { undef }
+
+sub skip_boolean { &decode_boolean }
+sub decode_boolean {
+    my $class = shift;
+    my $reader = pop;
+    $reader->read(my $bool, 1);
+    return unpack 'C', $bool;
+}
+
+sub skip_int { &decode_int }
+sub decode_int {
+    my $class = shift;
+    my $reader = pop;
+    return zigzag(unsigned_varint($reader));
+}
+
+sub skip_long { &decode_long };
+sub decode_long {
+    my $class = shift;
+    return decode_int($class, @_);
+}
+
+sub skip_float { &decode_float }
+sub decode_float {
+    my $class = shift;
+    my $reader = pop;
+    $reader->read(my $buf, 4);
+    return unpack "f<", $buf;
+}
+
+sub skip_double { &decode_double }
+sub decode_double {
+    my $class = shift;
+    my $reader = pop;
+    $reader->read(my $buf, 8);
+    return unpack "d<", $buf,
+}
+
+sub skip_bytes {
+    my $class = shift;
+    my $reader = pop;
+    my $size = decode_long($class, undef, undef, $reader);
+    $reader->seek($size, 0);
+    return;
+}
+
+sub decode_bytes {
+    my $class = shift;
+    my $reader = pop;
+    my $size = decode_long($class, undef, undef, $reader);
+    $reader->read(my $buf, $size);
+    return $buf;
+}
+
+sub skip_string { &skip_bytes }
+sub decode_string {
+    my $class = shift;
+    my $reader = pop;
+    my $bytes = decode_bytes($class, undef, undef, $reader);
+    return Encode::decode_utf8($bytes);
+}
+
+sub skip_record {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    for my $field (@{ $schema->fields }){
+        skip($class, $field->{type}, $reader);
+    }
+}
+
+## 1.3.2 A record is encoded by encoding the values of its fields in the order
+## that they are declared. In other words, a record is encoded as just the
+## concatenation of the encodings of its fields. Field values are encoded per
+## their schema.
+sub decode_record {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    my $record;
+
+    my %extra_fields = %{ $reader_schema->fields_as_hash };
+    for my $field (@{ $writer_schema->fields }) {
+        my $name = $field->{name};
+        my $w_field_schema = $field->{type};
+        my $r_field_schema = delete $extra_fields{$name};
+
+        ## 1.3.2 if the writer's record contains a field with a name not
+        ## present in the reader's record, the writer's value for that field
+        ## is ignored.
+        if (! $r_field_schema) {
+            $class->skip($w_field_schema, $reader);
+            next;
+        }
+        my $data = $class->decode(
+            writer_schema => $w_field_schema,
+            reader_schema => $r_field_schema->{type},
+            reader        => $reader,
+        );
+        $record->{ $name } = $data;
+    }
+
+    for my $name (keys %extra_fields) {
+        ## 1.3.2. if the reader's record schema has a field with no default
+        ## value, and writer's schema does not have a field with the same
+        ## name, an error is signalled.
+        unless (exists $extra_fields{$name}->{default}) {
+            throw Avro::Schema::Error::Mismatch(
+                "cannot resolve without default"
+            );
+        }
+        ## 1.3.2 ... else the default value is used
+        $record->{ $name } = $extra_fields{$name}->{default};
+    }
+    return $record;
+}
+
+sub skip_enum { &skip_int }
+
+## 1.3.2 An enum is encoded by a int, representing the zero-based position of
+## the symbol in the schema.
+sub decode_enum {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    my $index = decode_int($class, @_);
+
+    my $w_data = $writer_schema->symbols->[$index];
+    ## 1.3.2 if the writer's symbol is not present in the reader's enum,
+    ## then an error is signalled.
+    throw Avro::Schema::Error::Mismatch("enum unknown")
+        unless $reader_schema->is_data_valid($w_data);
+    return $w_data;
+}
+
+sub skip_block {
+    my $class = shift;
+    my ($reader, $block_content) = @_;
+    my $block_count = decode_long($class, undef, undef, $reader);
+    while ($block_count) {
+        if ($block_count < 0) {
+            $reader->seek($block_count, 0);
+            next;
+        }
+        else {
+            for (1..$block_count) {
+                $block_content->();
+            }
+        }
+        $block_count = decode_long($class, undef, undef, $reader);
+    }
+}
+
+sub skip_array {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    skip_block($reader, sub { $class->skip($schema->items, $reader) });
+}
+
+## 1.3.2 Arrays are encoded as a series of blocks. Each block consists of a
+## long count value, followed by that many array items. A block with count zero
+## indicates the end of the array. Each item is encoded per the array's item
+## schema.
+## If a block's count is negative, its absolute value is used, and the count is
+## followed immediately by a long block size
+sub decode_array {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    my $block_count = decode_long($class, @_);
+    my @array;
+    my $writer_items = $writer_schema->items;
+    my $reader_items = $reader_schema->items;
+    while ($block_count) {
+        my $block_size;
+        if ($block_count < 0) {
+            $block_count = -$block_count;
+            $block_size = decode_long($class, @_);
+            ## XXX we can skip with $reader_schema?
+        }
+        for (1..$block_count) {
+            push @array, $class->decode(
+                writer_schema => $writer_items,
+                reader_schema => $reader_items,
+                reader        => $reader,
+            );
+        }
+        $block_count = decode_long($class, @_);
+    }
+    return \@array;
+}
+
+sub skip_map {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    skip_block($reader, sub {
+        skip_string($class, $reader);
+        $class->skip($schema->values, $reader);
+    });
+}
+
+## 1.3.2 Maps are encoded as a series of blocks. Each block consists of a long
+## count value, followed by that many key/value pairs. A block with count zero
+## indicates the end of the map. Each item is encoded per the map's value
+## schema.
+##
+## If a block's count is negative, its absolute value is used, and the count is
+## followed immediately by a long block size indicating the number of bytes in
+## the block. This block size permits fast skipping through data, e.g., when
+## projecting a record to a subset of its fields.
+sub decode_map {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    my %hash;
+
+    my $block_count = decode_long($class, @_);
+    my $writer_values = $writer_schema->values;
+    my $reader_values = $reader_schema->values;
+    while ($block_count) {
+        my $block_size;
+        if ($block_count < 0) {
+            $block_count = -$block_count;
+            $block_size = decode_long($class, @_);
+            ## XXX we can skip with $reader_schema?
+        }
+        for (1..$block_count) {
+            my $key = decode_string($class, @_);
+            unless (defined $key && length $key) {
+                throw Avro::Schema::Error::Parse("key of map is invalid");
+            }
+            $hash{$key} = $class->decode(
+                writer_schema => $writer_values,
+                reader_schema => $reader_values,
+                reader        => $reader,
+            );
+        }
+        $block_count = decode_long($class, @_);
+    }
+    return \%hash;
+}
+
+sub skip_union {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    my $idx = decode_long($class, undef, undef, $reader);
+    my $union_schema = $schema->schemas->[$idx]
+        or throw Avro::Schema::Error::Parse("union union member");
+    $class->skip($union_schema, $reader);
+}
+
+## 1.3.2 A union is encoded by first writing a long value indicating the
+## zero-based position within the union of the schema of its value. The value
+## is then encoded per the indicated schema within the union.
+sub decode_union {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    my $idx = decode_long($class, @_);
+    my $union_schema = $writer_schema->schemas->[$idx];
+    ## XXX TODO: schema resolution
+    # The first schema in the reader's union that matches the selected writer's
+    # union schema is recursively resolved against it. if none match, an error
+    # is signalled.
+    return $class->decode(
+        reader_schema => $union_schema,
+        writer_schema => $union_schema,
+        reader => $reader,
+    );
+}
+
+sub skip_fixed {
+    my $class = shift;
+    my ($schema, $reader) = @_;
+    $reader->seek($schema->size, 0);
+}
+
+## 1.3.2 Fixed instances are encoded using the number of bytes declared in the
+## schema.
+sub decode_fixed {
+    my $class = shift;
+    my ($writer_schema, $reader_schema, $reader) = @_;
+    $reader->read(my $buf, $writer_schema->size);
+    return $buf;
+}
+
+sub zigzag {
+    my $int = shift;
+    if (1 & $int) {
+        ## odd values are encoded negative ints
+        return -( 1 + ($int >> 1) );
+    }
+    ## even values are positive natural left shifted one bit
+    else {
+        return $int >> 1;
+    }
+}
+
+sub unsigned_varint {
+    my $reader = shift;
+    my $int = 0;
+    my $more;
+    my $shift = 0;
+    do {
+        $reader->read(my $buf, 1);
+        my $byte = ord $buf;
+        my $value = $byte & 0x7F;
+        $int |= $value << $shift;
+        $shift += 7;
+        $more = $byte & 0x80;
+    } until (! $more);
+    return $int;
+}
+
+1;
diff --git a/lang/perl/lib/Avro/BinaryEncoder.pm b/lang/perl/lib/Avro/BinaryEncoder.pm
new file mode 100644
index 0000000..efea1f6
--- /dev/null
+++ b/lang/perl/lib/Avro/BinaryEncoder.pm
@@ -0,0 +1,295 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::BinaryEncoder;
+use strict;
+use warnings;
+
+use Config;
+use Encode();
+use Error::Simple;
+use Regexp::Common qw(number);
+
+our $max64;
+our $complement = ~0x7F;
+if ($Config{use64bitint}) {
+    $max64 = 9223372036854775807;
+}
+else {
+    require Math::BigInt;
+    $complement = Math::BigInt->new("0b" . ("1" x 57) . ("0" x 7));
+    $max64      = Math::BigInt->new("0b0" . ("1" x 63));
+}
+
+
+=head2 encode(%param)
+
+Encodes the given C<data> according to the given C<schema>, and pass it
+to the C<emit_cb>
+
+Params are:
+
+=over 4
+
+=item * data
+
+The data to encode (can be any perl data structure, but it should match
+schema)
+
+=item * schema
+
+The schema to use to encode C<data>
+
+=item * emit_cb($byte_ref)
+
+The callback that will be invoked with the a reference to the encoded data
+in parameters.
+
+=back
+
+=cut
+
+sub encode {
+    my $class = shift;
+    my %param = @_;
+    my ($schema, $data, $cb) = @param{qw/schema data emit_cb/};
+
+    ## a schema can also be just a string
+    my $type = ref $schema ? $schema->type : $schema;
+
+    ## might want to profile and optimize this
+    my $meth = "encode_$type";
+    $class->$meth($schema, $data, $cb);
+    return;
+}
+
+sub encode_null {
+    $_[3]->(\'');
+}
+
+sub encode_boolean {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    $cb->( $data ? \"\x1" : \"\x0" );
+}
+
+sub encode_int {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    if ($data !~ /^$RE{num}{int}$/) {
+        throw Avro::BinaryEncoder::Error("cannot convert '$data' to integer");
+    }
+    if (abs($data) > 0x7fffffff) {
+        throw Avro::BinaryEncoder::Error("int ($data) should be <= 32bits");
+    }
+
+    my $enc = unsigned_varint(zigzag($data));
+    $cb->(\$enc);
+}
+
+sub encode_long {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    if ($data !~ /^$RE{num}{int}$/) {
+        throw Avro::BinaryEncoder::Error("cannot convert '$data' to long integer");
+    }
+    if (abs($data) > $max64) {
+        throw Avro::BinaryEncoder::Error("int ($data) should be <= 64bits");
+    }
+    my $enc = unsigned_varint(zigzag($data));
+    $cb->(\$enc);
+}
+
+sub encode_float {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    my $enc = pack "f<", $data;
+    $cb->(\$enc);
+}
+
+sub encode_double {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    my $enc = pack "d<", $data;
+    $cb->(\$enc);
+}
+
+sub encode_bytes {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    encode_long($class, undef, bytes::length($data), $cb);
+    $cb->(\$data);
+}
+
+sub encode_string {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    my $bytes = Encode::encode_utf8($data);
+    encode_long($class, undef, bytes::length($bytes), $cb);
+    $cb->(\$bytes);
+}
+
+## 1.3.2 A record is encoded by encoding the values of its fields in the order
+## that they are declared. In other words, a record is encoded as just the
+## concatenation of the encodings of its fields. Field values are encoded per
+## their schema.
+sub encode_record {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    for my $field (@{ $schema->fields }) {
+        $class->encode(
+            schema  => $field->{type},
+            data    => $data->{ $field->{name} },
+            emit_cb => $cb,
+        );
+    }
+}
+
+## 1.3.2 An enum is encoded by a int, representing the zero-based position of
+## the symbol in the schema.
+sub encode_enum {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    my $symbols = $schema->symbols_as_hash;
+    my $pos = $symbols->{ $data };
+    throw Avro::BinaryEncoder::Error("Cannot find enum $data")
+        unless defined $pos;
+    $class->encode_int(undef, $pos, $cb);
+}
+
+## 1.3.2 Arrays are encoded as a series of blocks. Each block consists of a
+## long count value, followed by that many array items. A block with count zero
+## indicates the end of the array. Each item is encoded per the array's item
+## schema.
+## If a block's count is negative, its absolute value is used, and the count is
+## followed immediately by a long block size
+
+## maybe here it would be worth configuring what a typical block size should be
+sub encode_array {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+
+    ## FIXME: multiple blocks
+    if (@$data) {
+        $class->encode_long(undef, scalar @$data, $cb);
+        for (@$data) {
+            $class->encode(
+                schema => $schema->items,
+                data => $_,
+                emit_cb => $cb,
+            );
+        }
+    }
+    ## end of the only block
+    $class->encode_long(undef, 0, $cb);
+}
+
+
+## 1.3.2 Maps are encoded as a series of blocks. Each block consists of a long
+## count value, followed by that many key/value pairs. A block with count zero
+## indicates the end of the map. Each item is encoded per the map's value
+## schema.
+##
+## (TODO)
+## If a block's count is negative, its absolute value is used, and the count is
+## followed immediately by a long block size indicating the number of bytes in
+## the block. This block size permits fast skipping through data, e.g., when
+## projecting a record to a subset of its fields.
+sub encode_map {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+
+    my @keys = keys %$data;
+    if (@keys) {
+        $class->encode_long(undef, scalar @keys, $cb);
+        for (@keys) {
+            ## the key
+            $class->encode_string(undef, $_, $cb);
+
+            ## the value
+            $class->encode(
+                schema => $schema->values,
+                data => $data->{$_},
+                emit_cb => $cb,
+            );
+        }
+    }
+    ## end of the only block
+    $class->encode_long(undef, 0, $cb);
+}
+
+## 1.3.2 A union is encoded by first writing a long value indicating the
+## zero-based position within the union of the schema of its value. The value
+## is then encoded per the indicated schema within the union.
+sub encode_union {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    my $idx = 0;
+    my $elected_schema;
+    for my $inner_schema (@{$schema->schemas}) {
+        if ($inner_schema->is_data_valid($data)) {
+            $elected_schema = $inner_schema;
+            last;
+        }
+        $idx++;
+    }
+    unless ($elected_schema) {
+        throw Avro::BinaryEncoder::Error("union cannot validate the data");
+    }
+    $class->encode_long(undef, $idx, $cb);
+    $class->encode(
+        schema => $elected_schema,
+        data => $data,
+        emit_cb => $cb,
+    );
+}
+
+## 1.3.2 Fixed instances are encoded using the number of bytes declared in the
+## schema.
+sub encode_fixed {
+    my $class = shift;
+    my ($schema, $data, $cb) = @_;
+    if (bytes::length $data != $schema->size) {
+        my $s1 = bytes::length $data;
+        my $s2 = $schema->size;
+        throw Avro::BinaryEncoder::Error("Fixed size doesn't match $s1!=$s2");
+    }
+    $cb->(\$data);
+}
+
+sub zigzag {
+    use warnings FATAL => 'numeric';
+    if ( $_[0] >= 0 ) {
+        return $_[0] << 1;
+    }
+    return (($_[0] << 1) ^ -1) | 0x1;
+}
+
+sub unsigned_varint {
+    my @bytes;
+    while ($_[0] & $complement) {           # mask with continuation bit
+        push @bytes, ($_[0] & 0x7F) | 0x80; # out and set continuation bit
+        $_[0] >>= 7;                        # next please
+    }
+    push @bytes, $_[0]; # last byte
+    return pack "C*", @bytes;
+}
+
+package Avro::BinaryEncoder::Error;
+use parent 'Error::Simple';
+
+1;
diff --git a/lang/perl/lib/Avro/DataFile.pm b/lang/perl/lib/Avro/DataFile.pm
new file mode 100644
index 0000000..7536432
--- /dev/null
+++ b/lang/perl/lib/Avro/DataFile.pm
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::DataFile;
+use strict;
+use warnings;
+
+use constant AVRO_MAGIC => "Obj\x01";
+
+use Avro::Schema;
+
+our $HEADER_SCHEMA = Avro::Schema->parse(<<EOH);
+{"type": "record", "name": "org.apache.avro.file.Header",
+  "fields" : [
+    {"name": "magic", "type": {"type": "fixed", "name": "Magic", "size": 4}},
+    {"name": "meta", "type": {"type": "map", "values": "bytes"}},
+    {"name": "sync", "type": {"type": "fixed", "name": "Sync", "size": 16}}
+  ]
+}
+EOH
+
+our %ValidCodec = (
+    null    => 1,
+    deflate => 1,
+);
+
+sub is_codec_valid {
+    my $datafile = shift;
+    my $codec = shift || '';
+    return $ValidCodec{$codec};
+}
+
++1;
diff --git a/lang/perl/lib/Avro/DataFileReader.pm b/lang/perl/lib/Avro/DataFileReader.pm
new file mode 100644
index 0000000..aba2529
--- /dev/null
+++ b/lang/perl/lib/Avro/DataFileReader.pm
@@ -0,0 +1,294 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::DataFileReader;
+use strict;
+use warnings;
+
+use Object::Tiny qw{
+    fh
+    reader_schema
+    sync_marker
+    block_max_size
+};
+
+use constant MARKER_SIZE => 16;
+
+# TODO: refuse to read a block more than block_max_size, instead
+# do partial reads
+
+use Avro::DataFile;
+use Avro::BinaryDecoder;
+use Avro::Schema;
+use Carp;
+use IO::String;
+use IO::Uncompress::RawInflate ;
+use Fcntl();
+
+sub new {
+    my $class = shift;
+    my $datafile = $class->SUPER::new(@_);
+
+    my $schema = $datafile->{reader_schema};
+    croak "schema is invalid"
+        if $schema && ! eval { $schema->isa("Avro::Schema") };
+
+    return $datafile;
+}
+
+sub codec {
+    my $datafile = shift;
+    return $datafile->metadata->{'avro.codec'};
+}
+
+sub writer_schema {
+    my $datafile = shift;
+    unless (exists $datafile->{_writer_schema}) {
+        my $json_schema = $datafile->metadata->{'avro.schema'};
+        $datafile->{_writer_schema} = Avro::Schema->parse($json_schema);
+    }
+    return $datafile->{_writer_schema};
+}
+
+sub metadata {
+    my $datafile = shift;
+    unless (exists $datafile->{_metadata}) {
+        my $header = $datafile->header;
+        $datafile->{_metadata} = $header->{meta} || {};
+    }
+    return $datafile->{_metadata};
+}
+
+sub header {
+    my $datafile = shift;
+    unless (exists $datafile->{_header}) {
+        $datafile->{_header} = $datafile->read_file_header;
+    }
+
+    return $datafile->{_header};
+}
+
+sub read_file_header {
+    my $datafile = shift;
+
+    my $data = Avro::BinaryDecoder->decode(
+        reader_schema => $Avro::DataFile::HEADER_SCHEMA,
+        writer_schema => $Avro::DataFile::HEADER_SCHEMA,
+        reader        => $datafile->{fh},
+    );
+    croak "Magic '$data->{magic}' doesn't match"
+        unless $data->{magic} eq Avro::DataFile->AVRO_MAGIC;
+
+    $datafile->{sync_marker} = $data->{sync}
+        or croak "sync marker appears invalid";
+
+    my $codec = $data->{meta}{'avro.codec'} || "";
+
+    throw Avro::DataFile::Error::UnsupportedCodec($codec)
+        unless Avro::DataFile->is_codec_valid($codec);
+
+    return $data;
+}
+
+sub all {
+    my $datafile = shift;
+
+    my @objs;
+    my @block_objs;
+    do {
+        if ($datafile->eof) {
+            @block_objs = ();
+        }
+        else {
+            $datafile->read_block_header if $datafile->eob;
+            @block_objs = $datafile->read_to_block_end;
+            push @objs, @block_objs;
+        }
+
+    } until !@block_objs;
+
+    return @objs
+}
+
+sub next {
+    my $datafile = shift;
+    my $count    = shift;
+
+    my @objs;
+
+    $datafile->read_block_header if $datafile->eob;
+    return ()                    if $datafile->eof;
+
+    my $block_count = $datafile->{object_count};
+
+    if ($block_count <= $count) {
+        push @objs, $datafile->read_to_block_end;
+        croak "Didn't read as many objects than expected"
+            unless scalar @objs == $block_count;
+
+        push @objs, $datafile->next($count - $block_count);
+    }
+    else {
+        push @objs, $datafile->read_within_block($count);
+    }
+    return @objs;
+}
+
+sub read_within_block {
+    my $datafile = shift;
+    my $count    = shift;
+
+    my $reader        = $datafile->reader;
+    my $writer_schema = $datafile->writer_schema;
+    my $reader_schema = $datafile->reader_schema || $writer_schema;
+    my @objs;
+    while ($count-- > 0 && $datafile->{object_count} > 0) {
+        push @objs, Avro::BinaryDecoder->decode(
+            writer_schema => $writer_schema,
+            reader_schema => $reader_schema,
+            reader        => $reader,
+        );
+        $datafile->{object_count}--;
+    }
+    return @objs;
+}
+
+sub skip {
+    my $datafile = shift;
+    my $count    = shift;
+
+    my $block_count = $datafile->{object_count};
+    if ($block_count <= $count) {
+        $datafile->skip_to_block_end
+            or croak "Cannot skip to end of block!";
+        $datafile->skip($count - $block_count);
+    }
+    else {
+        my $writer_schema = $datafile->writer_schema;
+        ## could probably be optimized
+        while ($count--) {
+            Avro::BinaryDecoder->skip($writer_schema, $datafile->reader);
+            $datafile->{object_count}--;
+        }
+    }
+}
+
+sub read_block_header {
+    my $datafile = shift;
+    my $fh = $datafile->{fh};
+
+    $datafile->header unless $datafile->{_header};
+
+    $datafile->{object_count} = Avro::BinaryDecoder->decode_long(
+        undef, undef, $fh,
+    );
+    $datafile->{block_size} = Avro::BinaryDecoder->decode_long(
+        undef, undef, $fh,
+    );
+    $datafile->{block_start} = tell $fh;
+
+    return unless $datafile->codec eq 'deflate';
+    ## we need to read the entire block into memory, to inflate it
+    my $nread = read $fh, my $block, $datafile->{block_size} + MARKER_SIZE
+        or croak "Error reading from file: $!";
+
+    ## remove the marker
+    my $marker = substr $block, -(MARKER_SIZE), MARKER_SIZE, '';
+    $datafile->{block_marker} = $marker;
+
+    ## this is our new reader
+    $datafile->{reader} = IO::Uncompress::RawInflate->new(\$block);
+
+    return;
+}
+
+sub verify_marker {
+    my $datafile = shift;
+
+    my $marker = $datafile->{block_marker};
+    unless (defined $marker) {
+        ## we are in the fh case
+        read $datafile->{fh}, $marker, MARKER_SIZE;
+    }
+
+    unless (($marker || "") eq $datafile->sync_marker) {
+        croak "Oops synchronization issue (marker mismatch)";
+    }
+    return;
+}
+
+sub skip_to_block_end {
+    my $datafile = shift;
+
+    if (my $reader = $datafile->{reader}) {
+        seek $reader, 0, Fcntl->SEEK_END;
+        return;
+    }
+
+    my $remaining_size = $datafile->{block_size}
+                       + $datafile->{block_start}
+                       - tell $datafile->{fh};
+
+    seek $datafile->{fh}, $remaining_size, 0;
+    $datafile->verify_marker; ## will do a read
+    return 1;
+}
+
+sub read_to_block_end {
+    my $datafile = shift;
+
+    my $reader = $datafile->reader;
+    my @objs = $datafile->read_within_block( $datafile->{object_count} );
+    $datafile->verify_marker;
+    return @objs;
+}
+
+sub reader {
+    my $datafile = shift;
+    return $datafile->{reader} || $datafile->{fh};
+}
+
+## end of block
+sub eob {
+    my $datafile = shift;
+
+    return 1 if $datafile->eof;
+
+    if ($datafile->{reader}) {
+        return 1 if $datafile->{reader}->eof;
+    }
+    else {
+        my $pos = tell $datafile->{fh};
+        return 1 unless $datafile->{block_start};
+        return 1 if $pos >= $datafile->{block_start} + $datafile->{block_size};
+    }
+    return 0;
+}
+
+sub eof {
+    my $datafile = shift;
+    if ($datafile->{reader}) {
+        return 0 unless $datafile->{reader}->eof;
+    }
+    return 1 if $datafile->{fh}->eof;
+    return 0;
+}
+
+package Avro::DataFile::Error::UnsupportedCodec;
+use parent 'Error::Simple';
+
+1;
diff --git a/lang/perl/lib/Avro/DataFileWriter.pm b/lang/perl/lib/Avro/DataFileWriter.pm
new file mode 100644
index 0000000..03b2f80
--- /dev/null
+++ b/lang/perl/lib/Avro/DataFileWriter.pm
@@ -0,0 +1,210 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::DataFileWriter;
+use strict;
+use warnings;
+
+use constant DEFAULT_BLOCK_MAX_SIZE => 1024 * 64;
+
+use Object::Tiny qw{
+    fh
+    writer_schema
+    codec
+    metadata
+    block_max_size
+    sync_marker
+};
+
+use Avro::BinaryEncoder;
+use Avro::BinaryDecoder;
+use Avro::DataFile;
+use Avro::Schema;
+use Carp;
+use Error::Simple;
+use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError);
+
+sub new {
+    my $class = shift;
+    my $datafile = $class->SUPER::new(@_);
+
+    ## default values
+    $datafile->{block_max_size} ||= DEFAULT_BLOCK_MAX_SIZE;
+    $datafile->{sync_marker}    ||= $class->random_sync_marker;
+    $datafile->{metadata}       ||= {};
+    $datafile->{codec}          ||= 'null';
+
+    $datafile->{_current_size}       = 0;
+    $datafile->{_serialized_objects} = [];
+    $datafile->{_compressed_block}   = '';
+
+    croak "Please specify a writer schema" unless $datafile->{writer_schema};
+    croak "writer_schema is invalid"
+        unless eval { $datafile->{writer_schema}->isa("Avro::Schema") };
+
+    throw Avro::DataFile::Error::InvalidCodec($datafile->{codec})
+        unless Avro::DataFile->is_codec_valid($datafile->{codec});
+
+    return $datafile;
+}
+
+## it's not really good random, but it should be good enough
+sub random_sync_marker {
+    my $class = shift;
+    my @r;
+    for (1..16) {
+        push @r, int rand(1<<8);
+    }
+    my $marker = pack "C16", @r;
+    return $marker;
+}
+
+sub print {
+    my $datafile = shift;
+    my $data = shift;
+    my $writer_schema = $datafile->{writer_schema};
+
+    my $enc_ref = '';
+    Avro::BinaryEncoder->encode(
+        schema => $writer_schema,
+        data => $data,
+        emit_cb => sub {
+            $enc_ref .= ${ $_[0] };
+        },
+    );
+    $datafile->buffer_or_print(\$enc_ref);
+}
+
+sub buffer_or_print {
+    my $datafile = shift;
+    my $string_ref = shift;
+
+    my $ser_objects = $datafile->{_serialized_objects};
+    push @$ser_objects, $string_ref;
+
+    if ($datafile->codec eq 'deflate') {
+        my $uncompressed = join('', map { $$_ } @$ser_objects);
+        rawdeflate \$uncompressed => \$datafile->{_compressed_block}
+            or croak "rawdeflate failed: $RawDeflateError";
+        $datafile->{_current_size} =
+            bytes::length($datafile->{_compressed_block});
+    }
+    else {
+      $datafile->{_current_size} += bytes::length($$string_ref);
+    }
+    if ($datafile->{_current_size} > $datafile->{block_max_size}) {
+        ## ok, time to flush!
+        $datafile->_print_block;
+    }
+    return;
+}
+
+sub header {
+    my $datafile = shift;
+
+    my $metadata = $datafile->metadata;
+    my $schema   = $datafile->writer_schema;
+    my $codec    = $datafile->codec;
+
+    for (keys %$metadata) {
+        warn "metadata '$_' is reserved" if /^avro\./;
+    }
+
+    my $encoded_header = '';
+    Avro::BinaryEncoder->encode(
+        schema => $Avro::DataFile::HEADER_SCHEMA,
+        data => {
+            magic => Avro::DataFile->AVRO_MAGIC,
+            meta => {
+                %$metadata,
+                'avro.schema' => $schema->to_string,
+                'avro.codec' => $codec,
+            },
+            sync => $datafile->{sync_marker},
+        },
+        emit_cb => sub { $encoded_header .= ${ $_[0] } },
+    );
+    return $encoded_header;
+}
+
+sub _print_header {
+    my $datafile = shift;
+    $datafile->{_header_printed} = 1;
+    my $fh = $datafile->{fh};
+    print $fh $datafile->header;
+
+    return 1;
+}
+
+sub _print_block {
+    my $datafile = shift;
+    unless ($datafile->{_header_printed}) {
+        $datafile->_print_header;
+    }
+    my $ser_objects = $datafile->{_serialized_objects};
+    my $object_count = scalar @$ser_objects;
+    my $length = $datafile->{_current_size};
+    my $prefix = '';
+
+    for ($object_count, $length) {
+        Avro::BinaryEncoder->encode_long(
+            undef, $_, sub { $prefix .= ${ $_[0] } },
+        );
+    }
+
+    my $sync_marker = $datafile->{sync_marker};
+    my $fh = $datafile->{fh};
+
+    ## alternatively here, we could do n calls to print
+    ## but we'll say that this all write block thing is here to overcome
+    ## any memory issues we could have with deferencing the ser_objects
+    if ($datafile->codec eq 'deflate') {
+        print $fh $prefix, $datafile->{_compressed_block}, $sync_marker;
+    }
+    else {
+        print $fh $prefix, (map { $$_ } @$ser_objects), $sync_marker;
+    }
+
+    ## now reset our internal buffer
+    $datafile->{_serialized_objects} = [];
+    $datafile->{_current_size} = 0;
+    $datafile->{_compressed_block} = '';
+    return 1;
+}
+
+sub flush {
+    my $datafile = shift;
+    $datafile->_print_block if $datafile->{_current_size};
+}
+
+sub close {
+    my $datafile = shift;
+    $datafile->flush;
+    my $fh = $datafile->{fh} or return;
+    close $fh;
+}
+
+sub DESTROY {
+    my $datafile = shift;
+    $datafile->flush;
+    return 1;
+}
+
+package Avro::DataFile::Error::InvalidCodec;
+use parent 'Error::Simple';
+
+1;
diff --git a/lang/perl/lib/Avro/Protocol.pm b/lang/perl/lib/Avro/Protocol.pm
new file mode 100644
index 0000000..f3d7389
--- /dev/null
+++ b/lang/perl/lib/Avro/Protocol.pm
@@ -0,0 +1,114 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::Protocol;
+use strict;
+use warnings;
+
+use Carp;
+use JSON::XS();
+use Try::Tiny;
+use Avro::Protocol::Message;
+use Avro::Schema;
+use Error;
+use Object::Tiny qw{
+    name
+    namespace
+    doc
+    types
+    messages
+};
+
+my $json = JSON::XS->new->allow_nonref;
+
+sub parse {
+    my $class     = shift;
+    my $enc_proto = shift
+        or throw Avro::Protocol::Error::Parse("protocol cannot be empty");
+
+    my $struct = try {
+        $json->decode($enc_proto);
+    }
+    catch {
+        throw Avro::Protocol::Error::Parse(
+            "Cannot parse json string: $_"
+        );
+    };
+    return $class->from_struct($struct);
+}
+
+sub from_struct {
+    my $class = shift;
+    my $struct = shift || {};
+    my $name = $struct->{protocol};
+    unless (defined $name or length $name) {
+        throw Avro::Protocol::Error::Parse("protocol name is required");
+    }
+
+    my $types = $class->parse_types($struct->{types});
+
+    my $messages = $class->parse_messages($struct->{messages}, $types)
+        if $struct->{messages};
+
+    my $protocol = $class->SUPER::new(
+        name      => $name,
+        namespace => $struct->{namespace},
+        doc       => $struct->{doc},
+        types     => $types,
+        messages  => $messages,
+    );
+    return $protocol;
+}
+
+sub parse_types {
+    my $class = shift;
+    my $types = shift || [];
+
+    my %types;
+    my $names = {};
+    for (@$types) {
+        try {
+            my $schema = Avro::Schema->parse_struct($_, $names);
+            $types{ $schema->fullname } = $schema;
+        }
+        catch {
+            throw Avro::Protocol::Error::Parse("errors in parsing types: $_");
+        };
+    }
+    return \%types;
+}
+
+sub parse_messages {
+    my $class = shift;
+    my $messages = shift || {};
+    my $types = shift;
+    my $m = {};
+    for my $name (keys %$messages) {
+        $m->{$name} = Avro::Protocol::Message->new($messages->{$name}, $types);
+    }
+    return $m;
+}
+
+sub fullname {
+    my $protocol = shift;
+    return join ".", grep { $_ } map { $protocol->$_ } qw{ namespace name };
+}
+
+package Avro::Protocol::Error::Parse;
+use parent 'Error::Simple';
+
+1;
diff --git a/lang/perl/lib/Avro/Protocol/Message.pm b/lang/perl/lib/Avro/Protocol/Message.pm
new file mode 100644
index 0000000..b08cfd6
--- /dev/null
+++ b/lang/perl/lib/Avro/Protocol/Message.pm
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::Protocol::Message;
+
+use strict;
+use warnings;
+
+use Avro::Schema;
+use Avro::Protocol;
+use Error;
+
+use Object::Tiny qw{
+    doc
+    request
+    response
+    errors
+};
+
+sub new {
+    my $class = shift;
+    my $struct = shift;
+    my $types = shift;
+
+    my $resp_struct = $struct->{response}
+        or throw Avro::Protocol::Error::Parse("response is missing");
+
+    my $req_struct = $struct->{request}
+        or throw Avro::Protocol::Error::Parse("request is missing");
+
+    my $request = [
+        map { Avro::Schema::Field->new($_, $types) } @$req_struct
+    ];
+
+    my $err_struct = $struct->{errors};
+
+    my $response = Avro::Schema->parse_struct($resp_struct, $types);
+    my $errors   = Avro::Schema->parse_struct($err_struct, $types)
+        if $err_struct;
+
+    return $class->SUPER::new(
+        doc      => $struct->{doc},
+        request  => $request,
+        response => $response,
+        errors   => $errors,
+    );
+
+}
+
+1;
diff --git a/lang/perl/lib/Avro/Schema.pm b/lang/perl/lib/Avro/Schema.pm
new file mode 100644
index 0000000..a27a822
--- /dev/null
+++ b/lang/perl/lib/Avro/Schema.pm
@@ -0,0 +1,838 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+package Avro::Schema;
+use strict;
+use warnings;
+
+use Carp;
+use JSON::XS();
+use Try::Tiny;
+
+my $json = JSON::XS->new->allow_nonref;
+
+sub parse {
+    my $schema      = shift;
+    my $json_string = shift;
+    my $names       = shift || {};
+    my $namespace   = shift || "";
+
+    my $struct = try {
+        $json->decode($json_string);
+    }
+    catch {
+        throw Avro::Schema::Error::Parse(
+            "Cannot parse json string: $_"
+        );
+    };
+    return $schema->parse_struct($struct, $names, $namespace);
+}
+
+sub to_string {
+    my $class = shift;
+    my $struct = shift;
+    return $json->encode($struct);
+}
+
+sub parse_struct {
+    my $schema = shift;
+    my $struct = shift;
+    my $names = shift || {};
+    my $namespace = shift || "";
+
+    ## 1.3.2 A JSON object
+    if (ref $struct eq 'HASH') {
+        my $type = $struct->{type}
+            or throw Avro::Schema::Error::Parse("type is missing");
+        if ( Avro::Schema::Primitive->is_type_valid($type) ) {
+            return Avro::Schema::Primitive->new(type => $type);
+        }
+        ## XXX technically we shouldn't allow error type other than in
+        ## a Protocol definition
+        if ($type eq 'record' or $type eq 'error') {
+            return Avro::Schema::Record->new(
+                struct => $struct,
+                names => $names,
+                namespace => $namespace,
+            );
+        }
+        elsif ($type eq 'enum') {
+            return Avro::Schema::Enum->new(
+                struct => $struct,
+                names => $names,
+                namespace => $namespace,
+            );
+        }
+        elsif ($type eq 'array') {
+            return Avro::Schema::Array->new(
+                struct => $struct,
+                names => $names,
+                namespace => $namespace,
+            );
+        }
+        elsif ($type eq 'map') {
+            return Avro::Schema::Map->new(
+                struct => $struct,
+                names => $names,
+                namespace => $namespace,
+            );
+        }
+        elsif ($type eq 'fixed') {
+            return Avro::Schema::Fixed->new(
+                struct => $struct,
+                names => $names,
+                namespace => $namespace,
+            );
+        }
+        else {
+            throw Avro::Schema::Error::Parse("unknown type: $type");
+        }
+    }
+    ## 1.3.2 A JSON array, representing a union of embedded types.
+    elsif (ref $struct eq 'ARRAY') {
+        return Avro::Schema::Union->new(
+            struct => $struct,
+            names => $names,
+            namespace => $namespace,
+        );
+    }
+    ## 1.3.2 A JSON string, naming a defined type.
+    else {
+        my $type = $struct;
+        ## It's one of our custom defined type
+        
+        ## Short name provided, prepend the namespace
+        if ( $type !~ /\./ ) {
+            my $fulltype = $namespace . '.' . $type;
+            if (exists $names->{$fulltype}) {
+                return $names->{$fulltype};
+            }
+        }
+        
+        ## Fully-qualified name
+        if (exists $names->{$type}) {
+            return $names->{$type};
+        }
+        
+        ## It's a primitive type
+        return Avro::Schema::Primitive->new(type => $type);
+    }
+}
+
+sub match {
+    my $class = shift;
+    my %param = @_;
+
+    my $reader = $param{reader}
+        or croak "missing reader schema";
+    my $writer = $param{writer}
+        or croak "missing writer schema";
+
+    my $wtype = ref $writer ? $writer->type : $writer;
+    my $rtype = ref $reader ? $reader->type : $reader;
+    ## 1.3.2 either schema is a union
+    return $wtype if $wtype eq 'union' or $rtype eq 'union';
+
+    ## 1.3.2 both schemas have same primitive type
+    return $wtype if $wtype eq $rtype
+             && Avro::Schema::Primitive->is_type_valid($wtype);
+
+    ## 1.3.2
+    ## int is promotable to long, float, or double
+    if ($wtype eq 'int' && (
+        $rtype eq 'float' or $rtype eq 'long' or $rtype eq 'double'
+    )) {
+        return $rtype;
+    }
+    ## long is promotable to float or double
+    if ($wtype eq 'long' && (
+        $rtype eq 'float' or $rtype eq 'double'
+    )) {
+        return $rtype;
+    }
+    ## float is promotable to double
+    if ($wtype eq 'float' && $rtype eq 'double') {
+        return $rtype;
+    }
+    return 0 unless $rtype eq $wtype;
+
+    ## 1.3.2 {subtype and/or names} match
+    if ($rtype eq 'array') {
+        return $wtype if $class->match(
+            reader => $reader->items,
+            writer => $writer->items,
+        );
+    }
+    elsif ($rtype eq 'record') {
+        return $wtype if $reader->fullname eq $writer->fullname;
+    }
+    elsif ($rtype eq 'map') {
+        return $wtype if $class->match(
+            reader => $reader->values,
+            writer => $writer->values,
+        );
+    }
+    elsif ($rtype eq 'fixed') {
+        return $wtype if $reader->size     eq $writer->size
+                      && $reader->fullname eq $writer->fullname;
+    }
+    elsif ($rtype eq 'enum') {
+        return $wtype if $reader->fullname eq $writer->fullname;
+    }
+    return 0;
+}
+
+
+package Avro::Schema::Base;
+our @ISA = qw/Avro::Schema/;
+use Carp;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+
+    my $type = $param{type};
+    if (!$type) {
+        my ($t) = $class =~ /::([^:]+)$/;
+        $type = lc ($t);
+    }
+    my $schema = bless {
+        type => $type,
+    }, $class;
+    return $schema;
+}
+
+sub type {
+    my $schema = shift;
+    return $schema->{type};
+}
+
+sub to_string {
+    my $schema = shift;
+    my $known_names = shift || {};
+    return Avro::Schema->to_string($schema->to_struct($known_names));
+}
+
+package Avro::Schema::Primitive;
+our @ISA = qw/Avro::Schema::Base/;
+use Carp;
+use Config;
+use Regexp::Common qw/number/;
+
+my %PrimitiveType = map { $_ => 1 } qw/
+    null
+    boolean
+    int
+    long
+    float
+    double
+    bytes
+    string
+/;
+
+my %Singleton = ( );
+
+## FIXME: useless lazy generation
+sub new {
+    my $class = shift;
+    my %param = @_;
+
+    my $type = $param{type}
+        or croak "Schema must have a type";
+
+    throw Avro::Schema::Error::Parse("Not a primitive type $type")
+        unless $class->is_type_valid($type);
+
+    if (! exists $Singleton{ $type } ) {
+        my $schema = $class->SUPER::new( type => $type );
+        $Singleton{ $type } = $schema;
+    }
+    return $Singleton{ $type };
+}
+
+sub is_type_valid {
+    return $PrimitiveType{ $_[1] || "" };
+}
+
+## Returns true or false wheter the given data is valid for
+## this schema
+sub is_data_valid {
+    my $schema = shift;
+    my $data = shift;
+    my $type = $schema->{type};
+    if ($type eq 'int') {
+        no warnings;
+        my $packed_int = pack "l", $data;
+        my $unpacked_int = unpack "l", $packed_int;
+        return $unpacked_int eq $data ? 1 : 0;
+    }
+    if ($type eq 'long') {
+        if ($Config{use64bitint}) {
+            my $packed_int = pack "q", $data;
+            my $unpacked_int = unpack "q", $packed_int;
+            return $unpacked_int eq $data ? 1 : 0;
+
+        }
+        else {
+            require Math::BigInt;
+            my $int = eval { Math::BigInt->new($data) };
+            if ($@) {
+                warn "probably a unblessed ref: $@";
+                return 0;
+            }
+            return 0 if $int->is_nan;
+            my $max = Math::BigInt->new( "0x7FFF_FFFF_FFFF_FFFF" );
+            return $int->bcmp($max) <= 0 ? 1 : 0;
+        }
+    }
+    if ($type eq 'float' or $type eq 'double') {
+        $data =~ /^$RE{num}{real}$/ ? return 1 : 0;
+    }
+    if ($type eq "bytes" or $type eq "string") {
+        return 1 unless !defined $data or ref $data;
+    }
+    if ($type eq 'null') {
+        return defined $data ? 0 : 1;
+    }
+    if ($type eq 'boolean') {
+        return 0 if ref $data; # sometimes risky
+        return 1 if $data =~ m{yes|no|y|n|t|f|true|false}i;
+        return 0;
+    }
+    return 0;
+}
+
+sub to_struct {
+    my $schema = shift;
+    return $schema->type;
+}
+
+package Avro::Schema::Named;
+our @ISA = qw/Avro::Schema::Base/;
+use Scalar::Util;
+
+my %NamedType = map { $_ => 1 } qw/
+    record
+    enum
+    fixed
+/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+
+    my $schema = $class->SUPER::new(%param);
+
+    my $names     = $param{names}  || {};
+    my $struct    = $param{struct} || {};
+    my $name      = $struct->{name};
+    unless (defined $name && length $name) {
+        throw Avro::Schema::Error::Parse( "Missing name for $class" );
+    }
+    my $namespace = $struct->{namespace};
+    unless (defined $namespace && length $namespace) {
+        $namespace = $param{namespace};
+    }
+
+    $schema->set_names($namespace, $name);
+    $schema->add_name($names);
+
+    return $schema;
+}
+
+sub is_type_valid {
+    return $NamedType{ $_[1] || "" };
+}
+
+sub set_names {
+    my $schema = shift;
+    my ($namespace, $name) = @_;
+
+    my @parts = split /\./, ($name || ""), -1;
+    if (@parts > 1) {
+        $name = pop @parts;
+        $namespace = join ".", @parts;
+        if (grep { ! length $_ } @parts) {
+            throw Avro::Schema::Error::Name(
+                "name '$name' is not a valid name"
+            );
+        }
+    }
+
+    ## 1.3.2 The name portion of a fullname, and record field names must:
+    ## * start with [A-Za-z_]
+    ## * subsequently contain only [A-Za-z0-9_]
+    my $type = $schema->{type};
+    unless (length $name && $name =~ m/^[A-Za-z_][A-Za-z0-9_]*$/) {
+        throw Avro::Schema::Error::Name(
+            "name '$name' is not valid for $type"
+        );
+    }
+    if (defined $namespace && length $namespace) {
+        for (split /\./, $namespace, -1) {
+            unless ($_ && /^[A-Za-z_][A-Za-z0-9_]*$/) {
+                throw Avro::Schema::Error::Name(
+                    "namespace '$namespace' is not valid for $type"
+                );
+            }
+        }
+    }
+    $schema->{name} = $name;
+    $schema->{namespace} = $namespace;
+}
+
+sub add_name {
+    my $schema = shift;
+    my ($names) = @_;
+
+    my $name = $schema->fullname;
+    if ( exists $names->{ $name } ) {
+        throw Avro::Schema::Error::Parse( "Name $name is already defined" );
+    }
+    $names->{$name} = $schema;
+    Scalar::Util::weaken( $names->{$name} );
+    return;
+}
+
+sub fullname {
+    my $schema = shift;
+    return join ".",
+        grep { defined $_ && length $_ }
+        map { $schema->{$_ } }
+        qw/namespace name/;
+}
+
+sub namespace {
+    my $schema = shift;
+    return $schema->{namespace};
+}
+
+package Avro::Schema::Record;
+our @ISA = qw/Avro::Schema::Named/;
+use Scalar::Util;
+
+my %ValidOrder = map { $_ => 1 } qw/ascending descending ignore/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+
+    my $names  = $param{names} ||= {};
+    my $schema = $class->SUPER::new(%param);
+
+    my $fields = $param{struct}{fields}
+        or throw Avro::Schema::Error::Parse("Record must have Fields");
+
+    throw Avro::Schema::Error::Parse("Record.Fields must me an array")
+        unless ref $fields eq 'ARRAY';
+
+    my $namespace = $schema->namespace;
+
+    my @fields;
+    for my $field (@$fields) {
+        my $f = Avro::Schema::Field->new($field, $names, $namespace);
+        push @fields, $f;
+    }
+    $schema->{fields} = \@fields;
+    return $schema;
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+    ## consider that this record type is now known (will serialize differently)
+    my $fullname = $schema->fullname;
+    if ($known_names->{ $fullname }++) {
+        return $fullname;
+    }
+    return {
+        type => $schema->{type},
+        name => $fullname,
+        fields => [
+            map { $_->to_struct($known_names) } @{ $schema->{fields} }
+        ],
+    };
+}
+
+sub fields {
+    my $schema = shift;
+    return $schema->{fields};
+}
+
+sub fields_as_hash {
+    my $schema = shift;
+    unless (exists $schema->{_fields_as_hash}) {
+        $schema->{_fields_as_hash} = {
+            map { $_->{name} => $_ } @{ $schema->{fields} }
+        };
+    }
+    return $schema->{_fields_as_hash};
+}
+
+sub is_data_valid {
+    my $schema = shift;
+    my $data = shift;
+    for my $field (@{ $schema->{fields} }) {
+        my $key = $field->{name};
+        return 0 unless $field->is_data_valid($data->{$key});
+    }
+    return 1;
+}
+
+package Avro::Schema::Field;
+
+sub to_struct {
+    my $field = shift;
+    my $known_names = shift || {};
+    my $type = $field->{type}->to_struct($known_names);
+    return { name => $field->{name}, type => $type };
+}
+
+sub new {
+    my $class = shift;
+    my ($struct, $names, $namespace) = @_;
+
+    my $name = $struct->{name};
+    throw Avro::Schema::Error::Parse("Record.Field.name is required")
+        unless defined $name && length $name;
+
+    my $type = $struct->{type};
+    throw Avro::Schema::Error::Parse("Record.Field.name is required")
+        unless defined $type && length $type;
+
+    $type = Avro::Schema->parse_struct($type, $names, $namespace);
+    my $field = { name => $name, type => $type };
+    #TODO: find where to weaken precisely
+    #Scalar::Util::weaken($struct->{type});
+
+    if (exists $struct->{default}) {
+        my $is_valid = $type->is_data_valid($struct->{default});
+        my $t = $type->type;
+        throw Avro::Schema::Error::Parse(
+            "default value doesn't validate $t: '$struct->{default}'"
+        ) unless $is_valid;
+
+        ## small Perlish special case
+        if ($type eq 'boolean') {
+            $field->{default} = $struct->{default} ? 1 : 0;
+        }
+        else {
+            $field->{default} = $struct->{default};
+        }
+    }
+    if (my $order = $struct->{order}) {
+        throw Avro::Schema::Error::Parse(
+            "Order '$order' is not valid'"
+        ) unless $ValidOrder{$order};
+        $field->{order} = $order;
+    }
+    return bless $field, $class;
+}
+
+sub is_data_valid {
+    my $field = shift;
+    my $data = shift;
+    return 1 if $field->{type}->is_data_valid($data);
+    return 0;
+}
+
+package Avro::Schema::Enum;
+our @ISA = qw/Avro::Schema::Named/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+    my $schema = $class->SUPER::new(%param);
+    my $struct = $param{struct}
+        or throw Avro::Schema::Error::Parse("Enum instantiation");
+    my $symbols = $struct->{symbols} || [];
+
+    unless (@$symbols) {
+        throw Avro::Schema::Error::Parse("Enum needs at least one symbol");
+    }
+    my %symbols;
+    my $pos = 0;
+    for (@$symbols) {
+        if (ref $_) {
+            throw Avro::Schema::Error::Parse(
+                "Enum.symbol should be a string"
+            );
+        }
+        throw Avro::Schema::Error::Parse("Duplicate symbol in Enum")
+            if exists $symbols{$_};
+
+        $symbols{$_} = $pos++;
+    }
+    $schema->{hash_symbols} = \%symbols;
+    return $schema;
+}
+
+sub is_data_valid {
+    my $schema = shift;
+    my $data = shift;
+    return 1 if defined $data && exists $schema->{hash_symbols}{$data};
+    return 0;
+}
+
+sub symbols {
+    my $schema = shift;
+    unless (exists $schema->{symbols}) {
+        my $sym = $schema->{hash_symbols};
+        $schema->{symbols} = [ sort { $sym->{$a} <=> $sym->{$b} } keys %$sym ];
+    }
+    return $schema->{symbols};
+}
+
+sub symbols_as_hash {
+    my $schema = shift;
+    return $schema->{hash_symbols} || {};
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+
+    my $fullname = $schema->fullname;
+    if ($known_names->{ $fullname }++) {
+        return $fullname;
+    }
+    return {
+        type => 'enum',
+        name => $schema->fullname,
+        symbols => [ @{ $schema->symbols } ],
+    };
+}
+
+package Avro::Schema::Array;
+our @ISA = qw/Avro::Schema::Base/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+    my $schema = $class->SUPER::new(%param);
+
+    my $struct = $param{struct}
+        or throw Avro::Schema::Error::Parse("Enum instantiation");
+
+    my $items = $struct->{items}
+        or throw Avro::Schema::Error::Parse("Array must declare 'items'");
+
+    $items = Avro::Schema->parse_struct($items, $param{names});
+    $schema->{items} = $items;
+    return $schema;
+}
+
+sub is_data_valid {
+    my $schema = shift;
+    my $default = shift;
+    return 1 if $default && ref $default eq 'ARRAY';
+    return 0;
+}
+
+sub items {
+    my $schema = shift;
+    return $schema->{items};
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+
+    return {
+        type => 'array',
+        items => $schema->{items}->to_struct($known_names),
+    };
+}
+
+package Avro::Schema::Map;
+our @ISA = qw/Avro::Schema::Base/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+    my $schema = $class->SUPER::new(%param);
+
+    my $struct = $param{struct}
+        or throw Avro::Schema::Error::Parse("Map instantiation");
+
+    my $values = $struct->{values};
+    unless (defined $values && length $values) {
+        throw Avro::Schema::Error::Parse("Map must declare 'values'");
+    }
+    $values = Avro::Schema->parse_struct($values, $param{names});
+    $schema->{values} = $values;
+
+    return $schema;
+}
+
+sub is_data_valid {
+    my $schema = shift;
+    my $default = shift;
+    return 1 if $default && ref $default eq 'HASH';
+    return 0;
+}
+
+sub values {
+    my $schema = shift;
+    return $schema->{values};
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+
+    return {
+        type => 'map',
+        values => $schema->{values}->to_struct($known_names),
+    };
+}
+
+package Avro::Schema::Union;
+our @ISA = qw/Avro::Schema::Base/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+    my $schema = $class->SUPER::new(%param);
+    my $union = $param{struct}
+        or throw Avro::Schema::Error::Parse("Union.new needs a struct");
+
+    my $names = $param{names} ||= {};
+
+    my @schemas;
+    my %seen_types;
+    for my $struct (@$union) {
+        my $sch = Avro::Schema->parse_struct($struct, $names);
+        my $type = $sch->type;
+
+        ## 1.3.2 Unions may not contain more than one schema with the same
+        ## type, except for the named types record, fixed and enum. For
+        ## example, unions containing two array types or two map types are not
+        ## permitted, but two types with different names are permitted.
+        if (Avro::Schema::Named->is_type_valid($type)) {
+            $type = $sch->fullname; # resolve Named types to their name
+        }
+        ## XXX: I could define &type_name doing the correct resolution for all classes
+        if ($seen_types{ $type }++) {
+            throw Avro::Schema::Error::Parse(
+                "$type is present more than once in the union"
+            )
+        }
+        ## 1.3.2 Unions may not immediately contain other unions.
+        if ($type eq 'union') {
+            throw Avro::Schema::Error::Parse(
+                "Cannot embed unions in union"
+            );
+        }
+        push @schemas, $sch;
+    }
+    $schema->{schemas} = \@schemas;
+
+    return $schema;
+}
+
+sub schemas {
+    my $schema = shift;
+    return $schema->{schemas};
+}
+
+sub is_data_valid {    
+    my $schema = shift;
+    my $data = shift;
+    for my $type ( @{ $schema->{schemas} } ) {
+        if ( $type->is_data_valid($data) ) {
+            return 1;
+        }
+    }
+    return 0;
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+    return [ map { $_->to_struct($known_names) } @{$schema->{schemas}} ];
+}
+
+package Avro::Schema::Fixed;
+our @ISA = qw/Avro::Schema::Named/;
+
+sub new {
+    my $class = shift;
+    my %param = @_;
+    my $schema = $class->SUPER::new(%param);
+
+    my $struct = $param{struct}
+        or throw Avro::Schema::Error::Parse("Fixed instantiation");
+
+    my $size = $struct->{size};
+    unless (defined $size && length $size) {
+        throw Avro::Schema::Error::Parse("Fixed must declare 'size'");
+    }
+    if (ref $size) {
+        throw Avro::Schema::Error::Parse(
+            "Fixed.size should be a scalar"
+        );
+    }
+    unless ($size =~ m{^\d+$} && $size > 0) {
+        throw Avro::Schema::Error::Parse(
+            "Fixed.size should be a positive integer"
+        );
+    }
+    $schema->{size} = $size;
+
+    return $schema;
+}
+
+sub is_data_valid {
+    my $schema = shift;
+    my $default = shift;
+    my $size = $schema->{size};
+    return 1 if $default && bytes::length $default == $size;
+    return 0;
+}
+
+sub size {
+    my $schema = shift;
+    return $schema->{size};
+}
+
+sub to_struct {
+    my $schema = shift;
+    my $known_names = shift || {};
+
+    my $fullname = $schema->fullname;
+    if ($known_names->{ $fullname }++) {
+        return $fullname;
+    }
+
+    return {
+        type => 'fixed',
+        name => $fullname,
+        size => $schema->{size},
+    };
+}
+
+package Avro::Schema::Error::Parse;
+use parent 'Error::Simple';
+
+package Avro::Schema::Error::Name;
+use parent 'Error::Simple';
+
+package Avro::Schema::Error::Mismatch;
+use parent 'Error::Simple';
+
+1;
diff --git a/lang/perl/t/00_compile.t b/lang/perl/t/00_compile.t
new file mode 100644
index 0000000..eee5268
--- /dev/null
+++ b/lang/perl/t/00_compile.t
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use strict;
+use Test::More;
+
+BEGIN { use_ok 'Avro' }
+
+done_testing;
diff --git a/lang/perl/t/01_names.t b/lang/perl/t/01_names.t
new file mode 100644
index 0000000..6538956
--- /dev/null
+++ b/lang/perl/t/01_names.t
@@ -0,0 +1,167 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use strict;
+use warnings;
+
+use Test::More;
+use Test::Exception;
+use_ok 'Avro::Schema';
+
+## name validation
+{
+    no warnings 'qw';
+    my @bad_names = qw/0 01 0a $ % $s . - -1 (x) #s # π
+                       @ !q ^f [ ( { } ) ] ~ ` ?a :a ;a 
+                       a- a^ a% a[ .. ... .a .a. a./;
+
+    my @bad_namespaces = @bad_names;
+    for my $name (@bad_names) {
+        throws_ok { Avro::Schema::Record->new(
+            struct => {
+                name => $name,
+                fields => [ { name => 'a', type => 'long' } ],
+            },
+        ) } "Avro::Schema::Error::Name", "bad name: $name";
+    }
+
+    for my $ns (@bad_namespaces) {
+        throws_ok { Avro::Schema::Record->new(
+            struct => {
+                name => 'name',
+                namespace => $ns,
+                fields => [ { name => 'a', type => 'long' } ],
+            },
+        ) } "Avro::Schema::Error::Name", "bad ns: $ns";
+    }
+}
+
+## name + namespace (bullet 1 of spec)
+{
+    my $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'saucisson',
+            namespace => 'dry',
+            fields => [ { name => 'a', type => 'long' } ],
+        },
+    );
+    is $r->fullname, 'dry.saucisson', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+}
+
+## fullname (bullet 2 of spec)
+{
+    my $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'dry.saucisson',
+            fields => [ { name => 'a', type => 'long' } ],
+        },
+    );
+    is $r->fullname, 'dry.saucisson', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+
+    $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'dry.saucisson',
+            namespace => 'archiduchesse.chaussette', ## ignored
+            fields => [ { name => 'a', type => 'long' } ],
+        },
+    );
+    is $r->fullname, 'dry.saucisson', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+}
+
+## name only (bullet 3 of spec)
+{
+    my $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'container',
+            namespace => 'dry',
+            fields => [ {
+                name => 'a', type => {
+                    type => 'record', name => 'saucisson', fields => [
+                        { name => 'aa', type => 'long' },
+                    ],
+                }
+            } ],
+        },
+    );
+    is $r->fullname, 'dry.container', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+    my $subr = $r->fields->[0]{type};
+    is $subr->fullname, 'dry.saucisson', 'dry.saucisson';
+    is $subr->namespace, 'dry', "sub ns is dry";
+
+    $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'dry.container',
+            fields => [ {
+                name => 'a', type => {
+                    type => 'record', name => 'saucisson', fields => [
+                        { name => 'aa', type => 'long' },
+                    ],
+                }
+            } ],
+        },
+    );
+    is $r->fullname, 'dry.container', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+    $subr = $r->fields->[0]{type};
+    is $subr->fullname, 'dry.saucisson', 'dry.saucisson';
+    is $subr->namespace, 'dry', "sub ns is dry";
+
+    $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'dry.container',
+            fields => [ {
+                name => 'a', type => {
+                    type => 'record', name => 'duchesse.saucisson', fields => [
+                        { name => 'aa', type => 'long' },
+                    ],
+                }
+            } ],
+        },
+    );
+    is $r->fullname, 'dry.container', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+    $subr = $r->fields->[0]{type};
+    is $subr->fullname, 'duchesse.saucisson', 'duchesse.saucisson';
+    is $subr->namespace, 'duchesse', "sub ns is duchesse";
+
+    $r = Avro::Schema::Record->new(
+        struct => {
+            name => 'dry.container',
+            fields => [ {
+                name => 'a', type => {
+                    type => 'record',
+                    namespace => 'duc',
+                    name => 'saucisson',
+                    fields => [
+                        { name => 'aa', type => 'long' },
+                    ],
+                }
+            } ],
+        },
+    );
+    is $r->fullname, 'dry.container', "correct fullname";
+    is $r->namespace, 'dry', "ns is dry";
+    $subr = $r->fields->[0]{type};
+    is $subr->fullname, 'duc.saucisson', 'duc.saucisson';
+    is $subr->namespace, 'duc', "sub ns is duc";
+}
+
+done_testing;
diff --git a/lang/perl/t/01_schema.t b/lang/perl/t/01_schema.t
new file mode 100644
index 0000000..4bc9091
--- /dev/null
+++ b/lang/perl/t/01_schema.t
@@ -0,0 +1,472 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use strict;
+use warnings;
+
+use Test::More;
+plan tests => 130;
+use Test::Exception;
+use_ok 'Avro::Schema';
+
+dies_ok { Avro::Schema->new } "Should use parse() or instantiate the subclass";
+
+throws_ok { Avro::Schema->parse(q()) } "Avro::Schema::Error::Parse";
+throws_ok { Avro::Schema->parse(q(test)) } "Avro::Schema::Error::Parse";
+throws_ok { Avro::Schema->parse(q({"type": t})) }
+            "Avro::Schema::Error::Parse";
+throws_ok { Avro::Schema->parse(q({"type": t})) }
+            "Avro::Schema::Error::Parse";
+
+my $s = Avro::Schema->parse(q("string"));
+isa_ok $s, 'Avro::Schema::Base';
+isa_ok $s, 'Avro::Schema::Primitive',
+is $s->type, "string", "type is string";
+
+my $s2 = Avro::Schema->parse(q({"type": "string"}));
+isa_ok $s2, 'Avro::Schema::Primitive';
+is $s2->type, "string", "type is string";
+is $s, $s2, "string Schematas are singletons";
+
+## Records
+{
+    my $s3 = Avro::Schema::Record->new(
+        struct => {
+            name => 'saucisson',
+            fields => [
+                { name => 'a', type => 'long'   },
+                { name => 'b', type => 'string' },
+            ],
+        },
+    );
+
+    isa_ok $s3, 'Avro::Schema::Record';
+    is $s3->type, 'record', "this is a record type";
+    is $s3->fullname, 'saucisson', "correct name";
+    is $s3->fields->[0]{name}, 'a', 'a';
+    is $s3->fields->[0]{type}, Avro::Schema::Primitive->new(type => 'long'), 'long';
+    is $s3->fields->[1]{name}, 'b', 'b';
+    is $s3->fields->[1]{type}, Avro::Schema::Primitive->new(type => 'string'), 'str';
+
+    ## self-reference
+    $s3 = Avro::Schema::Record->new(
+        struct => {
+            name => 'saucisson',
+            fields => [
+                { name => 'a', type => 'long'      },
+                { name => 'b', type => 'saucisson' },
+            ],
+        },
+    );
+    isa_ok $s3, 'Avro::Schema::Record';
+    is $s3->fullname, 'saucisson', "correct name";
+    is $s3->fields->[0]{name}, 'a', 'a';
+    is $s3->fields->[0]{type}, Avro::Schema::Primitive->new(type => 'long'), 'long';
+    is $s3->fields->[1]{name}, 'b', 'b';
+    is $s3->fields->[1]{type}, $s3, 'self!';
+
+    ## serialize
+    my $string = $s3->to_string;
+    like $string, qr/saucisson/, "generated string has 'saucisson'";
+    my $s3bis = Avro::Schema->parse($string);
+    is_deeply $s3bis->to_struct, $s3->to_struct,
+        'regenerated structure matches original';
+
+    ## record fields can have defaults
+    my @good_ints = (2, -1, -(2**31 - 1), 2_147_483_647, "2147483647"  );
+    my @bad_ints = ("", "string", 9.22337204, 9.22337204E10, \"2");
+    my @good_longs = (1, 2, -3);
+    my @bad_longs = (9.22337204, 9.22337204E10 + 0.1, \"2");
+
+    use Config;
+    if ($Config{use64bitint}) {
+        push @bad_ints, (2**32 - 1, 4_294_967_296, 9_223_372_036_854_775_807);
+        push @good_longs, (9_223_372_036_854_775_807, 3e10);
+        push @bad_longs, 9_223_372_036_854_775_808;
+    }
+    else {
+        require Math::BigInt;
+        push @bad_ints, map { Math::BigInt->new($_) }
+            ("0xFFFF_FFFF", "0x1_0000_0000", "0x7FFF_FFFF_FFFF_FFFF");
+        push @good_longs, map { Math::BigInt->new($_) }
+            ("9_223_372_036_854_775_807", "3e10");
+        push @bad_longs, Math::BigInt->new("9_223_372_036_854_775_808");
+    }
+
+    for (@good_ints) {
+        my $s4 = Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'int', default => $_ },
+                ],
+            },
+        );
+        is $s4->fields->[0]{default}, $_, "default $_";
+    }
+    for (@good_longs) {
+        my $s4 = Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'long', default => $_ },
+                ],
+            },
+        );
+        is $s4->fields->[0]{default}, $_, "default $_";
+    }
+    for (@bad_ints) {
+        throws_ok  { Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'int', default => $_ },
+                ],
+            },
+        ) } "Avro::Schema::Error::Parse", "invalid default: $_";
+    }
+    for (@bad_longs) {
+        throws_ok  { Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'long', default => $_ },
+                ],
+            },
+        ) } "Avro::Schema::Error::Parse", "invalid default: $_";
+    }
+
+    ## default of more complex types
+    throws_ok {
+        Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'union', default => 1 },
+                ],
+            },
+        )
+    } "Avro::Schema::Error::Parse", "union don't have default: $@";
+
+    my $s4 = Avro::Schema->parse_struct(
+        {
+            type => 'record',
+            name => 'saucisson',
+            fields => [
+                { name => 'string', type => 'string', default => "something" },
+                { name => 'map', type => { type => 'map', values => 'long' }, default => {a => 2} },
+                { name => 'array', type => { type => 'array', items => 'long' }, default => [1, 2] },
+                { name => 'bytes', type => 'bytes', default => "something" },
+                { name => 'null', type => 'null', default => undef },
+            ],
+        },
+    );
+    is $s4->fields->[0]{default}, "something", "string default";
+    is_deeply $s4->fields->[1]{default}, { a => 2 }, "map default";
+    is_deeply $s4->fields->[2]{default}, [1, 2], "array default";
+    is $s4->fields->[3]{default}, "something", "bytes default";
+    is $s4->fields->[4]{default}, undef, "null default";
+    ## TODO: technically we should verify that default map/array match values
+    ## and items types defined
+
+    ## ordering
+    for (qw(ascending descending ignore)) {
+        my $s4 = Avro::Schema::Record->new(
+            struct => {
+                name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'int', order => $_ },
+                ],
+            },
+        );
+        is $s4->fields->[0]{order}, $_, "order set to $_";
+    }
+    for (qw(DESCEND ascend DESCENDING ASCENDING)) {
+        throws_ok  { Avro::Schema::Record->new(
+            struct => { name => 'saucisson',
+                fields => [
+                    { name => 'a', type => 'long', order => $_ },
+                ],
+            },
+        ) } "Avro::Schema::Error::Parse", "invalid order: $_";
+    }
+}
+
+## Unions
+{
+    my $spec_example = <<EOJ;
+{
+  "type": "record",
+  "name": "LongList",
+  "fields" : [
+    {"name": "value", "type": "long"},
+    {"name": "next", "type": ["LongList", "null"]}
+  ]
+}
+EOJ
+    my $schema = Avro::Schema->parse($spec_example);
+    is $schema->type, 'record', "type record";
+    is $schema->fullname, 'LongList', "name is LongList";
+
+    ## Union checks
+    # can only contain one type
+
+    $s = <<EOJ;
+["null", "null"]
+EOJ
+    throws_ok { Avro::Schema->parse($s) }
+              'Avro::Schema::Error::Parse';
+
+    $s = <<EOJ;
+["long", "string", "float", "string"]
+EOJ
+    throws_ok { Avro::Schema->parse($s) }
+              'Avro::Schema::Error::Parse';
+
+    $s = <<EOJ;
+{
+  "type": "record",
+  "name": "embed",
+  "fields": [
+    {"name": "value", "type":
+        { "type": "record", "name": "rec1",  "fields": [
+            { "name": "str1", "type": "string"}
+        ] }
+    },
+    {"name": "next", "type": ["embed", "rec1", "embed"] }
+  ]
+}
+EOJ
+    throws_ok { Avro::Schema->parse($s) }
+          'Avro::Schema::Error::Parse',
+          'two records with same name in the union';
+
+    $s = <<EOJ;
+{
+  "type": "record",
+  "name": "embed",
+  "fields": [
+    {"name": "value", "type":
+        { "type": "record", "name": "rec1",  "fields": [
+            { "name": "str1", "type": "string"}
+        ] }
+    },
+    {"name": "next", "type": ["embed", "rec1"] }
+  ]
+}
+EOJ
+    lives_ok { Avro::Schema->parse($s) }
+             'two records of different names in the union';
+
+    # cannot directly embed another union
+    $s = <<EOJ;
+["long", ["string", "float"], "string"]
+EOJ
+    throws_ok { Avro::Schema->parse($s) }
+             'Avro::Schema::Error::Parse', "cannot embed union in union";
+}
+
+## Enums!
+{
+    my $s = <<EOJ;
+{ "type": "enum", "name": "theenum", "symbols": [ "A", "B" ]}
+EOJ
+    my $schema = Avro::Schema->parse($s);
+    is $schema->type, 'enum', "enum";
+    is $schema->fullname, 'theenum', "fullname";
+    is $schema->symbols->[0], "A", "symbol A";
+    is $schema->symbols->[1], "B", "symbol B";
+    my $string = $schema->to_string;
+    my $s2 = Avro::Schema->parse($string)->to_struct;
+    is_deeply $s2, $schema->to_struct, "reserialized identically";
+}
+
+## Arrays
+{
+    my $s = <<EOJ;
+{ "type": "array", "items": "string" }
+EOJ
+    my $schema = Avro::Schema->parse($s);
+    is $schema->type, 'array', "array";
+    isa_ok $schema->items, 'Avro::Schema::Primitive';
+    is $schema->items->type, 'string', "type of items is string";
+    my $string = $schema->to_string;
+    my $s2 = Avro::Schema->parse($string);
+    is_deeply $s2, $schema, "reserialized identically";
+}
+
+## Maps
+{
+    my $s = <<EOJ;
+{ "type": "map", "values": "string" }
+EOJ
+    my $schema = Avro::Schema->parse($s);
+    is $schema->type, 'map', "map";
+    isa_ok $schema->values, 'Avro::Schema::Primitive';
+    is $schema->values->type, 'string', "type of values is string";
+    my $string = $schema->to_string;
+    my $s2 = Avro::Schema->parse($string);
+    is_deeply $s2, $schema, "reserialized identically";
+}
+
+## Fixed
+{
+    my $s = <<EOJ;
+{ "type": "fixed", "name": "somefixed", "size": "something" }
+EOJ
+    throws_ok { Avro::Schema->parse($s) } "Avro::Schema::Error::Parse",
+        "size must be an int";
+
+    $s = <<EOJ;
+{ "type": "fixed", "name": "somefixed", "size": -100 }
+EOJ
+    throws_ok { Avro::Schema->parse($s) } "Avro::Schema::Error::Parse",
+        "size must be a POSITIVE int";
+
+    $s = <<EOJ;
+{ "type": "fixed", "name": "somefixed", "size": 0 }
+EOJ
+    throws_ok { Avro::Schema->parse($s) } "Avro::Schema::Error::Parse",
+        "size must be a POSITIVE int > 0";
+
+    $s = <<EOJ;
+{ "type": "fixed", "name": "somefixed", "size": 0.2 }
+EOJ
+    throws_ok { Avro::Schema->parse($s) } "Avro::Schema::Error::Parse",
+        "size must be an int";
+
+    $s = <<EOJ;
+{ "type": "fixed", "name": "somefixed", "size": 5e2 }
+EOJ
+    my $schema = Avro::Schema->parse($s);
+
+    is $schema->type, 'fixed', "fixed";
+    is $schema->fullname, 'somefixed', "name";
+    is $schema->size, 500, "size of fixed";
+    my $string = $schema->to_string;
+    my $s2 = Avro::Schema->parse($string);
+    is_deeply $s2, $schema, "reserialized identically";
+}
+    
+# fixed type referenced using short name without namespace
+{
+    my $s = <<EOJ;
+{
+  "type": "record",
+  "name": "HandshakeRequest", "namespace":"org.apache.avro.ipc",
+  "fields": [
+    {"name": "clientHash",
+     "type": {"type": "fixed", "name": "MD5", "size": 16}},
+    {"name": "clientProtocol", "type": ["null", "string"]},
+    {"name": "serverHash", "type": "MD5"},
+    {"name": "meta", "type": ["null", {"type": "map", "values": "bytes"}]}
+  ]
+}
+EOJ
+    my $schema = Avro::Schema->parse($s);
+    
+    is $schema->type, 'record', 'HandshakeRequest type ok';
+    is $schema->namespace, 'org.apache.avro.ipc', 'HandshakeRequest namespace ok';
+    is $schema->fields->[0]->{type}->{name}, 'MD5', 'HandshakeRequest clientHash type ok';
+    is $schema->fields->[2]->{type}->{name}, 'MD5', 'HandshakeRequest serverHash type ok';
+}
+
+## Schema resolution
+{
+    my @s = split /\n/, <<EOJ;
+{ "type": "int" }
+{ "type": "long" }
+{ "type": "float" }
+{ "type": "double" }
+{ "type": "boolean" }
+{ "type": "null" }
+{ "type": "string" }
+{ "type": "bytes" }
+{ "type": "array", "items": "string" }
+{ "type": "fixed", "size": 1, "name": "fixed" }
+{ "type": "enum", "name": "enum", "symbols": [ "s" ] }
+{ "type": "map", "values": "long" }
+{ "type": "record", "name": "r", "fields": [ { "name": "a", "type": "long" }] }
+EOJ
+    my %s;
+    for (@s) {
+        my $schema = Avro::Schema->parse($_);
+        $s{ $schema->type } = $schema;
+        ok ( Avro::Schema->match(
+                reader => $schema,
+                writer => $schema,
+        ), "identical match!");
+    }
+
+    ## schema promotion
+    match_ok($s{int},    $s{long});
+    match_ok($s{int},    $s{float});
+    match_ok($s{int},    $s{double});
+    match_ok($s{long},   $s{float});
+    match_ok($s{double}, $s{double});
+    match_ok($s{float},  $s{double});
+
+    ## some non promotion
+    match_nok($s{long},    $s{int});
+    match_nok($s{float},   $s{int});
+    match_nok($s{string},  $s{bytes});
+    match_nok($s{bytes},   $s{string});
+    match_nok($s{double},  $s{float});
+    match_nok($s{null},    $s{boolean});
+    match_nok($s{boolean}, $s{int});
+    match_nok($s{boolean}, $s{string});
+    match_nok($s{boolean}, $s{fixed});
+
+    ## complex type details
+    my @alt = split /\n/, <<EOJ;
+{ "type": "array", "items": "int" }
+{ "type": "fixed", "size": 2, "name": "fixed" }
+{ "type": "enum", "name": "enum2", "symbols": [ "b" ] }
+{ "type": "map", "values": "null" }
+{ "type": "record", "name": "r2", "fields": [ { "name": "b", "type": "long" }] }
+EOJ
+    my %alt;
+    for (@alt) {
+        my $schema = Avro::Schema->parse($_);
+        $alt{ $schema->type } = $schema;
+        match_nok($s{$schema->type}, $schema, "not same subtypes/names");
+    }
+}
+
+## union in a record.field
+{
+    my $s = Avro::Schema::Record->new(
+        struct => {
+            name => 'saucisson',
+            fields => [
+                { name => 'a', type => [ 'long', 'null' ] },
+            ],
+        },
+    );
+    isa_ok $s, 'Avro::Schema::Record';
+    is $s->fields->[0]{name}, 'a', 'a';
+    isa_ok $s->fields->[0]{type}, 'Avro::Schema::Union';
+}
+
+sub match_ok {
+    my ($w, $r, $msg) = @_;
+    $msg ||= "match_ok";
+    ok(Avro::Schema->match(reader => $r, writer => $w), $msg);
+}
+
+sub match_nok {
+    my ($w, $r, $msg) = @_;
+    $msg ||= "non matching";
+    ok !Avro::Schema->match(reader => $r, writer => $w), $msg;
+}
+
+done_testing;
diff --git a/lang/perl/t/02_bin_encode.t b/lang/perl/t/02_bin_encode.t
new file mode 100644
index 0000000..3125855
--- /dev/null
+++ b/lang/perl/t/02_bin_encode.t
@@ -0,0 +1,156 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use Avro::Schema;
+use Config;
+use Test::More;
+use Test::Exception;
+use Math::BigInt;
+
+use_ok 'Avro::BinaryEncoder';
+
+sub primitive_ok {
+    my ($primitive_type, $primitive_val, $expected_enc) = @_;
+
+    my $data;
+    my $meth = "encode_$primitive_type";
+    Avro::BinaryEncoder->$meth(
+        undef, $primitive_val, sub { $data = ${$_[0]} }
+    );
+    is $data, $expected_enc, "primitive $primitive_type encoded correctly";
+    return $data;
+}
+
+## some primitive testing
+{
+    primitive_ok null    =>    undef, '';
+    primitive_ok null    => 'whatev', '';
+
+    primitive_ok boolean => 0, "\x0";
+    primitive_ok boolean => 1, "\x1";
+
+    ## - high-bit of each byte should be set except for last one
+    ## - rest of bits are:
+    ## - little endian
+    ## - zigzag coded
+    primitive_ok long    =>        0, pack("C*", 0);
+    primitive_ok long    =>        1, pack("C*", 0x2);
+    primitive_ok long    =>       -1, pack("C*", 0x1);
+    primitive_ok int     =>       -1, pack("C*", 0x1);
+    primitive_ok int     =>      -20, pack("C*", 0b0010_0111);
+    primitive_ok int     =>       20, pack("C*", 0b0010_1000);
+    primitive_ok int     =>       63, pack("C*", 0b0111_1110);
+    primitive_ok int     =>       64, pack("C*", 0b1000_0000, 0b0000_0001);
+    my $p =
+    primitive_ok int     =>      -65, pack("C*", 0b1000_0001, 0b0000_0001);
+    primitive_ok int     =>       65, pack("C*", 0b1000_0010, 0b0000_0001);
+    primitive_ok int     =>       99, "\xc6\x01";
+
+    ## BigInt values still work
+    primitive_ok int     => Math::BigInt->new(-65), $p;
+
+    throws_ok {
+        my $toobig;
+        if ($Config{use64bitint}) {
+            $toobig = 1<<32;
+        }
+        else {
+            require Math::BigInt;
+            $toobig = Math::BigInt->new(1)->blsft(32);
+        }
+        primitive_ok int => $toobig, undef;
+    } "Avro::BinaryEncoder::Error", "33 bits";
+
+    throws_ok {
+        primitive_ok int => Math::BigInt->new(1)->blsft(63), undef;
+    } "Avro::BinaryEncoder::Error", "65 bits";
+
+    for (qw(long int)) {
+        throws_ok {
+            primitive_ok $_ =>  "x", undef;
+        } 'Avro::BinaryEncoder::Error', 'numeric values only';
+    };
+    # In Unicode, there are decimals that aren't 0-9.
+    # Make sure we handle non-ascii decimals cleanly.
+    for (qw(long int)) {
+        throws_ok {
+            primitive_ok $_ =>  "\N{U+0661}", undef;
+        } 'Avro::BinaryEncoder::Error', 'ascii decimals only';
+    };
+}
+
+## spec examples
+{
+    my $enc = '';
+    my $schema = Avro::Schema->parse(q({ "type": "string" }));
+    Avro::BinaryEncoder->encode(
+        schema => $schema,
+        data => "foo",
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    is $enc, "\x06\x66\x6f\x6f", "Binary_Encodings.Primitive_Types";
+
+    $schema = Avro::Schema->parse(<<EOJ);
+          {
+          "type": "record",
+          "name": "test",
+          "fields" : [
+          {"name": "a", "type": "long"},
+          {"name": "b", "type": "string"}
+          ]
+          }
+EOJ
+    $enc = '';
+    Avro::BinaryEncoder->encode(
+        schema => $schema,
+        data => { a => 27, b => 'foo' },
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    is $enc, "\x36\x06\x66\x6f\x6f", "Binary_Encodings.Complex_Types.Records";
+
+    $enc = '';
+    $schema = Avro::Schema->parse(q({"type": "array", "items": "long"}));
+    Avro::BinaryEncoder->encode(
+        schema => $schema,
+        data => [3, 27],
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    is $enc, "\x04\x06\x36\x00", "Binary_Encodings.Complex_Types.Arrays";
+
+    $enc = '';
+    $schema = Avro::Schema->parse(q(["string","null"]));
+    Avro::BinaryEncoder->encode(
+        schema => $schema,
+        data => undef,
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    is $enc, "\x02", "Binary_Encodings.Complex_Types.Unions-null";
+
+    $enc = '';
+    Avro::BinaryEncoder->encode(
+        schema => $schema,
+        data => "a",
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    is $enc, "\x00\x02\x61", "Binary_Encodings.Complex_Types.Unions-a";
+}
+
+done_testing;
diff --git a/lang/perl/t/03_bin_decode.t b/lang/perl/t/03_bin_decode.t
new file mode 100644
index 0000000..e2f340b
--- /dev/null
+++ b/lang/perl/t/03_bin_decode.t
@@ -0,0 +1,251 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use Avro::Schema;
+use Avro::BinaryEncoder;
+use Test::More;
+use Test::Exception;
+use IO::String;
+
+use_ok 'Avro::BinaryDecoder';
+
+## spec examples
+{
+    my $enc = "\x06\x66\x6f\x6f";
+    my $schema = Avro::Schema->parse(q({ "type": "string" }));
+    my $reader = IO::String->new($enc);
+    my $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $schema,
+        reader_schema => $schema,
+        reader        => $reader,
+    );
+    is $dec, "foo", "Binary_Encodings.Primitive_Types";
+
+    $schema = Avro::Schema->parse(<<EOJ);
+          {
+          "type": "record",
+          "name": "test",
+          "fields" : [
+          {"name": "a", "type": "long"},
+          {"name": "b", "type": "string"}
+          ]
+          }
+EOJ
+    $reader = IO::String->new("\x36\x06\x66\x6f\x6f");
+    $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $schema,
+        reader_schema => $schema,
+        reader        => $reader,
+    );
+    is_deeply $dec, { a => 27, b => 'foo' },
+                    "Binary_Encodings.Complex_Types.Records";
+
+    $reader = IO::String->new("\x04\x06\x36\x00");
+    $schema = Avro::Schema->parse(q({"type": "array", "items": "long"}));
+    $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $schema,
+        reader_schema => $schema,
+        reader        => $reader,
+    );
+    is_deeply $dec, [3, 27], "Binary_Encodings.Complex_Types.Arrays";
+
+    $reader = IO::String->new("\x02");
+    $schema = Avro::Schema->parse(q(["string","null"]));
+    $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $schema,
+        reader_schema => $schema,
+        reader         => $reader,
+    );
+    is $dec, undef, "Binary_Encodings.Complex_Types.Unions-null";
+
+    $reader =  IO::String->new("\x00\x02\x61");
+    $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $schema,
+        reader_schema => $schema,
+        reader        => $reader,
+    );
+    is $dec, "a", "Binary_Encodings.Complex_Types.Unions-a";
+}
+
+## enum schema resolution
+{
+
+    my $w_enum = Avro::Schema->parse(<<EOP);
+{ "type": "enum", "name": "enum", "symbols": [ "a", "b", "c", "\$", "z" ] }
+EOP
+    my $r_enum = Avro::Schema->parse(<<EOP);
+{ "type": "enum", "name": "enum", "symbols": [ "\$", "b", "c", "d" ] }
+EOP
+    ok ! !Avro::Schema->match( reader => $r_enum, writer => $w_enum ), "match";
+    my $enc;
+    for my $data (qw/b c $/) {
+        Avro::BinaryEncoder->encode(
+            schema  => $w_enum,
+            data    => $data,
+            emit_cb => sub { $enc = ${ $_[0] } },
+        );
+        my $dec = Avro::BinaryDecoder->decode(
+            writer_schema => $w_enum,
+            reader_schema => $r_enum,
+            reader => IO::String->new($enc),
+        );
+        is $dec, $data, "decoded!";
+    }
+
+    for my $data (qw/a z/) {
+        Avro::BinaryEncoder->encode(
+            schema  => $w_enum,
+            data    => $data,
+            emit_cb => sub { $enc = ${ $_[0] } },
+        );
+        throws_ok { Avro::BinaryDecoder->decode(
+            writer_schema => $w_enum,
+            reader_schema => $r_enum,
+            reader => IO::String->new($enc),
+        )} "Avro::Schema::Error::Mismatch", "schema problem";
+    }
+}
+
+## record resolution
+{
+    my $w_schema = Avro::Schema->parse(<<EOJ);
+          { "type": "record", "name": "test",
+            "fields" : [
+                {"name": "a", "type": "long"},
+                {"name": "bonus", "type": "string"} ]}
+EOJ
+
+    my $r_schema = Avro::Schema->parse(<<EOJ);
+          { "type": "record", "name": "test",
+            "fields" : [
+                {"name": "t", "type": "float", "default": 37.5 },
+                {"name": "a", "type": "long"} ]}
+EOJ
+
+    my $data = { a => 1, bonus => "i" };
+    my $enc = '';
+    Avro::BinaryEncoder->encode(
+        schema  => $w_schema,
+        data    => $data,
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    my $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $w_schema,
+        reader_schema => $r_schema,
+        reader => IO::String->new($enc),
+    );
+    is $dec->{a}, 1, "easy";
+    ok ! exists $dec->{bonus}, "bonus extra field ignored";
+    is $dec->{t}, 37.5, "default t from reader used";
+
+    ## delete the default for t
+    delete $r_schema->fields->[0]{default};
+    throws_ok {
+        Avro::BinaryDecoder->decode(
+            writer_schema => $w_schema,
+            reader_schema => $r_schema,
+            reader => IO::String->new($enc),
+        );
+    } "Avro::Schema::Error::Mismatch", "no default value!";
+}
+
+## union resolution
+{
+    my $w_schema = Avro::Schema->parse(<<EOP);
+[ "string", "null", { "type": "array", "items": "long" }]
+EOP
+    my $r_schema = Avro::Schema->parse(<<EOP);
+[ "boolean", "null", { "type": "array", "items": "double" }]
+EOP
+    my $enc = '';
+    my $data = [ 1, 2, 3, 4, 5, 6 ];
+    Avro::BinaryEncoder->encode(
+        schema  => $w_schema,
+        data    => $data,
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    my $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $w_schema,
+        reader_schema => $r_schema,
+        reader => IO::String->new($enc),
+    );
+
+    is_deeply $dec, $data, "decoded!";
+}
+
+## map resolution
+{
+    my $w_schema = Avro::Schema->parse(<<EOP);
+{ "type": "map", "values": { "type": "array", "items": "string" } }
+EOP
+    my $r_schema = Avro::Schema->parse(<<EOP);
+{ "type": "map", "values": { "type": "array", "items": "int" } }
+EOP
+    my $enc = '';
+    my $data = { "one" => [ "un", "one" ], two => [ "deux", "two" ] };
+
+    Avro::BinaryEncoder->encode(
+        schema  => $w_schema,
+        data    => $data,
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    throws_ok {
+        Avro::BinaryDecoder->decode(
+            writer_schema => $w_schema,
+            reader_schema => $r_schema,
+            reader => IO::String->new($enc),
+        )
+    } "Avro::Schema::Error::Mismatch", "recursively... fails";
+
+    my $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $w_schema,
+        reader_schema => $w_schema,
+        reader => IO::String->new($enc),
+    );
+    is_deeply $dec, $data, "decoded succeeded!";
+}
+
+## schema upgrade
+{
+    my $w_schema = Avro::Schema->parse(<<EOP);
+{ "type": "map", "values": { "type": "array", "items": "int" } }
+EOP
+    my $r_schema = Avro::Schema->parse(<<EOP);
+{ "type": "map", "values": { "type": "array", "items": "float" } }
+EOP
+    my $enc = '';
+    my $data = { "one" => [ 1, 2 ], two => [ 1, 30 ] };
+
+    Avro::BinaryEncoder->encode(
+        schema  => $w_schema,
+        data    => $data,
+        emit_cb => sub { $enc .= ${ $_[0] } },
+    );
+    my $dec = Avro::BinaryDecoder->decode(
+        writer_schema => $w_schema,
+        reader_schema => $w_schema,
+        reader => IO::String->new($enc),
+    );
+    is_deeply $dec, $data, "decoded succeeded! +upgrade";
+    is $dec->{one}[0], 1.0, "kind of dumb test";
+}
+
+done_testing;
diff --git a/lang/perl/t/04_datafile.t b/lang/perl/t/04_datafile.t
new file mode 100644
index 0000000..b7aaba8
--- /dev/null
+++ b/lang/perl/t/04_datafile.t
@@ -0,0 +1,122 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use Avro::DataFile;
+use Avro::BinaryEncoder;
+use Avro::BinaryDecoder;
+use Avro::Schema;
+use File::Temp;
+use Test::Exception;
+use Test::More;
+
+use_ok 'Avro::DataFileReader';
+use_ok 'Avro::DataFileWriter';
+
+my $tmpfh = File::Temp->new(UNLINK => 1);
+
+my $schema = Avro::Schema->parse(<<EOP);
+{ "type": "map", "values": { "type": "array", "items": "string" } }
+EOP
+
+my $write_file = Avro::DataFileWriter->new(
+    fh            => $tmpfh,
+    writer_schema => $schema,
+    metadata      => {
+        some => 'metadata',
+    },
+);
+
+my $data = {
+    a => [ "2.2", "4.4" ],
+    b => [ "2.4", "2", "-4", "4", "5" ],
+    c => [ "0" ],
+};
+
+$write_file->print($data);
+$write_file->flush;
+
+## rewind
+seek $tmpfh, 0, 0;
+my $uncompressed_size = -s $tmpfh;
+
+my $read_file = Avro::DataFileReader->new(
+    fh            => $tmpfh,
+    reader_schema => $schema,
+);
+is $read_file->metadata->{'avro.codec'}, 'null', 'avro.codec';
+is $read_file->metadata->{'some'}, 'metadata', 'custom meta';
+
+my @all = $read_file->all;
+is scalar @all, 1, "one object back";
+is_deeply $all[0], $data, "Our data is intact!";
+
+
+## codec tests
+{
+    throws_ok {
+        Avro::DataFileWriter->new(
+            fh            => File::Temp->new,
+            writer_schema => $schema,
+            codec         => 'unknown',
+        );
+    } "Avro::DataFile::Error::InvalidCodec", "invalid codec";
+
+    ## rewind
+    seek $tmpfh, 0, 0;
+    local $Avro::DataFile::ValidCodec{null} = 0;
+    $read_file = Avro::DataFileReader->new(
+        fh            => $tmpfh,
+        reader_schema => $schema,
+    );
+
+    throws_ok {
+        $read_file->all;
+    } "Avro::DataFile::Error::UnsupportedCodec", "I've removed 'null' :)";
+
+    ## deflate!
+    my $zfh = File::Temp->new(UNLINK => 0);
+    my $write_file = Avro::DataFileWriter->new(
+        fh            => $zfh,
+        writer_schema => $schema,
+        codec         => 'deflate',
+        metadata      => {
+            some => 'metadata',
+        },
+    );
+    $write_file->print($data);
+    $write_file->flush;
+
+    ## rewind
+    seek $zfh, 0, 0;
+
+    my $read_file = Avro::DataFileReader->new(
+        fh            => $zfh,
+        reader_schema => $schema,
+    );
+    is $read_file->metadata->{'avro.codec'}, 'deflate', 'avro.codec';
+    is $read_file->metadata->{'some'}, 'metadata', 'custom meta';
+
+    my @all = $read_file->all;
+    is scalar @all, 1, "one object back";
+    is_deeply $all[0], $data, "Our data is intact!";
+}
+
+done_testing;
diff --git a/lang/perl/t/05_protocol.t b/lang/perl/t/05_protocol.t
new file mode 100644
index 0000000..60a66ee
--- /dev/null
+++ b/lang/perl/t/05_protocol.t
@@ -0,0 +1,76 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#!/usr/bin/env perl
+
+use strict;
+use warnings;
+use Test::Exception;
+use Test::More;
+
+use_ok 'Avro::Protocol';
+
+{
+    my $spec_proto = <<EOJ;
+{
+"namespace": "com.acme",
+"protocol": "HelloWorld",
+"doc": "Protocol Greetings",
+
+"types": [
+    {"name": "Greeting", "type": "record", "fields": [
+        {"name": "message", "type": "string"}]},
+    {"name": "Curse", "type": "error", "fields": [
+        {"name": "message", "type": "string"}]}
+],
+
+"messages": {
+    "hello": {
+    "doc": "Say hello.",
+    "request": [{"name": "greeting", "type": "Greeting" }],
+    "response": "Greeting",
+    "errors": ["Curse"]
+    }
+}
+}
+EOJ
+    my $p = Avro::Protocol->parse($spec_proto);
+    ok $p, "proto returned";
+    isa_ok $p, 'Avro::Protocol';
+    is $p->fullname, "com.acme.HelloWorld", "fullname";
+    is $p->name, "HelloWorld", "name";
+    is $p->namespace, "com.acme", "namespace";
+
+    is $p->doc, "Protocol Greetings", "doc";
+
+    isa_ok $p->types, 'HASH';
+    isa_ok $p->types->{Greeting}, 'Avro::Schema::Record';
+    isa_ok $p->types->{Greeting}->fields_as_hash
+           ->{message}{type}, 'Avro::Schema::Primitive';
+
+    isa_ok $p->messages->{hello}, "Avro::Protocol::Message";
+    is $p->messages->{hello}->doc, "Say hello.";
+    isa_ok $p->messages->{hello}->errors, "Avro::Schema::Union";
+    isa_ok $p->messages->{hello}->response, "Avro::Schema::Record";
+    my $req_params = $p->messages->{hello}->request;
+    isa_ok $req_params, "ARRAY";
+    is scalar @$req_params, 1, "one parameter to hello message";
+    is $req_params->[0]->{name}, "greeting", "greeting field";
+    is $req_params->[0]->{type}, $p->types->{Greeting}, "same Schema type";
+}
+
+done_testing;
diff --git a/lang/perl/xt/pod.t b/lang/perl/xt/pod.t
new file mode 100644
index 0000000..759be81
--- /dev/null
+++ b/lang/perl/xt/pod.t
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+use Test::More;
+eval "use Test::Pod 1.00";
+plan skip_all => "Test::Pod 1.00 required for testing POD" if $@;
+all_pod_files_ok();
diff --git a/lang/php/README.txt b/lang/php/README.txt
new file mode 100644
index 0000000..4106107
--- /dev/null
+++ b/lang/php/README.txt
@@ -0,0 +1,33 @@
+What the Avro PHP library is
+============================
+
+A library for using [Avro](http://avro.apache.org/) with PHP.
+
+Requirements
+============
+ * PHP 5
+ * On 32-bit platforms, the [GMP PHP extension](http://php.net/gmp)
+ * For testing, [PHPUnit](http://www.phpunit.de/)
+
+Both GMP and PHPUnit are often available via package management
+systems as `php5-gmp` and `phpunit`, respectively.
+
+Getting started
+===============
+
+Untar the avro-php distribution, untar it, and put it in your include path:
+
+    tar xjf avro-php.tar.bz2 # avro-php.tar.bz2 is likely avro-php-1.4.0.tar.bz2
+    cp avro-php /path/to/where/you/want/it
+
+Require the avro.php file in your source, and you should be good to go:
+
+    <?php
+    require_once('avro-php/avro.php');
+
+If you're pulling from source, put `lib/` in your include path and require `lib/avro.php`:
+
+    <?php
+    require_once('lib/avro.php');
+
+Take a look in `examples/` for usage.
diff --git a/lang/php/build.sh b/lang/php/build.sh
new file mode 100755
index 0000000..fe60b5c
--- /dev/null
+++ b/lang/php/build.sh
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+cd `dirname "$0"`
+
+dist_dir="../../dist/php"
+build_dir="pkg"
+version=$(cat ../../share/VERSION.txt)
+libname="avro-php-$version"
+lib_dir="$build_dir/$libname"
+tarball="$libname.tar.bz2"
+
+test_tmp_dir="test/tmp"
+
+function clean {
+    rm -rf "$test_tmp_dir"
+    rm -rf "$build_dir"
+}
+
+function dist {
+    mkdir -p "$build_dir/$libname" "$lib_dir/examples"
+    cp -pr lib "$lib_dir"
+    cp -pr examples/*.php "$lib_dir/examples"
+    cp README.txt ../../LICENSE.txt ../../NOTICE.txt "$lib_dir"
+    cd "$build_dir"
+    tar -cjf "$tarball" "$libname"
+    mkdir -p "../$dist_dir"
+    cp "$tarball" "../$dist_dir"
+}
+
+case "$1" in
+     interop-data-generate)
+       php test/generate_interop_data.php
+       ;;
+
+     test-interop)
+       phpunit test/InterOpTest.php
+       ;;
+
+     test)
+       phpunit test/AllTests.php
+       ;;
+
+     dist)
+        dist
+       ;;
+
+     clean)
+       clean
+       ;;
+
+     *)
+       echo "Usage: $0 {interop-data-generate|test-interop|test|dist|clean}"
+esac
+
+
+exit 0
diff --git a/lang/php/examples/write_read.php b/lang/php/examples/write_read.php
new file mode 100644
index 0000000..ba38968
--- /dev/null
+++ b/lang/php/examples/write_read.php
@@ -0,0 +1,94 @@
+#!/usr/bin/env php
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('../lib/avro.php');
+
+// Write and read a data file
+
+$writers_schema_json = <<<_JSON
+{"name":"member",
+ "type":"record",
+ "fields":[{"name":"member_id", "type":"int"},
+           {"name":"member_name", "type":"string"}]}
+_JSON;
+
+$jose = array('member_id' => 1392, 'member_name' => 'Jose');
+$maria = array('member_id' => 1642, 'member_name' => 'Maria');
+$data = array($jose, $maria);
+
+$file_name = 'data.avr';
+// Open $file_name for writing, using the given writer's schema
+$data_writer = AvroDataIO::open_file($file_name, 'w', $writers_schema_json);
+
+// Write each datum to the file
+foreach ($data as $datum)
+  $data_writer->append($datum);
+// Tidy up
+$data_writer->close();
+
+// Open $file_name (by default for reading) using the writer's schema
+// included in the file
+$data_reader = AvroDataIO::open_file($file_name);
+echo "from file:\n";
+// Read each datum
+foreach ($data_reader->data() as $datum)
+  echo var_export($datum, true) . "\n";
+$data_reader->close();
+
+// Create a data string
+// Create a string io object.
+$io = new AvroStringIO();
+// Create a datum writer object
+$writers_schema = AvroSchema::parse($writers_schema_json);
+$writer = new AvroIODatumWriter($writers_schema);
+$data_writer = new AvroDataIOWriter($io, $writer, $writers_schema);
+foreach ($data as $datum)
+   $data_writer->append($datum);
+$data_writer->close();
+
+$binary_string = $io->string();
+
+// Load the string data string
+$read_io = new AvroStringIO($binary_string);
+$data_reader = new AvroDataIOReader($read_io, new AvroIODatumReader());
+echo "from binary string:\n";
+foreach ($data_reader->data() as $datum)
+  echo var_export($datum, true) . "\n";
+
+/** Output
+from file:
+array (
+  'member_id' => 1392,
+  'member_name' => 'Jose',
+)
+array (
+  'member_id' => 1642,
+  'member_name' => 'Maria',
+)
+from binary string:
+array (
+  'member_id' => 1392,
+  'member_name' => 'Jose',
+)
+array (
+  'member_id' => 1642,
+  'member_name' => 'Maria',
+)
+*/
diff --git a/lang/php/lib/avro.php b/lang/php/lib/avro.php
new file mode 100644
index 0000000..4805fb7
--- /dev/null
+++ b/lang/php/lib/avro.php
@@ -0,0 +1,195 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Avro library top-level file.
+ *
+ * This file in turn includes all files supporting the
+ * Avro implementation.
+ *
+ * @package Avro
+ */
+
+/**
+ * General Avro exceptions.
+ * @package Avro
+ */
+class AvroException extends Exception {}
+
+/**
+ * Library-level class for PHP Avro port.
+ *
+ * Contains library details such as version number and platform checks.
+ *
+ * This port is an implementation of the
+ * {@link http://avro.apache.org/docs/1.3.3/spec.html Avro 1.3.3 Specification}
+ *
+ * @package Avro
+ *
+ */
+class Avro
+{
+  /**
+   * @var string version number of Avro specification to which
+   *             this implemenation complies
+   */
+  const SPEC_VERSION  = '1.3.3';
+
+  /**#@+
+   * Constant to enumerate endianness.
+   * @access private
+   * @var int
+   */
+  const BIG_ENDIAN    = 0x00;
+  const LITTLE_ENDIAN = 0x01;
+  /**#@-*/
+
+  /**
+   * Memoized result of self::set_endianness()
+   * @var int self::BIG_ENDIAN or self::LITTLE_ENDIAN
+   * @see self::set_endianness()
+   */
+  private static $endianness;
+
+  /**#@+
+   * Constant to enumerate biginteger handling mode.
+   * GMP is used, if available, on 32-bit platforms.
+   */
+  const PHP_BIGINTEGER_MODE = 0x00;
+  const GMP_BIGINTEGER_MODE = 0x01;
+  /**#@-*/
+
+  /**
+   * @var int
+   * Mode used to handle bigintegers. After Avro::check_64_bit() has been called,
+   * (usually via a call to Avro::check_platform(), set to
+   * self::GMP_BIGINTEGER_MODE on 32-bit platforms that have GMP available,
+   * and to self::PHP_BIGINTEGER_MODE otherwise.
+   */
+  private static $biginteger_mode;
+
+  /**
+   * Wrapper method to call each required check.
+   *
+   */
+  public static function check_platform()
+  {
+    self::check_64_bit();
+    self::check_little_endian();
+  }
+
+  /**
+   * Determines if the host platform can encode and decode long integer data.
+   *
+   * @throws AvroException if the platform cannot handle long integers.
+   */
+  private static function check_64_bit()
+  {
+    if (8 != PHP_INT_SIZE)
+      if (extension_loaded('gmp'))
+        self::$biginteger_mode = self::GMP_BIGINTEGER_MODE;
+      else
+        throw new AvroException('This platform cannot handle a 64-bit operations. '
+                                . 'Please install the GMP PHP extension.');
+    else
+      self::$biginteger_mode = self::PHP_BIGINTEGER_MODE;
+
+  }
+
+  /**
+   * @returns boolean true if the PHP GMP extension is used and false otherwise.
+   * @internal Requires Avro::check_64_bit() (exposed via Avro::check_platform())
+   *           to have been called to set Avro::$biginteger_mode.
+   */
+  static function uses_gmp()
+  {
+    return (self::GMP_BIGINTEGER_MODE == self::$biginteger_mode);
+  }
+
+  /**
+   * Determines if the host platform is little endian,
+   * required for processing double and float data.
+   *
+   * @throws AvroException if the platform is not little endian.
+   */
+  private static function check_little_endian()
+  {
+    if (!self::is_little_endian_platform())
+      throw new AvroException('This is not a little-endian platform');
+  }
+
+  /**
+   * Determines the endianness of the host platform and memoizes
+   * the result to Avro::$endianness.
+   *
+   * Based on a similar check perfomed in http://pear.php.net/package/Math_BinaryUtils
+   *
+   * @throws AvroException if the endianness cannot be determined.
+   */
+  private static function set_endianness()
+  {
+    $packed = pack('d', 1);
+    switch ($packed)
+    {
+      case "\77\360\0\0\0\0\0\0":
+        self::$endianness = self::BIG_ENDIAN;
+        break;
+      case "\0\0\0\0\0\0\360\77":
+        self::$endianness = self::LITTLE_ENDIAN;
+        break;
+      default:
+        throw new AvroException(
+          sprintf('Error determining platform endianness: %s',
+                  AvroDebug::hex_string($packed)));
+    }
+  }
+
+  /**
+   * @returns boolean true if the host platform is big endian
+   *                  and false otherwise.
+   * @uses self::set_endianness()
+   */
+  private static function is_big_endian_platform()
+  {
+    if (is_null(self::$endianness))
+      self::set_endianness();
+
+    return (self::BIG_ENDIAN == self::$endianness);
+  }
+
+  /**
+   * @returns boolean true if the host platform is little endian,
+   *                  and false otherwise.
+   * @uses self::is_bin_endian_platform()
+   */
+  private static function is_little_endian_platform()
+  {
+    return !self::is_big_endian_platform();
+  }
+
+}
+
+require_once('avro/util.php');
+require_once('avro/debug.php');
+require_once('avro/schema.php');
+require_once('avro/io.php');
+require_once('avro/gmp.php');
+require_once('avro/datum.php');
+require_once('avro/data_file.php');
+require_once('avro/protocol.php');
diff --git a/lang/php/lib/avro/data_file.php b/lang/php/lib/avro/data_file.php
new file mode 100644
index 0000000..e8e089f
--- /dev/null
+++ b/lang/php/lib/avro/data_file.php
@@ -0,0 +1,535 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Classes handling reading and writing from and to AvroIO objects
+ * @package Avro
+ */
+
+/**
+ * Raised when something unkind happens with respect to AvroDataIO.
+ * @package Avro
+ */
+class AvroDataIOException extends AvroException {}
+
+/**
+ * @package Avro
+ */
+class AvroDataIO
+{
+  /**
+   * @var int used in file header
+   */
+  const VERSION = 1;
+
+  /**
+   * @var int count of bytes in synchronization marker
+   */
+  const SYNC_SIZE = 16;
+
+  /**
+   * @var int   count of items per block, arbitrarily set to 4000 * SYNC_SIZE
+   * @todo make this value configurable
+   */
+  const SYNC_INTERVAL = 64000;
+
+  /**
+   * @var string map key for datafile metadata codec value
+   */
+  const METADATA_CODEC_ATTR = 'avro.codec';
+
+  /**
+   * @var string map key for datafile metadata schema value
+   */
+  const METADATA_SCHEMA_ATTR = 'avro.schema';
+  /**
+   * @var string JSON for datafile metadata schema
+   */
+  const METADATA_SCHEMA_JSON = '{"type":"map","values":"bytes"}';
+
+  /**
+   * @var string codec value for NULL codec
+   */
+  const NULL_CODEC = 'null';
+
+  /**
+   * @var string codec value for deflate codec
+   */
+  const DEFLATE_CODEC = 'deflate';
+
+  /**
+   * @var array array of valid codec names
+   * @todo Avro implementations are required to implement deflate codec as well,
+   *       so implement it already!
+   */
+  private static $valid_codecs = array(self::NULL_CODEC);
+
+  /**
+   * @var AvroSchema cached version of metadata schema object
+   */
+  private static $metadata_schema;
+
+  /**
+   * @returns the initial "magic" segment of an Avro container file header.
+   */
+  public static function magic() { return ('Obj' . pack('c', self::VERSION)); }
+
+  /**
+   * @returns int count of bytes in the initial "magic" segment of the
+   *              Avro container file header
+   */
+  public static function magic_size() { return strlen(self::magic()); }
+
+
+  /**
+   * @returns AvroSchema object of Avro container file metadata.
+   */
+  public static function metadata_schema()
+  {
+    if (is_null(self::$metadata_schema))
+      self::$metadata_schema = AvroSchema::parse(self::METADATA_SCHEMA_JSON);
+    return self::$metadata_schema;
+  }
+
+  /**
+   * @param string $file_path file_path of file to open
+   * @param string $mode one of AvroFile::READ_MODE or AvroFile::WRITE_MODE
+   * @param string $schema_json JSON of writer's schema
+   * @returns AvroDataIOWriter instance of AvroDataIOWriter
+   *
+   * @throws AvroDataIOException if $writers_schema is not provided
+   *         or if an invalid $mode is given.
+   */
+  public static function open_file($file_path, $mode=AvroFile::READ_MODE,
+                                   $schema_json=null)
+  {
+    $schema = !is_null($schema_json)
+      ? AvroSchema::parse($schema_json) : null;
+
+    $io = false;
+    switch ($mode)
+    {
+      case AvroFile::WRITE_MODE:
+        if (is_null($schema))
+          throw new AvroDataIOException('Writing an Avro file requires a schema.');
+        $file = new AvroFile($file_path, AvroFile::WRITE_MODE);
+        $io = self::open_writer($file, $schema);
+        break;
+      case AvroFile::READ_MODE:
+        $file = new AvroFile($file_path, AvroFile::READ_MODE);
+        $io = self::open_reader($file, $schema);
+        break;
+      default:
+        throw new AvroDataIOException(
+          sprintf("Only modes '%s' and '%s' allowed. You gave '%s'.",
+                  AvroFile::READ_MODE, AvroFile::WRITE_MODE, $mode));
+    }
+    return $io;
+  }
+
+  /**
+   * @returns array array of valid codecs
+   */
+  private static function valid_codecs()
+  {
+    return self::$valid_codecs;
+  }
+
+  /**
+   * @param string $codec
+   * @returns boolean true if $codec is a valid codec value and false otherwise
+   */
+  public static function is_valid_codec($codec)
+  {
+    return in_array($codec, self::valid_codecs());
+  }
+
+  /**
+   * @param AvroIO $io
+   * @param AvroSchema $schema
+   * @returns AvroDataIOWriter
+   */
+  protected function open_writer($io, $schema)
+  {
+    $writer = new AvroIODatumWriter($schema);
+    return new AvroDataIOWriter($io, $writer, $schema);
+  }
+
+  /**
+   * @param AvroIO $io
+   * @param AvroSchema $schema
+   * @returns AvroDataIOReader
+   */
+  protected function open_reader($io, $schema)
+  {
+    $reader = new AvroIODatumReader(null, $schema);
+    return new AvroDataIOReader($io, $reader);
+  }
+
+}
+
+/**
+ *
+ * Reads Avro data from an AvroIO source using an AvroSchema.
+ * @package Avro
+ */
+class AvroDataIOReader
+{
+  /**
+   * @var AvroIO
+   */
+  private $io;
+
+  /**
+   * @var AvroIOBinaryDecoder
+   */
+  private $decoder;
+
+  /**
+   * @var AvroIODatumReader
+   */
+  private $datum_reader;
+
+  /**
+   * @var string
+   */
+  private $sync_marker;
+
+  /**
+   * @var array object container metadata
+   */
+  private $metadata;
+
+  /**
+   * @var int count of items in block
+   */
+  private $block_count;
+
+  /**
+   * @param AvroIO $io source from which to read
+   * @param AvroIODatumReader $datum_reader reader that understands
+   *                                        the data schema
+   * @throws AvroDataIOException if $io is not an instance of AvroIO
+   * @uses read_header()
+   */
+  public function __construct($io, $datum_reader)
+  {
+
+    if (!($io instanceof AvroIO))
+      throw new AvroDataIOException('io must be instance of AvroIO');
+
+    $this->io = $io;
+    $this->decoder = new AvroIOBinaryDecoder($this->io);
+    $this->datum_reader = $datum_reader;
+    $this->read_header();
+
+    $codec = AvroUtil::array_value($this->metadata, 
+                                   AvroDataIO::METADATA_CODEC_ATTR);
+    if ($codec && !AvroDataIO::is_valid_codec($codec))
+      throw new AvroDataIOException(sprintf('Uknown codec: %s', $codec));
+
+    $this->block_count = 0;
+    // FIXME: Seems unsanitary to set writers_schema here.
+    // Can't constructor take it as an argument?
+    $this->datum_reader->set_writers_schema(
+      AvroSchema::parse($this->metadata[AvroDataIO::METADATA_SCHEMA_ATTR]));
+  }
+
+  /**
+   * Reads header of object container
+   * @throws AvroDataIOException if the file is not an Avro data file.
+   */
+  private function read_header()
+  {
+    $this->seek(0, AvroIO::SEEK_SET);
+
+    $magic = $this->read(AvroDataIO::magic_size());
+
+    if (strlen($magic) < AvroDataIO::magic_size())
+      throw new AvroDataIOException(
+        'Not an Avro data file: shorter than the Avro magic block');
+
+    if (AvroDataIO::magic() != $magic)
+      throw new AvroDataIOException(
+        sprintf('Not an Avro data file: %s does not match %s',
+                $magic, AvroDataIO::magic()));
+
+    $this->metadata = $this->datum_reader->read_data(AvroDataIO::metadata_schema(),
+                                                     AvroDataIO::metadata_schema(),
+                                                     $this->decoder);
+    $this->sync_marker = $this->read(AvroDataIO::SYNC_SIZE);
+  }
+
+  /**
+   * @internal Would be nice to implement data() as an iterator, I think
+   * @returns array of data from object container.
+   */
+  public function data()
+  {
+    $data = array();
+    while (true)
+    {
+      if (0 == $this->block_count)
+      {
+        if ($this->is_eof())
+          break;
+
+        if ($this->skip_sync())
+          if ($this->is_eof())
+            break;
+
+        $this->read_block_header();
+      }
+      $data []= $this->datum_reader->read($this->decoder);
+      $this->block_count -= 1;
+    }
+    return $data;
+  }
+
+  /**
+   * Closes this writer (and its AvroIO object.)
+   * @uses AvroIO::close()
+   */
+  public function close() { return $this->io->close(); }
+
+  /**
+   * @uses AvroIO::seek()
+   */
+  private function seek($offset, $whence)
+  {
+    return $this->io->seek($offset, $whence);
+  }
+
+  /**
+   * @uses AvroIO::read()
+   */
+  private function read($len) { return $this->io->read($len); }
+
+  /**
+   * @uses AvroIO::is_eof()
+   */
+  private function is_eof() { return $this->io->is_eof(); }
+
+  private function skip_sync()
+  {
+    $proposed_sync_marker = $this->read(AvroDataIO::SYNC_SIZE);
+    if ($proposed_sync_marker != $this->sync_marker)
+    {
+      $this->seek(-AvroDataIO::SYNC_SIZE, AvroIO::SEEK_CUR);
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Reads the block header (which includes the count of items in the block
+   * and the length in bytes of the block)
+   * @returns int length in bytes of the block.
+   */
+  private function read_block_header()
+  {
+    $this->block_count = $this->decoder->read_long();
+    return $this->decoder->read_long();
+  }
+
+}
+
+/**
+ * Writes Avro data to an AvroIO source using an AvroSchema
+ * @package Avro
+ */
+class AvroDataIOWriter
+{
+  /**
+   * @returns string a new, unique sync marker.
+   */
+  private static function generate_sync_marker()
+  {
+    // From http://php.net/manual/en/function.mt-rand.php comments
+    return pack('S8',
+                mt_rand(0, 0xffff), mt_rand(0, 0xffff),
+                mt_rand(0, 0xffff),
+                mt_rand(0, 0xffff) | 0x4000,
+                mt_rand(0, 0xffff) | 0x8000,
+                mt_rand(0, 0xffff), mt_rand(0, 0xffff), mt_rand(0, 0xffff));
+  }
+
+  /**
+   * @var AvroIO object container where data is written
+   */
+  private $io;
+
+  /**
+   * @var AvroIOBinaryEncoder encoder for object container
+   */
+  private $encoder;
+
+  /**
+   * @var AvroDatumWriter
+   */
+  private $datum_writer;
+
+  /**
+   * @var AvroStringIO buffer for writing
+   */
+  private $buffer;
+
+  /**
+   * @var AvroIOBinaryEncoder encoder for buffer
+   */
+  private $buffer_encoder; // AvroIOBinaryEncoder
+
+  /**
+   * @var int count of items written to block
+   */
+  private $block_count;
+
+  /**
+   * @var array map of object container metadata
+   */
+  private $metadata;
+
+  /**
+   * @param AvroIO $io
+   * @param AvroIODatumWriter $datum_writer
+   * @param AvroSchema $writers_schema
+   */
+  public function __construct($io, $datum_writer, $writers_schema=null)
+  {
+    if (!($io instanceof AvroIO))
+      throw new AvroDataIOException('io must be instance of AvroIO');
+
+    $this->io = $io;
+    $this->encoder = new AvroIOBinaryEncoder($this->io);
+    $this->datum_writer = $datum_writer;
+    $this->buffer = new AvroStringIO();
+    $this->buffer_encoder = new AvroIOBinaryEncoder($this->buffer);
+    $this->block_count = 0;
+    $this->metadata = array();
+
+    if ($writers_schema)
+    {
+      $this->sync_marker = self::generate_sync_marker();
+      $this->metadata[AvroDataIO::METADATA_CODEC_ATTR] = AvroDataIO::NULL_CODEC;
+      $this->metadata[AvroDataIO::METADATA_SCHEMA_ATTR] = strval($writers_schema);
+      $this->write_header();
+    }
+    else
+    {
+      $dfr = new AvroDataIOReader($this->io, new AvroIODatumReader());
+      $this->sync_marker = $dfr->sync_marker;
+      $this->metadata[AvroDataIO::METADATA_CODEC_ATTR] = $dfr->metadata[AvroDataIO::METADATA_CODEC_ATTR];
+
+      $schema_from_file = $dfr->metadata[AvroDataIO::METADATA_SCHEMA_ATTR];
+      $this->metadata[AvroDataIO::METADATA_SCHEMA_ATTR] = $schema_from_file;
+      $this->datum_writer->writers_schema = AvroSchema::parse($schema_from_file);
+      $this->seek(0, SEEK_END);
+    }
+  }
+
+  /**
+   * @param mixed $datum
+   */
+  public function append($datum)
+  {
+    $this->datum_writer->write($datum, $this->buffer_encoder);
+    $this->block_count++;
+
+    if ($this->buffer->length() >= AvroDataIO::SYNC_INTERVAL)
+      $this->write_block();
+  }
+
+  /**
+   * Flushes buffer to AvroIO object container and closes it.
+   * @return mixed value of $io->close()
+   * @see AvroIO::close()
+   */
+  public function close()
+  {
+    $this->flush();
+    return $this->io->close();
+  }
+
+  /**
+   * Flushes biffer to AvroIO object container.
+   * @returns mixed value of $io->flush()
+   * @see AvroIO::flush()
+   */
+  private function flush()
+  {
+    $this->write_block();
+    return $this->io->flush();
+  }
+
+  /**
+   * Writes a block of data to the AvroIO object container.
+   * @throws AvroDataIOException if the codec provided by the encoder
+   *         is not supported
+   * @internal Should the codec check happen in the constructor?
+   *           Why wait until we're writing data?
+   */
+  private function write_block()
+  {
+    if ($this->block_count > 0)
+    {
+      $this->encoder->write_long($this->block_count);
+      $to_write = strval($this->buffer);
+      $this->encoder->write_long(strlen($to_write));
+
+      if (AvroDataIO::is_valid_codec(
+            $this->metadata[AvroDataIO::METADATA_CODEC_ATTR]))
+        $this->write($to_write);
+      else
+        throw new AvroDataIOException(
+          sprintf('codec %s is not supported',
+                  $this->metadata[AvroDataIO::METADATA_CODEC_ATTR]));
+
+      $this->write($this->sync_marker);
+      $this->buffer->truncate();
+      $this->block_count = 0;
+    }
+  }
+
+  /**
+   * Writes the header of the AvroIO object container
+   */
+  private function write_header()
+  {
+    $this->write(AvroDataIO::magic());
+    $this->datum_writer->write_data(AvroDataIO::metadata_schema(),
+                                    $this->metadata, $this->encoder);
+    $this->write($this->sync_marker);
+  }
+
+  /**
+   * @param string $bytes
+   * @uses AvroIO::write()
+   */
+  private function write($bytes) { return $this->io->write($bytes); }
+
+  /**
+   * @param int $offset
+   * @param int $whence
+   * @uses AvroIO::seek()
+   */
+  private function seek($offset, $whence)
+  {
+    return $this->io->seek($offset, $whence);
+  }
+}
diff --git a/lang/php/lib/avro/datum.php b/lang/php/lib/avro/datum.php
new file mode 100644
index 0000000..ea275fa
--- /dev/null
+++ b/lang/php/lib/avro/datum.php
@@ -0,0 +1,984 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Classes for reading and writing Avro data to AvroIO objects.
+ *
+ * @package Avro
+ *
+ * @todo Implement JSON encoding, as is required by the Avro spec.
+ */
+
+/**
+ * Exceptions arising from writing or reading Avro data.
+ *
+ * @package Avro
+ */
+class AvroIOTypeException extends AvroException
+{
+  /**
+   * @param AvroSchema $expected_schema
+   * @param mixed $datum
+   */
+  public function __construct($expected_schema, $datum)
+  {
+    parent::__construct(sprintf('The datum %s is not an example of schema %s',
+                                var_export($datum, true), $expected_schema));
+  }
+}
+
+/**
+ * Exceptions arising from incompatibility between
+ * reader and writer schemas.
+ *
+ * @package Avro
+ */
+class AvroIOSchemaMatchException extends AvroException
+{
+  /**
+   * @param AvroSchema $writers_schema
+   * @param AvroSchema $readers_schema
+   */
+  function __construct($writers_schema, $readers_schema)
+  {
+    parent::__construct(
+      sprintf("Writer's schema %s and Reader's schema %s do not match.",
+              $writers_schema, $readers_schema));
+  }
+}
+
+/**
+ * Handles schema-specific writing of data to the encoder.
+ *
+ * Ensures that each datum written is consistent with the writer's schema.
+ *
+ * @package Avro
+ */
+class AvroIODatumWriter
+{
+  /**
+   * Schema used by this instance to write Avro data.
+   * @var AvroSchema
+   */
+  private $writers_schema;
+
+  /**
+   * @param AvroSchema $writers_schema
+   */
+  function __construct($writers_schema=null)
+  {
+    $this->writers_schema = $writers_schema;
+  }
+
+  /**
+   * @param AvroSchema $writers_schema
+   * @param $datum
+   * @param AvroIOBinaryEncoder $encoder
+   * @returns mixed
+   *
+   * @throws AvroIOTypeException if $datum is invalid for $writers_schema
+   */
+  function write_data($writers_schema, $datum, $encoder)
+  {
+    if (!AvroSchema::is_valid_datum($writers_schema, $datum))
+      throw new AvroIOTypeException($writers_schema, $datum);
+
+    switch ($writers_schema->type())
+    {
+      case AvroSchema::NULL_TYPE:
+        return $encoder->write_null($datum);
+      case AvroSchema::BOOLEAN_TYPE:
+        return $encoder->write_boolean($datum);
+      case AvroSchema::INT_TYPE:
+        return $encoder->write_int($datum);
+      case AvroSchema::LONG_TYPE:
+        return $encoder->write_long($datum);
+      case AvroSchema::FLOAT_TYPE:
+        return $encoder->write_float($datum);
+      case AvroSchema::DOUBLE_TYPE:
+        return $encoder->write_double($datum);
+      case AvroSchema::STRING_TYPE:
+        return $encoder->write_string($datum);
+      case AvroSchema::BYTES_TYPE:
+        return $encoder->write_bytes($datum);
+      case AvroSchema::ARRAY_SCHEMA:
+        return $this->write_array($writers_schema, $datum, $encoder);
+      case AvroSchema::MAP_SCHEMA:
+        return $this->write_map($writers_schema, $datum, $encoder);
+      case AvroSchema::FIXED_SCHEMA:
+        return $this->write_fixed($writers_schema, $datum, $encoder);
+      case AvroSchema::ENUM_SCHEMA:
+      return $this->write_enum($writers_schema, $datum, $encoder);
+      case AvroSchema::RECORD_SCHEMA:
+      case AvroSchema::ERROR_SCHEMA:
+      case AvroSchema::REQUEST_SCHEMA:
+        return $this->write_record($writers_schema, $datum, $encoder);
+      case AvroSchema::UNION_SCHEMA:
+        return $this->write_union($writers_schema, $datum, $encoder);
+      default:
+        throw new AvroException(sprintf('Uknown type: %s',
+                                        $writers_schema->type));
+    }
+  }
+
+  /**
+   * @param $datum
+   * @param AvroIOBinaryEncoder $encoder
+   */
+  function write($datum, $encoder)
+  {
+    $this->write_data($this->writers_schema, $datum, $encoder);
+  }
+
+  /**#@+
+   * @param AvroSchema $writers_schema
+   * @param null|boolean|int|float|string|array $datum item to be written
+   * @param AvroIOBinaryEncoder $encoder
+   */
+  private function write_array($writers_schema, $datum, $encoder)
+  {
+    $datum_count = count($datum);
+    if (0 < $datum_count)
+    {
+      $encoder->write_long($datum_count);
+      $items = $writers_schema->items();
+      foreach ($datum as $item)
+        $this->write_data($items, $item, $encoder);
+    }
+    return $encoder->write_long(0);
+  }
+
+  private function write_map($writers_schema, $datum, $encoder)
+  {
+    $datum_count = count($datum);
+    if ($datum_count > 0)
+    {
+      $encoder->write_long($datum_count);
+      foreach ($datum as $k => $v)
+      {
+        $encoder->write_string($k);
+        $this->write_data($writers_schema->values(), $v, $encoder);
+      }
+    }
+    $encoder->write_long(0);
+  }
+
+  private function write_union($writers_schema, $datum, $encoder)
+  {
+    $datum_schema_index = -1;
+    $datum_schema = null;
+    foreach ($writers_schema->schemas() as $index => $schema)
+      if (AvroSchema::is_valid_datum($schema, $datum))
+      {
+        $datum_schema_index = $index;
+        $datum_schema = $schema;
+        break;
+      }
+
+    if (is_null($datum_schema))
+      throw new AvroIOTypeException($writers_schema, $datum);
+
+    $encoder->write_long($datum_schema_index);
+    $this->write_data($datum_schema, $datum, $encoder);
+  }
+
+  private function write_enum($writers_schema, $datum, $encoder)
+  {
+    $datum_index = $writers_schema->symbol_index($datum);
+    return $encoder->write_int($datum_index);
+  }
+
+  private function write_fixed($writers_schema, $datum, $encoder)
+  {
+    /**
+     * NOTE Unused $writers_schema parameter included for consistency
+     * with other write_* methods.
+     */
+    return $encoder->write($datum);
+  }
+
+  private function write_record($writers_schema, $datum, $encoder)
+  {
+    foreach ($writers_schema->fields() as $field)
+      $this->write_data($field->type(), $datum[$field->name()], $encoder);
+  }
+
+  /**#@-*/
+}
+
+/**
+ * Encodes and writes Avro data to an AvroIO object using
+ * Avro binary encoding.
+ *
+ * @package Avro
+ */
+class AvroIOBinaryEncoder
+{
+  /**
+   * Performs encoding of the given float value to a binary string
+   *
+   * XXX: This is <b>not</b> endian-aware! The {@link Avro::check_platform()}
+   * called in {@link AvroIOBinaryEncoder::__construct()} should ensure the
+   * library is only used on little-endian platforms, which ensure the little-endian
+   * encoding required by the Avro spec.
+   *
+   * @param float $float
+   * @returns string bytes
+   * @see Avro::check_platform()
+   */
+  static function float_to_int_bits($float)
+  {
+    return pack('f', (float) $float);
+  }
+
+  /**
+   * Performs encoding of the given double value to a binary string
+   *
+   * XXX: This is <b>not</b> endian-aware! See comments in
+   * {@link AvroIOBinaryEncoder::float_to_int_bits()} for details.
+   *
+   * @param double $double
+   * @returns string bytes
+   */
+  static function double_to_long_bits($double)
+  {
+    return pack('d', (double) $double);
+  }
+
+  /**
+   * @param int|string $n
+   * @returns string long $n encoded as bytes
+   * @internal This relies on 64-bit PHP.
+   */
+  static public function encode_long($n)
+  {
+    $n = (int) $n;
+    $n = ($n << 1) ^ ($n >> 63);
+    $str = '';
+    while (0 != ($n & ~0x7F))
+    {
+      $str .= chr(($n & 0x7F) | 0x80);
+      $n >>= 7;
+    }
+    $str .= chr($n);
+    return $str;
+  }
+
+  /**
+   * @var AvroIO
+   */
+  private $io;
+
+  /**
+   * @param AvroIO $io object to which data is to be written.
+   *
+   */
+  function __construct($io)
+  {
+    Avro::check_platform();
+    $this->io = $io;
+  }
+
+  /**
+   * @param null $datum actual value is ignored
+   */
+  function write_null($datum) { return null; }
+
+  /**
+   * @param boolean $datum
+   */
+  function write_boolean($datum)
+  {
+    $byte = $datum ? chr(1) : chr(0);
+    $this->write($byte);
+  }
+
+  /**
+   * @param int $datum
+   */
+  function write_int($datum) { $this->write_long($datum); }
+
+  /**
+   * @param int $n
+   */
+  function write_long($n)
+  {
+    if (Avro::uses_gmp())
+      $this->write(AvroGMP::encode_long($n));
+    else
+      $this->write(self::encode_long($n));
+  }
+
+  /**
+   * @param float $datum
+   * @uses self::float_to_int_bits()
+   */
+  public function write_float($datum)
+  {
+    $this->write(self::float_to_int_bits($datum));
+  }
+
+  /**
+   * @param float $datum
+   * @uses self::double_to_long_bits()
+   */
+  public function write_double($datum)
+  {
+    $this->write(self::double_to_long_bits($datum));
+  }
+
+  /**
+   * @param string $str
+   * @uses self::write_bytes()
+   */
+  function write_string($str) { $this->write_bytes($str); }
+
+  /**
+   * @param string $bytes
+   */
+  function write_bytes($bytes)
+  {
+    $this->write_long(strlen($bytes));
+    $this->write($bytes);
+  }
+
+  /**
+   * @param string $datum
+   */
+  function write($datum) { $this->io->write($datum); }
+}
+
+/**
+ * Handles schema-specifc reading of data from the decoder.
+ *
+ * Also handles schema resolution between the reader and writer
+ * schemas (if a writer's schema is provided).
+ *
+ * @package Avro
+ */
+class AvroIODatumReader
+{
+  /**
+   *
+   * @param AvroSchema $writers_schema
+   * @param AvroSchema $readers_schema
+   * @returns boolean true if the schemas are consistent with
+   *                  each other and false otherwise.
+   */
+  static function schemas_match($writers_schema, $readers_schema)
+  {
+    $writers_schema_type = $writers_schema->type;
+    $readers_schema_type = $readers_schema->type;
+
+    if (AvroSchema::UNION_SCHEMA == $writers_schema_type
+        || AvroSchema::UNION_SCHEMA == $readers_schema_type)
+      return true;
+
+    if ($writers_schema_type == $readers_schema_type)
+    {
+      if (AvroSchema::is_primitive_type($writers_schema_type))
+        return true;
+
+      switch ($readers_schema_type)
+      {
+        case AvroSchema::MAP_SCHEMA:
+          return self::attributes_match($writers_schema->values(),
+                                        $readers_schema->values(),
+                                        array(AvroSchema::TYPE_ATTR));
+        case AvroSchema::ARRAY_SCHEMA:
+          return self::attributes_match($writers_schema->items(),
+                                        $readers_schema->items(),
+                                        array(AvroSchema::TYPE_ATTR));
+        case AvroSchema::ENUM_SCHEMA:
+          return self::attributes_match($writers_schema, $readers_schema,
+                                        array(AvroSchema::FULLNAME_ATTR));
+        case AvroSchema::FIXED_SCHEMA:
+          return self::attributes_match($writers_schema, $readers_schema,
+                                        array(AvroSchema::FULLNAME_ATTR,
+                                              AvroSchema::SIZE_ATTR));
+        case AvroSchema::RECORD_SCHEMA:
+        case AvroSchema::ERROR_SCHEMA:
+          return self::attributes_match($writers_schema, $readers_schema,
+                                        array(AvroSchema::FULLNAME_ATTR));
+        case AvroSchema::REQUEST_SCHEMA:
+          // XXX: This seems wrong
+          return true;
+          // XXX: no default
+      }
+
+      if (AvroSchema::INT_TYPE == $writers_schema_type
+          && in_array($readers_schema_type, array(AvroSchema::LONG_TYPE,
+                                                  AvroSchema::FLOAT_TYPE,
+                                                  AvroSchema::DOUBLE_TYPE)))
+        return true;
+
+      if (AvroSchema::LONG_TYPE == $writers_schema_type
+          && in_array($readers_schema_type, array(AvroSchema::FLOAT_TYPE,
+                                                  AvroSchema::DOUBLE_TYPE)))
+        return true;
+
+      if (AvroSchema::FLOAT_TYPE == $writers_schema_type
+          && AvroSchema::DOUBLE_TYPE == $readers_schema_type)
+        return true;
+
+      return false;
+    }
+
+  }
+
+  /**
+   * Checks equivalence of the given attributes of the two given schemas.
+   *
+   * @param AvroSchema $schema_one
+   * @param AvroSchema $schema_two
+   * @param string[] $attribute_names array of string attribute names to compare
+   *
+   * @returns boolean true if the attributes match and false otherwise.
+   */
+  static function attributes_match($schema_one, $schema_two, $attribute_names)
+  {
+    foreach ($attribute_names as $attribute_name)
+      if ($schema_one->attribute($attribute_name)
+          != $schema_two->attribute($attribute_name))
+        return false;
+    return true;
+  }
+
+  /**
+   * @var AvroSchema
+   */
+  private $writers_schema;
+
+  /**
+   * @var AvroSchema
+   */
+  private $readers_schema;
+
+  /**
+   * @param AvroSchema $writers_schema
+   * @param AvroSchema $readers_schema
+   */
+  function __construct($writers_schema=null, $readers_schema=null)
+  {
+    $this->writers_schema = $writers_schema;
+    $this->readers_schema = $readers_schema;
+  }
+
+  /**
+   * @param AvroSchema $readers_schema
+   */
+  public function set_writers_schema($readers_schema)
+  {
+    $this->writers_schema = $readers_schema;
+  }
+
+  /**
+   * @param AvroIOBinaryDecoder $decoder
+   * @returns string
+   */
+  public function read($decoder)
+  {
+    if (is_null($this->readers_schema))
+      $this->readers_schema = $this->writers_schema;
+    return $this->read_data($this->writers_schema, $this->readers_schema,
+                            $decoder);
+  }
+
+  /**#@+
+   * @param AvroSchema $writers_schema
+   * @param AvroSchema $readers_schema
+   * @param AvroIOBinaryDecoder $decoder
+   */
+  /**
+   * @returns mixed
+   */
+  public function read_data($writers_schema, $readers_schema, $decoder)
+  {
+    if (!self::schemas_match($writers_schema, $readers_schema))
+      throw new AvroIOSchemaMatchException($writers_schema, $readers_schema);
+
+    // Schema resolution: reader's schema is a union, writer's schema is not
+    if (AvroSchema::UNION_SCHEMA == $readers_schema->type()
+        && AvroSchema::UNION_SCHEMA != $writers_schema->type())
+    {
+      foreach ($readers_schema->schemas() as $schema)
+        if (self::schemas_match($writers_schema, $schema))
+          return $this->read_data($writers_schema, $schema, $decoder);
+      throw new AvroIOSchemaMatchException($writers_schema, $readers_schema);
+    }
+
+    switch ($writers_schema->type())
+    {
+      case AvroSchema::NULL_TYPE:
+        return $decoder->read_null();
+      case AvroSchema::BOOLEAN_TYPE:
+        return $decoder->read_boolean();
+      case AvroSchema::INT_TYPE:
+        return $decoder->read_int();
+      case AvroSchema::LONG_TYPE:
+        return $decoder->read_long();
+      case AvroSchema::FLOAT_TYPE:
+        return $decoder->read_float();
+      case AvroSchema::DOUBLE_TYPE:
+        return $decoder->read_double();
+      case AvroSchema::STRING_TYPE:
+        return $decoder->read_string();
+      case AvroSchema::BYTES_TYPE:
+        return $decoder->read_bytes();
+      case AvroSchema::ARRAY_SCHEMA:
+        return $this->read_array($writers_schema, $readers_schema, $decoder);
+      case AvroSchema::MAP_SCHEMA:
+        return $this->read_map($writers_schema, $readers_schema, $decoder);
+      case AvroSchema::UNION_SCHEMA:
+        return $this->read_union($writers_schema, $readers_schema, $decoder);
+      case AvroSchema::ENUM_SCHEMA:
+        return $this->read_enum($writers_schema, $readers_schema, $decoder);
+      case AvroSchema::FIXED_SCHEMA:
+        return $this->read_fixed($writers_schema, $readers_schema, $decoder);
+      case AvroSchema::RECORD_SCHEMA:
+      case AvroSchema::ERROR_SCHEMA:
+      case AvroSchema::REQUEST_SCHEMA:
+        return $this->read_record($writers_schema, $readers_schema, $decoder);
+      default:
+        throw new AvroException(sprintf("Cannot read unknown schema type: %s",
+                                        $writers_schema->type()));
+    }
+  }
+
+  /**
+   * @returns array
+   */
+  public function read_array($writers_schema, $readers_schema, $decoder)
+  {
+    $items = array();
+    $block_count = $decoder->read_long();
+    while (0 != $block_count)
+    {
+      if ($block_count < 0)
+      {
+        $block_count = -$block_count;
+        $block_size = $decoder->read_long(); // Read (and ignore) block size
+      }
+      for ($i = 0; $i < $block_count; $i++)
+        $items []= $this->read_data($writers_schema->items(),
+                                    $readers_schema->items(),
+                                    $decoder);
+      $block_count = $decoder->read_long();
+    }
+    return $items;
+  }
+
+  /**
+   * @returns array
+   */
+  public function read_map($writers_schema, $readers_schema, $decoder)
+  {
+    $items = array();
+    $pair_count = $decoder->read_long();
+    while (0 != $pair_count)
+    {
+      if ($pair_count < 0)
+      {
+        $pair_count = -$pair_count;
+        // Note: we're not doing anything with block_size other than skipping it
+        $block_size = $decoder->read_long();
+      }
+
+      for ($i = 0; $i < $pair_count; $i++)
+      {
+        $key = $decoder->read_string();
+        $items[$key] = $this->read_data($writers_schema->values(),
+                                        $readers_schema->values(),
+                                        $decoder);
+      }
+      $pair_count = $decoder->read_long();
+    }
+    return $items;
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function read_union($writers_schema, $readers_schema, $decoder)
+  {
+    $schema_index = $decoder->read_long();
+    $selected_writers_schema = $writers_schema->schema_by_index($schema_index);
+    return $this->read_data($selected_writers_schema, $readers_schema, $decoder);
+  }
+
+  /**
+   * @returns string
+   */
+  public function read_enum($writers_schema, $readers_schema, $decoder)
+  {
+    $symbol_index = $decoder->read_int();
+    $symbol = $writers_schema->symbol_by_index($symbol_index);
+    if (!$readers_schema->has_symbol($symbol))
+      null; // FIXME: unset wrt schema resolution
+    return $symbol;
+  }
+
+  /**
+   * @returns string
+   */
+  public function read_fixed($writers_schema, $readers_schema, $decoder)
+  {
+    return $decoder->read($writers_schema->size());
+  }
+
+  /**
+   * @returns array
+   */
+  public function read_record($writers_schema, $readers_schema, $decoder)
+  {
+    $readers_fields = $readers_schema->fields_hash();
+    $record = array();
+    foreach ($writers_schema->fields() as $writers_field)
+    {
+      $type = $writers_field->type();
+      if (isset($readers_fields[$writers_field->name()]))
+        $record[$writers_field->name()]
+          = $this->read_data($type,
+                             $readers_fields[$writers_field->name()]->type(),
+                             $decoder);
+      else
+        $this->skip_data($type, $decoder);
+    }
+    // Fill in default values
+    if (count($readers_fields) > count($record))
+    {
+      $writers_fields = $writers_schema->fields_hash();
+      foreach ($readers_fields as $field_name => $field)
+      {
+        if (!isset($writers_fields[$field_name]))
+        {
+          if ($field->has_default_value())
+            $record[$field->name()]
+              = $this->read_default_value($field->type(),
+                                          $field->default_value());
+          else
+            null; // FIXME: unset
+        }
+      }
+    }
+
+    return $record;
+  }
+  /**#@-*/
+
+  /**
+   * @param AvroSchema $field_schema
+   * @param null|boolean|int|float|string|array $default_value
+   * @returns null|boolean|int|float|string|array
+   *
+   * @throws AvroException if $field_schema type is unknown.
+   */
+  public function read_default_value($field_schema, $default_value)
+  {
+    switch($field_schema->type())
+    {
+      case AvroSchema::NULL_TYPE:
+        return null;
+      case AvroSchema::BOOLEAN_TYPE:
+        return $default_value;
+      case AvroSchema::INT_TYPE:
+      case AvroSchema::LONG_TYPE:
+        return (int) $default_value;
+      case AvroSchema::FLOAT_TYPE:
+      case AvroSchema::DOUBLE_TYPE:
+        return (float) $default_value;
+      case AvroSchema::STRING_TYPE:
+      case AvroSchema::BYTES_TYPE:
+        return $default_value;
+      case AvroSchema::ARRAY_SCHEMA:
+        $array = array();
+        foreach ($default_value as $json_val)
+        {
+          $val = $this->read_default_value($field_schema->items(), $json_val);
+          $array []= $val;
+        }
+        return $array;
+      case AvroSchema::MAP_SCHEMA:
+        $map = array();
+        foreach ($default_value as $key => $json_val)
+          $map[$key] = $this->read_default_value($field_schema->values(),
+                                                 $json_val);
+        return $map;
+      case AvroSchema::UNION_SCHEMA:
+        return $this->read_default_value($field_schema->schema_by_index(0),
+                                         $default_value);
+      case AvroSchema::ENUM_SCHEMA:
+      case AvroSchema::FIXED_SCHEMA:
+        return $default_value;
+      case AvroSchema::RECORD_SCHEMA:
+        $record = array();
+        foreach ($field_schema->fields() as $field)
+        {
+          $field_name = $field->name();
+          if (!$json_val = $default_value[$field_name])
+            $json_val = $field->default_value();
+
+          $record[$field_name] = $this->read_default_value($field->type(),
+                                                           $json_val);
+        }
+        return $record;
+    default:
+      throw new AvroException(sprintf('Unknown type: %s', $field_schema->type()));
+    }
+  }
+
+  /**
+   * @param AvroSchema $writers_schema
+   * @param AvroIOBinaryDecoder $decoder
+   */
+  private function skip_data($writers_schema, $decoder)
+  {
+    switch ($writers_schema->type())
+    {
+      case AvroSchema::NULL_TYPE:
+        return $decoder->skip_null();
+      case AvroSchema::BOOLEAN_TYPE:
+        return $decoder->skip_boolean();
+      case AvroSchema::INT_TYPE:
+        return $decoder->skip_int();
+      case AvroSchema::LONG_TYPE:
+        return $decoder->skip_long();
+      case AvroSchema::FLOAT_TYPE:
+        return $decoder->skip_float();
+      case AvroSchema::DOUBLE_TYPE:
+        return $decoder->skip_double();
+      case AvroSchema::STRING_TYPE:
+        return $decoder->skip_string();
+      case AvroSchema::BYTES_TYPE:
+        return $decoder->skip_bytes();
+      case AvroSchema::ARRAY_SCHEMA:
+        return $decoder->skip_array($writers_schema, $decoder);
+      case AvroSchema::MAP_SCHEMA:
+        return $decoder->skip_map($writers_schema, $decoder);
+      case AvroSchema::UNION_SCHEMA:
+        return $decoder->skip_union($writers_schema, $decoder);
+      case AvroSchema::ENUM_SCHEMA:
+        return $decoder->skip_enum($writers_schema, $decoder);
+      case AvroSchema::FIXED_SCHEMA:
+        return $decoder->skip_fixed($writers_schema, $decoder);
+      case AvroSchema::RECORD_SCHEMA:
+      case AvroSchema::ERROR_SCHEMA:
+      case AvroSchema::REQUEST_SCHEMA:
+        return $decoder->skip_record($writers_schema, $decoder);
+      default:
+        throw new AvroException(sprintf('Uknown schema type: %s',
+                                        $writers_schema->type()));
+    }
+  }
+}
+
+/**
+ * Decodes and reads Avro data from an AvroIO object encoded using
+ * Avro binary encoding.
+ *
+ * @package Avro
+ */
+class AvroIOBinaryDecoder
+{
+
+  /**
+   * @param int[] array of byte ascii values
+   * @returns long decoded value
+   * @internal Requires 64-bit platform
+   */
+  public static function decode_long_from_array($bytes)
+  {
+    $b = array_shift($bytes);
+    $n = $b & 0x7f;
+    $shift = 7;
+    while (0 != ($b & 0x80))
+    {
+      $b = array_shift($bytes);
+      $n |= (($b & 0x7f) << $shift);
+      $shift += 7;
+    }
+    return (($n >> 1) ^ -($n & 1));
+  }
+
+  /**
+   * Performs decoding of the binary string to a float value.
+   *
+   * XXX: This is <b>not</b> endian-aware! See comments in
+   * {@link AvroIOBinaryEncoder::float_to_int_bits()} for details.
+   *
+   * @param string $bits
+   * @returns float
+   */
+  static public function int_bits_to_float($bits)
+  {
+    $float = unpack('f', $bits);
+    return (float) $float[1];
+  }
+
+  /**
+   * Performs decoding of the binary string to a double value.
+   *
+   * XXX: This is <b>not</b> endian-aware! See comments in
+   * {@link AvroIOBinaryEncoder::float_to_int_bits()} for details.
+   *
+   * @param string $bits
+   * @returns float
+   */
+  static public function long_bits_to_double($bits)
+  {
+    $double = unpack('d', $bits);
+    return (double) $double[1];
+  }
+
+  /**
+   * @var AvroIO
+   */
+  private $io;
+
+  /**
+   * @param AvroIO $io object from which to read.
+   */
+  public function __construct($io)
+  {
+    Avro::check_platform();
+    $this->io = $io;
+  }
+
+  /**
+   * @returns string the next byte from $this->io.
+   * @throws AvroException if the next byte cannot be read.
+   */
+  private function next_byte() { return $this->read(1); }
+
+  /**
+   * @returns null
+   */
+  public function read_null() { return null; }
+
+  /**
+   * @returns boolean
+   */
+  public function read_boolean()
+  {
+    return (boolean) (1 == ord($this->next_byte()));
+  }
+
+  /**
+   * @returns int
+   */
+  public function read_int() { return (int) $this->read_long(); }
+
+  /**
+   * @returns long
+   */
+  public function read_long()
+  {
+    $byte = ord($this->next_byte());
+    $bytes = array($byte);
+    while (0 != ($byte & 0x80))
+    {
+      $byte = ord($this->next_byte());
+      $bytes []= $byte;
+    }
+
+    if (Avro::uses_gmp())
+      return AvroGMP::decode_long_from_array($bytes);
+
+    return self::decode_long_from_array($bytes);
+  }
+
+  /**
+   * @returns float
+   */
+  public function read_float()
+  {
+    return self::int_bits_to_float($this->read(4));
+  }
+
+  /**
+   * @returns double
+   */
+  public function read_double()
+  {
+    return self::long_bits_to_double($this->read(8));
+  }
+
+  /**
+   * A string is encoded as a long followed by that many bytes
+   * of UTF-8 encoded character data.
+   * @returns string
+   */
+  public function read_string() { return $this->read_bytes(); }
+
+  /**
+   * @returns string
+   */
+  public function read_bytes() { return $this->read($this->read_long()); }
+
+  /**
+   * @param int $len count of bytes to read
+   * @returns string
+   */
+  public function read($len) { return $this->io->read($len); }
+
+  public function skip_null() { return null; }
+
+  public function skip_boolean() { return $this->skip(1); }
+
+  public function skip_int() { return $this->skip_long(); }
+
+  protected function skip_long()
+  {
+    $b = $this->next_byte();
+    while (0 != ($b & 0x80))
+      $b = $this->next_byte();
+  }
+
+  public function skip_float() { return $this->skip(4); }
+
+  public function skip_double() { return $this->skip(8); }
+
+  public function skip_bytes() { return $this->skip($this->read_long()); }
+
+  public function skip_string() { return $this->skip_bytes(); }
+
+  /**
+   * @param int $len count of bytes to skip
+   * @uses AvroIO::seek()
+   */
+  public function skip($len) { $this->seek($len, AvroIO::SEEK_CUR); }
+
+  /**
+   * @returns int position of pointer in AvroIO instance
+   * @uses AvroIO::tell()
+   */
+  private function tell() { return $this->io->tell(); }
+
+  /**
+   * @param int $offset
+   * @param int $whence
+   * @returns boolean true upon success
+   * @uses AvroIO::seek()
+   */
+  private function seek($offset, $whence)
+  {
+    return $this->io->seek($offset, $whence);
+  }
+}
+
diff --git a/lang/php/lib/avro/debug.php b/lang/php/lib/avro/debug.php
new file mode 100644
index 0000000..2278f19
--- /dev/null
+++ b/lang/php/lib/avro/debug.php
@@ -0,0 +1,194 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @package Avro
+ */
+
+/**
+ * Avro library code debugging functions
+ * @package Avro
+ */
+class AvroDebug
+{
+
+  /**
+   * @var int high debug level
+   */
+  const DEBUG5 = 5;
+  /**
+   * @var int low debug level
+   */
+  const DEBUG1 = 1;
+  /**
+   * @var int current debug level
+   */
+  const DEBUG_LEVEL = self::DEBUG1;
+
+  /**
+   * @var int $debug_level
+   * @returns boolean true if the given $debug_level is equivalent
+   *                  or more verbose than than the current debug level
+   *                  and false otherwise.
+   */
+  static function is_debug($debug_level=self::DEBUG1)
+  {
+    return (self::DEBUG_LEVEL >= $debug_level);
+  }
+
+  /**
+   * @param string $format format string for the given arguments. Passed as is
+   *                     to <code>vprintf</code>.
+   * @param array  $args   array of arguments to pass to vsprinf.
+   * @param int    $debug_level debug level at which to print this statement
+   * @returns boolean true
+   */
+  static function debug($format, $args, $debug_level=self::DEBUG1)
+  {
+    if (self::is_debug($debug_level))
+      vprintf($format . "\n", $args);
+    return true;
+  }
+
+  /**
+   * @param string $str
+   * @returns string[] array of hex representation of each byte of $str
+   */
+  static function hex_array($str) { return self::bytes_array($str); }
+
+  /**
+   * @param string $str
+   * @param string $joiner string used to join
+   * @returns string hex-represented bytes of each byte of $str
+                     joined by $joiner
+   */
+  static function hex_string($str, $joiner=' ')
+  {
+    return join($joiner, self::hex_array($str));
+  }
+
+  /**
+   * @param string $str
+   * @param string $format format to represent bytes
+   * @returns string[] array of each byte of $str formatted using $format
+   */
+  static function bytes_array($str, $format='x%02x')
+  {
+    $x = array();
+    foreach (str_split($str) as $b)
+      $x []= sprintf($format, ord($b));
+    return $x;
+  }
+
+  /**
+   * @param string $str
+   * @returns string[] array of bytes of $str represented in decimal format ('%3d')
+   */
+  static function dec_array($str) { return self::bytes_array($str, '%3d'); }
+
+  /**
+   * @param string $str
+   * @param string $joiner string to join bytes of $str
+   * @returns string of bytes of $str represented in decimal format
+   * @uses dec_array()
+   */
+  static function dec_string($str, $joiner = ' ')
+  {
+    return join($joiner, self::dec_array($str));
+  }
+
+  /**
+   * @param string $str
+   * @param string $format one of 'ctrl', 'hex', or 'dec' for control,
+                           hexadecimal, or decimal format for bytes.
+                           - ctrl: ASCII control characters represented as text.
+                             For example, the null byte is represented as 'NUL'.
+                             Visible ASCII characters represent themselves, and
+                             others are represented as a decimal ('%03d')
+                           - hex: bytes represented in hexadecimal ('%02X')
+                           - dec: bytes represented in decimal ('%03d')
+   * @returns string[] array of bytes represented in the given format.
+   */
+  static function ascii_array($str, $format='ctrl')
+  {
+    if (!in_array($format, array('ctrl', 'hex', 'dec')))
+      throw new AvroException('Unrecognized format specifier');
+
+    $ctrl_chars = array('NUL', 'SOH', 'STX', 'ETX', 'EOT', 'ENQ', 'ACK', 'BEL',
+                        'BS',  'HT',  'LF',  'VT',  'FF',  'CR',  'SO',  'SI',
+                        'DLE', 'DC1', 'DC2', 'DC3', 'DC4', 'NAK', 'SYN', 'ETB',
+                        'CAN', 'EM',  'SUB', 'ESC', 'FS',  'GS',  'RS',  'US');
+    $x = array();
+    foreach (str_split($str) as $b)
+    {
+      $db = ord($b);
+      if ($db < 32)
+      {
+        switch ($format)
+        {
+          case 'ctrl':
+            $x []= str_pad($ctrl_chars[$db], 3, ' ', STR_PAD_LEFT);
+            break;
+          case 'hex':
+            $x []= sprintf("x%02X", $db);
+            break;
+          case 'dec':
+            $x []= str_pad($db, 3, '0', STR_PAD_LEFT);
+            break;
+        }
+      }
+      else if ($db < 127)
+        $x []= "  $b";
+      else if ($db == 127)
+      {
+        switch ($format)
+        {
+          case 'ctrl':
+            $x []= 'DEL';
+            break;
+          case 'hex':
+            $x []= sprintf("x%02X", $db);
+            break;
+          case 'dec':
+            $x []= str_pad($db, 3, '0', STR_PAD_LEFT);
+            break;
+        }
+      }
+      else
+        if ('hex' == $format)
+          $x []= sprintf("x%02X", $db);
+        else
+          $x []= str_pad($db, 3, '0', STR_PAD_LEFT);
+    }
+    return $x;
+  }
+
+  /**
+   * @param string $str
+   * @param string $format one of 'ctrl', 'hex', or 'dec'.
+   *                       See {@link self::ascii_array()} for more description
+   * @param string $joiner
+   * @returns string of bytes joined by $joiner
+   * @uses ascii_array()
+   */
+  static function ascii_string($str, $format='ctrl', $joiner = ' ')
+  {
+    return join($joiner, self::ascii_array($str, $format));
+  }
+}
diff --git a/lang/php/lib/avro/gmp.php b/lang/php/lib/avro/gmp.php
new file mode 100644
index 0000000..3d41d03
--- /dev/null
+++ b/lang/php/lib/avro/gmp.php
@@ -0,0 +1,222 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @package Avro
+ */
+
+/**
+ * Methods for handling 64-bit operations using the GMP extension.
+ *
+ * This is a naive and hackish implementation that is intended
+ * to work well enough to support Avro. It has not been tested
+ * beyond what's needed to decode and encode long values.
+ *
+ * @package Avro
+ */
+class AvroGMP {
+
+  /**
+   * @var resource memoized GMP resource for zero
+   */
+  private static $gmp_0;
+
+  /**
+   * @returns resource GMP resource for zero
+   */
+  private static function gmp_0()
+  {
+    if (!isset(self::$gmp_0))
+      self::$gmp_0 = gmp_init('0');
+    return self::$gmp_0;
+  }
+
+  /**
+   * @var resource memoized GMP resource for one (1)
+   */
+  private static $gmp_1;
+
+  /**
+   * @returns resource GMP resource for one (1)
+   */
+  private static function gmp_1()
+  {
+    if (!isset(self::$gmp_1))
+      self::$gmp_1 = gmp_init('1');
+    return self::$gmp_1;
+  }
+
+  /**
+   * @var resource memoized GMP resource for two (2)
+   */
+  private static $gmp_2;
+
+  /**
+   * @returns resource GMP resource for two (2)
+   */
+  private static function gmp_2()
+  {
+    if (!isset(self::$gmp_2))
+      self::$gmp_2 = gmp_init('2');
+    return self::$gmp_2;
+  }
+
+  /**
+   * @var resource memoized GMP resource for 0x7f
+   */
+  private static $gmp_0x7f;
+
+  /**
+   * @returns resource GMP resource for 0x7f
+   */
+  private static function gmp_0x7f()
+  {
+    if (!isset(self::$gmp_0x7f))
+      self::$gmp_0x7f = gmp_init('0x7f');
+    return self::$gmp_0x7f;
+  }
+
+  /**
+   * @var resource memoized GMP resource for 64-bit ~0x7f
+   */
+  private static $gmp_n0x7f;
+
+  /**
+   * @returns resource GMP resource for 64-bit ~0x7f
+   */
+  private static function gmp_n0x7f()
+  {
+    if (!isset(self::$gmp_n0x7f))
+      self::$gmp_n0x7f = gmp_init('0xffffffffffffff80');
+    return self::$gmp_n0x7f;
+  }
+
+  /**
+   * @var resource memoized GMP resource for 64-bits of 1
+   */
+  private static $gmp_0xfs;
+
+  /**
+   * @returns resource GMP resource for 64-bits of 1
+   */
+  private static function gmp_0xfs()
+  {
+    if (!isset(self::$gmp_0xfs))
+      self::$gmp_0xfs = gmp_init('0xffffffffffffffff');
+    return self::$gmp_0xfs;
+  }
+
+  /**
+   * @param GMP resource
+   * @returns GMP resource 64-bit two's complement of input.
+   */
+  static function gmp_twos_complement($g)
+  {
+    return gmp_neg(gmp_sub(gmp_pow(self::gmp_2(), 64), $g));
+  }
+
+  /**
+   * @interal Only works up to shift 63 (doesn't wrap bits around).
+   * @param resource|int|string $g
+   * @param int $shift number of bits to shift left
+   * @returns resource $g shifted left
+   */
+  static function shift_left($g, $shift)
+  {
+    if (0 == $shift)
+      return $g;
+
+    if (0 > gmp_sign($g))
+      $g = self::gmp_twos_complement($g);
+
+    $m = gmp_mul($g, gmp_pow(self::gmp_2(), $shift));
+    $m = gmp_and($m, self::gmp_0xfs());
+    if (gmp_testbit($m, 63))
+      $m = gmp_neg(gmp_add(gmp_and(gmp_com($m), self::gmp_0xfs()),
+                           self::gmp_1()));
+    return $m;
+  }
+
+  /**
+   * Arithmetic right shift
+   * @param resource|int|string $g
+   * @param int $shift number of bits to shift right
+   * @returns resource $g shifted right $shift bits
+   */
+  static function shift_right($g, $shift)
+  {
+    if (0 == $shift)
+      return $g;
+
+    if (0 <= gmp_sign($g))
+      $m = gmp_div($g, gmp_pow(self::gmp_2(), $shift));
+    else // negative
+    {
+      $g = gmp_and($g, self::gmp_0xfs());
+      $m = gmp_div($g, gmp_pow(self::gmp_2(), $shift));
+      $m = gmp_and($m, self::gmp_0xfs());
+      for ($i = 63; $i >= (63 - $shift); $i--)
+        gmp_setbit($m, $i);
+
+      $m = gmp_neg(gmp_add(gmp_and(gmp_com($m), self::gmp_0xfs()),
+                           self::gmp_1()));
+    }
+
+    return $m;
+  }
+
+  /**
+   * @param int|str $n integer (or string representation of integer) to encode
+   * @return string $bytes of the long $n encoded per the Avro spec
+   */
+  static function encode_long($n)
+  {
+    $g = gmp_init($n);
+    $g = gmp_xor(self::shift_left($g, 1),
+                 self::shift_right($g, 63));
+    $bytes = '';
+    while (0 != gmp_cmp(self::gmp_0(), gmp_and($g, self::gmp_n0x7f())))
+    {
+      $bytes .= chr(gmp_intval(gmp_and($g, self::gmp_0x7f())) | 0x80);
+      $g = self::shift_right($g, 7);
+    }
+    $bytes .= chr(gmp_intval($g));
+    return $bytes;
+  }
+
+  /**
+   * @param int[] $bytes array of ascii codes of bytes to decode
+   * @return string represenation of decoded long.
+   */
+  static function decode_long_from_array($bytes)
+  {
+    $b = array_shift($bytes);
+    $g = gmp_init($b & 0x7f);
+    $shift = 7;
+    while (0 != ($b & 0x80))
+    {
+      $b = array_shift($bytes);
+      $g = gmp_or($g, self::shift_left(($b & 0x7f), $shift));
+      $shift += 7;
+    }
+    $val = gmp_xor(self::shift_right($g, 1), gmp_neg(gmp_and($g, 1)));
+    return gmp_strval($val);
+  }
+
+}
diff --git a/lang/php/lib/avro/io.php b/lang/php/lib/avro/io.php
new file mode 100644
index 0000000..239e53d
--- /dev/null
+++ b/lang/php/lib/avro/io.php
@@ -0,0 +1,494 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Avro IO object classes
+ * @package Avro
+ */
+
+/**
+ * Exceptions associated with AvroIO instances.
+ * @package Avro
+ */
+class AvroIOException extends AvroException {}
+
+/**
+ * Barebones IO base class to provide common interface for file and string
+ * access within the Avro classes.
+ *
+ * @package Avro
+ */
+class AvroIO
+{
+
+  /**
+   * @var string general read mode
+   */
+  const READ_MODE = 'r';
+  /**
+   * @var string general write mode.
+   */
+  const WRITE_MODE = 'w';
+
+  /**
+   * @var int set position equal to $offset bytes
+   */
+  const SEEK_CUR = SEEK_CUR;
+  /**
+   * @var int set position to current index + $offset bytes
+   */
+  const SEEK_SET = SEEK_SET;
+  /**
+   * @var int set position to end of file + $offset bytes
+   */
+  const SEEK_END = SEEK_END;
+
+  /**
+   * Read $len bytes from AvroIO instance
+   * @var int $len
+   * @return string bytes read
+   */
+  public function read($len)
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Append bytes to this buffer. (Nothing more is needed to support Avro.)
+   * @param str $arg bytes to write
+   * @returns int count of bytes written.
+   * @throws AvroIOException if $args is not a string value.
+   */
+  public function write($arg)
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Return byte offset within AvroIO instance
+   * @return int
+   */
+  public function tell()
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Set the position indicator. The new position, measured in bytes
+   * from the beginning of the file, is obtained by adding $offset to
+   * the position specified by $whence.
+   *
+   * @param int $offset
+   * @param int $whence one of AvroIO::SEEK_SET, AvroIO::SEEK_CUR,
+   *                    or Avro::SEEK_END
+   * @returns boolean true
+   *
+   * @throws AvroIOException
+   */
+  public function seek($offset, $whence=self::SEEK_SET)
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Flushes any buffered data to the AvroIO object.
+   * @returns boolean true upon success.
+   */
+  public function flush()
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Returns whether or not the current position at the end of this AvroIO
+   * instance.
+   *
+   * Note is_eof() is <b>not</b> like eof in C or feof in PHP:
+   * it returns TRUE if the *next* read would be end of file,
+   * rather than if the *most recent* read read end of file.
+   * @returns boolean true if at the end of file, and false otherwise
+   */
+  public function is_eof()
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+  /**
+   * Closes this AvroIO instance.
+   */
+  public function close()
+  {
+    throw new AvroNotImplementedException('Not implemented');
+  }
+
+}
+
+/**
+ * AvroIO wrapper for string access
+ * @package Avro
+ */
+class AvroStringIO extends AvroIO
+{
+  /**
+   * @var string
+   */
+  private $string_buffer;
+  /**
+   * @var int  current position in string
+   */
+  private $current_index;
+  /**
+   * @var boolean whether or not the string is closed.
+   */
+  private $is_closed;
+
+  /**
+   * @param string $str initial value of AvroStringIO buffer. Regardless
+   *                    of the initial value, the pointer is set to the
+   *                    beginning of the buffer.
+   * @throws AvroIOException if a non-string value is passed as $str
+   */
+  public function __construct($str = '')
+  {
+    $this->is_closed = false;
+    $this->string_buffer = '';
+    $this->current_index = 0;
+
+    if (is_string($str))
+      $this->string_buffer .= $str;
+    else
+      throw new AvroIOException(
+        sprintf('constructor argument must be a string: %s', gettype($str)));
+  }
+
+  /**
+   * Append bytes to this buffer.
+   * (Nothing more is needed to support Avro.)
+   * @param str $arg bytes to write
+   * @returns int count of bytes written.
+   * @throws AvroIOException if $args is not a string value.
+   */
+  public function write($arg)
+  {
+    $this->check_closed();
+    if (is_string($arg))
+      return $this->append_str($arg);
+    throw new AvroIOException(
+      sprintf('write argument must be a string: (%s) %s',
+              gettype($arg), var_export($arg, true)));
+  }
+
+  /**
+   * @returns string bytes read from buffer
+   * @todo test for fencepost errors wrt updating current_index
+   */
+  public function read($len)
+  {
+    $this->check_closed();
+    $read='';
+    for($i=$this->current_index; $i<($this->current_index+$len); $i++) 
+      $read .= $this->string_buffer[$i];
+    if (strlen($read) < $len)
+      $this->current_index = $this->length();
+    else
+      $this->current_index += $len;
+    return $read;
+  }
+
+  /**
+   * @returns boolean true if successful
+   * @throws AvroIOException if the seek failed.
+   */
+  public function seek($offset, $whence=self::SEEK_SET)
+  {
+    if (!is_int($offset))
+      throw new AvroIOException('Seek offset must be an integer.');
+    // Prevent seeking before BOF
+    switch ($whence)
+    {
+      case self::SEEK_SET:
+        if (0 > $offset)
+          throw new AvroIOException('Cannot seek before beginning of file.');
+        $this->current_index = $offset;
+        break;
+      case self::SEEK_CUR:
+        if (0 > $this->current_index + $whence)
+          throw new AvroIOException('Cannot seek before beginning of file.');
+        $this->current_index += $offset;
+        break;
+      case self::SEEK_END:
+        if (0 > $this->length() + $offset)
+          throw new AvroIOException('Cannot seek before beginning of file.');
+        $this->current_index = $this->length() + $offset;
+        break;
+      default:
+        throw new AvroIOException(sprintf('Invalid seek whence %d', $whence));
+    }
+
+    return true;
+  }
+
+  /**
+   * @returns int
+   * @see AvroIO::tell()
+   */
+  public function tell() { return $this->current_index; }
+
+  /**
+   * @returns boolean
+   * @see AvroIO::is_eof()
+   */
+  public function is_eof()
+  {
+    return ($this->current_index >= $this->length());
+  }
+
+  /**
+   * No-op provided for compatibility with AvroIO interface.
+   * @returns boolean true
+   */
+  public function flush() { return true; }
+
+  /**
+   * Marks this buffer as closed.
+   * @returns boolean true
+   */
+  public function close()
+  {
+    $this->check_closed();
+    $this->is_closed = true;
+    return true;
+  }
+
+  /**
+   * @throws AvroIOException if the buffer is closed.
+   */
+  private function check_closed()
+  {
+    if ($this->is_closed())
+      throw new AvroIOException('Buffer is closed');
+  }
+
+  /**
+   * Appends bytes to this buffer.
+   * @param string $str
+   * @returns integer count of bytes written.
+   */
+  private function append_str($str)
+  { 
+    $this->check_closed(); 
+    $this->string_buffer .= $str; 
+    $len = strlen($str); 
+    $this->current_index += $len; 
+    return $len; 
+  } 
+
+  /**
+   * Truncates the truncate buffer to 0 bytes and returns the pointer
+   * to the beginning of the buffer.
+   * @returns boolean true
+   */
+  public function truncate()
+  {
+    $this->check_closed();
+    $this->string_buffer = '';
+    $this->current_index = 0;
+    return true;
+  }
+
+  /**
+   * @returns int count of bytes in the buffer
+   * @internal Could probably memoize length for performance, but
+   *           no need do this yet.
+   */
+  public function length() { return strlen($this->string_buffer); }
+
+  /**
+   * @returns string
+   */
+  public function __toString() { return $this->string_buffer; }
+
+
+  /**
+   * @returns string
+   * @uses self::__toString()
+   */
+  public function string() { return $this->__toString(); }
+
+  /**
+   * @returns boolean true if this buffer is closed and false
+   *                       otherwise.
+   */
+  public function is_closed() { return $this->is_closed; }
+}
+
+/**
+ * AvroIO wrapper for PHP file access functions
+ * @package Avro
+ */
+class AvroFile extends AvroIO
+{
+  /**
+   * @var string fopen read mode value. Used internally.
+   */
+  const FOPEN_READ_MODE = 'rb';
+
+  /**
+   * @var string fopen write mode value. Used internally.
+   */
+  const FOPEN_WRITE_MODE = 'wb';
+
+  /**
+   * @var string
+   */
+  private $file_path;
+
+  /**
+   * @var resource file handle for AvroFile instance
+   */
+  private $file_handle;
+
+  public function __construct($file_path, $mode = self::READ_MODE)
+  {
+    /**
+     * XXX: should we check for file existence (in case of reading)
+     * or anything else about the provided file_path argument?
+     */
+    $this->file_path = $file_path;
+    switch ($mode)
+    {
+      case self::WRITE_MODE:
+        $this->file_handle = fopen($this->file_path, self::FOPEN_WRITE_MODE);
+        if (false == $this->file_handle)
+          throw new AvroIOException('Could not open file for writing');
+        break;
+      case self::READ_MODE:
+        $this->file_handle = fopen($this->file_path, self::FOPEN_READ_MODE);
+        if (false == $this->file_handle)
+          throw new AvroIOException('Could not open file for reading');
+        break;
+      default:
+        throw new AvroIOException(
+          sprintf("Only modes '%s' and '%s' allowed. You provided '%s'.",
+                  self::READ_MODE, self::WRITE_MODE, $mode));
+    }
+  }
+
+  /**
+   * @returns int count of bytes written
+   * @throws AvroIOException if write failed.
+   */
+  public function write($str)
+  {
+    $len = fwrite($this->file_handle, $str);
+    if (false === $len)
+      throw new AvroIOException(sprintf('Could not write to file'));
+    return $len;
+  }
+
+  /**
+   * @param int $len count of bytes to read.
+   * @returns string bytes read
+   * @throws AvroIOException if length value is negative or if the read failed
+   */
+  public function read($len)
+  {
+    if (0 > $len)
+      throw new AvroIOException(
+        sprintf("Invalid length value passed to read: %d", $len));
+
+    if (0 == $len)
+      return '';
+
+    $bytes = fread($this->file_handle, $len);
+    if (false === $bytes)
+      throw new AvroIOException('Could not read from file');
+    return $bytes;
+  }
+
+  /**
+   * @returns int current position within the file
+   * @throws AvroFileExcpetion if tell failed.
+   */
+  public function tell()
+  {
+    $position = ftell($this->file_handle);
+    if (false === $position)
+      throw new AvroIOException('Could not execute tell on reader');
+    return $position;
+  }
+
+  /**
+   * @param int $offset
+   * @param int $whence
+   * @returns boolean true upon success
+   * @throws AvroIOException if seek failed.
+   * @see AvroIO::seek()
+   */
+  public function seek($offset, $whence = SEEK_SET)
+  {
+    $res = fseek($this->file_handle, $offset, $whence);
+    // Note: does not catch seeking beyond end of file
+    if (-1 === $res)
+      throw new AvroIOException(
+        sprintf("Could not execute seek (offset = %d, whence = %d)",
+                $offset, $whence));
+    return true;
+  }
+
+  /**
+   * Closes the file.
+   * @returns boolean true if successful.
+   * @throws AvroIOException if there was an error closing the file.
+   */
+  public function close()
+  {
+    $res = fclose($this->file_handle);
+    if (false === $res)
+      throw new AvroIOException('Error closing file.');
+    return $res;
+  }
+
+  /**
+   * @returns boolean true if the pointer is at the end of the file,
+   *                  and false otherwise.
+   * @see AvroIO::is_eof() as behavior differs from feof()
+   */
+  public function is_eof()
+  {
+    $this->read(1);
+    if (feof($this->file_handle))
+      return true;
+    $this->seek(-1, self::SEEK_CUR);
+    return false;
+  }
+
+  /**
+   * @returns boolean true if the flush was successful.
+   * @throws AvroIOException if there was an error flushing the file.
+   */
+  public function flush()
+  {
+    $res = fflush($this->file_handle);
+    if (false === $res)
+      throw new AvroIOException('Could not flush file.');
+    return true;
+  }
+
+}
diff --git a/lang/php/lib/avro/protocol.php b/lang/php/lib/avro/protocol.php
new file mode 100644
index 0000000..a558e66
--- /dev/null
+++ b/lang/php/lib/avro/protocol.php
@@ -0,0 +1,86 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @package Avro
+ */
+
+/**
+ * Avro library for protocols
+ * @package Avro
+ */
+class AvroProtocol
+{
+  public $name;
+  public $namespace;
+  public $schemata;
+
+  public static function parse($json)
+  {
+    if (is_null($json))
+      throw new AvroProtocolParseException( "Protocol can't be null");
+
+    $protocol = new AvroProtocol();
+    $protocol->real_parse(json_decode($json, true));
+    return $protocol;
+  }
+
+  function real_parse($avro) {
+    $this->protocol = $avro["protocol"];
+    $this->namespace = $avro["namespace"];
+    $this->schemata = new AvroNamedSchemata();
+    $this->name = $avro["protocol"];
+
+    if (!is_null($avro["types"])) {
+        $types = AvroSchema::real_parse($avro["types"], $this->namespace, $this->schemata);
+    }
+
+    if (!is_null($avro["messages"])) {
+      foreach ($avro["messages"] as $messageName => $messageAvro) {
+        $message = new AvroProtocolMessage($messageName, $messageAvro, $this);
+        $this->messages{$messageName} = $message;
+      }
+    }
+  }
+}
+
+class AvroProtocolMessage
+{
+  /**
+   * @var AvroRecordSchema $request
+   */
+
+  public $request;
+
+  public $response;
+
+  public function __construct($name, $avro, $protocol)
+  {
+    $this->name = $name;
+    $this->request = new AvroRecordSchema(new AvroName($name, null, $protocol->namespace), null, $avro{'request'}, $protocol->schemata, AvroSchema::REQUEST_SCHEMA);
+
+    if (array_key_exists('response', $avro)) {
+      $this->response = $protocol->schemata->schema_by_name(new AvroName($avro{'response'}, $protocol->namespace, $protocol->namespace));
+      if ($this->response == null)
+        $this->response = new AvroPrimitiveSchema($avro{'response'});
+    }
+  }
+}
+
+class AvroProtocolParseException extends AvroException {};
diff --git a/lang/php/lib/avro/schema.php b/lang/php/lib/avro/schema.php
new file mode 100644
index 0000000..3d7fbbb
--- /dev/null
+++ b/lang/php/lib/avro/schema.php
@@ -0,0 +1,1457 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Avro Schema and and Avro Schema support classes.
+ * @package Avro
+ */
+
+/** TODO
+ * - ARRAY have only type and item attributes (what about metadata?)
+ * - MAP keys are (assumed?) to be strings
+ * - FIXED size must be integer (must be positive? less than MAXINT?)
+ * - primitive type names cannot have a namespace (so throw an error? or ignore?)
+ * - schema may contain multiple definitions of a named schema
+ *   if definitions are equivalent (?)
+ *  - Cleanup default namespace and named schemata handling.
+ *     - For one, it appears to be *too* global. According to the spec,
+ *       we should only be referencing schemas that are named within the
+ *       *enclosing* schema, so those in sibling schemas (say, unions or fields)
+ *       shouldn't be referenced, if I understand the spec correctly.
+ *     - Also, if a named schema is defined more than once in the same schema,
+ *       it must have the same definition: so it appears we *do* need to keep
+ *       track of named schemata globally as well. (And does this play well
+ *       with the requirements regarding enclosing schema?
+ *  - default values for bytes and fixed fields are JSON strings,
+ *    where unicode code points 0-255 are mapped to unsigned 8-bit byte values 0-255
+ *  - make sure other default values for other schema are of appropriate type
+ *  - Should AvroField really be an AvroSchema object? Avro Fields have a name
+ *    attribute, but not a namespace attribute (and the name can't be namespace
+ *    qualified). It also has additional attributes such as doc, which named schemas
+ *    enum and record have (though not fixed schemas, which also have names), and
+ *    fields also have default and order attributes, shared by no other schema type.
+ */
+
+/**
+ * Exceptions associated with parsing JSON schema represenations
+ * @package Avro
+ */
+class AvroSchemaParseException extends AvroException {};
+
+/**
+ * @package Avro
+ */
+class AvroSchema
+{
+  /**
+   * @var int lower bound of integer values: -(1 << 31)
+   */
+  const INT_MIN_VALUE = -2147483648;
+
+  /**
+   * @var int upper bound of integer values: (1 << 31) - 1
+   */
+  const INT_MAX_VALUE = 2147483647;
+
+  /**
+   * @var long lower bound of long values: -(1 << 63)
+   */
+  const LONG_MIN_VALUE = -9223372036854775808;
+
+  /**
+   * @var long upper bound of long values: (1 << 63) - 1
+   */
+  const LONG_MAX_VALUE =  9223372036854775807;
+
+  /**
+   * @var string null schema type name
+   */
+  const NULL_TYPE = 'null';
+
+  /**
+   * @var string boolean schema type name
+   */
+  const BOOLEAN_TYPE = 'boolean';
+
+  /**
+   * int schema type value is a 32-bit signed int
+   * @var string int schema type name.
+   */
+  const INT_TYPE = 'int';
+
+  /**
+   * long schema type value is a 64-bit signed int
+   * @var string long schema type name
+   */
+  const LONG_TYPE = 'long';
+
+  /**
+   * float schema type value is a 32-bit IEEE 754 floating-point number
+   * @var string float schema type name
+   */
+  const FLOAT_TYPE = 'float';
+
+  /**
+   * double schema type value is a 64-bit IEEE 754 floating-point number
+   * @var string double schema type name
+   */
+  const DOUBLE_TYPE = 'double';
+
+  /**
+   * string schema type value is a Unicode character sequence
+   * @var string string schema type name
+   */
+  const STRING_TYPE = 'string';
+
+  /**
+   * bytes schema type value is a sequence of 8-bit unsigned bytes
+   * @var string bytes schema type name
+   */
+  const BYTES_TYPE = 'bytes';
+
+  // Complex Types
+  // Unnamed Schema
+  /**
+   * @var string array schema type name
+   */
+  const ARRAY_SCHEMA = 'array';
+
+  /**
+   * @var string map schema type name
+   */
+  const MAP_SCHEMA = 'map';
+
+  /**
+   * @var string union schema type name
+   */
+  const UNION_SCHEMA = 'union';
+
+  /**
+   * Unions of error schemas are used by Avro messages
+   * @var string error_union schema type name
+   */
+  const ERROR_UNION_SCHEMA = 'error_union';
+
+  // Named Schema
+
+  /**
+   * @var string enum schema type name
+   */
+  const ENUM_SCHEMA = 'enum';
+
+  /**
+   * @var string fixed schema type name
+   */
+  const FIXED_SCHEMA = 'fixed';
+
+  /**
+   * @var string record schema type name
+   */
+  const RECORD_SCHEMA = 'record';
+  // Other Schema
+
+  /**
+   * @var string error schema type name
+   */
+  const ERROR_SCHEMA = 'error';
+
+  /**
+   * @var string request schema type name
+   */
+  const REQUEST_SCHEMA = 'request';
+
+
+  // Schema attribute names
+  /**
+   * @var string schema type name attribute name
+   */
+  const TYPE_ATTR = 'type';
+
+  /**
+   * @var string named schema name attribute name
+   */
+  const NAME_ATTR = 'name';
+
+  /**
+   * @var string named schema namespace attribute name
+   */
+  const NAMESPACE_ATTR = 'namespace';
+
+  /**
+   * @var string derived attribute: doesn't appear in schema
+   */
+  const FULLNAME_ATTR = 'fullname';
+
+  /**
+   * @var string array schema size attribute name
+   */
+  const SIZE_ATTR = 'size';
+
+  /**
+   * @var string record fields attribute name
+   */
+  const FIELDS_ATTR = 'fields';
+
+  /**
+   * @var string array schema items attribute name
+   */
+  const ITEMS_ATTR = 'items';
+
+  /**
+   * @var string enum schema symbols attribute name
+   */
+  const SYMBOLS_ATTR = 'symbols';
+
+  /**
+   * @var string map schema values attribute name
+   */
+  const VALUES_ATTR = 'values';
+
+  /**
+   * @var string document string attribute name
+   */
+  const DOC_ATTR = 'doc';
+
+  /**
+   * @var array list of primitive schema type names
+   */
+  private static $primitive_types = array(self::NULL_TYPE, self::BOOLEAN_TYPE,
+                                          self::STRING_TYPE, self::BYTES_TYPE,
+                                          self::INT_TYPE, self::LONG_TYPE,
+                                          self::FLOAT_TYPE, self::DOUBLE_TYPE);
+
+  /**
+   * @var array list of named schema type names
+   */
+  private static $named_types = array(self::FIXED_SCHEMA, self::ENUM_SCHEMA,
+                                      self::RECORD_SCHEMA, self::ERROR_SCHEMA);
+
+  /**
+   * @param string $type a schema type name
+   * @returns boolean true if the given type name is a named schema type name
+   *                  and false otherwise.
+   */
+  public static function is_named_type($type)
+  {
+    return in_array($type, self::$named_types);
+  }
+
+  /**
+   * @param string $type a schema type name
+   * @returns boolean true if the given type name is a primitive schema type
+   *                  name and false otherwise.
+   */
+  public static function is_primitive_type($type)
+  {
+    return in_array($type, self::$primitive_types);
+  }
+
+  /**
+   * @param string $type a schema type name
+   * @returns boolean true if the given type name is a valid schema type
+   *                  name and false otherwise.
+   */
+  public static function is_valid_type($type)
+  {
+    return (self::is_primitive_type($type)
+            || self::is_named_type($type)
+            || in_array($type, array(self::ARRAY_SCHEMA,
+                                     self::MAP_SCHEMA,
+                                     self::UNION_SCHEMA,
+                                     self::REQUEST_SCHEMA,
+                                     self::ERROR_UNION_SCHEMA)));
+  }
+
+  /**
+   * @var array list of names of reserved attributes
+   */
+  private static $reserved_attrs = array(self::TYPE_ATTR,
+                                         self::NAME_ATTR,
+                                         self::NAMESPACE_ATTR,
+                                         self::FIELDS_ATTR,
+                                         self::ITEMS_ATTR,
+                                         self::SIZE_ATTR,
+                                         self::SYMBOLS_ATTR,
+                                         self::VALUES_ATTR);
+
+  /**
+   * @param string $json JSON-encoded schema
+   * @uses self::real_parse()
+   * @returns AvroSchema
+   */
+  public static function parse($json)
+  {
+    $schemata = new AvroNamedSchemata();
+    return self::real_parse(json_decode($json, true), null, $schemata);
+  }
+
+  /**
+   * @param mixed $avro JSON-decoded schema
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata reference to named schemas
+   * @returns AvroSchema
+   * @throws AvroSchemaParseException
+   */
+  static function real_parse($avro, $default_namespace=null, &$schemata=null)
+  {
+    if (is_null($schemata))
+      $schemata = new AvroNamedSchemata();
+
+    if (is_array($avro))
+    {
+      $type = AvroUtil::array_value($avro, self::TYPE_ATTR);
+
+      if (self::is_primitive_type($type))
+        return new AvroPrimitiveSchema($type);
+
+      elseif (self::is_named_type($type))
+      {
+        $name = AvroUtil::array_value($avro, self::NAME_ATTR);
+        $namespace = AvroUtil::array_value($avro, self::NAMESPACE_ATTR);
+        $new_name = new AvroName($name, $namespace, $default_namespace);
+        $doc = AvroUtil::array_value($avro, self::DOC_ATTR);
+        switch ($type)
+        {
+          case self::FIXED_SCHEMA:
+            $size = AvroUtil::array_value($avro, self::SIZE_ATTR);
+            return new AvroFixedSchema($new_name, $doc,
+                                       $size,
+                                       $schemata);
+          case self::ENUM_SCHEMA:
+            $symbols = AvroUtil::array_value($avro, self::SYMBOLS_ATTR);
+            return new AvroEnumSchema($new_name, $doc,
+                                      $symbols,
+                                      $schemata);
+          case self::RECORD_SCHEMA:
+          case self::ERROR_SCHEMA:
+            $fields = AvroUtil::array_value($avro, self::FIELDS_ATTR);
+            return new AvroRecordSchema($new_name, $doc,
+                                        $fields,
+                                        $schemata, $type);
+          default:
+            throw new AvroSchemaParseException(
+              sprintf('Unknown named type: %s', $type));
+        }
+      }
+      elseif (self::is_valid_type($type))
+      {
+        switch ($type)
+        {
+          case self::ARRAY_SCHEMA:
+            return new AvroArraySchema($avro[self::ITEMS_ATTR],
+                                       $default_namespace,
+                                       $schemata);
+          case self::MAP_SCHEMA:
+            return new AvroMapSchema($avro[self::VALUES_ATTR],
+                                     $default_namespace,
+                                     $schemata);
+          default:
+            throw new AvroSchemaParseException(
+              sprintf('Unknown valid type: %s', $type));
+        }
+      }
+      elseif (!array_key_exists(self::TYPE_ATTR, $avro)
+              && AvroUtil::is_list($avro))
+        return new AvroUnionSchema($avro, $default_namespace, $schemata);
+      else
+        throw new AvroSchemaParseException(sprintf('Undefined type: %s',
+                                                   $type));
+    }
+    elseif (self::is_primitive_type($avro))
+      return new AvroPrimitiveSchema($avro);
+    else
+      throw new AvroSchemaParseException(
+        sprintf('%s is not a schema we know about.',
+                print_r($avro, true)));
+  }
+
+  /**
+   * @returns boolean true if $datum is valid for $expected_schema
+   *                  and false otherwise.
+   * @throws AvroSchemaParseException
+   */
+  public static function is_valid_datum($expected_schema, $datum)
+  {
+    switch($expected_schema->type)
+    {
+      case self::NULL_TYPE:
+        return is_null($datum);
+      case self::BOOLEAN_TYPE:
+        return is_bool($datum);
+      case self::STRING_TYPE:
+      case self::BYTES_TYPE:
+        return is_string($datum);
+      case self::INT_TYPE:
+        return (is_int($datum)
+                && (self::INT_MIN_VALUE <= $datum)
+                && ($datum <= self::INT_MAX_VALUE));
+      case self::LONG_TYPE:
+        return (is_int($datum)
+                && (self::LONG_MIN_VALUE <= $datum)
+                && ($datum <= self::LONG_MAX_VALUE));
+      case self::FLOAT_TYPE:
+      case self::DOUBLE_TYPE:
+        return (is_float($datum) || is_int($datum));
+      case self::ARRAY_SCHEMA:
+        if (is_array($datum))
+        {
+          foreach ($datum as $d)
+            if (!self::is_valid_datum($expected_schema->items(), $d))
+              return false;
+          return true;
+        }
+        return false;
+      case self::MAP_SCHEMA:
+        if (is_array($datum))
+        {
+          foreach ($datum as $k => $v)
+            if (!is_string($k)
+                || !self::is_valid_datum($expected_schema->values(), $v))
+              return false;
+          return true;
+        }
+        return false;
+      case self::UNION_SCHEMA:
+        foreach ($expected_schema->schemas() as $schema)
+          if (self::is_valid_datum($schema, $datum))
+            return true;
+        return false;
+      case self::ENUM_SCHEMA:
+        return in_array($datum, $expected_schema->symbols());
+      case self::FIXED_SCHEMA:
+        return (is_string($datum)
+                && (strlen($datum) == $expected_schema->size()));
+      case self::RECORD_SCHEMA:
+      case self::ERROR_SCHEMA:
+      case self::REQUEST_SCHEMA:
+        if (is_array($datum))
+        {
+          foreach ($expected_schema->fields() as $field)
+            if (!array_key_exists($field->name(), $datum) || !self::is_valid_datum($field->type(), $datum[$field->name()]))
+              return false;
+          return true;
+        }
+        return false;
+      default:
+        throw new AvroSchemaParseException(
+          sprintf('%s is not allowed.', $expected_schema));
+    }
+  }
+
+  /**
+   * @internal Should only be called from within the constructor of
+   *           a class which extends AvroSchema
+   * @param string $type a schema type name
+   */
+  public function __construct($type)
+  {
+    $this->type = $type;
+  }
+
+  /**
+   * @param mixed $avro
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata
+   * @returns AvroSchema
+   * @uses AvroSchema::real_parse()
+   * @throws AvroSchemaParseException
+   */
+  protected static function subparse($avro, $default_namespace, &$schemata=null)
+  {
+    try
+    {
+      return self::real_parse($avro, $default_namespace, $schemata);
+    }
+    catch (AvroSchemaParseException $e)
+    {
+      throw $e;
+    }
+    catch (Exception $e)
+    {
+      throw new AvroSchemaParseException(
+        sprintf('Sub-schema is not a valid Avro schema. Bad schema: %s',
+                print_r($avro, true)));
+    }
+
+  }
+
+  /**
+   * @returns string schema type name of this schema
+   */
+  public function type() { return $this->type;  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    return array(self::TYPE_ATTR => $this->type);
+  }
+
+  /**
+   * @returns string the JSON-encoded representation of this Avro schema.
+   */
+  public function __toString() { return json_encode($this->to_avro()); }
+
+  /**
+   * @returns mixed value of the attribute with the given attribute name
+   */
+  public function attribute($attribute) { return $this->$attribute(); }
+
+}
+
+/**
+ * Avro schema for basic types such as null, int, long, string.
+ * @package Avro
+ */
+class AvroPrimitiveSchema extends AvroSchema
+{
+
+  /**
+   * @param string $type the primitive schema type name
+   * @throws AvroSchemaParseException if the given $type is not a
+   *         primitive schema type name
+   */
+  public function __construct($type)
+  {
+    if (self::is_primitive_type($type))
+      return parent::__construct($type);
+    throw new AvroSchemaParseException(
+      sprintf('%s is not a valid primitive type.', $type));
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    // FIXME: Is this if really necessary? When *wouldn't* this be the case?
+    if (1 == count($avro))
+      return $this->type;
+    return $avro;
+  }
+}
+
+/**
+ * Avro array schema, consisting of items of a particular
+ * Avro schema type.
+ * @package Avro
+ */
+class AvroArraySchema extends AvroSchema
+{
+  /**
+   * @var AvroName|AvroSchema named schema name or AvroSchema of
+   *                          array element
+   */
+  private $items;
+
+  /**
+   * @var boolean true if the items schema
+   * FIXME: couldn't we derive this from whether or not $this->items
+   *        is an AvroName or an AvroSchema?
+   */
+  private $is_items_schema_from_schemata;
+
+  /**
+   * @param string|mixed $items AvroNamedSchema name or object form
+   *        of decoded JSON schema representation.
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata
+   */
+  public function __construct($items, $default_namespace, &$schemata=null)
+  {
+    parent::__construct(AvroSchema::ARRAY_SCHEMA);
+
+    $this->is_items_schema_from_schemata = false;
+    $items_schema = null;
+    if (is_string($items)
+        && $items_schema = $schemata->schema_by_name(
+          new AvroName($items, null, $default_namespace)))
+      $this->is_items_schema_from_schemata = true;
+    else
+      $items_schema = AvroSchema::subparse($items, $default_namespace, $schemata);
+
+    $this->items = $items_schema;
+  }
+
+
+  /**
+   * @returns AvroName|AvroSchema named schema name or AvroSchema
+   *          of this array schema's elements.
+   */
+  public function items() { return $this->items; }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    $avro[AvroSchema::ITEMS_ATTR] = $this->is_items_schema_from_schemata
+      ? $this->items->qualified_name() : $this->items->to_avro();
+    return $avro;
+  }
+}
+
+/**
+ * Avro map schema consisting of named values of defined
+ * Avro Schema types.
+ * @package Avro
+ */
+class AvroMapSchema extends AvroSchema
+{
+  /**
+   * @var string|AvroSchema named schema name or AvroSchema
+   *      of map schema values.
+   */
+  private $values;
+
+  /**
+   * @var boolean true if the named schema
+   * XXX Couldn't we derive this based on whether or not
+   * $this->values is a string?
+   */
+  private $is_values_schema_from_schemata;
+
+  /**
+   * @param string|AvroSchema $values
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata
+   */
+  public function __construct($values, $default_namespace, &$schemata=null)
+  {
+    parent::__construct(AvroSchema::MAP_SCHEMA);
+
+    $this->is_values_schema_from_schemata = false;
+    $values_schema = null;
+    if (is_string($values)
+        && $values_schema = $schemata->schema_by_name(
+          new AvroName($values, null, $default_namespace)))
+      $this->is_values_schema_from_schemata = true;
+    else
+      $values_schema = AvroSchema::subparse($values, $default_namespace,
+                                            $schemata);
+
+    $this->values = $values_schema;
+  }
+
+  /**
+   * @returns XXX|AvroSchema
+   */
+  public function values() { return $this->values; }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    $avro[AvroSchema::VALUES_ATTR] = $this->is_values_schema_from_schemata
+      ? $this->values->qualified_name() : $this->values->to_avro();
+    return $avro;
+  }
+}
+
+/**
+ * Union of Avro schemas, of which values can be of any of the schema in
+ * the union.
+ * @package Avro
+ */
+class AvroUnionSchema extends AvroSchema
+{
+  /**
+   * @var AvroSchema[] list of schemas of this union
+   */
+  private $schemas;
+
+  /**
+   * @var int[] list of indices of named schemas which
+   *                are defined in $schemata
+   */
+  public $schema_from_schemata_indices;
+
+  /**
+   * @param AvroSchema[] $schemas list of schemas in the union
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata
+   */
+  public function __construct($schemas, $default_namespace, &$schemata=null)
+  {
+    parent::__construct(AvroSchema::UNION_SCHEMA);
+
+    $this->schema_from_schemata_indices = array();
+    $schema_types = array();
+    foreach ($schemas as $index => $schema)
+    {
+      $is_schema_from_schemata = false;
+      $new_schema = null;
+      if (is_string($schema)
+          && ($new_schema = $schemata->schema_by_name(
+                new AvroName($schema, null, $default_namespace))))
+        $is_schema_from_schemata = true;
+      else
+        $new_schema = self::subparse($schema, $default_namespace, $schemata);
+
+      $schema_type = $new_schema->type;
+      if (self::is_valid_type($schema_type)
+          && !self::is_named_type($schema_type)
+          && in_array($schema_type, $schema_types))
+        throw new AvroSchemaParseException(
+          sprintf('"%s" is already in union', $schema_type));
+      elseif (AvroSchema::UNION_SCHEMA == $schema_type)
+        throw new AvroSchemaParseException('Unions cannot contain other unions');
+      else
+      {
+        $schema_types []= $schema_type;
+        $this->schemas []= $new_schema;
+        if ($is_schema_from_schemata)
+          $this->schema_from_schemata_indices []= $index;
+      }
+    }
+
+  }
+
+  /**
+   * @returns AvroSchema[]
+   */
+  public function schemas() { return $this->schemas; }
+
+  /**
+   * @returns AvroSchema the particular schema from the union for
+   * the given (zero-based) index.
+   * @throws AvroSchemaParseException if the index is invalid for this schema.
+   */
+  public function schema_by_index($index)
+  {
+    if (count($this->schemas) > $index)
+      return $this->schemas[$index];
+
+    throw new AvroSchemaParseException('Invalid union schema index');
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = array();
+
+    foreach ($this->schemas as $index => $schema)
+      $avro []= (in_array($index, $this->schema_from_schemata_indices))
+      ? $schema->qualified_name() : $schema->to_avro();
+
+    return $avro;
+  }
+}
+
+/**
+ * Parent class of named Avro schema
+ * @package Avro
+ * @todo Refactor AvroNamedSchema to use an AvroName instance
+ *       to store name information.
+ */
+class AvroNamedSchema extends AvroSchema
+{
+  /**
+   * @var AvroName $name
+   */
+  private $name;
+
+  /**
+   * @var string documentation string
+   */
+  private $doc;
+
+  /**
+   * @param string $type
+   * @param AvroName $name
+   * @param string $doc documentation string
+   * @param AvroNamedSchemata &$schemata
+   * @throws AvroSchemaParseException
+   */
+  public function __construct($type, $name, $doc=null, &$schemata=null)
+  {
+    parent::__construct($type);
+    $this->name = $name;
+
+    if ($doc && !is_string($doc))
+      throw new AvroSchemaParseException('Schema doc attribute must be a string');
+    $this->doc = $doc;
+
+    if (!is_null($schemata))
+      $schemata = $schemata->clone_with_new_schema($this);
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    list($name, $namespace) = AvroName::extract_namespace($this->qualified_name());
+    $avro[AvroSchema::NAME_ATTR] = $name;
+    if ($namespace)
+      $avro[AvroSchema::NAMESPACE_ATTR] = $namespace;
+    if (!is_null($this->doc))
+      $avro[AvroSchema::DOC_ATTR] = $this->doc;
+    return $avro;
+  }
+
+  /**
+   * @returns string
+   */
+  public function fullname() { return $this->name->fullname(); }
+
+  public function qualified_name() { return $this->name->qualified_name(); }
+
+}
+
+/**
+ * @package Avro
+ */
+class AvroName
+{
+  /**
+   * @var string character used to separate names comprising the fullname
+   */
+  const NAME_SEPARATOR = '.';
+
+  /**
+   * @var string regular expression to validate name values
+   */
+  const NAME_REGEXP = '/^[A-Za-z_][A-Za-z0-9_]*$/';
+
+  /**
+   * @returns string[] array($name, $namespace)
+   */
+  public static function extract_namespace($name, $namespace=null)
+  {
+    $parts = explode(self::NAME_SEPARATOR, $name);
+    if (count($parts) > 1)
+    {
+      $name = array_pop($parts);
+      $namespace = join(self::NAME_SEPARATOR, $parts);
+    }
+    return array($name, $namespace);
+  }
+
+  /**
+   * @returns boolean true if the given name is well-formed
+   *          (is a non-null, non-empty string) and false otherwise
+   */
+  public static function is_well_formed_name($name)
+  {
+    return (is_string($name) && !empty($name)
+            && preg_match(self::NAME_REGEXP, $name));
+  }
+
+  /**
+   * @param string $namespace
+   * @returns boolean true if namespace is composed of valid names
+   * @throws AvroSchemaParseException if any of the namespace components
+   *                                  are invalid.
+   */
+  private static function check_namespace_names($namespace)
+  {
+    foreach (explode(self::NAME_SEPARATOR, $namespace) as $n)
+    {
+      if (empty($n) || (0 == preg_match(self::NAME_REGEXP, $n)))
+        throw new AvroSchemaParseException(sprintf('Invalid name "%s"', $n));
+    }
+    return true;
+  }
+
+  /**
+   * @param string $name
+   * @param string $namespace
+   * @returns string
+   * @throws AvroSchemaParseException if any of the names are not valid.
+   */
+  private static function parse_fullname($name, $namespace)
+  {
+    if (!is_string($namespace) || empty($namespace))
+      throw new AvroSchemaParseException('Namespace must be a non-empty string.');
+    self::check_namespace_names($namespace);
+    return $namespace . '.' . $name;
+  }
+
+  /**
+   * @var string valid names are matched by self::NAME_REGEXP
+   */
+  private $name;
+
+  /**
+   * @var string
+   */
+  private $namespace;
+
+  /**
+   * @var string
+   */
+  private $fullname;
+
+  /**
+   * @var string Name qualified as necessary given its default namespace.
+   */
+  private $qualified_name;
+
+  /**
+   * @param string $name
+   * @param string $namespace
+   * @param string $default_namespace
+   */
+  public function __construct($name, $namespace, $default_namespace)
+  {
+    if (!is_string($name) || empty($name))
+      throw new AvroSchemaParseException('Name must be a non-empty string.');
+
+    if (strpos($name, self::NAME_SEPARATOR)
+        && self::check_namespace_names($name))
+      $this->fullname = $name;
+    elseif (0 == preg_match(self::NAME_REGEXP, $name))
+      throw new AvroSchemaParseException(sprintf('Invalid name "%s"', $name));
+    elseif (!is_null($namespace))
+      $this->fullname = self::parse_fullname($name, $namespace);
+    elseif (!is_null($default_namespace))
+      $this->fullname = self::parse_fullname($name, $default_namespace);
+    else
+      $this->fullname = $name;
+
+    list($this->name, $this->namespace) = self::extract_namespace($this->fullname);
+    $this->qualified_name = (is_null($this->namespace)
+                             || $this->namespace == $default_namespace)
+      ? $this->name : $this->fullname;
+  }
+
+  /**
+   * @returns array array($name, $namespace)
+   */
+  public function name_and_namespace()
+  {
+    return array($this->name, $this->namespace);
+  }
+
+  /**
+   * @returns string
+   */
+  public function fullname() { return $this->fullname; }
+
+  /**
+   * @returns string fullname
+   * @uses $this->fullname()
+   */
+  public function __toString() { return $this->fullname(); }
+
+  /**
+   * @returns string name qualified for its context
+   */
+  public function qualified_name() { return $this->qualified_name; }
+
+}
+
+/**
+ *  Keeps track of AvroNamedSchema which have been observed so far,
+ *  as well as the default namespace.
+ *
+ * @package Avro
+ */
+class AvroNamedSchemata
+{
+  /**
+   * @var AvroNamedSchema[]
+   */
+  private $schemata;
+
+  /**
+   * @param AvroNamedSchemata[]
+   */
+  public function __construct($schemata=array())
+  {
+    $this->schemata = $schemata;
+  }
+
+  public function list_schemas() {
+    var_export($this->schemata);
+    foreach($this->schemata as $sch) 
+      print('Schema '.$sch->__toString()."\n");
+  }
+
+  /**
+   * @param string $fullname
+   * @returns boolean true if there exists a schema with the given name
+   *                  and false otherwise.
+   */
+  public function has_name($fullname)
+  {
+    return array_key_exists($fullname, $this->schemata);
+  }
+
+  /**
+   * @param string $fullname
+   * @returns AvroSchema|null the schema which has the given name,
+   *          or null if there is no schema with the given name.
+   */
+  public function schema($fullname)
+  {
+    if (isset($this->schemata[$fullname]))
+        return $this->schemata[$fullname];
+    return null;
+  }
+
+  /**
+   * @param AvroName $name
+   * @returns AvroSchema|null
+   */
+  public function schema_by_name($name)
+  {
+    return $this->schema($name->fullname());
+  }
+
+  /**
+   * Creates a new AvroNamedSchemata instance of this schemata instance
+   * with the given $schema appended.
+   * @param AvroNamedSchema schema to add to this existing schemata
+   * @returns AvroNamedSchemata
+   */
+  public function clone_with_new_schema($schema)
+  {
+    $name = $schema->fullname();
+    if (AvroSchema::is_valid_type($name))
+      throw new AvroSchemaParseException(
+        sprintf('Name "%s" is a reserved type name', $name));
+    else if ($this->has_name($name))
+      throw new AvroSchemaParseException(
+        sprintf('Name "%s" is already in use', $name));
+    $schemata = new AvroNamedSchemata($this->schemata);
+    $schemata->schemata[$name] = $schema;
+    return $schemata;
+  }
+}
+
+/**
+ * @package Avro
+ */
+class AvroEnumSchema extends AvroNamedSchema
+{
+  /**
+   * @var string[] array of symbols
+   */
+  private $symbols;
+
+  /**
+   * @param AvroName $name
+   * @param string $doc
+   * @param string[] $symbols
+   * @param AvroNamedSchemata &$schemata
+   * @throws AvroSchemaParseException
+   */
+  public function __construct($name, $doc, $symbols, &$schemata=null)
+  {
+    if (!AvroUtil::is_list($symbols))
+      throw new AvroSchemaParseException('Enum Schema symbols are not a list');
+
+    if (count(array_unique($symbols)) > count($symbols))
+      throw new AvroSchemaParseException(
+        sprintf('Duplicate symbols: %s', $symbols));
+
+    foreach ($symbols as $symbol)
+      if (!is_string($symbol) || empty($symbol))
+        throw new AvroSchemaParseException(
+          sprintf('Enum schema symbol must be a string %',
+                  print_r($symbol, true)));
+
+    parent::__construct(AvroSchema::ENUM_SCHEMA, $name, $doc, $schemata);
+    $this->symbols = $symbols;
+  }
+
+  /**
+   * @returns string[] this enum schema's symbols
+   */
+  public function symbols() { return $this->symbols; }
+
+  /**
+   * @param string $symbol
+   * @returns boolean true if the given symbol exists in this
+   *          enum schema and false otherwise
+   */
+  public function has_symbol($symbol)
+  {
+    return in_array($symbol, $this->symbols);
+  }
+
+  /**
+   * @param int $index
+   * @returns string enum schema symbol with the given (zero-based) index
+   */
+  public function symbol_by_index($index)
+  {
+    if (array_key_exists($index, $this->symbols))
+      return $this->symbols[$index];
+    throw new AvroException(sprintf('Invalid symbol index %d', $index));
+  }
+
+  /**
+   * @param string $symbol
+   * @returns int the index of the given $symbol in the enum schema
+   */
+  public function symbol_index($symbol)
+  {
+    $idx = array_search($symbol, $this->symbols, true);
+    if (false !== $idx)
+      return $idx;
+    throw new AvroException(sprintf("Invalid symbol value '%s'", $symbol));
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    $avro[AvroSchema::SYMBOLS_ATTR] = $this->symbols;
+    return $avro;
+  }
+}
+
+/**
+ * AvroNamedSchema with fixed-length data values
+ * @package Avro
+ */
+class AvroFixedSchema extends AvroNamedSchema
+{
+
+  /**
+   * @var int byte count of this fixed schema data value
+   */
+  private $size;
+
+  /**
+   * @param AvroName $name
+   * @param string $doc Set to null, as fixed schemas don't have doc strings
+   * @param int $size byte count of this fixed schema data value
+   * @param AvroNamedSchemata &$schemata
+   */
+  public function __construct($name, $doc, $size, &$schemata=null)
+  {
+    $doc = null; // Fixed schemas don't have doc strings.
+    if (!is_integer($size))
+      throw new AvroSchemaParseException(
+        'Fixed Schema requires a valid integer for "size" attribute');
+    parent::__construct(AvroSchema::FIXED_SCHEMA, $name, $doc, $schemata);
+    return $this->size = $size;
+  }
+
+  /**
+   * @returns int byte count of this fixed schema data value
+   */
+  public function size() { return $this->size; }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+    $avro[AvroSchema::SIZE_ATTR] = $this->size;
+    return $avro;
+  }
+}
+
+/**
+ * @package Avro
+ */
+class AvroRecordSchema extends AvroNamedSchema
+{
+  /**
+   * @param mixed $field_data
+   * @param string $default_namespace namespace of enclosing schema
+   * @param AvroNamedSchemata &$schemata
+   * @returns AvroField[]
+   * @throws AvroSchemaParseException
+   */
+  static function parse_fields($field_data, $default_namespace, &$schemata)
+  {
+    $fields = array();
+    $field_names = array();
+    foreach ($field_data as $index => $field)
+    {
+      $name = AvroUtil::array_value($field, AvroField::FIELD_NAME_ATTR);
+      $type = AvroUtil::array_value($field, AvroSchema::TYPE_ATTR);
+      $order = AvroUtil::array_value($field, AvroField::ORDER_ATTR);
+
+      $default = null;
+      $has_default = false;
+      if (array_key_exists(AvroField::DEFAULT_ATTR, $field))
+      {
+        $default = $field[AvroField::DEFAULT_ATTR];
+        $has_default = true;
+      }
+
+      if (in_array($name, $field_names))
+        throw new AvroSchemaParseException(
+          sprintf("Field name %s is already in use", $name));
+
+      $is_schema_from_schemata = false;
+      $field_schema = null;
+      if (is_string($type)
+          && $field_schema = $schemata->schema_by_name(
+            new AvroName($type, null, $default_namespace)))
+        $is_schema_from_schemata = true;
+      else
+        $field_schema = self::subparse($type, $default_namespace, $schemata);
+
+      $new_field = new AvroField($name, $field_schema, $is_schema_from_schemata,
+                                 $has_default, $default, $order);
+      $field_names []= $name;
+      $fields []= $new_field;
+    }
+    return $fields;
+  }
+
+  /**
+   * @var AvroSchema[] array of AvroNamedSchema field definitions of
+   *                   this AvroRecordSchema
+   */
+  private $fields;
+
+  /**
+   * @var array map of field names to field objects.
+   * @internal Not called directly. Memoization of AvroRecordSchema->fields_hash()
+   */
+  private $fields_hash;
+
+  /**
+   * @param string $name
+   * @param string $namespace
+   * @param string $doc
+   * @param array $fields
+   * @param AvroNamedSchemata &$schemata
+   * @param string $schema_type schema type name
+   * @throws AvroSchemaParseException
+   */
+  public function __construct($name, $doc, $fields, &$schemata=null,
+                              $schema_type=AvroSchema::RECORD_SCHEMA)
+  {
+    if (is_null($fields))
+      throw new AvroSchemaParseException(
+        'Record schema requires a non-empty fields attribute');
+
+    if (AvroSchema::REQUEST_SCHEMA == $schema_type)
+      parent::__construct($schema_type, $name); 
+    else
+      parent::__construct($schema_type, $name, $doc, $schemata);
+
+    list($x, $namespace) = $name->name_and_namespace();
+    $this->fields = self::parse_fields($fields, $namespace, $schemata);
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = parent::to_avro();
+
+    $fields_avro = array();
+    foreach ($this->fields as $field)
+      $fields_avro [] = $field->to_avro();
+
+    if (AvroSchema::REQUEST_SCHEMA == $this->type)
+      return $fields_avro;
+
+    $avro[AvroSchema::FIELDS_ATTR] = $fields_avro;
+
+    return $avro;
+  }
+
+  /**
+   * @returns array the schema definitions of the fields of this AvroRecordSchema
+   */
+  public function fields() { return $this->fields; }
+
+  /**
+   * @returns array a hash table of the fields of this AvroRecordSchema fields
+   *          keyed by each field's name
+   */
+  public function fields_hash()
+  {
+    if (is_null($this->fields_hash))
+    {
+      $hash = array();
+      foreach ($this->fields as $field)
+        $hash[$field->name()] = $field;
+      $this->fields_hash = $hash;
+    }
+    return $this->fields_hash;
+  }
+}
+
+/**
+ * Field of an {@link AvroRecordSchema}
+ * @package Avro
+ */
+class AvroField extends AvroSchema
+{
+
+  /**
+   * @var string fields name attribute name
+   */
+  const FIELD_NAME_ATTR = 'name';
+
+  /**
+   * @var string
+   */
+  const DEFAULT_ATTR = 'default';
+
+  /**
+   * @var string
+   */
+  const ORDER_ATTR = 'order';
+
+  /**
+   * @var string
+   */
+  const ASC_SORT_ORDER = 'ascending';
+
+  /**
+   * @var string
+   */
+  const DESC_SORT_ORDER = 'descending';
+
+  /**
+   * @var string
+   */
+  const IGNORE_SORT_ORDER = 'ignore';
+
+  /**
+   * @var array list of valid field sort order values
+   */
+  private static $valid_field_sort_orders = array(self::ASC_SORT_ORDER,
+                                                  self::DESC_SORT_ORDER,
+                                                  self::IGNORE_SORT_ORDER);
+
+
+  /**
+   * @param string $order
+   * @returns boolean
+   */
+  private static function is_valid_field_sort_order($order)
+  {
+    return in_array($order, self::$valid_field_sort_orders);
+  }
+
+  /**
+   * @param string $order
+   * @throws AvroSchemaParseException if $order is not a valid
+   *                                  field order value.
+   */
+  private static function check_order_value($order)
+  {
+    if (!is_null($order) && !self::is_valid_field_sort_order($order))
+      throw new AvroSchemaParseException(
+        sprintf('Invalid field sort order %s', $order));
+  }
+
+  /**
+   * @var string
+   */
+  private $name;
+
+  /**
+   * @var boolean whether or no there is a default value
+   */
+  private $has_default;
+
+  /**
+   * @var string field default value
+   */
+  private $default;
+
+  /**
+   * @var string sort order of this field
+   */
+  private $order;
+
+  /**
+   * @var boolean whether or not the AvroNamedSchema of this field is
+   *              defined in the AvroNamedSchemata instance
+   */
+  private $is_type_from_schemata;
+
+  /**
+   * @param string $type
+   * @param string $name
+   * @param AvroSchema $schema
+   * @param boolean $is_type_from_schemata
+   * @param string $default
+   * @param string $order
+   * @todo Check validity of $default value
+   * @todo Check validity of $order value
+   */
+  public function __construct($name, $schema, $is_type_from_schemata,
+                              $has_default, $default, $order=null)
+  {
+    if (!AvroName::is_well_formed_name($name))
+      throw new AvroSchemaParseException('Field requires a "name" attribute');
+
+    $this->type = $schema;
+    $this->is_type_from_schemata = $is_type_from_schemata;
+    $this->name = $name;
+    $this->has_default = $has_default;
+    if ($this->has_default)
+      $this->default = $default;
+    $this->check_order_value($order);
+    $this->order = $order;
+  }
+
+  /**
+   * @returns mixed
+   */
+  public function to_avro()
+  {
+    $avro = array(AvroField::FIELD_NAME_ATTR => $this->name);
+
+    $avro[AvroSchema::TYPE_ATTR] = ($this->is_type_from_schemata)
+      ? $this->type->qualified_name() : $this->type->to_avro();
+
+    if (isset($this->default))
+      $avro[AvroField::DEFAULT_ATTR] = $this->default;
+
+    if ($this->order)
+      $avro[AvroField::ORDER_ATTR] = $this->order;
+
+    return $avro;
+  }
+
+  /**
+   * @returns string the name of this field
+   */
+  public function name() { return $this->name; }
+
+  /**
+   * @returns mixed the default value of this field
+   */
+  public function default_value() { return $this->default;  }
+
+  /**
+   * @returns boolean true if the field has a default and false otherwise
+   */
+  public function has_default_value() { return $this->has_default; }
+}
diff --git a/lang/php/lib/avro/util.php b/lang/php/lib/avro/util.php
new file mode 100644
index 0000000..a43613e
--- /dev/null
+++ b/lang/php/lib/avro/util.php
@@ -0,0 +1,67 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * @package Avro
+ */
+
+/**
+ * Class for static utility methods used in Avro.
+ *
+ * @package Avro
+ */
+class AvroUtil
+{
+  /**
+   * Determines whether the given array is an associative array
+   * (what is termed a map, hash, or dictionary in other languages)
+   * or a list (an array with monotonically increasing integer indicies
+   * starting with zero).
+   *
+   * @param array $ary array to test
+   * @returns true if the array is a list and false otherwise.
+   *
+   */
+  static function is_list($ary)
+  {
+    if (is_array($ary))
+    {
+      $i = 0;
+      foreach ($ary as $k => $v)
+      {
+        if ($i !== $k)
+          return false;
+        $i++;
+      }
+      return true;
+    }
+    return false;
+  }
+
+  /**
+   * @param array $ary
+   * @param string $key
+   * @returns mixed the value of $ary[$key] if it is set,
+   *                and null otherwise.
+   */
+  static function array_value($ary, $key)
+  {
+    return isset($ary[$key]) ? $ary[$key] : null;
+  }
+}
diff --git a/lang/php/test/AllTests.php b/lang/php/test/AllTests.php
new file mode 100644
index 0000000..e0f5ed2
--- /dev/null
+++ b/lang/php/test/AllTests.php
@@ -0,0 +1,47 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('DataFileTest.php');
+require_once('SchemaTest.php');
+require_once('NameTest.php');
+require_once('StringIOTest.php');
+require_once('IODatumReaderTest.php');
+require_once('LongEncodingTest.php');
+require_once('FloatIntEncodingTest.php');
+require_once('DatumIOTest.php');
+require_once('ProtocolFileTest.php');
+// InterOpTest tests are run separately.
+
+class AllTests
+{
+  public static function suite()
+  {
+    $suite = new PHPUnit_Framework_TestSuite('AvroAllTests');
+    $suite->addTestSuite('DataFileTest');
+    $suite->addTestSuite('SchemaTest');
+    $suite->addTestSuite('NameTest');
+    $suite->addTestSuite('StringIOTest');
+    $suite->addTestSuite('IODatumReaderTest');
+    $suite->addTestSuite('LongEncodingTest');
+    $suite->addTestSuite('FloatIntEncodingTest');
+    $suite->addTestSuite('DatumIOTest');
+    $suite->addTestSuite('ProtocolFileTest');
+    return $suite;
+  }
+}
diff --git a/lang/php/test/DataFileTest.php b/lang/php/test/DataFileTest.php
new file mode 100644
index 0000000..3254e21
--- /dev/null
+++ b/lang/php/test/DataFileTest.php
@@ -0,0 +1,270 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class DataFileTest extends PHPUnit_Framework_TestCase
+{
+  private $data_files;
+  const REMOVE_DATA_FILES = true;
+
+  static function current_timestamp() { return strftime("%Y%m%dT%H%M%S"); }
+
+  protected function add_data_file($data_file)
+  {
+    if (is_null($this->data_files))
+      $this->data_files = array();
+    $data_file = "$data_file.".self::current_timestamp();
+    $full = join(DIRECTORY_SEPARATOR, array(TEST_TEMP_DIR, $data_file));
+    $this->data_files []= $full;
+    return $full;
+  }
+
+  protected static function remove_data_file($data_file)
+  {
+    if (file_exists($data_file))
+      unlink($data_file);
+  }
+
+  protected function remove_data_files()
+  {
+    if (self::REMOVE_DATA_FILES
+        && 0 < count($this->data_files))
+      foreach ($this->data_files as $data_file)
+        $this->remove_data_file($data_file);
+  }
+
+  protected function setUp()
+  {
+    if (!file_exists(TEST_TEMP_DIR))
+      mkdir(TEST_TEMP_DIR);
+    $this->remove_data_files();
+  }
+  protected function tearDown()
+  {
+    $this->remove_data_files();
+  }
+
+  public function test_write_read_nothing_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-nothing-null.avr');
+    $writers_schema = '"null"';
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_data = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals(null, $read_data);
+  }
+
+  public function test_write_read_null_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-null.avr');
+    $writers_schema = '"null"';
+    $data = null;
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->append($data);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_data = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals($data, $read_data);
+  }
+
+  public function test_write_read_string_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-str.avr');
+    $writers_schema = '"string"';
+    $data = 'foo';
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->append($data);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_data = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals($data, $read_data);
+  }
+
+
+  public function test_write_read_round_trip()
+  {
+
+    $data_file = $this->add_data_file('data-wr-int.avr');
+    $writers_schema = '"int"';
+    $data = 1;
+
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->append(1);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_data = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals($data, $read_data);
+
+  }
+
+  public function test_write_read_true_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-true.avr');
+    $writers_schema = '"boolean"';
+    $datum = true;
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->append($datum);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_datum = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals($datum, $read_datum);
+  }
+
+  public function test_write_read_false_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-false.avr');
+    $writers_schema = '"boolean"';
+    $datum = false;
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    $dw->append($datum);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_datum = array_shift($dr->data());
+    $dr->close();
+    $this->assertEquals($datum, $read_datum);
+  }
+  public function test_write_read_int_array_round_trip()
+  {
+    $data_file = $this->add_data_file('data-wr-int-ary.avr');
+    $writers_schema = '"int"';
+    $data = array(10, 20, 30, 40, 50, 60, 70);
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    foreach ($data as $datum)
+      $dw->append($datum);
+    $dw->close();
+
+    $dr = AvroDataIO::open_file($data_file);
+    $read_data = $dr->data();
+    $dr->close();
+    $this->assertEquals($data, $read_data,
+                        sprintf("in: %s\nout: %s",
+                                json_encode($data), json_encode($read_data)));
+  }
+
+  public function test_differing_schemas_with_primitives()
+  {
+    $data_file = $this->add_data_file('data-prim.avr');
+
+    $writer_schema = <<<JSON
+{ "type": "record",
+  "name": "User",
+  "fields" : [
+      {"name": "username", "type": "string"},
+      {"name": "age", "type": "int"},
+      {"name": "verified", "type": "boolean", "default": "false"}
+      ]}
+JSON;
+    $data = array(array('username' => 'john', 'age' => 25, 'verified' => true),
+                  array('username' => 'ryan', 'age' => 23, 'verified' => false));
+    $dw = AvroDataIO::open_file($data_file, 'w', $writer_schema);
+    foreach ($data as $datum)
+    {
+      $dw->append($datum);
+    }
+    $dw->close();
+    $reader_schema = <<<JSON
+      { "type": "record",
+        "name": "User",
+        "fields" : [
+      {"name": "username", "type": "string"}
+      ]}
+JSON;
+    $dr = AvroDataIO::open_file($data_file, 'r', $reader_schema);
+    foreach ($dr->data() as $index => $record)
+    {
+      $this->assertEquals($data[$index]['username'], $record['username']);
+    }
+  }
+
+  public function test_differing_schemas_with_complex_objects()
+  {
+    $data_file = $this->add_data_file('data-complex.avr');
+
+    $writers_schema = <<<JSON
+{ "type": "record",
+  "name": "something",
+  "fields": [
+    {"name": "something_fixed", "type": {"name": "inner_fixed",
+                                         "type": "fixed", "size": 3}},
+    {"name": "something_enum", "type": {"name": "inner_enum",
+                                        "type": "enum",
+                                        "symbols": ["hello", "goodbye"]}},
+    {"name": "something_array", "type": {"type": "array", "items": "int"}},
+    {"name": "something_map", "type": {"type": "map", "values": "int"}},
+    {"name": "something_record", "type": {"name": "inner_record",
+                                          "type": "record",
+                                          "fields": [
+                                            {"name": "inner", "type": "int"}
+                                          ]}},
+    {"name": "username", "type": "string"}
+]}
+JSON;
+
+    $data = array(array("username" => "john",
+                        "something_fixed" => "foo",
+                        "something_enum" => "hello",
+                        "something_array" => array(1,2,3),
+                        "something_map" => array("a" => 1, "b" => 2),
+                        "something_record" => array("inner" => 2),
+                        "something_error" => array("code" => 403)),
+                  array("username" => "ryan",
+                        "something_fixed" => "bar",
+                        "something_enum" => "goodbye",
+                        "something_array" => array(1,2,3),
+                        "something_map" => array("a" => 2, "b" => 6),
+                        "something_record" => array("inner" => 1),
+                        "something_error" => array("code" => 401)));
+    $dw = AvroDataIO::open_file($data_file, 'w', $writers_schema);
+    foreach ($data as $datum)
+      $dw->append($datum);
+    $dw->close();
+
+    foreach (array('fixed', 'enum', 'record', 'error',
+                   'array' , 'map', 'union') as $s)
+    {
+      $readers_schema = json_decode($writers_schema, true);
+      $dr = AvroDataIO::open_file($data_file, 'r', json_encode($readers_schema));
+      foreach ($dr->data() as $idx => $obj)
+      {
+        foreach ($readers_schema['fields'] as $field)
+        {
+          $field_name = $field['name'];
+          $this->assertEquals($data[$idx][$field_name], $obj[$field_name]);
+        }
+      }
+      $dr->close();
+
+    }
+
+  }
+
+}
diff --git a/lang/php/test/DatumIOTest.php b/lang/php/test/DatumIOTest.php
new file mode 100644
index 0000000..795a506
--- /dev/null
+++ b/lang/php/test/DatumIOTest.php
@@ -0,0 +1,144 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class DatumIOTest extends PHPUnit_Framework_TestCase
+{
+  /**
+   * @dataProvider data_provider
+   */
+  function test_datum_round_trip($schema_json, $datum, $binary)
+  {
+    $schema = AvroSchema::parse($schema_json);
+    $written = new AvroStringIO();
+    $encoder = new AvroIOBinaryEncoder($written);
+    $writer = new AvroIODatumWriter($schema);
+
+    $writer->write($datum, $encoder);
+    $output = strval($written);
+    $this->assertEquals($binary, $output,
+                        sprintf("expected: %s\n  actual: %s",
+                                AvroDebug::ascii_string($binary, 'hex'),
+                                AvroDebug::ascii_string($output, 'hex')));
+
+    $read = new AvroStringIO($binary);
+    $decoder = new AvroIOBinaryDecoder($read);
+    $reader = new AvroIODatumReader($schema);
+    $read_datum = $reader->read($decoder);
+    $this->assertEquals($datum, $read_datum);
+  }
+
+  function data_provider()
+  {
+    return array(array('"null"', null, ''),
+
+                 array('"boolean"', true, "\001"),
+                 array('"boolean"', false, "\000"),
+
+                 array('"int"', (int) -2147483648, "\xFF\xFF\xFF\xFF\x0F"),
+                 array('"int"', -1, "\001"),
+                 array('"int"', 0, "\000"),
+                 array('"int"', 1, "\002"),
+                 array('"int"', 2147483647, "\xFE\xFF\xFF\xFF\x0F"),
+
+                 // array('"long"', (int) -9223372036854775808, "\001"),
+                 array('"long"', -1, "\001"),
+                 array('"long"',  0, "\000"),
+                 array('"long"',  1, "\002"),
+                 // array('"long"', 9223372036854775807, "\002")
+
+                 array('"float"', (float) -10.0, "\000\000 \301"),
+                 array('"float"', (float)  -1.0, "\000\000\200\277"),
+                 array('"float"', (float)   0.0, "\000\000\000\000"),
+                 array('"float"', (float)   2.0, "\000\000\000@"),
+                 array('"float"', (float)   9.0, "\000\000\020A"),
+
+                 array('"double"', (double) -10.0, "\000\000\000\000\000\000$\300"),
+                 array('"double"', (double) -1.0, "\000\000\000\000\000\000\360\277"),
+                 array('"double"', (double) 0.0, "\000\000\000\000\000\000\000\000"),
+                 array('"double"', (double) 2.0, "\000\000\000\000\000\000\000@"),
+                 array('"double"', (double) 9.0, "\000\000\000\000\000\000\"@"),
+
+                 array('"string"', 'foo', "\x06foo"),
+                 array('"bytes"', "\x01\x02\x03", "\x06\x01\x02\x03"),
+
+                 array('{"type":"array","items":"int"}',
+                       array(1,2,3),
+                       "\x06\x02\x04\x06\x00"),
+                 array('{"type":"map","values":"int"}',
+                       array('foo' => 1, 'bar' => 2, 'baz' => 3),
+                       "\x06\x06foo\x02\x06bar\x04\x06baz\x06\x00"),
+                 array('["null", "int"]', 1, "\x02\x02"),
+                 array('{"name":"fix","type":"fixed","size":3}',
+                       "\xAA\xBB\xCC", "\xAA\xBB\xCC"),
+                 array('{"name":"enm","type":"enum","symbols":["A","B","C"]}',
+                       'B', "\x02"),
+                 array('{"name":"rec","type":"record","fields":[{"name":"a","type":"int"},{"name":"b","type":"boolean"}]}',
+                       array('a' => 1, 'b' => false),
+                       "\x02\x00")
+      );
+  }
+
+  function default_provider()
+  {
+    return array(array('"null"', 'null', null),
+                 array('"boolean"', 'true', true),
+                 array('"int"', '1', 1),
+                 array('"long"', '2000', 2000),
+                 array('"float"', '1.1', (float) 1.1),
+                 array('"double"', '200.2', (double) 200.2),
+                 array('"string"', '"quux"', 'quux'),
+                 array('"bytes"', '"\u00FF"', "\xC3\xBF"),
+                 array('{"type":"array","items":"int"}',
+                       '[5,4,3,2]', array(5,4,3,2)),
+                 array('{"type":"map","values":"int"}',
+                       '{"a":9}', array('a' => 9)),
+                 array('["int","string"]', '8', 8),
+                 array('{"name":"x","type":"enum","symbols":["A","V"]}',
+                       '"A"', 'A'),
+                 array('{"name":"x","type":"fixed","size":4}', '"\u00ff"', "\xC3\xBF"),
+                 array('{"name":"x","type":"record","fields":[{"name":"label","type":"int"}]}',
+                       '{"label":7}', array('label' => 7)));
+  }
+
+  /**
+   * @dataProvider default_provider
+   */
+  function test_field_default_value($field_schema_json,
+                                    $default_json, $default_value)
+  {
+    $writers_schema_json = '{"name":"foo","type":"record","fields":[]}';
+    $writers_schema = AvroSchema::parse($writers_schema_json);
+
+    $readers_schema_json = sprintf(
+      '{"name":"foo","type":"record","fields":[{"name":"f","type":%s,"default":%s}]}',
+      $field_schema_json, $default_json);
+    $readers_schema = AvroSchema::parse($readers_schema_json);
+
+    $reader = new AvroIODatumReader($writers_schema, $readers_schema);
+    $record = $reader->read(new AvroIOBinaryDecoder(new AvroStringIO()));
+    if (array_key_exists('f', $record))
+      $this->assertEquals($default_value, $record['f']);
+    else
+      $this->assertTrue(false, sprintf('expected field record[f]: %s',
+                                       print_r($record, true))) ;
+  }
+
+}
diff --git a/lang/php/test/FloatIntEncodingTest.php b/lang/php/test/FloatIntEncodingTest.php
new file mode 100644
index 0000000..4bcff4e
--- /dev/null
+++ b/lang/php/test/FloatIntEncodingTest.php
@@ -0,0 +1,289 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+require_once('test_helper.php');
+
+class FloatIntEncodingTest extends PHPUnit_Framework_TestCase
+{
+  const FLOAT_TYPE = 'float';
+  const DOUBLE_TYPE = 'double';
+
+  static $FLOAT_NAN;
+  static $FLOAT_POS_INF;
+  static $FLOAT_NEG_INF;
+  static $DOUBLE_NAN;
+  static $DOUBLE_POS_INF;
+  static $DOUBLE_NEG_INF;
+
+  static $LONG_BITS_NAN;
+  static $LONG_BITS_POS_INF;
+  static $LONG_BITS_NEG_INF;
+  static $INT_BITS_NAN;
+  static $INT_BITS_POS_INF;
+  static $INT_BITS_NEG_INF;
+
+  static function make_special_vals()
+  {
+    self::$DOUBLE_NAN     = (double) NAN;
+    self::$DOUBLE_POS_INF = (double) INF;
+    self::$DOUBLE_NEG_INF = (double) -INF;
+    self::$FLOAT_NAN      = (float) NAN;
+    self::$FLOAT_POS_INF  = (float) INF;
+    self::$FLOAT_NEG_INF  = (float) -INF;
+
+    self::$LONG_BITS_NAN     = strrev(pack('H*', '7ff8000000000000'));
+    self::$LONG_BITS_POS_INF = strrev(pack('H*', '7ff0000000000000'));
+    self::$LONG_BITS_NEG_INF = strrev(pack('H*', 'fff0000000000000'));
+    self::$INT_BITS_NAN      = strrev(pack('H*', '7fc00000'));
+    self::$INT_BITS_POS_INF  = strrev(pack('H*', '7f800000'));
+    self::$INT_BITS_NEG_INF  = strrev(pack('H*', 'ff800000'));
+  }
+
+  function setUp()
+  {
+    self::make_special_vals();
+  }
+
+  function test_special_values()
+  {
+    $this->assertTrue(is_float(self::$FLOAT_NAN), 'float NaN is a float');
+    $this->assertTrue(is_nan(self::$FLOAT_NAN), 'float NaN is NaN');
+    $this->assertFalse(is_infinite(self::$FLOAT_NAN), 'float NaN is not infinite');
+
+    $this->assertTrue(is_float(self::$FLOAT_POS_INF), 'float pos infinity is a float');
+    $this->assertTrue(is_infinite(self::$FLOAT_POS_INF), 'float pos infinity is infinite');
+    $this->assertTrue(0 < self::$FLOAT_POS_INF, 'float pos infinity is greater than 0');
+    $this->assertFalse(is_nan(self::$FLOAT_POS_INF), 'float pos infinity is not NaN');
+
+    $this->assertTrue(is_float(self::$FLOAT_NEG_INF), 'float neg infinity is a float');
+    $this->assertTrue(is_infinite(self::$FLOAT_NEG_INF), 'float neg infinity is infinite');
+    $this->assertTrue(0 > self::$FLOAT_NEG_INF, 'float neg infinity is less than 0');
+    $this->assertFalse(is_nan(self::$FLOAT_NEG_INF), 'float neg infinity is not NaN');
+
+    $this->assertTrue(is_double(self::$DOUBLE_NAN), 'double NaN is a double');
+    $this->assertTrue(is_nan(self::$DOUBLE_NAN), 'double NaN is NaN');
+    $this->assertFalse(is_infinite(self::$DOUBLE_NAN), 'double NaN is not infinite');
+
+    $this->assertTrue(is_double(self::$DOUBLE_POS_INF), 'double pos infinity is a double');
+    $this->assertTrue(is_infinite(self::$DOUBLE_POS_INF), 'double pos infinity is infinite');
+    $this->assertTrue(0 < self::$DOUBLE_POS_INF, 'double pos infinity is greater than 0');
+    $this->assertFalse(is_nan(self::$DOUBLE_POS_INF), 'double pos infinity is not NaN');
+
+    $this->assertTrue(is_double(self::$DOUBLE_NEG_INF), 'double neg infinity is a double');
+    $this->assertTrue(is_infinite(self::$DOUBLE_NEG_INF), 'double neg infinity is infinite');
+    $this->assertTrue(0 > self::$DOUBLE_NEG_INF, 'double neg infinity is less than 0');
+    $this->assertFalse(is_nan(self::$DOUBLE_NEG_INF), 'double neg infinity is not NaN');
+
+  }
+
+  function special_vals_provider()
+  {
+    self::make_special_vals();
+    return array(array(self::DOUBLE_TYPE, self::$DOUBLE_POS_INF, self::$LONG_BITS_POS_INF),
+                 array(self::DOUBLE_TYPE, self::$DOUBLE_NEG_INF, self::$LONG_BITS_NEG_INF),
+                 array(self::FLOAT_TYPE, self::$FLOAT_POS_INF, self::$INT_BITS_POS_INF),
+                 array(self::FLOAT_TYPE, self::$FLOAT_NEG_INF, self::$INT_BITS_NEG_INF));
+  }
+
+  /**
+   * @dataProvider special_vals_provider
+   */
+  function test_encoding_special_values($type, $val, $bits)
+  {
+    $this->assert_encode_values($type, $val, $bits);
+  }
+
+  function nan_vals_provider()
+  {
+    self::make_special_vals();
+    return array(array(self::DOUBLE_TYPE, self::$DOUBLE_NAN, self::$LONG_BITS_NAN),
+                 array(self::FLOAT_TYPE, self::$FLOAT_NAN, self::$INT_BITS_NAN));
+  }
+
+  /**
+   * @dataProvider nan_vals_provider
+   */
+  function test_encoding_nan_values($type, $val, $bits)
+  {
+    $this->assert_encode_nan_values($type, $val, $bits);
+  }
+
+  function normal_vals_provider()
+  {
+    $ruby_to_generate_vals =<<<_RUBY
+      def d2lb(d); [d].pack('E') end
+      dary = (-10..10).to_a + [-1234.2132, -211e23]
+      dary.each {|x| b = d2lb(x); puts %/array(self::DOUBLE_TYPE, (double) #{x}, #{b.inspect}, '#{b.unpack('h*')[0]}'),/}
+      def f2ib(f); [f].pack('e') end
+      fary = (-10..10).to_a + [-1234.5, -211.3e6]
+      fary.each {|x| b = f2ib(x); puts %/array(self::FLOAT_TYPE, (float) #{x}, #{b.inspect}, '#{b.unpack('h*')[0]}'),/}
+_RUBY;
+
+    return array(
+                 array(self::DOUBLE_TYPE, (double) -10, "\000\000\000\000\000\000$\300", '000000000000420c'),
+                 array(self::DOUBLE_TYPE, (double) -9, "\000\000\000\000\000\000\"\300", '000000000000220c'),
+                 array(self::DOUBLE_TYPE, (double) -8, "\000\000\000\000\000\000 \300", '000000000000020c'),
+                 array(self::DOUBLE_TYPE, (double) -7, "\000\000\000\000\000\000\034\300", '000000000000c10c'),
+                 array(self::DOUBLE_TYPE, (double) -6, "\000\000\000\000\000\000\030\300", '000000000000810c'),
+                 array(self::DOUBLE_TYPE, (double) -5, "\000\000\000\000\000\000\024\300", '000000000000410c'),
+                 array(self::DOUBLE_TYPE, (double) -4, "\000\000\000\000\000\000\020\300", '000000000000010c'),
+            /**/ array(self::DOUBLE_TYPE, (double) -3, "\000\000\000\000\000\000\010\300", '000000000000800c'),
+                 array(self::DOUBLE_TYPE, (double) -2, "\000\000\000\000\000\000\000\300", '000000000000000c'),
+                 array(self::DOUBLE_TYPE, (double) -1, "\000\000\000\000\000\000\360\277", '0000000000000ffb'),
+                 array(self::DOUBLE_TYPE, (double) 0, "\000\000\000\000\000\000\000\000", '0000000000000000'),
+                 array(self::DOUBLE_TYPE, (double) 1, "\000\000\000\000\000\000\360?", '0000000000000ff3'),
+                 array(self::DOUBLE_TYPE, (double) 2, "\000\000\000\000\000\000\000@", '0000000000000004'),
+            /**/ array(self::DOUBLE_TYPE, (double) 3, "\000\000\000\000\000\000\010@", '0000000000008004'),
+                 array(self::DOUBLE_TYPE, (double) 4, "\000\000\000\000\000\000\020@", '0000000000000104'),
+                 array(self::DOUBLE_TYPE, (double) 5, "\000\000\000\000\000\000\024@", '0000000000004104'),
+                 array(self::DOUBLE_TYPE, (double) 6, "\000\000\000\000\000\000\030@", '0000000000008104'),
+                 array(self::DOUBLE_TYPE, (double) 7, "\000\000\000\000\000\000\034@", '000000000000c104'),
+                 array(self::DOUBLE_TYPE, (double) 8, "\000\000\000\000\000\000 @", '0000000000000204'),
+                 array(self::DOUBLE_TYPE, (double) 9, "\000\000\000\000\000\000\"@", '0000000000002204'),
+                 array(self::DOUBLE_TYPE, (double) 10, "\000\000\000\000\000\000$@", '0000000000004204'),
+            /**/ array(self::DOUBLE_TYPE, (double) -1234.2132, "\007\316\031Q\332H\223\300", '70ec9115ad84390c'),
+                 array(self::DOUBLE_TYPE, (double) -2.11e+25, "\311\260\276J\031t1\305", '9c0beba49147135c'),
+
+                 array(self::FLOAT_TYPE, (float) -10, "\000\000 \301", '0000021c'),
+                 array(self::FLOAT_TYPE, (float) -9, "\000\000\020\301", '0000011c'),
+                 array(self::FLOAT_TYPE, (float) -8, "\000\000\000\301", '0000001c'),
+                 array(self::FLOAT_TYPE, (float) -7, "\000\000\340\300", '00000e0c'),
+                 array(self::FLOAT_TYPE, (float) -6, "\000\000\300\300", '00000c0c'),
+                 array(self::FLOAT_TYPE, (float) -5, "\000\000\240\300", '00000a0c'),
+                 array(self::FLOAT_TYPE, (float) -4, "\000\000\200\300", '0000080c'),
+                 array(self::FLOAT_TYPE, (float) -3, "\000\000@\300", '0000040c'),
+                 array(self::FLOAT_TYPE, (float) -2, "\000\000\000\300", '0000000c'),
+                 array(self::FLOAT_TYPE, (float) -1, "\000\000\200\277", '000008fb'),
+                 array(self::FLOAT_TYPE, (float) 0, "\000\000\000\000", '00000000'),
+                 array(self::FLOAT_TYPE, (float) 1, "\000\000\200?", '000008f3'),
+                 array(self::FLOAT_TYPE, (float) 2, "\000\000\000@", '00000004'),
+                 array(self::FLOAT_TYPE, (float) 3, "\000\000@@", '00000404'),
+                 array(self::FLOAT_TYPE, (float) 4, "\000\000\200@", '00000804'),
+                 array(self::FLOAT_TYPE, (float) 5, "\000\000\240@", '00000a04'),
+                 array(self::FLOAT_TYPE, (float) 6, "\000\000\300@", '00000c04'),
+                 array(self::FLOAT_TYPE, (float) 7, "\000\000\340@", '00000e04'),
+                 array(self::FLOAT_TYPE, (float) 8, "\000\000\000A", '00000014'),
+                 array(self::FLOAT_TYPE, (float) 9, "\000\000\020A", '00000114'),
+                 array(self::FLOAT_TYPE, (float) 10, "\000\000 A", '00000214'),
+                 array(self::FLOAT_TYPE, (float) -1234.5, "\000P\232\304", '0005a94c'),
+                 array(self::FLOAT_TYPE, (float) -211300000.0, "\352\202I\315", 'ae2894dc'),
+      );
+  }
+
+  function float_vals_provider()
+  {
+    $ary = array();
+
+    foreach ($this->normal_vals_provider() as $values)
+      if (self::FLOAT_TYPE == $values[0])
+        $ary []= array($values[0], $values[1], $values[2]);
+
+    return $ary;
+  }
+
+  function double_vals_provider()
+  {
+    $ary = array();
+
+    foreach ($this->normal_vals_provider() as $values)
+      if (self::DOUBLE_TYPE == $values[0])
+        $ary []= array($values[0], $values[1], $values[2]);
+
+    return $ary;
+  }
+
+
+  /**
+   * @dataProvider float_vals_provider
+   */
+  function test_encoding_float_values($type, $val, $bits)
+  {
+    $this->assert_encode_values($type, $val, $bits);
+  }
+
+  /**
+   * @dataProvider double_vals_provider
+   */
+  function test_encoding_double_values($type, $val, $bits)
+  {
+    $this->assert_encode_values($type, $val, $bits);
+  }
+
+  function assert_encode_values($type, $val, $bits)
+  {
+    if (self::FLOAT_TYPE == $type)
+    {
+      $decoder = array('AvroIOBinaryDecoder', 'int_bits_to_float');
+      $encoder = array('AvroIOBinaryEncoder', 'float_to_int_bits');
+    }
+    else
+    {
+      $decoder = array('AvroIOBinaryDecoder', 'long_bits_to_double');
+      $encoder = array('AvroIOBinaryEncoder', 'double_to_long_bits');
+    }
+
+    $decoded_bits_val = call_user_func($decoder, $bits);
+    $this->assertEquals($val, $decoded_bits_val,
+                        sprintf("%s\n expected: '%f'\n    given: '%f'",
+                                'DECODED BITS', $val, $decoded_bits_val));
+
+    $encoded_val_bits = call_user_func($encoder, $val);
+    $this->assertEquals($bits, $encoded_val_bits,
+                        sprintf("%s\n expected: '%s'\n    given: '%s'",
+                                'ENCODED VAL',
+                                AvroDebug::hex_string($bits),
+                                AvroDebug::hex_string($encoded_val_bits)));
+
+    $round_trip_value = call_user_func($decoder, $encoded_val_bits);
+    $this->assertEquals($val, $round_trip_value,
+                        sprintf("%s\n expected: '%f'\n     given: '%f'",
+                                'ROUND TRIP BITS', $val, $round_trip_value));
+  }
+
+  function assert_encode_nan_values($type, $val, $bits)
+  {
+    if (self::FLOAT_TYPE == $type)
+    {
+      $decoder = array('AvroIOBinaryDecoder', 'int_bits_to_float');
+      $encoder = array('AvroIOBinaryEncoder', 'float_to_int_bits');
+    }
+    else
+    {
+      $decoder = array('AvroIOBinaryDecoder', 'long_bits_to_double');
+      $encoder = array('AvroIOBinaryEncoder', 'double_to_long_bits');
+    }
+
+    $decoded_bits_val = call_user_func($decoder, $bits);
+    $this->assertTrue(is_nan($decoded_bits_val),
+                      sprintf("%s\n expected: '%f'\n    given: '%f'",
+                              'DECODED BITS', $val, $decoded_bits_val));
+
+    $encoded_val_bits = call_user_func($encoder, $val);
+    $this->assertEquals($bits, $encoded_val_bits,
+                        sprintf("%s\n expected: '%s'\n    given: '%s'",
+                                'ENCODED VAL',
+                                AvroDebug::hex_string($bits),
+                                AvroDebug::hex_string($encoded_val_bits)));
+
+    $round_trip_value = call_user_func($decoder, $encoded_val_bits);
+    $this->assertTrue(is_nan($round_trip_value),
+                      sprintf("%s\n expected: '%f'\n     given: '%f'",
+                              'ROUND TRIP BITS', $val, $round_trip_value));
+  }
+
+}
diff --git a/lang/php/test/IODatumReaderTest.php b/lang/php/test/IODatumReaderTest.php
new file mode 100644
index 0000000..dae2ef9
--- /dev/null
+++ b/lang/php/test/IODatumReaderTest.php
@@ -0,0 +1,36 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class IODatumReaderTest extends PHPUnit_Framework_TestCase
+{
+
+  public function testSchemaMatching()
+  {
+    $writers_schema = <<<JSON
+      { "type": "map",
+        "values": "bytes" }
+JSON;
+    $readers_schema = $writers_schema;
+    $this->assertTrue(AvroIODatumReader::schemas_match(
+                        AvroSchema::parse($writers_schema),
+                        AvroSchema::parse($readers_schema)));
+  }
+}
diff --git a/lang/php/test/InterOpTest.php b/lang/php/test/InterOpTest.php
new file mode 100644
index 0000000..43cd646
--- /dev/null
+++ b/lang/php/test/InterOpTest.php
@@ -0,0 +1,75 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class InterOpTest extends PHPUnit_Framework_TestCase
+{
+  var $projection_json;
+  var $projection;
+
+  public function setUp()
+  {
+    $interop_schema_file_name = AVRO_INTEROP_SCHEMA;
+    $this->projection_json = file_get_contents($interop_schema_file_name);
+    $this->projection = AvroSchema::parse($this->projection_json);
+  }
+
+  public function file_name_provider()
+  {
+    $data_dir = AVRO_BUILD_DATA_DIR;
+    $data_files = array();
+    if (!($dh = opendir($data_dir)))
+      die("Could not open data dir '$data_dir'\n");
+
+    /* TODO This currently only tries to read files of the form 'language.avro',
+     * but not 'language_deflate.avro' as the PHP implementation is not yet
+     * able to read deflate data files. When deflate support is added, change
+     * this to match *.avro. */
+    while ($file = readdir($dh))
+      if (0 < preg_match('/^[a-z]+\.avro$/', $file))
+        $data_files []= join(DIRECTORY_SEPARATOR, array($data_dir, $file));
+    closedir($dh);
+
+    $ary = array();
+    foreach ($data_files as $df)
+      $ary []= array($df);
+    return $ary;
+  }
+
+  /**
+   *  @dataProvider file_name_provider
+   */
+  public function test_read($file_name)
+  {
+
+    $dr = AvroDataIO::open_file(
+      $file_name, AvroFile::READ_MODE, $this->projection_json);
+
+    $data = $dr->data();
+
+    $this->assertNotEquals(0, count($data),
+                           sprintf("no data read from %s", $file_name));
+
+    foreach ($data as $idx => $datum)
+      $this->assertNotNull($datum, sprintf("null datum from %s", $file_name));
+
+  }
+
+}
diff --git a/lang/php/test/LongEncodingTest.php b/lang/php/test/LongEncodingTest.php
new file mode 100644
index 0000000..88fa58d
--- /dev/null
+++ b/lang/php/test/LongEncodingTest.php
@@ -0,0 +1,315 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+require_once('test_helper.php');
+
+class LongEncodingTest extends PHPUnit_Framework_TestCase
+{
+
+  function setUp()
+  {
+    Avro::check_platform();
+  }
+
+  static function is_64_bit() { return (PHP_INT_SIZE == 8); }
+  function skip_64_bit_test_on_32_bit()
+  {
+    if (!self::is_64_bit())
+      $this->markTestSkipped('Requires 64-bit platform');
+  }
+
+  function skip_if_no_gmp()
+  {
+    if (!extension_loaded('gmp'))
+      $this->markTestSkipped('Requires GMP PHP Extension.');
+  }
+
+  function assert_bit_shift($expected, $actual, $shift_type,
+                            $expected_binary, $actual_binary)
+  {
+    $this->assertEquals(
+      $expected, $actual,
+      sprintf("%s\nexpected: %d\n  actual: %d\nexpected b: %s\n  actual b: %s",
+              $shift_type, $expected, $actual,
+              $expected_binary, $actual_binary));
+  }
+
+  /**
+   * @dataProvider bit_shift_provider
+   */
+  function test_bit_shift($val, $shift, $expected_lval, $expected_rval, $lbin, $rbin)
+  {
+
+    $this->skip_64_bit_test_on_32_bit();
+
+    $lval = (int) ((int) $val << $shift);
+    $this->assert_bit_shift($expected_lval, strval($lval),
+                            'lshift', $lbin, decbin($lval));
+    $rval = ((int) $val >> $shift);
+    $this->assert_bit_shift($expected_rval, strval($rval),
+                            'rshift', $rbin, decbin($rval));
+  }
+
+  /**
+   * @dataProvider bit_shift_provider
+   */
+  function test_left_shift_gmp($val, $shift,
+                               $expected_lval, $expected_rval,
+                               $lbin, $rbin)
+  {
+    $this->skip_if_no_gmp();
+    $lval = gmp_strval(AvroGMP::shift_left($val, $shift));
+    $this->assert_bit_shift($expected_lval, $lval, 'gmp left shift',
+                            $lbin, decbin((int) $lval));
+  }
+
+  /**
+   * @dataProvider bit_shift_provider
+   */
+  function test_right_shift_gmp($val, $shift, $expected_lval, $expected_rval,
+                                $lbin, $rbin)
+  {
+    $this->skip_if_no_gmp();
+    $rval = gmp_strval(AvroGMP::shift_right($val, $shift));
+    $this->assert_bit_shift($expected_rval, $rval, 'gmp right shift',
+                            $rbin, decbin((int) $rval));
+  }
+
+  /**
+   * @dataProvider long_provider
+   */
+  function test_encode_long($val, $expected_bytes)
+  {
+    $this->skip_64_bit_test_on_32_bit();
+    $bytes = AvroIOBinaryEncoder::encode_long($val);
+    $this->assertEquals($expected_bytes, $bytes);
+  }
+
+  /**
+   * @dataProvider long_provider
+   */
+  function test_gmp_encode_long($val, $expected_bytes)
+  {
+    $this->skip_if_no_gmp();
+    $bytes = AvroGMP::encode_long($val);
+    $this->assertEquals($expected_bytes, $bytes);
+  }
+
+  /**
+   * @dataProvider long_provider
+   */
+  function test_decode_long_from_array($expected_val, $bytes)
+  {
+    $this->skip_64_bit_test_on_32_bit();
+    $ary = array_map('ord', str_split($bytes));
+    $val = AvroIOBinaryDecoder::decode_long_from_array($ary);
+    $this->assertEquals($expected_val, $val);
+  }
+
+  /**
+   * @dataProvider long_provider
+   */
+  function test_gmp_decode_long_from_array($expected_val, $bytes)
+  {
+    $this->skip_if_no_gmp();
+    $ary = array_map('ord', str_split($bytes));
+    $val = AvroGMP::decode_long_from_array($ary);
+    $this->assertEquals($expected_val, $val);
+  }
+
+  function long_provider()
+  {
+    return array(array('0', "\x0"),
+                 array('1', "\x2"),
+                 array('7', "\xe"),
+                 array('10000', "\xa0\x9c\x1"),
+                 array('2147483647', "\xfe\xff\xff\xff\xf"),
+                 array('98765432109', "\xda\x94\x87\xee\xdf\x5"),
+                 array('-1', "\x1"),
+                 array('-7', "\xd"),
+                 array('-10000', "\x9f\x9c\x1"),
+                 array('-2147483648', "\xff\xff\xff\xff\xf"),
+                 array('-98765432109', "\xd9\x94\x87\xee\xdf\x5")
+      );
+
+  }
+
+  function bit_shift_provider()
+  {
+                      // val shift lval rval
+    return array(
+      array('0', 0, '0', '0',
+            '0',
+            '0'),
+      array('0', 1, '0', '0',
+            '0',
+            '0'),
+      array('0', 7, '0', '0',
+            '0',
+            '0'),
+      array('0', 63, '0', '0',
+            '0',
+            '0'),
+      array('1', 0, '1', '1',
+            '1',
+            '1'),
+      array('1', 1, '2', '0',
+            '10',
+            '0'),
+      array('1', 7, '128', '0',
+            '10000000',
+            '0'),
+      array('1', 63, '-9223372036854775808', '0',
+            '1000000000000000000000000000000000000000000000000000000000000000',
+            '0'),
+      array('100', 0, '100', '100',
+            '1100100',
+            '1100100'),
+      array('100', 1, '200', '50',
+            '11001000',
+            '110010'),
+      array('100', 7, '12800', '0',
+            '11001000000000',
+            '0'),
+      array('100', 63, '0', '0',
+            '0',
+            '0'),
+      array('1000000', 0, '1000000', '1000000',
+            '11110100001001000000',
+            '11110100001001000000'),
+      array('1000000', 1, '2000000', '500000',
+            '111101000010010000000',
+            '1111010000100100000'),
+      array('1000000', 7, '128000000', '7812',
+            '111101000010010000000000000',
+            '1111010000100'),
+      array('1000000', 63, '0', '0',
+            '0',
+            '0'),
+      array('2147483647', 0, '2147483647', '2147483647',
+            '1111111111111111111111111111111',
+            '1111111111111111111111111111111'),
+      array('2147483647', 1, '4294967294', '1073741823',
+            '11111111111111111111111111111110',
+            '111111111111111111111111111111'),
+      array('2147483647', 7, '274877906816', '16777215',
+            '11111111111111111111111111111110000000',
+            '111111111111111111111111'),
+      array('2147483647', 63, '-9223372036854775808', '0',
+            '1000000000000000000000000000000000000000000000000000000000000000',
+            '0'),
+      array('10000000000', 0, '10000000000', '10000000000',
+            '1001010100000010111110010000000000',
+            '1001010100000010111110010000000000'),
+      array('10000000000', 1, '20000000000', '5000000000',
+            '10010101000000101111100100000000000',
+            '100101010000001011111001000000000'),
+      array('10000000000', 7, '1280000000000', '78125000',
+            '10010101000000101111100100000000000000000',
+            '100101010000001011111001000'),
+      array('10000000000', 63, '0', '0',
+            '0',
+            '0'),
+      array('9223372036854775807', 0, '9223372036854775807', '9223372036854775807',
+            '111111111111111111111111111111111111111111111111111111111111111',
+            '111111111111111111111111111111111111111111111111111111111111111'),
+      array('9223372036854775807', 1, '-2', '4611686018427387903',
+            '1111111111111111111111111111111111111111111111111111111111111110',
+            '11111111111111111111111111111111111111111111111111111111111111'),
+      array('9223372036854775807', 7, '-128', '72057594037927935',
+            '1111111111111111111111111111111111111111111111111111111110000000',
+            '11111111111111111111111111111111111111111111111111111111'),
+      array('9223372036854775807', 63, '-9223372036854775808', '0',
+            '1000000000000000000000000000000000000000000000000000000000000000',
+            '0'),
+      array('-1', 0, '-1', '-1',
+            '1111111111111111111111111111111111111111111111111111111111111111',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-1', 1, '-2', '-1',
+            '1111111111111111111111111111111111111111111111111111111111111110',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-1', 7, '-128', '-1',
+            '1111111111111111111111111111111111111111111111111111111110000000',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-1', 63, '-9223372036854775808', '-1',
+            '1000000000000000000000000000000000000000000000000000000000000000',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-100', 0, '-100', '-100',
+            '1111111111111111111111111111111111111111111111111111111110011100',
+            '1111111111111111111111111111111111111111111111111111111110011100'),
+      array('-100', 1, '-200', '-50',
+            '1111111111111111111111111111111111111111111111111111111100111000',
+            '1111111111111111111111111111111111111111111111111111111111001110'),
+      array('-100', 7, '-12800', '-1',
+            '1111111111111111111111111111111111111111111111111100111000000000',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-100', 63, '0', '-1',
+            '0',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-1000000', 0, '-1000000', '-1000000',
+            '1111111111111111111111111111111111111111111100001011110111000000',
+            '1111111111111111111111111111111111111111111100001011110111000000'),
+      array('-1000000', 1, '-2000000', '-500000',
+            '1111111111111111111111111111111111111111111000010111101110000000',
+            '1111111111111111111111111111111111111111111110000101111011100000'),
+      array('-1000000', 7, '-128000000', '-7813',
+            '1111111111111111111111111111111111111000010111101110000000000000',
+            '1111111111111111111111111111111111111111111111111110000101111011'),
+      array('-1000000', 63, '0', '-1',
+            '0',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-2147483648', 0, '-2147483648', '-2147483648',
+            '1111111111111111111111111111111110000000000000000000000000000000',
+            '1111111111111111111111111111111110000000000000000000000000000000'),
+      array('-2147483648', 1, '-4294967296', '-1073741824',
+            '1111111111111111111111111111111100000000000000000000000000000000',
+            '1111111111111111111111111111111111000000000000000000000000000000'),
+      array('-2147483648', 7, '-274877906944', '-16777216',
+            '1111111111111111111111111100000000000000000000000000000000000000',
+            '1111111111111111111111111111111111111111000000000000000000000000'),
+      array('-2147483648', 63, '0', '-1',
+            '0',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-10000000000', 0, '-10000000000', '-10000000000',
+            '1111111111111111111111111111110110101011111101000001110000000000',
+            '1111111111111111111111111111110110101011111101000001110000000000'),
+      array('-10000000000', 1, '-20000000000', '-5000000000',
+            '1111111111111111111111111111101101010111111010000011100000000000',
+            '1111111111111111111111111111111011010101111110100000111000000000'),
+      array('-10000000000', 7, '-1280000000000', '-78125000',
+            '1111111111111111111111101101010111111010000011100000000000000000',
+            '1111111111111111111111111111111111111011010101111110100000111000'),
+      array('-10000000000', 63, '0', '-1',
+            '0',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      array('-9223372036854775808', 0, '-9223372036854775808', '-9223372036854775808',
+            '1000000000000000000000000000000000000000000000000000000000000000',
+            '1000000000000000000000000000000000000000000000000000000000000000'),
+      array('-9223372036854775808', 1, '0', '-4611686018427387904',
+            '0',
+            '1100000000000000000000000000000000000000000000000000000000000000'),
+      array('-9223372036854775808', 7, '0', '-72057594037927936',
+            '0',
+            '1111111100000000000000000000000000000000000000000000000000000000'),
+      array('-9223372036854775808', 63, '0', '-1',
+            '0',
+            '1111111111111111111111111111111111111111111111111111111111111111'),
+      );
+  }
+
+}
diff --git a/lang/php/test/NameTest.php b/lang/php/test/NameTest.php
new file mode 100644
index 0000000..072f843
--- /dev/null
+++ b/lang/php/test/NameTest.php
@@ -0,0 +1,106 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class NameExample
+{
+  var $is_valid;
+  var $name;
+  var $namespace;
+  var $default_namespace;
+  var $expected_fullname;
+  function __construct($name, $namespace, $default_namespace, $is_valid,
+                       $expected_fullname=null)
+  {
+    $this->name = $name;
+    $this->namespace = $namespace;
+    $this->default_namespace = $default_namespace;
+    $this->is_valid = $is_valid;
+    $this->expected_fullname = $expected_fullname;
+  }
+
+  function __toString()
+  {
+    return var_export($this, true);
+  }
+}
+
+class NameTest extends PHPUnit_Framework_TestCase
+{
+
+  function fullname_provider()
+  {
+    $examples = array(new NameExample('foo', null, null, true, 'foo'),
+                      new NameExample('foo', 'bar', null, true, 'bar.foo'),
+                      new NameExample('bar.foo', 'baz', null, true, 'bar.foo'),
+                      new NameExample('_bar.foo', 'baz', null, true, '_bar.foo'),
+                      new NameExample('bar._foo', 'baz', null, true, 'bar._foo'),
+                      new NameExample('3bar.foo', 'baz', null, false),
+                      new NameExample('bar.3foo', 'baz', null, false),
+                      new NameExample('b4r.foo', 'baz', null, true, 'b4r.foo'),
+                      new NameExample('bar.f0o', 'baz', null, true, 'bar.f0o'),
+                      new NameExample(' .foo', 'baz', null, false),
+                      new NameExample('bar. foo', 'baz', null, false),
+                      new NameExample('bar. ', 'baz', null, false)
+                      );
+    $exes = array();
+    foreach ($examples as $ex)
+      $exes []= array($ex);
+    return $exes;
+  }
+
+  /**
+   * @dataProvider fullname_provider
+   */
+  function test_fullname($ex)
+  {
+    try
+    {
+      $name = new AvroName($ex->name, $ex->namespace, $ex->default_namespace);
+      $this->assertTrue($ex->is_valid);
+      $this->assertEquals($ex->expected_fullname, $name->fullname());
+    }
+    catch (AvroSchemaParseException $e)
+    {
+      $this->assertFalse($ex->is_valid, sprintf("%s:\n%s",
+                                                $ex,
+                                                $e->getMessage()));
+    }
+  }
+
+  function name_provider()
+  {
+    return array(array('a', true),
+                 array('_', true),
+                 array('1a', false),
+                 array('', false),
+                 array(null, false),
+                 array(' ', false),
+                 array('Cons', true));
+  }
+
+  /**
+   * @dataProvider name_provider
+   */
+  function test_name($name, $is_well_formed)
+  {
+    $this->assertEquals(AvroName::is_well_formed_name($name), $is_well_formed, $name);
+  }
+}
diff --git a/lang/php/test/ProtocolFileTest.php b/lang/php/test/ProtocolFileTest.php
new file mode 100644
index 0000000..eb50e34
--- /dev/null
+++ b/lang/php/test/ProtocolFileTest.php
@@ -0,0 +1,353 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+// near-verbatim port of test_protocol.py
+class ProtocolFileTest extends PHPUnit_Framework_TestCase
+{
+	protected function setUp() {
+	}
+	
+	public function testParsing() {
+		$cnt=count($this->prot_parseable);
+		for ($i=0; $i<$cnt; $i++) {
+			try {
+				//print($i . " " . ($this->prot_parseable[$i]?"true":"false") . " \n");
+				$prot=AvroProtocol::parse($this->prot_data[$i]);
+			} catch (AvroSchemaParseException $x) {
+				// exception ok if we expected this protocol spec to be unparseable
+				$this->assertEquals(false, $this->prot_parseable[$i]);
+			}
+		}
+	}
+	
+	// test data
+	private $prot_parseable=array(true, true, true, true, true, true, false, true, true);
+	private $prot_data = array(
+<<<'DATUM'
+{
+  "namespace": "com.acme",
+  "protocol": "HelloWorld",
+
+  "types": [
+    {"name": "Greeting", "type": "record", "fields": [
+      {"name": "message", "type": "string"}]},
+    {"name": "Curse", "type": "error", "fields": [
+      {"name": "message", "type": "string"}]}
+  ],
+
+  "messages": {
+    "hello": {
+      "request": [{"name": "greeting", "type": "Greeting" }],
+      "response": "Greeting",
+      "errors": ["Curse"]
+    }
+  }
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test",
+ "protocol": "Simple",
+
+ "types": [
+     {"name": "Kind", "type": "enum", "symbols": ["FOO","BAR","BAZ"]},
+
+     {"name": "MD5", "type": "fixed", "size": 16},
+
+     {"name": "TestRecord", "type": "record",
+      "fields": [
+          {"name": "name", "type": "string", "order": "ignore"},
+          {"name": "kind", "type": "Kind", "order": "descending"},
+          {"name": "hash", "type": "MD5"}
+      ]
+     },
+
+     {"name": "TestError", "type": "error", "fields": [
+         {"name": "message", "type": "string"}
+      ]
+     }
+
+ ],
+
+ "messages": {
+
+     "hello": {
+         "request": [{"name": "greeting", "type": "string"}],
+         "response": "string"
+     },
+
+     "echo": {
+         "request": [{"name": "record", "type": "TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "add": {
+         "request": [{"name": "arg1", "type": "int"}, {"name": "arg2", "type": "int"}],
+         "response": "int"
+     },
+
+     "echoBytes": {
+         "request": [{"name": "data", "type": "bytes"}],
+         "response": "bytes"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["TestError"]
+     }
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestNamespace",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "TestRecord", "type": "record",
+      "fields": [ {"name": "hash", "type": "org.apache.avro.test.util.MD5"} ]
+     },
+     {"name": "TestError", "namespace": "org.apache.avro.test.errors",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "record", "type": "TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.errors.TestError"]
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestImplicitNamespace",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "ReferencedRecord", "type": "record", 
+       "fields": [ {"name": "foo", "type": "string"} ] },
+     {"name": "TestRecord", "type": "record",
+      "fields": [ {"name": "hash", "type": "org.apache.avro.test.util.MD5"},
+                  {"name": "unqalified", "type": "ReferencedRecord"} ]
+     },
+     {"name": "TestError",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "qualified", 
+             "type": "org.apache.avro.test.namespace.TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.namespace.TestError"]
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestNamespaceTwo",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "ReferencedRecord", "type": "record", 
+       "namespace": "org.apache.avro.other.namespace", 
+       "fields": [ {"name": "foo", "type": "string"} ] },
+     {"name": "TestRecord", "type": "record",
+      "fields": [ {"name": "hash", "type": "org.apache.avro.test.util.MD5"},
+                  {"name": "qualified", 
+                    "type": "org.apache.avro.other.namespace.ReferencedRecord"} 
+                ]
+     },
+     {"name": "TestError",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "qualified", 
+             "type": "org.apache.avro.test.namespace.TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.namespace.TestError"]
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestValidRepeatedName",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "ReferencedRecord", "type": "record", 
+       "namespace": "org.apache.avro.other.namespace", 
+       "fields": [ {"name": "foo", "type": "string"} ] },
+     {"name": "ReferencedRecord", "type": "record", 
+       "fields": [ {"name": "bar", "type": "double"} ] },
+     {"name": "TestError",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "qualified", 
+             "type": "ReferencedRecord"}],
+         "response": "org.apache.avro.other.namespace.ReferencedRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.namespace.TestError"]
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestInvalidRepeatedName",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "ReferencedRecord", "type": "record", 
+       "fields": [ {"name": "foo", "type": "string"} ] },
+     {"name": "ReferencedRecord", "type": "record", 
+       "fields": [ {"name": "bar", "type": "double"} ] },
+     {"name": "TestError",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "qualified", 
+             "type": "ReferencedRecord"}],
+         "response": "org.apache.avro.other.namespace.ReferencedRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.namespace.TestError"]
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{"namespace": "org.apache.avro.test",
+ "protocol": "BulkData",
+
+ "types": [],
+
+ "messages": {
+
+     "read": {
+         "request": [],
+         "response": "bytes"
+     },
+
+     "write": {
+         "request": [ {"name": "data", "type": "bytes"} ],
+         "response": "null"
+     }
+
+ }
+
+}
+DATUM
+,
+<<<'DATUM'
+{
+  "protocol" : "API",
+  "namespace" : "xyz.api",
+  "types" : [ {
+    "type" : "enum",
+    "name" : "Symbology",
+    "namespace" : "xyz.api.product",
+    "symbols" : [ "OPRA", "CUSIP", "ISIN", "SEDOL" ]
+  }, {
+    "type" : "record",
+    "name" : "Symbol",
+    "namespace" : "xyz.api.product",
+    "fields" : [ {
+      "name" : "symbology",
+      "type" : "xyz.api.product.Symbology"
+    }, {
+      "name" : "symbol",
+      "type" : "string"
+    } ]
+  }, {
+    "type" : "record",
+    "name" : "MultiSymbol",
+    "namespace" : "xyz.api.product",
+    "fields" : [ {
+      "name" : "symbols",
+      "type" : {
+        "type" : "map",
+        "values" : "xyz.api.product.Symbol"
+      }
+    } ]
+  } ],
+  "messages" : {
+  }
+}
+DATUM
+	);
+}
diff --git a/lang/php/test/SchemaTest.php b/lang/php/test/SchemaTest.php
new file mode 100644
index 0000000..10f747a
--- /dev/null
+++ b/lang/php/test/SchemaTest.php
@@ -0,0 +1,463 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class SchemaExample
+{
+  var $schema_string;
+  var $is_valid;
+  var $name;
+  var $comment;
+  var $normalized_schema_string;
+  function __construct($schema_string, $is_valid, $normalized_schema_string=null,
+                       $name=null, $comment=null)
+  {
+    $this->schema_string = $schema_string;
+    $this->is_valid = $is_valid;
+    $this->name = $name ? $name : $schema_string;
+    $this->normalized_schema_string = $normalized_schema_string
+      ? $normalized_schema_string : json_encode(json_decode($schema_string, true));
+    $this->comment = $comment;
+  }
+}
+
+class SchemaTest extends PHPUnit_Framework_TestCase
+{
+  static $examples = array();
+  static $valid_examples = array();
+
+  protected static function make_primitive_examples()
+  {
+    $examples = array();
+    foreach (array('null', 'boolean',
+                   'int', 'long',
+                   'float', 'double',
+                   'bytes', 'string')
+             as $type)
+    {
+      $examples []= new SchemaExample(sprintf('"%s"', $type), true);
+      $examples []= new SchemaExample(sprintf('{"type": "%s"}', $type), true, sprintf('"%s"', $type));
+    }
+    return $examples;
+  }
+
+  protected static function make_examples()
+  {
+    $primitive_examples = array_merge(array(new SchemaExample('"True"', false),
+                                            new SchemaExample('{"no_type": "test"}', false),
+                                            new SchemaExample('{"type": "panther"}', false)),
+                                        self::make_primitive_examples());
+
+    $array_examples = array(
+      new SchemaExample('{"type": "array", "items": "long"}', true),
+      new SchemaExample('
+    {"type": "array",
+     "items": {"type": "enum", "name": "Test", "symbols": ["A", "B"]}}
+    ', true));
+
+    $map_examples = array(
+      new SchemaExample('{"type": "map", "values": "long"}', true),
+      new SchemaExample('
+    {"type": "map",
+     "values": {"type": "enum", "name": "Test", "symbols": ["A", "B"]}}
+    ', true));
+
+    $union_examples = array(
+      new SchemaExample('["string", "null", "long"]', true),
+      new SchemaExample('["null", "null"]', false),
+      new SchemaExample('["long", "long"]', false),
+      new SchemaExample('
+    [{"type": "array", "items": "long"}
+     {"type": "array", "items": "string"}]
+    ', false),
+      new SchemaExample('["long",
+                          {"type": "long"},
+                          "int"]', false),
+      new SchemaExample('["long",
+                          {"type": "array", "items": "long"},
+                          {"type": "map", "values": "long"},
+                          "int"]', true),
+      new SchemaExample('["long",
+                          ["string", "null"],
+                          "int"]', false),
+      new SchemaExample('["long",
+                          ["string", "null"],
+                          "int"]', false),
+      new SchemaExample('["null", "boolean", "int", "long", "float", "double",
+                          "string", "bytes",
+                          {"type": "array", "items":"int"},
+                          {"type": "map", "values":"int"},
+                          {"name": "bar", "type":"record",
+                           "fields":[{"name":"label", "type":"string"}]},
+                          {"name": "foo", "type":"fixed",
+                           "size":16},
+                          {"name": "baz", "type":"enum", "symbols":["A", "B", "C"]}
+                         ]', true, '["null","boolean","int","long","float","double","string","bytes",{"type":"array","items":"int"},{"type":"map","values":"int"},{"type":"record","name":"bar","fields":[{"name":"label","type":"string"}]},{"type":"fixed","name":"foo","size":16},{"type":"enum","name":"baz","symbols":["A","B","C"]}]'),
+      new SchemaExample('
+    [{"name":"subtract", "namespace":"com.example",
+      "type":"record",
+      "fields":[{"name":"minuend", "type":"int"},
+                {"name":"subtrahend", "type":"int"}]},
+      {"name": "divide", "namespace":"com.example",
+      "type":"record",
+      "fields":[{"name":"quotient", "type":"int"},
+                {"name":"dividend", "type":"int"}]},
+      {"type": "array", "items": "string"}]
+    ', true, '[{"type":"record","name":"subtract","namespace":"com.example","fields":[{"name":"minuend","type":"int"},{"name":"subtrahend","type":"int"}]},{"type":"record","name":"divide","namespace":"com.example","fields":[{"name":"quotient","type":"int"},{"name":"dividend","type":"int"}]},{"type":"array","items":"string"}]'),
+      );
+
+    $fixed_examples = array(
+      new SchemaExample('{"type": "fixed", "name": "Test", "size": 1}', true),
+      new SchemaExample('
+    {"type": "fixed",
+     "name": "MyFixed",
+     "namespace": "org.apache.hadoop.avro",
+     "size": 1}
+    ', true),
+      new SchemaExample('
+    {"type": "fixed",
+     "name": "Missing size"}
+    ', false),
+      new SchemaExample('
+    {"type": "fixed",
+     "size": 314}
+    ', false),
+      new SchemaExample('{"type":"fixed","name":"ex","doc":"this should be ignored","size": 314}',
+                        true,
+                        '{"type":"fixed","name":"ex","size":314}'),
+      new SchemaExample('{"name": "bar",
+                          "namespace": "com.example",
+                          "type": "fixed",
+                          "size": 32 }', true,
+                        '{"type":"fixed","name":"bar","namespace":"com.example","size":32}'),
+      new SchemaExample('{"name": "com.example.bar",
+                          "type": "fixed",
+                          "size": 32 }', true,
+        '{"type":"fixed","name":"bar","namespace":"com.example","size":32}'));
+
+    $fixed_examples []= new SchemaExample(
+      '{"type":"fixed","name":"_x.bar","size":4}', true,
+      '{"type":"fixed","name":"bar","namespace":"_x","size":4}');
+    $fixed_examples []= new SchemaExample(
+      '{"type":"fixed","name":"baz._x","size":4}', true,
+      '{"type":"fixed","name":"_x","namespace":"baz","size":4}');
+    $fixed_examples []= new SchemaExample(
+      '{"type":"fixed","name":"baz.3x","size":4}', false);
+
+    $enum_examples = array(
+      new SchemaExample('{"type": "enum", "name": "Test", "symbols": ["A", "B"]}', true),
+      new SchemaExample('
+    {"type": "enum",
+     "name": "Status",
+     "symbols": "Normal Caution Critical"}
+    ', false),
+      new SchemaExample('
+    {"type": "enum",
+     "name": [ 0, 1, 1, 2, 3, 5, 8 ],
+     "symbols": ["Golden", "Mean"]}
+    ', false),
+      new SchemaExample('
+    {"type": "enum",
+     "symbols" : ["I", "will", "fail", "no", "name"]}
+    ', false),
+      new SchemaExample('
+    {"type": "enum",
+     "name": "Test"
+     "symbols" : ["AA", "AA"]}
+    ', false),
+      new SchemaExample('{"type":"enum","name":"Test","symbols":["AA", 16]}',
+                        false),
+      new SchemaExample('
+    {"type": "enum",
+     "name": "blood_types",
+     "doc": "AB is freaky.",
+     "symbols" : ["A", "AB", "B", "O"]}
+    ', true),
+      new SchemaExample('
+    {"type": "enum",
+     "name": "blood-types",
+     "doc": 16,
+     "symbols" : ["A", "AB", "B", "O"]}
+    ', false)
+      );
+
+
+    $record_examples = array();
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Test",
+     "fields": [{"name": "f",
+                 "type": "long"}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "error",
+     "name": "Test",
+     "fields": [{"name": "f",
+                 "type": "long"}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Node",
+     "fields": [{"name": "label", "type": "string"},
+                {"name": "children",
+                 "type": {"type": "array", "items": "Node"}}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "ListLink",
+     "fields": [{"name": "car", "type": "int"},
+                {"name": "cdr", "type": "ListLink"}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Lisp",
+     "fields": [{"name": "value",
+                 "type": ["null", "string"]}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Lisp",
+     "fields": [{"name": "value",
+                 "type": ["null", "string",
+                          {"type": "record",
+                           "name": "Cons",
+                           "fields": [{"name": "car", "type": "string"},
+                                      {"name": "cdr", "type": "string"}]}]}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Lisp",
+     "fields": [{"name": "value",
+                 "type": ["null", "string",
+                          {"type": "record",
+                           "name": "Cons",
+                           "fields": [{"name": "car", "type": "Lisp"},
+                                      {"name": "cdr", "type": "Lisp"}]}]}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "HandshakeRequest",
+     "namespace": "org.apache.avro.ipc",
+     "fields": [{"name": "clientHash",
+                 "type": {"type": "fixed", "name": "MD5", "size": 16}},
+                {"name": "meta",
+                 "type": ["null", {"type": "map", "values": "bytes"}]}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "HandshakeRequest",
+     "namespace": "org.apache.avro.ipc",
+     "fields": [{"name": "clientHash",
+                 "type": {"type": "fixed", "name": "MD5", "size": 16}},
+                {"name": "clientProtocol", "type": ["null", "string"]},
+                {"name": "serverHash", "type": "MD5"},
+                {"name": "meta",
+                 "type": ["null", {"type": "map", "values": "bytes"}]}]}
+    ', true);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "HandshakeResponse",
+     "namespace": "org.apache.avro.ipc",
+     "fields": [{"name": "match",
+                 "type": {"type": "enum",
+                          "name": "HandshakeMatch",
+                          "symbols": ["BOTH", "CLIENT", "NONE"]}},
+                {"name": "serverProtocol", "type": ["null", "string"]},
+                {"name": "serverHash",
+                 "type": ["null",
+                          {"name": "MD5", "size": 16, "type": "fixed"}]},
+                {"name": "meta",
+                 "type": ["null", {"type": "map", "values": "bytes"}]}]}
+    ', true,
+     '{"type":"record","name":"HandshakeResponse","namespace":"org.apache.avro.ipc","fields":[{"name":"match","type":{"type":"enum","name":"HandshakeMatch","symbols":["BOTH","CLIENT","NONE"]}},{"name":"serverProtocol","type":["null","string"]},{"name":"serverHash","type":["null",{"type":"fixed","name":"MD5","size":16}]},{"name":"meta","type":["null",{"type":"map","values":"bytes"}]}]}'
+      );
+    $record_examples []= new SchemaExample('{"type": "record",
+ "namespace": "org.apache.avro",
+ "name": "Interop",
+ "fields": [{"type": {"fields": [{"type": {"items": "org.apache.avro.Node",
+                                           "type": "array"},
+                                  "name": "children"}],
+                      "type": "record",
+                      "name": "Node"},
+             "name": "recordField"}]}
+', true, '{"type":"record","name":"Interop","namespace":"org.apache.avro","fields":[{"name":"recordField","type":{"type":"record","name":"Node","fields":[{"name":"children","type":{"type":"array","items":"Node"}}]}}]}');
+    $record_examples [] = new SchemaExample('{"type": "record",
+ "namespace": "org.apache.avro",
+ "name": "Interop",
+ "fields": [{"type": {"symbols": ["A", "B", "C"], "type": "enum", "name": "Kind"},
+             "name": "enumField"},
+            {"type": {"fields": [{"type": "string", "name": "label"},
+                                 {"type": {"items": "org.apache.avro.Node", "type": "array"},
+                                  "name": "children"}],
+                      "type": "record",
+                      "name": "Node"},
+             "name": "recordField"}]}', true, '{"type":"record","name":"Interop","namespace":"org.apache.avro","fields":[{"name":"enumField","type":{"type":"enum","name":"Kind","symbols":["A","B","C"]}},{"name":"recordField","type":{"type":"record","name":"Node","fields":[{"name":"label","type":"string"},{"name":"children","type":{"type":"array","items":"Node"}}]}}]}');
+
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Interop",
+     "namespace": "org.apache.avro",
+     "fields": [{"name": "intField", "type": "int"},
+                {"name": "longField", "type": "long"},
+                {"name": "stringField", "type": "string"},
+                {"name": "boolField", "type": "boolean"},
+                {"name": "floatField", "type": "float"},
+                {"name": "doubleField", "type": "double"},
+                {"name": "bytesField", "type": "bytes"},
+                {"name": "nullField", "type": "null"},
+                {"name": "arrayField",
+                 "type": {"type": "array", "items": "double"}},
+                {"name": "mapField",
+                 "type": {"type": "map",
+                          "values": {"name": "Foo",
+                                     "type": "record",
+                                     "fields": [{"name": "label",
+                                                 "type": "string"}]}}},
+                {"name": "unionField",
+                 "type": ["boolean",
+                          "double",
+                          {"type": "array", "items": "bytes"}]},
+                {"name": "enumField",
+                 "type": {"type": "enum",
+                          "name": "Kind",
+                          "symbols": ["A", "B", "C"]}},
+                {"name": "fixedField",
+                 "type": {"type": "fixed", "name": "MD5", "size": 16}},
+                {"name": "recordField",
+                 "type": {"type": "record",
+                          "name": "Node",
+                          "fields": [{"name": "label", "type": "string"},
+                                     {"name": "children",
+                                      "type": {"type": "array",
+                                               "items": "Node"}}]}}]}
+    ', true,
+    '{"type":"record","name":"Interop","namespace":"org.apache.avro","fields":[{"name":"intField","type":"int"},{"name":"longField","type":"long"},{"name":"stringField","type":"string"},{"name":"boolField","type":"boolean"},{"name":"floatField","type":"float"},{"name":"doubleField","type":"double"},{"name":"bytesField","type":"bytes"},{"name":"nullField","type":"null"},{"name":"arrayField","type":{"type":"array","items":"double"}},{"name":"mapField","type":{"type":"map","values":{"type": [...]
+    $record_examples []= new SchemaExample('{"type": "record", "namespace": "org.apache.avro", "name": "Interop", "fields": [{"type": "int", "name": "intField"}, {"type": "long", "name": "longField"}, {"type": "string", "name": "stringField"}, {"type": "boolean", "name": "boolField"}, {"type": "float", "name": "floatField"}, {"type": "double", "name": "doubleField"}, {"type": "bytes", "name": "bytesField"}, {"type": "null", "name": "nullField"}, {"type": {"items": "double", "type": "arra [...]
+', true, '{"type":"record","name":"Interop","namespace":"org.apache.avro","fields":[{"name":"intField","type":"int"},{"name":"longField","type":"long"},{"name":"stringField","type":"string"},{"name":"boolField","type":"boolean"},{"name":"floatField","type":"float"},{"name":"doubleField","type":"double"},{"name":"bytesField","type":"bytes"},{"name":"nullField","type":"null"},{"name":"arrayField","type":{"type":"array","items":"double"}},{"name":"mapField","type":{"type":"map","values":{"t [...]
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "ipAddr",
+     "fields": [{"name": "addr",
+                 "type": [{"name": "IPv6", "type": "fixed", "size": 16},
+                          {"name": "IPv4", "type": "fixed", "size": 4}]}]}
+    ', true,
+    '{"type":"record","name":"ipAddr","fields":[{"name":"addr","type":[{"type":"fixed","name":"IPv6","size":16},{"type":"fixed","name":"IPv4","size":4}]}]}');
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Address",
+     "fields": [{"type": "string"},
+                {"type": "string", "name": "City"}]}
+    ', false);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "name": "Event",
+     "fields": [{"name": "Sponsor"},
+                {"name": "City", "type": "string"}]}
+    ', false);
+    $record_examples []= new SchemaExample('
+    {"type": "record",
+     "fields": "His vision, from the constantly passing bars,"
+     "name", "Rainer"}
+    ', false);
+     $record_examples []= new SchemaExample('
+    {"name": ["Tom", "Jerry"],
+     "type": "record",
+     "fields": [{"name": "name", "type": "string"}]}
+    ', false);
+     $record_examples []= new SchemaExample('
+    {"type":"record","name":"foo","doc":"doc string",
+     "fields":[{"name":"bar", "type":"int", "order":"ascending", "default":1}]}
+',
+                                            true,
+                                            '{"type":"record","name":"foo","doc":"doc string","fields":[{"name":"bar","type":"int","default":1,"order":"ascending"}]}');
+     $record_examples []= new SchemaExample('
+    {"type":"record", "name":"foo", "doc":"doc string",
+     "fields":[{"name":"bar", "type":"int", "order":"bad"}]}
+', false);
+
+    self::$examples = array_merge($primitive_examples,
+                                  $fixed_examples,
+                                  $enum_examples,
+                                  $array_examples,
+                                  $map_examples,
+                                  $union_examples,
+                                  $record_examples);
+    self::$valid_examples = array();
+    foreach (self::$examples as $example)
+    {
+      if ($example->is_valid)
+        self::$valid_examples []= $example;
+    }
+  }
+
+  function test_json_decode()
+  {
+    $this->assertEquals(json_decode('null', true), null);
+    $this->assertEquals(json_decode('32', true), 32);
+    $this->assertEquals(json_decode('"32"', true), '32');
+    $this->assertEquals((array) json_decode('{"foo": 27}'), array("foo" => 27));
+    $this->assertTrue(is_array(json_decode('{"foo": 27}', true)));
+    $this->assertEquals(json_decode('{"foo": 27}', true), array("foo" => 27));
+    $this->assertEquals(json_decode('["bar", "baz", "blurfl"]', true),
+                        array("bar", "baz", "blurfl"));
+    $this->assertFalse(is_array(json_decode('null', true)));
+    $this->assertEquals(json_decode('{"type": "null"}', true), array("type" => 'null'));
+    foreach (array('true', 'True', 'TRUE', 'tRue') as $truthy)
+    {
+      $this->assertEquals(json_decode($truthy, true), true, $truthy);
+    }
+    $this->assertEquals(json_decode('"boolean"'), 'boolean');
+  }
+
+  function schema_examples_provider()
+  {
+    self::make_examples();
+    $ary = array();
+    foreach (self::$examples as $example)
+      $ary []= array($example);
+    return $ary;
+    return array(array(1), array(2), array(3));
+  }
+
+  /**
+   * @dataProvider schema_examples_provider
+   */
+  function test_parse($example)
+  {
+    $schema_string = $example->schema_string;
+    try
+    {
+      $normalized_schema_string = $example->normalized_schema_string;
+      $schema = AvroSchema::parse($schema_string);
+      $this->assertTrue($example->is_valid,
+                        sprintf("schema_string: %s\n",
+                                $schema_string));
+      $this->assertEquals($normalized_schema_string, strval($schema));
+    }
+    catch (AvroSchemaParseException $e)
+    {
+      $this->assertFalse($example->is_valid,
+                         sprintf("schema_string: %s\n%s",
+                                 $schema_string,
+                                 $e->getMessage()));
+    }
+  }
+
+}
diff --git a/lang/php/test/StringIOTest.php b/lang/php/test/StringIOTest.php
new file mode 100644
index 0000000..e1f5865
--- /dev/null
+++ b/lang/php/test/StringIOTest.php
@@ -0,0 +1,72 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+class StringIOTest extends PHPUnit_Framework_TestCase
+{
+
+  public function test_write()
+  {
+    $strio = new AvroStringIO();
+    $this->assertEquals(0, $strio->tell());
+    $str = 'foo';
+    $strlen = strlen($str);
+    $this->assertEquals($strlen, $strio->write($str));
+    $this->assertEquals($strlen, $strio->tell());
+  }
+
+  public function test_seek()
+  {
+    $this->markTestIncomplete('This test has not been implemented yet.');
+  }
+
+  public function test_tell()
+  {
+    $this->markTestIncomplete('This test has not been implemented yet.');
+  }
+
+  public function test_read()
+  {
+    $this->markTestIncomplete('This test has not been implemented yet.');
+  }
+
+  public function test_string_rep()
+  {
+    $writers_schema_json = '"null"';
+    $writers_schema = AvroSchema::parse($writers_schema_json);
+    $datum_writer = new AvroIODatumWriter($writers_schema);
+    $strio = new AvroStringIO();
+    $this->assertEquals('', $strio->string());
+    $dw = new AvroDataIOWriter($strio, $datum_writer, $writers_schema_json);
+    $dw->close(); 
+
+    $this->assertEquals(57, strlen($strio->string()), 
+                        AvroDebug::ascii_string($strio->string()));
+
+    $read_strio = new AvroStringIO($strio->string());
+
+    $datum_reader = new AvroIODatumReader();
+    $dr = new AvroDataIOReader($read_strio, $datum_reader);
+    $read_data = $dr->data();
+    $datum_count = count($read_data);
+    $this->assertEquals(0, $datum_count);
+  }
+
+}
diff --git a/lang/php/test/generate_interop_data.php b/lang/php/test/generate_interop_data.php
new file mode 100644
index 0000000..6361661
--- /dev/null
+++ b/lang/php/test/generate_interop_data.php
@@ -0,0 +1,46 @@
+#!/usr/bin/env php
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+require_once('test_helper.php');
+
+$data_file = join(DIRECTORY_SEPARATOR, array(AVRO_BUILD_DATA_DIR, 'php.avro'));
+$datum = array('nullField' => null,
+               'boolField' => true,
+               'intField' => -42,
+               'longField' => (int) 2147483650,
+               'floatField' => 1234.0,
+               'doubleField' => -5432.6,
+               'stringField' => 'hello avro',
+               'bytesField' => "\x16\xa6",
+               'arrayField' => array(5.0, -6.0, -10.5),
+               'mapField' => array('a' => array('label' => 'a'),
+                                   'c' => array('label' => '3P0')),
+               'unionField' => 14.5,
+               'enumField' => 'C',
+               'fixedField' => '1019181716151413',
+               'recordField' => array('label' => 'blah',
+                                      'children' => array(
+                                        array('label' => 'inner',
+                                              'children' => array()))));
+
+$schema_json = file_get_contents(AVRO_INTEROP_SCHEMA);
+$io_writer = AvroDataIO::open_file($data_file, 'w', $schema_json);
+$io_writer->append($datum);
+$io_writer->close();
diff --git a/lang/php/test/test_helper.php b/lang/php/test/test_helper.php
new file mode 100644
index 0000000..1c06777
--- /dev/null
+++ b/lang/php/test/test_helper.php
@@ -0,0 +1,42 @@
+<?php
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+define('AVRO_TEST_HELPER_DIR', dirname(__FILE__));
+
+require_once(join(DIRECTORY_SEPARATOR, 
+                  array(dirname(AVRO_TEST_HELPER_DIR), 'lib', 'avro.php')));
+
+define('TEST_TEMP_DIR', join(DIRECTORY_SEPARATOR, 
+                             array(AVRO_TEST_HELPER_DIR, 'tmp')));
+
+define('AVRO_BASE_DIR', dirname(dirname(dirname(AVRO_TEST_HELPER_DIR))));
+define('AVRO_SHARE_DIR', join(DIRECTORY_SEPARATOR,
+                               array(AVRO_BASE_DIR, 'share')));
+define('AVRO_BUILD_DIR', join(DIRECTORY_SEPARATOR,
+                               array(AVRO_BASE_DIR, 'build')));
+define('AVRO_BUILD_DATA_DIR', join(DIRECTORY_SEPARATOR,
+                                    array(AVRO_BUILD_DIR, 'interop', 'data')));
+define('AVRO_TEST_SCHEMAS_DIR', join(DIRECTORY_SEPARATOR,
+                                     array(AVRO_SHARE_DIR, 'test', 'schemas')));
+define('AVRO_INTEROP_SCHEMA', join(DIRECTORY_SEPARATOR,
+                                   array(AVRO_TEST_SCHEMAS_DIR, 'interop.avsc')));
+
+$tz = ini_get('date.timezone');
+if (empty($x))
+  date_default_timezone_set('America/New_York');
diff --git a/lang/py/build.xml b/lang/py/build.xml
index 6d371ea..61c3f4c 100644
--- a/lang/py/build.xml
+++ b/lang/py/build.xml
@@ -16,7 +16,7 @@
    limitations under the License.
 -->
 
-<project name="Avro" default="dist">
+<project name="Avro" default="dist" xmlns:ivy="antlib:org.apache.ivy.ant">
  
   <!-- Load user's default properties. -->
   <property file="${user.home}/build.properties"/>
@@ -36,6 +36,9 @@
   <property name="lib.dir" value="${basedir}/lib"/>
   <property name="test.dir" value="${basedir}/test"/>
 
+  <property name="ivy.version" value="2.2.0"/>
+  <property name="ivy.jar" value="${basedir}/lib/ivy-${ivy.version}.jar"/>
+
   <!-- Load shared properties -->
   <loadfile srcFile="${share.dir}/VERSION.txt" property="avro.version" />
   <loadfile srcFile="${share.schema.dir}/org/apache/avro/ipc/HandshakeRequest.avsc" property="handshake.request.json"/>
@@ -55,6 +58,17 @@
 
   <target name="init" description="Create the build directory.">
     <mkdir dir="${build.dir}"/>
+    <available file="${ivy.jar}" property="ivy.jar.found"/>
+    <antcall target="ivy-download"/>
+    <typedef uri="antlib:org.apache.ivy.ant">
+      <classpath>
+        <pathelement location="${ivy.jar}" />
+      </classpath>
+    </typedef>
+  </target>
+  
+  <target name="ivy-download" unless="ivy.jar.found" >
+    <get src="http://repo2.maven.org/maven2/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar" dest="${ivy.jar}" usetimestamp="true" />
   </target>
 
   <target name="build"
@@ -77,6 +91,12 @@
       <fileset dir="${lib.dir}" />
     </copy>
 
+    <!--Copy the protocols used for tethering -->
+    <copy todir="${build.dir}/src/avro/tether">
+        <fileset dir="${share.schema.dir}/org/apache/avro/mapred/tether/">
+    <include name="*.avpr"/>
+        </fileset>
+    </copy>
     <!-- Inline the handshake schemas -->
     <copy file="${src.dir}/avro/ipc.py"
           toFile="${build.dir}/src/avro/ipc.py"
@@ -120,6 +140,20 @@
         <filter token="INTEROP_DATA_DIR" value="${interop.data.dir}"/>
       </filterset>
     </copy>
+
+    <!-- Ensure we have a local copy of the tools jar -->
+    <ivy:retrieve
+        pattern="${basedir}/../java/tools/target/[artifact]-[revision].[ext]"/>
+
+    <!-- Inline the location of the tools jar -->
+    <copy file="${test.dir}/test_tether_word_count.py"
+          toFile="${build.dir}/test/test_tether_word_count.py"
+          overwrite="true">
+      <filterset>
+  <filter token="AVRO_VERSION" value="${avro.version}"/>
+  <filter token="TOPDIR" value="${basedir}"/>
+      </filterset>
+    </copy>
   </target>
 
   <target name="test"
@@ -135,6 +169,22 @@
     </py-test>
   </target>
 
+    <!--Created a unittest to run just the tests for tethered jobs.
+    -->
+    <target name="test-tether"
+          description="Run unit tests for a hadoop python-tethered job."
+          depends="build">
+    <taskdef name="py-test" classname="org.pyant.tasks.PythonTestTask"
+       classpathref="java.classpath"/>
+    <py-test python="${python}" pythonpathref="test.path">
+      <fileset dir="${build.dir}/test">
+        <include name="test_tether*.py"/>
+        <!--<exclude name="test_datafile_interop.py"/>-->
+      </fileset>
+    </py-test>
+  </target>
+
+
   <target name="interop-data-test"
           description="Run python interop data tests"
           depends="build">
diff --git a/lang/py/ivy.xml b/lang/py/ivy.xml
new file mode 100644
index 0000000..c37216c
--- /dev/null
+++ b/lang/py/ivy.xml
@@ -0,0 +1,24 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<ivy-module version="2.0">
+    <info organisation="org.apache.avro" module="python"/>
+    <configurations defaultconfmapping="default"/>
+    <dependencies>
+        <dependency org="org.apache.avro" name="avro-tools"
+                    rev="${avro.version}" transitive="false"/>
+    </dependencies>
+</ivy-module>
diff --git a/lang/py/ivysettings.xml b/lang/py/ivysettings.xml
new file mode 100644
index 0000000..31de16e
--- /dev/null
+++ b/lang/py/ivysettings.xml
@@ -0,0 +1,30 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<ivysettings>
+  <settings defaultResolver="repos" />
+  <property name="m2-pattern" value="${user.home}/.m2/repository/[organisation]/[module]/[revision]/[module]-[revision](-[classifier]).[ext]" override="false" />
+  <resolvers>
+    <chain name="repos">
+      <ibiblio name="central" m2compatible="true"/>   
+      <ibiblio name="apache-snapshots" m2compatible="true" root="https://repository.apache.org/content/groups/snapshots"/> 
+      <filesystem name="local-maven2" m2compatible="true"> <!-- needed when building non-snapshot version for release -->
+        <artifact pattern="${m2-pattern}"/>
+        <ivy pattern="${m2-pattern}"/>
+      </filesystem>
+    </chain>
+  </resolvers>
+</ivysettings>
diff --git a/lang/py/src/avro/schema.py b/lang/py/src/avro/schema.py
index 86ce86a..f946d0a 100644
--- a/lang/py/src/avro/schema.py
+++ b/lang/py/src/avro/schema.py
@@ -385,13 +385,13 @@ class Field(object):
 #
 class PrimitiveSchema(Schema):
   """Valid primitive types are in PRIMITIVE_TYPES."""
-  def __init__(self, type):
+  def __init__(self, type, other_props=None):
     # Ensure valid ctor args
     if type not in PRIMITIVE_TYPES:
       raise AvroException("%s is not a valid primitive type." % type)
 
     # Call parent ctor
-    Schema.__init__(self, type)
+    Schema.__init__(self, type, other_props=other_props)
 
     self.fullname = type
 
@@ -723,7 +723,7 @@ def make_avsc_object(json_data, names=None):
     type = json_data.get('type')
     other_props = get_other_props(json_data, SCHEMA_RESERVED_PROPS)
     if type in PRIMITIVE_TYPES:
-      return PrimitiveSchema(type)
+      return PrimitiveSchema(type, other_props)
     elif type in NAMED_TYPES:
       name = json_data.get('name')
       namespace = json_data.get('namespace', names.default_namespace)
diff --git a/lang/py/src/avro/tether/__init__.py b/lang/py/src/avro/tether/__init__.py
new file mode 100644
index 0000000..458c692
--- /dev/null
+++ b/lang/py/src/avro/tether/__init__.py
@@ -0,0 +1,7 @@
+from .util import *
+from .tether_task import *
+from .tether_task_runner import *
+
+__all__=util.__all__
+__all__+=tether_task.__all__
+__all__+=tether_task_runner.__all__
diff --git a/lang/py/src/avro/tether/tether_task.py b/lang/py/src/avro/tether/tether_task.py
new file mode 100644
index 0000000..90a8788
--- /dev/null
+++ b/lang/py/src/avro/tether/tether_task.py
@@ -0,0 +1,498 @@
+"""
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+"""
+
+__all__=["TetherTask","TaskType","inputProtocol","outputProtocol","HTTPRequestor"]
+
+from avro import schema, protocol
+from avro import io as avio
+from avro import ipc
+
+import io as pyio
+import sys
+import os
+import traceback
+import logging
+import collections
+from StringIO import StringIO
+import threading
+
+
+# create protocol objects for the input and output protocols
+# The build process should copy InputProtocol.avpr and OutputProtocol.avpr
+# into the same directory as this module
+inputProtocol=None
+outputProtocol=None
+
+TaskType=None
+if (inputProtocol is None):
+  pfile=os.path.split(__file__)[0]+os.sep+"InputProtocol.avpr"
+
+  if not(os.path.exists(pfile)):
+    raise Exception("Could not locate the InputProtocol: {0} does not exist".format(pfile))
+
+  with file(pfile,'r') as hf:
+    prototxt=hf.read()
+
+  inputProtocol=protocol.parse(prototxt)
+
+  # use a named tuple to represent the tasktype enumeration
+  taskschema=inputProtocol.types_dict["TaskType"]
+  _ttype=collections.namedtuple("_tasktype",taskschema.symbols)
+  TaskType=_ttype(*taskschema.symbols)
+
+if (outputProtocol is None):
+  pfile=os.path.split(__file__)[0]+os.sep+"OutputProtocol.avpr"
+
+  if not(os.path.exists(pfile)):
+    raise Exception("Could not locate the OutputProtocol: {0} does not exist".format(pfile))
+
+  with file(pfile,'r') as hf:
+    prototxt=hf.read()
+
+  outputProtocol=protocol.parse(prototxt)
+
+class Collector(object):
+  """
+  Collector for map and reduce output values
+  """
+  def __init__(self,scheme=None,outputClient=None):
+    """
+
+    Parameters
+    ---------------------------------------------
+    scheme - The scheme for the datums to output - can be a json string
+           - or an instance of Schema
+    outputClient - The output client used to send messages to the parent
+    """
+
+    if not(isinstance(scheme,schema.Schema)):
+      scheme=schema.parse(scheme)
+
+    if (outputClient is None):
+      raise ValueError("output client can't be none.")
+
+    self.scheme=scheme
+    self.buff=StringIO()
+    self.encoder=avio.BinaryEncoder(self.buff)
+
+    self.datum_writer = avio.DatumWriter(writers_schema=self.scheme)
+    self.outputClient=outputClient
+
+  def collect(self,record,partition=None):
+    """Collect a map or reduce output value
+
+    Parameters
+    ------------------------------------------------------
+    record - The record to write
+    partition - Indicates the partition for a pre-partitioned map output
+              - currently not supported
+    """
+
+    self.buff.truncate(0)
+    self.datum_writer.write(record, self.encoder);
+    self.buff.flush();
+    self.buff.seek(0)
+
+    # delete all the data in the buffer
+    if (partition is None):
+
+      # TODO: Is there a more efficient way to read the data in self.buff?
+      # we could use self.buff.read() but that returns the byte array as a string
+      # will that work?  We can also use self.buff.readinto to read it into
+      # a bytearray but the byte array must be pre-allocated
+      # self.outputClient.output(self.buff.buffer.read())
+
+      #its not a StringIO
+      self.outputClient.request("output",{"datum":self.buff.read()})
+    else:
+      self.outputClient.request("outputPartitioned",{"datum":self.buff.read(),"partition":partition})
+
+
+
+def keys_are_equal(rec1,rec2,fkeys):
+  """Check if the "keys" in two records are equal. The key fields
+  are all fields for which order isn't marked ignore.
+
+  Parameters
+  -------------------------------------------------------------------------
+  rec1  - The first record
+  rec2 - The second record
+  fkeys - A list of the fields to compare
+  """
+
+  for f in fkeys:
+    if not(rec1[f]==rec2[f]):
+      return False
+
+  return True
+
+
+class HTTPRequestor(object):
+  """
+  This is a small requestor subclass I created for the HTTP protocol.
+  Since the HTTP protocol isn't persistent, we need to instantiate
+  a new transciever and new requestor for each request.
+  But I wanted to use of the requestor to be identical to that for
+  SocketTransciever so that we can seamlessly switch between the two.
+  """
+
+  def __init__(self, server,port,protocol):
+    """
+    Instantiate the class.
+
+    Parameters
+    ----------------------------------------------------------------------
+    server - The server hostname
+    port - Which port to use
+    protocol - The protocol for the communication
+    """
+
+    self.server=server
+    self.port=port
+    self.protocol=protocol
+
+  def request(self,*args,**param):
+    transciever=ipc.HTTPTransceiver(self.server,self.port)
+    requestor=ipc.Requestor(self.protocol, transciever)
+    return requestor.request(*args,**param)
+
+
+class TetherTask(object):
+  """
+  Base class for python tether mapreduce programs.
+
+  ToDo: Currently the subclass has to implement both reduce and reduceFlush.
+  This is not very pythonic. A pythonic way to implement the reducer
+  would be to pass the reducer a generator (as dumbo does) so that the user
+  could iterate over the records for the given key.
+  How would we do this. I think we would need to have two threads, one thread would run
+  the user's reduce function. This loop would be suspended when no reducer records were available.
+  The other thread would read in the records for the reducer. This thread should
+  only buffer so many records at a time (i.e if the buffer is full, self.input shouldn't return right
+  away but wait for space to free up)
+  """
+
+  def __init__(self,inschema=None,midschema=None,outschema=None):
+    """
+
+    Parameters
+    ---------------------------------------------------------
+    inschema - The scheme for the input to the mapper
+    midschema  - The scheme for the output of the mapper
+    outschema - The scheme for the output of the reducer
+
+    An example scheme for the prototypical word count example would be
+    inscheme='{"type":"record", "name":"Pair","namespace":"org.apache.avro.mapred","fields":[
+              {"name":"key","type":"string"},
+              {"name":"value","type":"long","order":"ignore"}]
+              }'
+
+    Important: The records are split into (key,value) pairs as required by map reduce
+    by using all fields with "order"=ignore for the key and the remaining fields for the value.
+
+    The subclass provides these schemas in order to tell this class which schemas it expects.
+    The configure request will also provide the schemas that the parent process is using.
+    This allows us to check whether the schemas match and if not whether we can resolve
+    the differences (see http://avro.apache.org/docs/current/spec.html#Schema+Resolution))
+
+    """
+
+
+    if (inschema is None):
+      raise ValueError("inschema can't be None")
+
+    if (midschema is None):
+      raise ValueError("midschema can't be None")
+
+    if (outschema is None):
+      raise ValueError("outschema can't be None")
+
+    # make sure we can parse the schemas
+    # Should we call fail if we can't parse the schemas?
+    self.inschema=schema.parse(inschema)
+    self.midschema=schema.parse(midschema)
+    self.outschema=schema.parse(outschema)
+
+
+    # declare various variables
+    self.clienTransciever=None
+
+    # output client is used to communicate with the parent process
+    # in particular to transmit the outputs of the mapper and reducer
+    self.outputClient = None
+
+    # collectors for the output of the mapper and reducer
+    self.midCollector=None
+    self.outCollector=None
+
+    self._partitions=None
+
+    # cache a list of the fields used by the reducer as the keys
+    # we need the fields to decide when we have finished processing all values for
+    # a given key. We cache the fields to be more efficient
+    self._red_fkeys=None
+
+    # We need to keep track of the previous record fed to the reducer
+    # b\c we need to be able to determine when we start processing a new group
+    # in the reducer
+    self.midRecord=None
+
+    # create an event object to signal when
+    # http server is ready to be shutdown
+    self.ready_for_shutdown=threading.Event()
+    self.log=logging.getLogger("TetherTask")
+
+  def open(self, inputport,clientPort=None):
+    """Open the output client - i.e the connection to the parent process
+
+    Parameters
+    ---------------------------------------------------------------
+    inputport - This is the port that the subprocess is listening on. i.e the
+                subprocess starts a server listening on this port to accept requests from
+                the parent process
+    clientPort - The port on which the server in the parent process is listening
+                - If this is None we look for the environment variable AVRO_TETHER_OUTPUT_PORT
+                - This is mainly provided for debugging purposes. In practice
+                we want to use the environment variable
+
+    """
+
+
+    # Open the connection to the parent process
+    # The port the parent process is listening on is set in the environment
+    # variable AVRO_TETHER_OUTPUT_PORT
+    # open output client, connecting to parent
+
+    if (clientPort is None):
+      clientPortString = os.getenv("AVRO_TETHER_OUTPUT_PORT")
+      if (clientPortString is None):
+        raise Exception("AVRO_TETHER_OUTPUT_PORT env var is not set")
+
+      clientPort = int(clientPortString)
+
+    self.log.info("TetherTask.open: Opening connection to parent server on port={0}".format(clientPort))
+
+    # We use the HTTP protocol although we hope to shortly have
+    # support for SocketServer,
+    usehttp=True
+
+    if(usehttp):
+      # self.outputClient =  ipc.Requestor(outputProtocol, self.clientTransceiver)
+      # since HTTP is stateless, a new transciever
+      # is created and closed for each request. We therefore set clientTransciever to None
+      # We still declare clientTransciever because for other (state) protocols we will need
+      # it and we want to check when we get the message fail whether the transciever
+      # needs to be closed.
+      # self.clientTranciever=None
+      self.outputClient =  HTTPRequestor("127.0.0.1",clientPort,outputProtocol)
+
+    else:
+      raise NotImplementedError("Only http protocol is currently supported")
+
+    try:
+      self.outputClient.request('configure',{"port":inputport})
+    except Exception as e:
+      estr= traceback.format_exc()
+      self.fail(estr)
+
+
+  def configure(self,taskType,  inSchemaText,  outSchemaText):
+    """
+
+    Parameters
+    -------------------------------------------------------------------
+    taskType - What type of task (e.g map, reduce)
+             - This is an enumeration which is specified in the input protocol
+    inSchemaText -  string containing the input schema
+                 - This is the actual schema with which the data was encoded
+                   i.e it is the writer_schema (see http://avro.apache.org/docs/current/spec.html#Schema+Resolution)
+                   This is the schema the parent process is using which might be different
+                   from the one provided by the subclass of tether_task
+
+    outSchemaText - string containing the output scheme
+                  - This is the schema expected by the parent process for the output
+    """
+    self.taskType = taskType
+
+    try:
+      inSchema = schema.parse(inSchemaText)
+      outSchema = schema.parse(outSchemaText)
+
+      if (taskType==TaskType.MAP):
+        self.inReader=avio.DatumReader(writers_schema=inSchema,readers_schema=self.inschema)
+        self.midCollector=Collector(outSchemaText,self.outputClient)
+
+      elif(taskType==TaskType.REDUCE):
+        self.midReader=avio.DatumReader(writers_schema=inSchema,readers_schema=self.midschema)
+        # this.outCollector = new Collector<OUT>(outSchema);
+        self.outCollector=Collector(outSchemaText,self.outputClient)
+
+        # determine which fields in the input record are they keys for the reducer
+        self._red_fkeys=[f.name for f in self.midschema.fields if not(f.order=='ignore')]
+
+    except Exception as e:
+
+      estr= traceback.format_exc()
+      self.fail(estr)
+
+  def set_partitions(self,npartitions):
+
+    try:
+      self._partitions=npartitions
+    except Exception as e:
+      estr= traceback.format_exc()
+      self.fail(estr)
+
+  def get_partitions():
+    """ Return the number of map output partitions of this job."""
+    return self._partitions
+
+  def input(self,data,count):
+    """ Recieve input from the server
+
+    Parameters
+    ------------------------------------------------------
+    data - Sould containg the bytes encoding the serialized data
+          - I think this gets represented as a tring
+    count - how many input records are provided in the binary stream
+    """
+    try:
+      # to avio.BinaryDecoder
+      bdata=StringIO(data)
+      decoder = avio.BinaryDecoder(bdata)
+
+      for i in range(count):
+        if (self.taskType==TaskType.MAP):
+          inRecord = self.inReader.read(decoder)
+
+          # Do we need to pass midCollector if its declared as an instance variable
+          self.map(inRecord, self.midCollector)
+
+        elif (self.taskType==TaskType.REDUCE):
+
+          # store the previous record
+          prev = self.midRecord
+
+          # read the new record
+          self.midRecord = self.midReader.read(decoder);
+          if (prev != None and not(keys_are_equal(self.midRecord,prev,self._red_fkeys))):
+            # since the key has changed we need to finalize the processing
+            # for this group of key,value pairs
+            self.reduceFlush(prev, self.outCollector)
+          self.reduce(self.midRecord, self.outCollector)
+
+    except Exception as e:
+      estr= traceback.format_exc()
+      self.log.warning("failing: "+estr)
+      self.fail(estr)
+
+  def complete(self):
+    """
+    Process the complete request
+    """
+    if ((self.taskType == TaskType.REDUCE ) and not(self.midRecord is None)):
+      try:
+        self.reduceFlush(self.midRecord, self.outCollector);
+      except Exception as e:
+        estr=traceback.format_exc()
+        self.log.warning("failing: "+estr);
+        self.fail(estr)
+
+    self.outputClient.request("complete",dict())
+
+  def map(self,record,collector):
+    """Called with input values to generate intermediat values (i.e mapper output).
+
+    Parameters
+    ----------------------------------------------------------------------------
+    record - The input record
+    collector - The collector to collect the output
+
+    This is an abstract function which should be overloaded by the application specific
+    subclass.
+    """
+
+    raise NotImplementedError("This is an abstract method which should be overloaded in the subclass")
+
+  def reduce(self,record, collector):
+    """ Called with input values to generate reducer output. Inputs are sorted by the mapper
+    key.
+
+    The reduce function is invoked once for each value belonging to a given key outputted
+    by the mapper.
+
+    Parameters
+    ----------------------------------------------------------------------------
+    record - The mapper output
+    collector - The collector to collect the output
+
+    This is an abstract function which should be overloaded by the application specific
+    subclass.
+    """
+
+    raise NotImplementedError("This is an abstract method which should be overloaded in the subclass")
+
+  def reduceFlush(self,record, collector):
+    """
+    Called with the last intermediate value in each equivalence run.
+    In other words, reduceFlush is invoked once for each key produced in the reduce
+    phase. It is called after reduce has been invoked on each value for the given key.
+
+    Parameters
+    ------------------------------------------------------------------
+    record - the last record on which reduce was invoked.
+    """
+    raise NotImplementedError("This is an abstract method which should be overloaded in the subclass")
+
+  def status(self,message):
+    """
+    Called to update task status
+    """
+    self.outputClient.request("status",{"message":message})
+
+  def count(self,group, name, amount):
+    """
+    Called to increment a counter
+    """
+    self.outputClient.request("count",{"group":group, "name":name, "amount":amount})
+
+  def fail(self,message):
+    """
+    Call to fail the task.
+    """
+    self.log.error("TetherTask.fail: failure occured message follows:\n{0}".format(message))
+    try:
+      self.outputClient.request("fail",{"message":message})
+    except Exception as e:
+      estr=traceback.format_exc()
+      self.log.error("TetherTask.fail: an exception occured while trying to send the fail message to the output server:\n{0}".format(estr))
+
+    self.close()
+
+  def close(self):
+    self.log.info("TetherTask.close: closing")
+    if not(self.clienTransciever is None):
+      try:
+        self.clienTransciever.close()
+
+      except Exception as e:
+        # ignore exceptions
+        pass
+
+    # http server is ready to be shutdown
+    self.ready_for_shutdown.set()
diff --git a/lang/py/src/avro/tether/tether_task_runner.py b/lang/py/src/avro/tether/tether_task_runner.py
new file mode 100644
index 0000000..7d223d3
--- /dev/null
+++ b/lang/py/src/avro/tether/tether_task_runner.py
@@ -0,0 +1,227 @@
+"""
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+"""
+
+__all__=["TaskRunner"]
+
+if __name__ == "__main__":
+  # Relative imports don't work when being run directly
+  from avro import tether
+  from avro.tether import TetherTask, find_port, inputProtocol
+
+else:
+  from . import TetherTask, find_port, inputProtocol
+
+from avro import ipc
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+import logging
+import weakref
+import threading
+import sys
+import traceback
+
+class TaskRunnerResponder(ipc.Responder):
+  """
+  The responder for the thethered process
+  """
+  def __init__(self,runner):
+    """
+    Param
+    ----------------------------------------------------------
+    runner - Instance of TaskRunner
+    """
+    ipc.Responder.__init__(self, inputProtocol)
+
+    self.log=logging.getLogger("TaskRunnerResponder")
+
+    # should we use weak references to avoid circular references?
+    # We use weak references b\c self.runner owns this instance of TaskRunnerResponder
+    if isinstance(runner,weakref.ProxyType):
+      self.runner=runner
+    else:
+      self.runner=weakref.proxy(runner)
+
+    self.task=weakref.proxy(runner.task)
+
+  def invoke(self, message, request):
+    try:
+      if message.name=='configure':
+        self.log.info("TetherTaskRunner: Recieved configure")
+        self.task.configure(request["taskType"],request["inSchema"],request["outSchema"])
+      elif message.name=='partitions':
+        self.log.info("TetherTaskRunner: Recieved partitions")
+        try:
+          self.task.set_partitions(request["partitions"])
+        except Exception as e:
+          self.log.error("Exception occured while processing the partitions message: Message:\n"+traceback.format_exc())
+          raise
+      elif message.name=='input':
+        self.log.info("TetherTaskRunner: Recieved input")
+        self.task.input(request["data"],request["count"])
+      elif message.name=='abort':
+        self.log.info("TetherTaskRunner: Recieved abort")
+        self.runner.close()
+      elif message.name=='complete':
+        self.log.info("TetherTaskRunner: Recieved complete")
+        self.task.complete()
+        self.task.close()
+        self.runner.close()
+      else:
+        self.log.warning("TetherTaskRunner: recieved unknown message {0}".format(message.name))
+
+    except Exception as e:
+      self.log.error("Error occured while processing message: {0}".format(message.name))
+      emsg=traceback.format_exc()
+      self.task.fail(emsg)
+
+    return None
+
+
+def HTTPHandlerGen(runner):
+  """
+  This is a class factory for the HTTPHandler. We need
+  a factory b\c we need a reference to the runner
+
+  Parameters
+  -----------------------------------------------------------------
+  runner - instance of the task runner
+  """
+
+  if not(isinstance(runner,weakref.ProxyType)):
+    runnerref=weakref.proxy(runner)
+  else:
+    runnerref=runner
+
+  class TaskRunnerHTTPHandler(BaseHTTPRequestHandler):
+    """Create a handler for the parent.
+    """
+
+    runner=runnerref
+    def __init__(self,*args,**param):
+      """
+      """
+      BaseHTTPRequestHandler.__init__(self,*args,**param)
+
+    def do_POST(self):
+      self.responder =TaskRunnerResponder(self.runner)
+      call_request_reader = ipc.FramedReader(self.rfile)
+      call_request = call_request_reader.read_framed_message()
+      resp_body = self.responder.respond(call_request)
+      self.send_response(200)
+      self.send_header('Content-Type', 'avro/binary')
+      self.end_headers()
+      resp_writer = ipc.FramedWriter(self.wfile)
+      resp_writer.write_framed_message(resp_body)
+
+  return TaskRunnerHTTPHandler
+
+class TaskRunner(object):
+  """This class ties together the server handling the requests from
+  the parent process and the instance of TetherTask which actually
+  implements the logic for the mapper and reducer phases
+  """
+
+  def __init__(self,task):
+    """
+    Construct the runner
+
+    Parameters
+    ---------------------------------------------------------------
+    task - An instance of tether task
+    """
+
+    self.log=logging.getLogger("TaskRunner:")
+
+    if not(isinstance(task,TetherTask)):
+      raise ValueError("task must be an instance of tether task")
+    self.task=task
+
+    self.server=None
+    self.sthread=None
+
+  def start(self,outputport=None,join=True):
+    """
+    Start the server
+
+    Parameters
+    -------------------------------------------------------------------
+    outputport - (optional) The port on which the parent process is listening
+                 for requests from the task.
+               - This will typically be supplied by an environment variable
+                 we allow it to be supplied as an argument mainly for debugging
+    join       - (optional) If set to fault then we don't issue a join to block
+                 until the thread excecuting the server terminates.
+                This is mainly for debugging. By setting it to false,
+                we can resume execution in this thread so that we can do additional
+                testing
+    """
+
+    port=find_port()
+    address=("localhost",port)
+
+
+    def thread_run(task_runner=None):
+      task_runner.server = HTTPServer(address, HTTPHandlerGen(task_runner))
+      task_runner.server.allow_reuse_address = True
+      task_runner.server.serve_forever()
+
+    # create a separate thread for the http server
+    sthread=threading.Thread(target=thread_run,kwargs={"task_runner":self})
+    sthread.start()
+
+    self.sthread=sthread
+    # This needs to run in a separat thread b\c serve_forever() blocks
+    self.task.open(port,clientPort=outputport)
+
+    # wait for the other thread to finish
+    if (join):
+      self.task.ready_for_shutdown.wait()
+      self.server.shutdown()
+
+      # should we do some kind of check to make sure it exits
+      self.log.info("Shutdown the logger")
+      # shutdown the logging
+      logging.shutdown()
+
+  def close(self):
+    """
+    Handler for the close message
+    """
+
+    self.task.close()
+
+if __name__ == '__main__':
+  # TODO::Make the logging level a parameter we can set
+  # logging.basicConfig(level=logging.INFO,filename='/tmp/log',filemode='w')
+  logging.basicConfig(level=logging.INFO)
+
+  if (len(sys.argv)<=1):
+    print "Error: tether_task_runner.__main__: Usage: tether_task_runner task_package.task_module.TaskClass"
+    raise ValueError("Usage: tether_task_runner task_package.task_module.TaskClass")
+
+  fullcls=sys.argv[1]
+  mod,cname=fullcls.rsplit(".",1)
+
+  logging.info("tether_task_runner.__main__: Task: {0}".format(fullcls))
+
+  modobj=__import__(mod,fromlist=cname)
+
+  taskcls=getattr(modobj,cname)
+  task=taskcls()
+
+  runner=TaskRunner(task=task)
+  runner.start()
diff --git a/lang/py/src/avro/tether/util.py b/lang/py/src/avro/tether/util.py
new file mode 100644
index 0000000..071b4a1
--- /dev/null
+++ b/lang/py/src/avro/tether/util.py
@@ -0,0 +1,34 @@
+"""
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+"""
+
+__all__=["find_port"]
+
+import socket
+
+
+def find_port():
+  """
+  Return an unbound port
+  """
+  s=socket.socket()
+  s.bind(("127.0.0.1",0))
+
+  port=s.getsockname()[1]
+  s.close()
+
+  return port
\ No newline at end of file
diff --git a/lang/py/test/mock_tether_parent.py b/lang/py/test/mock_tether_parent.py
new file mode 100644
index 0000000..399a03a
--- /dev/null
+++ b/lang/py/test/mock_tether_parent.py
@@ -0,0 +1,95 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import set_avro_test_path
+from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from avro import ipc
+from avro import protocol
+from avro import tether
+
+import socket
+
+def find_port():
+  """
+  Return an unbound port
+  """
+  s=socket.socket()
+  s.bind(("127.0.0.1",0))
+
+  port=s.getsockname()[1]
+  s.close()
+
+  return port
+
+SERVER_ADDRESS = ('localhost', find_port())
+
+class MockParentResponder(ipc.Responder):
+  """
+  The responder for the mocked parent
+  """
+  def __init__(self):
+    ipc.Responder.__init__(self, tether.outputProtocol)
+
+  def invoke(self, message, request):
+    if message.name=='configure':
+      print "MockParentResponder: Recieved 'configure': inputPort={0}".format(request["port"])
+
+    elif message.name=='status':
+      print "MockParentResponder: Recieved 'status': message={0}".format(request["message"])
+    elif message.name=='fail':
+      print "MockParentResponder: Recieved 'fail': message={0}".format(request["message"])
+    else:
+      print "MockParentResponder: Recieved {0}".format(message.name)
+
+    # flush the output so it shows up in the parent process
+    sys.stdout.flush()
+
+    return None
+
+class MockParentHandler(BaseHTTPRequestHandler):
+  """Create a handler for the parent.
+  """
+  def do_POST(self):
+    self.responder =MockParentResponder()
+    call_request_reader = ipc.FramedReader(self.rfile)
+    call_request = call_request_reader.read_framed_message()
+    resp_body = self.responder.respond(call_request)
+    self.send_response(200)
+    self.send_header('Content-Type', 'avro/binary')
+    self.end_headers()
+    resp_writer = ipc.FramedWriter(self.wfile)
+    resp_writer.write_framed_message(resp_body)
+
+if __name__ == '__main__':
+  if (len(sys.argv)<=1):
+    raise ValueError("Usage: mock_tether_parent command")
+
+  cmd=sys.argv[1].lower()
+  if (sys.argv[1]=='start_server'):
+    if (len(sys.argv)==3):
+      port=int(sys.argv[2])
+    else:
+      raise ValueError("Usage: mock_tether_parent start_server port")
+
+    SERVER_ADDRESS=(SERVER_ADDRESS[0],port)
+    print "mock_tether_parent: Launching Server on Port: {0}".format(SERVER_ADDRESS[1])
+
+    # flush the output so it shows up in the parent process
+    sys.stdout.flush()
+    parent_server = HTTPServer(SERVER_ADDRESS, MockParentHandler)
+    parent_server.allow_reuse_address = True
+    parent_server.serve_forever()
diff --git a/lang/py/test/set_avro_test_path.py b/lang/py/test/set_avro_test_path.py
new file mode 100644
index 0000000..d8b0098
--- /dev/null
+++ b/lang/py/test/set_avro_test_path.py
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Module adjusts the path PYTHONPATH so the unittests
+will work even if an egg for AVRO is already installed.
+By default eggs always appear higher on pythons path then
+directories set via the environment variable PYTHONPATH.
+
+For reference see:
+http://www.velocityreviews.com/forums/t716589-pythonpath-and-eggs.html
+http://stackoverflow.com/questions/897792/pythons-sys-path-value.
+
+Unittests would therefore use the installed AVRO and not the AVRO
+being built. To work around this the unittests import this module before
+importing AVRO. This module in turn adjusts the python path so that the test
+build of AVRO is higher on the path then any installed eggs.
+"""
+import sys
+import os
+
+# determine the build directory and then make sure all paths that start with the
+# build directory are at the top of the path
+builddir=os.path.split(os.path.split(__file__)[0])[0]
+bpaths=filter(lambda s:s.startswith(builddir), sys.path)
+
+for p in bpaths:
+  sys.path.insert(0,p)
\ No newline at end of file
diff --git a/lang/py/test/test_datafile.py b/lang/py/test/test_datafile.py
index b3ce692..72994f3 100644
--- a/lang/py/test/test_datafile.py
+++ b/lang/py/test/test_datafile.py
@@ -15,6 +15,9 @@
 # limitations under the License.
 import os
 import unittest
+
+import set_avro_test_path
+
 from avro import schema
 from avro import io
 from avro import datafile
diff --git a/lang/py/test/test_datafile_interop.py b/lang/py/test/test_datafile_interop.py
index 8f4e883..7204529 100644
--- a/lang/py/test/test_datafile_interop.py
+++ b/lang/py/test/test_datafile_interop.py
@@ -15,6 +15,9 @@
 # limitations under the License.
 import os
 import unittest
+
+import set_avro_test_path
+
 from avro import io
 from avro import datafile
 
diff --git a/lang/py/test/test_io.py b/lang/py/test/test_io.py
index 05a6f80..1e79d3e 100644
--- a/lang/py/test/test_io.py
+++ b/lang/py/test/test_io.py
@@ -19,6 +19,9 @@ try:
 except ImportError:
   from StringIO import StringIO
 from binascii import hexlify
+
+import set_avro_test_path
+
 from avro import schema
 from avro import io
 
diff --git a/lang/py/test/test_ipc.py b/lang/py/test/test_ipc.py
index 2545b15..7fffe49 100644
--- a/lang/py/test/test_ipc.py
+++ b/lang/py/test/test_ipc.py
@@ -19,6 +19,8 @@ servers yet available.
 """
 import unittest
 
+import set_avro_test_path
+
 # This test does import this code, to make sure it at least passes
 # compilation.
 from avro import ipc
diff --git a/lang/py/test/test_schema.py b/lang/py/test/test_schema.py
index b9c84b3..204d1b1 100644
--- a/lang/py/test/test_schema.py
+++ b/lang/py/test/test_schema.py
@@ -17,6 +17,8 @@
 Test the schema parsing logic.
 """
 import unittest
+import set_avro_test_path
+
 from avro import schema
 
 def print_test_name(test_name):
@@ -287,6 +289,10 @@ OTHER_PROP_EXAMPLES = [
      "symbols": [ "one", "two", "three" ],
      "cp_float" : 1.0 }
     """,True),
+  ExampleSchema("""\
+    {"type": "long",
+     "date": "true"}
+    """, True)
 ]
 
 EXAMPLES = PRIMITIVE_EXAMPLES
diff --git a/lang/py/test/test_tether_task.py b/lang/py/test/test_tether_task.py
new file mode 100644
index 0000000..32265e6
--- /dev/null
+++ b/lang/py/test/test_tether_task.py
@@ -0,0 +1,116 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+
+import os
+import subprocess
+import sys
+import time
+import unittest
+
+import set_avro_test_path
+
+class TestTetherTask(unittest.TestCase):
+  """
+  TODO: We should validate the the server response by looking at stdout
+  """
+  def test1(self):
+    """
+    Test that the thether_task is working. We run the mock_tether_parent in a separate
+    subprocess
+    """
+    from avro import tether
+    from avro import io as avio
+    from avro import schema
+    from avro.tether import HTTPRequestor,inputProtocol, find_port
+
+    import StringIO
+    import mock_tether_parent
+    from word_count_task import WordCountTask
+
+    task=WordCountTask()
+
+    proc=None
+    try:
+      # launch the server in a separate process
+      # env["AVRO_TETHER_OUTPUT_PORT"]=output_port
+      env=dict()
+      env["PYTHONPATH"]=':'.join(sys.path)
+      server_port=find_port()
+
+      pyfile=mock_tether_parent.__file__
+      proc=subprocess.Popen(["python", pyfile,"start_server","{0}".format(server_port)])
+      input_port=find_port()
+
+      print "Mock server started process pid={0}".format(proc.pid)
+      # Possible race condition? open tries to connect to the subprocess before the subprocess is fully started
+      # so we give the subprocess time to start up
+      time.sleep(1)
+      task.open(input_port,clientPort=server_port)
+
+      # TODO: We should validate that open worked by grabbing the STDOUT of the subproces
+      # and ensuring that it outputted the correct message.
+
+      #***************************************************************
+      # Test the mapper
+      task.configure(tether.TaskType.MAP,str(task.inschema),str(task.midschema))
+
+      # Serialize some data so we can send it to the input function
+      datum="This is a line of text"
+      writer = StringIO.StringIO()
+      encoder = avio.BinaryEncoder(writer)
+      datum_writer = avio.DatumWriter(task.inschema)
+      datum_writer.write(datum, encoder)
+
+      writer.seek(0)
+      data=writer.read()
+
+      # Call input to simulate calling map
+      task.input(data,1)
+
+      # Test the reducer
+      task.configure(tether.TaskType.REDUCE,str(task.midschema),str(task.outschema))
+
+      # Serialize some data so we can send it to the input function
+      datum={"key":"word","value":2}
+      writer = StringIO.StringIO()
+      encoder = avio.BinaryEncoder(writer)
+      datum_writer = avio.DatumWriter(task.midschema)
+      datum_writer.write(datum, encoder)
+
+      writer.seek(0)
+      data=writer.read()
+
+      # Call input to simulate calling reduce
+      task.input(data,1)
+
+      task.complete()
+
+      # try a status
+      task.status("Status message")
+
+    except Exception as e:
+      raise
+    finally:
+      # close the process
+      if not(proc is None):
+        proc.kill()
+
+      pass
+
+if __name__ == '__main__':
+  unittest.main()
\ No newline at end of file
diff --git a/lang/py/test/test_tether_task_runner.py b/lang/py/test/test_tether_task_runner.py
new file mode 100644
index 0000000..a3f10fe
--- /dev/null
+++ b/lang/py/test/test_tether_task_runner.py
@@ -0,0 +1,191 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+import time
+import unittest
+
+import set_avro_test_path
+
+
+class TestTetherTaskRunner(unittest.TestCase):
+  """ unit test for a tethered task runner.
+  """
+
+  def test1(self):
+    from word_count_task import WordCountTask
+    from avro.tether import TaskRunner, find_port,HTTPRequestor,inputProtocol, TaskType
+    from avro import io as avio
+    import mock_tether_parent
+    import subprocess
+    import StringIO
+    import logging
+
+    # set the logging level to debug so that debug messages are printed
+    logging.basicConfig(level=logging.DEBUG)
+
+    proc=None
+    try:
+      # launch the server in a separate process
+      env=dict()
+      env["PYTHONPATH"]=':'.join(sys.path)
+      parent_port=find_port()
+
+      pyfile=mock_tether_parent.__file__
+      proc=subprocess.Popen(["python", pyfile,"start_server","{0}".format(parent_port)])
+      input_port=find_port()
+
+      print "Mock server started process pid={0}".format(proc.pid)
+      # Possible race condition? open tries to connect to the subprocess before the subprocess is fully started
+      # so we give the subprocess time to start up
+      time.sleep(1)
+
+      runner=TaskRunner(WordCountTask())
+
+      runner.start(outputport=parent_port,join=False)
+
+      # Test sending various messages to the server and ensuring they are
+      # processed correctly
+      requestor=HTTPRequestor("localhost",runner.server.server_address[1],inputProtocol)
+
+      # TODO: We should validate that open worked by grabbing the STDOUT of the subproces
+      # and ensuring that it outputted the correct message.
+
+      # Test the mapper
+      requestor.request("configure",{"taskType":TaskType.MAP,"inSchema":str(runner.task.inschema),"outSchema":str(runner.task.midschema)})
+
+      # Serialize some data so we can send it to the input function
+      datum="This is a line of text"
+      writer = StringIO.StringIO()
+      encoder = avio.BinaryEncoder(writer)
+      datum_writer = avio.DatumWriter(runner.task.inschema)
+      datum_writer.write(datum, encoder)
+
+      writer.seek(0)
+      data=writer.read()
+
+
+      # Call input to simulate calling map
+      requestor.request("input",{"data":data,"count":1})
+
+      #Test the reducer
+      requestor.request("configure",{"taskType":TaskType.REDUCE,"inSchema":str(runner.task.midschema),"outSchema":str(runner.task.outschema)})
+
+      #Serialize some data so we can send it to the input function
+      datum={"key":"word","value":2}
+      writer = StringIO.StringIO()
+      encoder = avio.BinaryEncoder(writer)
+      datum_writer = avio.DatumWriter(runner.task.midschema)
+      datum_writer.write(datum, encoder)
+
+      writer.seek(0)
+      data=writer.read()
+
+
+      #Call input to simulate calling reduce
+      requestor.request("input",{"data":data,"count":1})
+
+      requestor.request("complete",{})
+
+
+      runner.task.ready_for_shutdown.wait()
+      runner.server.shutdown()
+      #time.sleep(2)
+      #runner.server.shutdown()
+
+      sthread=runner.sthread
+
+      #Possible race condition?
+      time.sleep(1)
+
+      #make sure the other thread terminated
+      self.assertFalse(sthread.isAlive())
+
+      #shutdown the logging
+      logging.shutdown()
+
+    except Exception as e:
+      raise
+    finally:
+      #close the process
+      if not(proc is None):
+        proc.kill()
+
+
+  def test2(self):
+    """
+    In this test we want to make sure that when we run "tether_task_runner.py"
+    as our main script everything works as expected. We do this by using subprocess to run it
+    in a separate thread.
+    """
+    from word_count_task import WordCountTask
+    from avro.tether import TaskRunner, find_port,HTTPRequestor,inputProtocol, TaskType
+    from avro.tether import tether_task_runner
+    from avro import io as avio
+    import mock_tether_parent
+    import subprocess
+    import StringIO
+
+
+    proc=None
+
+    runnerproc=None
+    try:
+      #launch the server in a separate process
+      env=dict()
+      env["PYTHONPATH"]=':'.join(sys.path)
+      parent_port=find_port()
+
+      pyfile=mock_tether_parent.__file__
+      proc=subprocess.Popen(["python", pyfile,"start_server","{0}".format(parent_port)])
+
+      #Possible race condition? when we start tether_task_runner it will call
+      # open tries to connect to the subprocess before the subprocess is fully started
+      #so we give the subprocess time to start up
+      time.sleep(1)
+
+
+      #start the tether_task_runner in a separate process
+      env={"AVRO_TETHER_OUTPUT_PORT":"{0}".format(parent_port)}
+      env["PYTHONPATH"]=':'.join(sys.path)
+
+      runnerproc=subprocess.Popen(["python",tether_task_runner.__file__,"word_count_task.WordCountTask"],env=env)
+
+      #possible race condition wait for the process to start
+      time.sleep(1)
+
+
+
+      print "Mock server started process pid={0}".format(proc.pid)
+      #Possible race condition? open tries to connect to the subprocess before the subprocess is fully started
+      #so we give the subprocess time to start up
+      time.sleep(1)
+
+
+    except Exception as e:
+      raise
+    finally:
+      #close the process
+      if not(runnerproc is None):
+        runnerproc.kill()
+
+      if not(proc is None):
+        proc.kill()
+
+if __name__==("__main__"):
+  unittest.main()
diff --git a/lang/py/test/test_tether_word_count.py b/lang/py/test/test_tether_word_count.py
new file mode 100644
index 0000000..6e51d31
--- /dev/null
+++ b/lang/py/test/test_tether_word_count.py
@@ -0,0 +1,213 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+import subprocess
+import sys
+import time
+import unittest
+import os
+
+import set_avro_test_path
+
+class TestTetherWordCount(unittest.TestCase):
+  """ unittest for a python tethered map-reduce job.
+  """
+
+  def _write_lines(self,lines,fname):
+    """
+    Write the lines to an avro file named fname
+
+    Parameters
+    --------------------------------------------------------
+    lines - list of strings to write
+    fname - the name of the file to write to.
+    """
+    import avro.io as avio
+    from avro.datafile import DataFileReader,DataFileWriter
+    from avro import schema
+
+    #recursively make all directories
+    dparts=fname.split(os.sep)[:-1]
+    for i in range(len(dparts)):
+      pdir=os.sep+os.sep.join(dparts[:i+1])
+      if not(os.path.exists(pdir)):
+        os.mkdir(pdir)
+
+
+    with file(fname,'w') as hf:
+      inschema="""{"type":"string"}"""
+      writer=DataFileWriter(hf,avio.DatumWriter(inschema),writers_schema=schema.parse(inschema))
+
+      #encoder = avio.BinaryEncoder(writer)
+      #datum_writer = avio.DatumWriter()
+      for datum in lines:
+        writer.append(datum)
+
+      writer.close()
+
+
+
+
+  def _count_words(self,lines):
+    """Return a dictionary counting the words in lines
+    """
+    counts={}
+
+    for line in lines:
+      words=line.split()
+
+      for w in words:
+        if not(counts.has_key(w.strip())):
+          counts[w.strip()]=0
+
+        counts[w.strip()]=counts[w.strip()]+1
+
+    return counts
+
+  def test1(self):
+    """
+    Run a tethered map-reduce job.
+
+    Assumptions: 1) bash is available in /bin/bash
+    """
+    from word_count_task import WordCountTask
+    from avro.tether import tether_task_runner
+    from avro.datafile import DataFileReader
+    from avro.io import DatumReader
+    import avro
+
+    import subprocess
+    import StringIO
+    import shutil
+    import tempfile
+    import inspect
+
+    proc=None
+
+    try:
+
+
+      # TODO we use the tempfile module to generate random names
+      # for the files
+      base_dir = "/tmp/test_tether_word_count"
+      if os.path.exists(base_dir):
+        shutil.rmtree(base_dir)
+
+      inpath = os.path.join(base_dir, "in")
+      infile=os.path.join(inpath, "lines.avro")
+      lines=["the quick brown fox jumps over the lazy dog",
+             "the cow jumps over the moon",
+             "the rain in spain falls mainly on the plains"]
+
+      self._write_lines(lines,infile)
+
+      true_counts=self._count_words(lines)
+
+      if not(os.path.exists(infile)):
+        self.fail("Missing the input file {0}".format(infile))
+
+
+      # The schema for the output of the mapper and reducer
+      oschema="""
+{"type":"record",
+ "name":"Pair","namespace":"org.apache.avro.mapred","fields":[
+     {"name":"key","type":"string"},
+     {"name":"value","type":"long","order":"ignore"}
+ ]
+}
+"""
+
+      # write the schema to a temporary file
+      osfile=tempfile.NamedTemporaryFile(mode='w',suffix=".avsc",prefix="wordcount",delete=False)
+      outschema=osfile.name
+      osfile.write(oschema)
+      osfile.close()
+
+      if not(os.path.exists(outschema)):
+        self.fail("Missing the schema file")
+
+      outpath = os.path.join(base_dir, "out")
+
+      args=[]
+
+      args.append("java")
+      args.append("-jar")
+      args.append(os.path.abspath("@TOPDIR@/../java/tools/target/avro-tools- at AVRO_VERSION@.jar"))
+
+
+      args.append("tether")
+      args.extend(["--in",inpath])
+      args.extend(["--out",outpath])
+      args.extend(["--outschema",outschema])
+      args.extend(["--protocol","http"])
+
+      # form the arguments for the subprocess
+      subargs=[]
+
+      srcfile=inspect.getsourcefile(tether_task_runner)
+
+      # Create a shell script to act as the program we want to execute
+      # We do this so we can set the python path appropriately
+      script="""#!/bin/bash
+export PYTHONPATH={0}
+python -m avro.tether.tether_task_runner word_count_task.WordCountTask
+"""
+      # We need to make sure avro is on the path
+      # getsourcefile(avro) returns .../avro/__init__.py
+      asrc=inspect.getsourcefile(avro)
+      apath=asrc.rsplit(os.sep,2)[0]
+
+      # path to where the tests lie
+      tpath=os.path.split(__file__)[0]
+
+      exhf=tempfile.NamedTemporaryFile(mode='w',prefix="exec_word_count_",delete=False)
+      exfile=exhf.name
+      exhf.write(script.format((os.pathsep).join([apath,tpath]),srcfile))
+      exhf.close()
+
+      # make it world executable
+      os.chmod(exfile,0755)
+
+      args.extend(["--program",exfile])
+
+      print "Command:\n\t{0}".format(" ".join(args))
+      proc=subprocess.Popen(args)
+
+
+      proc.wait()
+
+      # read the output
+      with file(os.path.join(outpath,"part-00000.avro")) as hf:
+        reader=DataFileReader(hf, DatumReader())
+        for record in reader:
+          self.assertEqual(record["value"],true_counts[record["key"]])
+
+        reader.close()
+
+    except Exception as e:
+      raise
+    finally:
+      # close the process
+      if proc is not None and proc.returncode is None:
+        proc.kill()
+      if os.path.exists(base_dir):
+        shutil.rmtree(base_dir)
+      if os.path.exists(exfile):
+        os.remove(exfile)
+
+if __name__== "__main__":
+  unittest.main()
diff --git a/lang/py/test/word_count_task.py b/lang/py/test/word_count_task.py
new file mode 100644
index 0000000..30dcc51
--- /dev/null
+++ b/lang/py/test/word_count_task.py
@@ -0,0 +1,96 @@
+"""
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+"""
+
+__all__=["WordCountTask"]
+
+from avro.tether import TetherTask
+
+import logging
+
+#TODO::Make the logging level a parameter we can set
+#logging.basicConfig(level=logging.INFO)
+class WordCountTask(TetherTask):
+  """
+  Implements the mappper and reducer for the word count example
+  """
+
+  def __init__(self):
+    """
+    """
+
+    inschema="""{"type":"string"}"""
+    midschema="""{"type":"record", "name":"Pair","namespace":"org.apache.avro.mapred","fields":[
+              {"name":"key","type":"string"},
+              {"name":"value","type":"long","order":"ignore"}]
+              }"""
+    outschema=midschema
+    TetherTask.__init__(self,inschema,midschema,outschema)
+
+
+    #keep track of the partial sums of the counts
+    self.psum=0
+
+
+  def map(self,record,collector):
+    """Implement the mapper for the word count example
+
+    Parameters
+    ----------------------------------------------------------------------------
+    record - The input record
+    collector - The collector to collect the output
+    """
+
+    words=record.split()
+
+    for w in words:
+      logging.info("WordCountTask.Map: word={0}".format(w))
+      collector.collect({"key":w,"value":1})
+
+  def reduce(self,record, collector):
+    """Called with input values to generate reducer output. Inputs are sorted by the mapper
+    key.
+
+    The reduce function is invoked once for each value belonging to a given key outputted
+    by the mapper.
+
+    Parameters
+    ----------------------------------------------------------------------------
+    record - The mapper output
+    collector - The collector to collect the output
+    """
+
+    self.psum+=record["value"]
+
+  def reduceFlush(self,record, collector):
+    """
+    Called with the last intermediate value in each equivalence run.
+    In other words, reduceFlush is invoked once for each key produced in the reduce
+    phase. It is called after reduce has been invoked on each value for the given key.
+
+    Parameters
+    ------------------------------------------------------------------
+    record - the last record on which reduce was invoked.
+    """
+
+    #collect the current record
+    logging.info("WordCountTask.reduceFlush key={0} value={1}".format(record["key"],self.psum))
+
+    collector.collect({"key":record["key"],"value":self.psum})
+
+    #reset the sum
+    self.psum=0
diff --git a/lang/py3/avro/schema.py b/lang/py3/avro/schema.py
index b5d17fe..c3f73c5 100644
--- a/lang/py3/avro/schema.py
+++ b/lang/py3/avro/schema.py
@@ -643,7 +643,7 @@ class PrimitiveSchema(Schema):
   Valid primitive types are defined in PRIMITIVE_TYPES.
   """
 
-  def __init__(self, type):
+  def __init__(self, type, other_props=None):
     """Initializes a new schema object for the specified primitive type.
 
     Args:
@@ -651,7 +651,7 @@ class PrimitiveSchema(Schema):
     """
     if type not in PRIMITIVE_TYPES:
       raise AvroException('%r is not a valid primitive type.' % type)
-    super(PrimitiveSchema, self).__init__(type)
+    super(PrimitiveSchema, self).__init__(type, other_props=other_props)
 
   @property
   def name(self):
@@ -752,7 +752,7 @@ class EnumSchema(NamedSchema):
         other_props=other_props,
     )
 
-    self._props['symbols'] = tuple(sorted(symbol_set))
+    self._props['symbols'] = symbols
     if doc is not None:
       self._props['doc'] = doc
 
@@ -1153,7 +1153,7 @@ def _SchemaFromJSONObject(json_object, names):
 
   if type in PRIMITIVE_TYPES:
     # FIXME should not ignore other properties
-    return PrimitiveSchema(type)
+    return PrimitiveSchema(type, other_props=other_props)
 
   elif type in NAMED_TYPES:
     name = json_object.get('name')
diff --git a/lang/py3/avro/tests/run_tests.py b/lang/py3/avro/tests/run_tests.py
index 738c8e5..d7e6512 100644
--- a/lang/py3/avro/tests/run_tests.py
+++ b/lang/py3/avro/tests/run_tests.py
@@ -54,6 +54,7 @@ from avro.tests.test_ipc import *
 from avro.tests.test_protocol import *
 from avro.tests.test_schema import *
 from avro.tests.test_script import *
+from avro.tests.test_enum import *
 
 
 def SetupLogging():
diff --git a/lang/py/test/test_datafile_interop.py b/lang/py3/avro/tests/test_enum.py
similarity index 54%
copy from lang/py/test/test_datafile_interop.py
copy to lang/py3/avro/tests/test_enum.py
index 8f4e883..7e55359 100644
--- a/lang/py/test/test_datafile_interop.py
+++ b/lang/py3/avro/tests/test_enum.py
@@ -1,39 +1,35 @@
+#!/usr/bin/env python3
+# -*- mode: python -*-
+# -*- coding: utf-8 -*-
+
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
+# regarding copyright ownership.  Thete ASF licenses this file
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import os
+
 import unittest
-from avro import io
-from avro import datafile
 
-class TestDataFileInterop(unittest.TestCase):
-  def test_interop(self):
-    print ''
-    print 'TEST INTEROP'
-    print '============'
-    print ''
-    for f in os.listdir('@INTEROP_DATA_DIR@'):
-      print 'READING %s' % f
-      print ''
+from avro import schema
+
+class TestEnum(unittest.TestCase):
+  def testSymbolsInOrder(self):
+    enum = schema.EnumSchema('Test', '', ['A', 'B'], schema.Names(), '', {})
+    self.assertEqual('A', enum.symbols[0])
 
-      # read data in binary from file
-      reader = open(os.path.join('@INTEROP_DATA_DIR@', f), 'rb')
-      datum_reader = io.DatumReader()
-      dfr = datafile.DataFileReader(reader, datum_reader)
-      for datum in dfr:
-        assert datum is not None
+  def testSymbolsInReverseOrder(self):
+    enum = schema.EnumSchema('Test', '', ['B', 'A'], schema.Names(), '', {})
+    self.assertEqual('B', enum.symbols[0])
 
 if __name__ == '__main__':
-  unittest.main()
+  raise Exception('Use run_tests.py')
diff --git a/lang/py3/avro/tests/test_schema.py b/lang/py3/avro/tests/test_schema.py
index 3aaa6b3..c836528 100644
--- a/lang/py3/avro/tests/test_schema.py
+++ b/lang/py3/avro/tests/test_schema.py
@@ -426,6 +426,11 @@ OTHER_PROP_EXAMPLES = [
     """,
     valid=True,
   ),
+  ExampleSchema("""
+    {"type": "long", "date": "true"}
+    """,
+    valid=True,
+  ),
 ]
 
 EXAMPLES = PRIMITIVE_EXAMPLES
diff --git a/lang/py3/setup.py b/lang/py3/setup.py
index 426ad1d..53b76ad 100644
--- a/lang/py3/setup.py
+++ b/lang/py3/setup.py
@@ -27,6 +27,9 @@ from setuptools import setup
 
 VERSION_FILE_NAME = 'VERSION.txt'
 
+# The following prevents distutils from using hardlinks (which may not always be
+# available, e.g. on a Docker volume). See http://bugs.python.org/issue8876
+del os.link
 
 def RunsFromSourceDist():
   """Tests whether setup.py is invoked from a source distribution.
@@ -120,7 +123,7 @@ def Main():
   avro_version = ReadVersion()
 
   setup(
-      name = 'avro-python3-snapshot',
+      name = 'avro-python3',
       version = avro_version,
       packages = ['avro'],
       package_dir = {'avro': 'avro'},
diff --git a/lang/ruby/.gitignore b/lang/ruby/.gitignore
new file mode 100644
index 0000000..1c2f433
--- /dev/null
+++ b/lang/ruby/.gitignore
@@ -0,0 +1 @@
+tmp
\ No newline at end of file
diff --git a/lang/ruby/CHANGELOG b/lang/ruby/CHANGELOG
new file mode 100644
index 0000000..16d7595
--- /dev/null
+++ b/lang/ruby/CHANGELOG
@@ -0,0 +1 @@
+v0.0.1 stuff
\ No newline at end of file
diff --git a/lang/ruby/Gemfile b/lang/ruby/Gemfile
new file mode 100644
index 0000000..20543da
--- /dev/null
+++ b/lang/ruby/Gemfile
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source 'https://rubygems.org' 
+gem 'rake'
+gem 'echoe'
+gem 'multi_json'
+gem 'snappy'
diff --git a/lang/ruby/Manifest b/lang/ruby/Manifest
new file mode 100644
index 0000000..fa2c728
--- /dev/null
+++ b/lang/ruby/Manifest
@@ -0,0 +1,22 @@
+CHANGELOG
+Manifest
+Rakefile
+avro.gemspec
+interop/test_interop.rb
+lib/avro.rb
+lib/avro/data_file.rb
+lib/avro/io.rb
+lib/avro/ipc.rb
+lib/avro/protocol.rb
+lib/avro/schema.rb
+test/random_data.rb
+test/sample_ipc_client.rb
+test/sample_ipc_http_client.rb
+test/sample_ipc_http_server.rb
+test/sample_ipc_server.rb
+test/test_datafile.rb
+test/test_help.rb
+test/test_io.rb
+test/test_protocol.rb
+test/test_socket_transport.rb
+test/tool.rb
diff --git a/lang/ruby/Rakefile b/lang/ruby/Rakefile
new file mode 100644
index 0000000..d1da5d0
--- /dev/null
+++ b/lang/ruby/Rakefile
@@ -0,0 +1,63 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'rubygems'
+require 'echoe'
+VERSION = File.open('../../share/VERSION.txt').read.sub('-SNAPSHOT', '.pre1').chomp
+Echoe.new('avro', VERSION) do |p|
+  p.author = "Apache Software Foundation"
+  p.email = "avro-dev at hadoop.apache.org"
+  p.summary = "Apache Avro for Ruby"
+  p.description = "Avro is a data serialization and RPC format"
+  p.url = "http://hadoop.apache.org/avro/"
+  p.runtime_dependencies = %w[multi_json]
+end
+
+t = Rake::TestTask.new(:interop)
+t.pattern = 'interop/test*.rb'
+
+task :generate_interop do
+  $:.unshift(HERE + '/lib')
+  $:.unshift(HERE + '/test')
+  require 'avro'
+  require 'random_data'
+
+  schema = Avro::Schema.parse(File.read(SCHEMAS + '/interop.avsc'))
+  r = RandomData.new(schema, ENV['SEED'])
+  f = File.open(BUILD + '/interop/data/ruby.avro', 'w')
+  writer = Avro::DataFile::Writer.new(f, Avro::IO::DatumWriter.new(schema), schema)
+  begin
+    writer << r.next
+    writer << r.next
+  ensure
+    writer.close
+  end
+
+  Avro::DataFile.open(BUILD + '/interop/data/ruby_deflate.avro', 'w', schema.to_s, :deflate) do |writer|
+    20.times { writer << r.next }
+  end
+end
+
+
+HERE = File.expand_path(File.dirname(__FILE__))
+SHARE = HERE + '/../../share'
+SCHEMAS = SHARE + '/test/schemas'
+BUILD = HERE + '/../../build'
+
+task :dist => [:gem] do
+  mkdir_p "../../dist/ruby"
+  cp "pkg/avro-#{VERSION}.gem", "../../dist/ruby"
+end
diff --git a/lang/ruby/build.sh b/lang/ruby/build.sh
new file mode 100755
index 0000000..8fea565
--- /dev/null
+++ b/lang/ruby/build.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+# connect to avro ruby root directory
+cd `dirname "$0"`
+
+# maintain our gems here
+export GEM_HOME=.gem/
+export PATH="$PATH:.gem/bin"
+
+# bootstrap bundler
+gem install --no-rdoc --no-ri bundler
+
+case "$1" in
+     test)
+        bundle install
+        bundle exec rake test
+       ;;
+
+     dist)
+        bundle exec rake dist
+       ;;
+
+     clean)
+        bundle exec rake clean
+       ;;
+
+     *)
+       echo "Usage: $0 {test|dist|clean}"
+       exit 1
+
+esac
+
+exit 0
diff --git a/lang/py/test/test_datafile_interop.py b/lang/ruby/interop/test_interop.rb
similarity index 51%
copy from lang/py/test/test_datafile_interop.py
copy to lang/ruby/interop/test_interop.rb
index 8f4e883..d027631 100644
--- a/lang/py/test/test_datafile_interop.py
+++ b/lang/ruby/interop/test_interop.rb
@@ -1,3 +1,4 @@
+#!/usr/bin/env ruby
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -13,27 +14,28 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import os
-import unittest
-from avro import io
-from avro import datafile
 
-class TestDataFileInterop(unittest.TestCase):
-  def test_interop(self):
-    print ''
-    print 'TEST INTEROP'
-    print '============'
-    print ''
-    for f in os.listdir('@INTEROP_DATA_DIR@'):
-      print 'READING %s' % f
-      print ''
+require 'rubygems'
+require 'test/unit'
+require 'avro'
 
-      # read data in binary from file
-      reader = open(os.path.join('@INTEROP_DATA_DIR@', f), 'rb')
-      datum_reader = io.DatumReader()
-      dfr = datafile.DataFileReader(reader, datum_reader)
-      for datum in dfr:
-        assert datum is not None
+class TestInterop < Test::Unit::TestCase
+  HERE = File.expand_path(File.dirname(__FILE__))
+  SHARE = HERE + '/../../../share'
+  SCHEMAS = SHARE + '/test/schemas'
+  Dir[HERE + '/../../../build/interop/data/*'].each do |fn|
+    define_method("test_read_#{File.basename(fn, 'avro')}") do
+      projection = Avro::Schema.parse(File.read(SCHEMAS+'/interop.avsc'))
 
-if __name__ == '__main__':
-  unittest.main()
+      File.open(fn) do |f|
+        r = Avro::DataFile::Reader.new(f, Avro::IO::DatumReader.new(projection))
+        i = 0
+        r.each do |datum|
+          i += 1
+          assert_not_nil datum, "nil datum from #{fn}"
+        end
+        assert_not_equal 0, i, "no data read in from #{fn}"
+      end
+    end
+  end
+end
diff --git a/lang/py/test/test_datafile_interop.py b/lang/ruby/lib/avro.rb
similarity index 55%
copy from lang/py/test/test_datafile_interop.py
copy to lang/ruby/lib/avro.rb
index 8f4e883..902dcd8 100644
--- a/lang/py/test/test_datafile_interop.py
+++ b/lang/ruby/lib/avro.rb
@@ -5,35 +5,37 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 # http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import os
-import unittest
-from avro import io
-from avro import datafile
 
-class TestDataFileInterop(unittest.TestCase):
-  def test_interop(self):
-    print ''
-    print 'TEST INTEROP'
-    print '============'
-    print ''
-    for f in os.listdir('@INTEROP_DATA_DIR@'):
-      print 'READING %s' % f
-      print ''
+require 'multi_json'
+require 'set'
+require 'digest/md5'
+require 'net/http'
+require 'stringio'
+require 'zlib'
 
-      # read data in binary from file
-      reader = open(os.path.join('@INTEROP_DATA_DIR@', f), 'rb')
-      datum_reader = io.DatumReader()
-      dfr = datafile.DataFileReader(reader, datum_reader)
-      for datum in dfr:
-        assert datum is not None
+module Avro
+  VERSION = "FIXME"
 
-if __name__ == '__main__':
-  unittest.main()
+  class AvroError < StandardError; end
+
+  class AvroTypeError < Avro::AvroError
+    def initialize(schm=nil, datum=nil, msg=nil)
+      msg ||= "Not a #{schm.to_s}: #{datum}"
+      super(msg)
+    end
+  end
+end
+
+require 'avro/schema'
+require 'avro/io'
+require 'avro/data_file'
+require 'avro/protocol'
+require 'avro/ipc'
diff --git a/lang/ruby/lib/avro/data_file.rb b/lang/ruby/lib/avro/data_file.rb
new file mode 100644
index 0000000..c27c2dc
--- /dev/null
+++ b/lang/ruby/lib/avro/data_file.rb
@@ -0,0 +1,366 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'openssl'
+
+module Avro
+  module DataFile
+    VERSION = 1
+    MAGIC = "Obj" + [VERSION].pack('c')
+    MAGIC.force_encoding('BINARY') if MAGIC.respond_to?(:force_encoding)
+    MAGIC_SIZE = MAGIC.respond_to?(:bytesize) ? MAGIC.bytesize : MAGIC.size
+    SYNC_SIZE = 16
+    SYNC_INTERVAL = 4000 * SYNC_SIZE
+    META_SCHEMA = Schema.parse('{"type": "map", "values": "bytes"}')
+    VALID_ENCODINGS = ['binary'] # not used yet
+
+    class DataFileError < AvroError; end
+
+    def self.open(file_path, mode='r', schema=nil, codec=nil)
+      schema = Avro::Schema.parse(schema) if schema
+      case mode
+      when 'w'
+        unless schema
+          raise DataFileError, "Writing an Avro file requires a schema."
+        end
+        io = open_writer(File.open(file_path, 'wb'), schema, codec)
+      when 'r'
+        io = open_reader(File.open(file_path, 'rb'), schema)
+      else
+        raise DataFileError, "Only modes 'r' and 'w' allowed. You gave #{mode.inspect}."
+      end
+
+      yield io if block_given?
+      io
+    ensure
+      io.close if block_given? && io
+    end
+
+    def self.codecs
+      @codecs
+    end
+
+    def self.register_codec(codec)
+      @codecs ||= {}
+      codec = codec.new if !codec.respond_to?(:codec_name) && codec.is_a?(Class)
+      @codecs[codec.codec_name.to_s] = codec
+    end
+
+    def self.get_codec(codec)
+      codec ||= 'null'
+      if codec.respond_to?(:compress) && codec.respond_to?(:decompress)
+        codec # it's a codec instance
+      elsif codec.is_a?(Class)
+        codec.new # it's a codec class
+      elsif @codecs.include?(codec.to_s)
+        @codecs[codec.to_s] # it's a string or symbol (codec name)
+      else
+        raise DataFileError, "Unknown codec: #{codec.inspect}"
+      end
+    end
+
+    class << self
+      private
+      def open_writer(file, schema, codec=nil)
+        writer = Avro::IO::DatumWriter.new(schema)
+        Avro::DataFile::Writer.new(file, writer, schema, codec)
+      end
+
+      def open_reader(file, schema)
+        reader = Avro::IO::DatumReader.new(nil, schema)
+        Avro::DataFile::Reader.new(file, reader)
+      end
+    end
+
+    class Writer
+      def self.generate_sync_marker
+        OpenSSL::Random.random_bytes(16)
+      end
+
+      attr_reader :writer, :encoder, :datum_writer, :buffer_writer, :buffer_encoder, :sync_marker, :meta, :codec
+      attr_accessor :block_count
+
+      def initialize(writer, datum_writer, writers_schema=nil, codec=nil, meta={})
+        # If writers_schema is not present, presume we're appending
+        @writer = writer
+        @encoder = IO::BinaryEncoder.new(@writer)
+        @datum_writer = datum_writer
+        @meta = meta
+        @buffer_writer = StringIO.new('', 'w')
+        @buffer_writer.set_encoding('BINARY') if @buffer_writer.respond_to?(:set_encoding)
+        @buffer_encoder = IO::BinaryEncoder.new(@buffer_writer)
+        @block_count = 0
+
+        if writers_schema
+          @sync_marker = Writer.generate_sync_marker
+          @codec = DataFile.get_codec(codec)
+          @meta['avro.codec'] = @codec.codec_name.to_s
+          @meta['avro.schema'] = writers_schema.to_s
+          datum_writer.writers_schema = writers_schema
+          write_header
+        else
+          # open writer for reading to collect metadata
+          dfr = Reader.new(writer, Avro::IO::DatumReader.new)
+
+          # FIXME(jmhodges): collect arbitrary metadata
+          # collect metadata
+          @sync_marker = dfr.sync_marker
+          @meta['avro.codec'] = dfr.meta['avro.codec']
+          @codec = DataFile.get_codec(meta['avro.codec'])
+
+          # get schema used to write existing file
+          schema_from_file = dfr.meta['avro.schema']
+          @meta['avro.schema'] = schema_from_file
+          datum_writer.writers_schema = Schema.parse(schema_from_file)
+
+          # seek to the end of the file and prepare for writing
+          writer.seek(0,2)
+        end
+      end
+
+      # Append a datum to the file
+      def <<(datum)
+        datum_writer.write(datum, buffer_encoder)
+        self.block_count += 1
+
+        # if the data to write is larger than the sync interval, write
+        # the block
+        if buffer_writer.tell >= SYNC_INTERVAL
+          write_block
+        end
+      end
+
+      # Return the current position as a value that may be passed to
+      # DataFileReader.seek(long). Forces the end of the current block,
+      # emitting a synchronization marker.
+      def sync
+        write_block
+        writer.tell
+      end
+
+      # Flush the current state of the file, including metadata
+      def flush
+        write_block
+        writer.flush
+      end
+
+      def close
+        flush
+        writer.close
+      end
+
+      private
+
+      def write_header
+        # write magic
+        writer.write(MAGIC)
+
+        # write metadata
+        datum_writer.write_data(META_SCHEMA, meta, encoder)
+
+        # write sync marker
+        writer.write(sync_marker)
+      end
+
+      # TODO(jmhodges): make a schema for blocks and use datum_writer
+      # TODO(jmhodges): do we really need the number of items in the block?
+      def write_block
+        if block_count > 0
+          # write number of items in block and block size in bytes
+          encoder.write_long(block_count)
+          to_write = codec.compress(buffer_writer.string)
+          encoder.write_long(to_write.respond_to?(:bytesize) ? to_write.bytesize : to_write.size)
+
+          # write block contents
+          writer.write(to_write)
+
+          # write sync marker
+          writer.write(sync_marker)
+
+          # reset buffer
+          buffer_writer.truncate(0)
+          buffer_writer.rewind
+          self.block_count = 0
+        end
+      end
+    end
+
+    # Read files written by DataFileWriter
+    class Reader
+      include ::Enumerable
+
+      # The reader and binary decoder for the raw file stream
+      attr_reader :reader, :decoder
+
+      # The binary decoder for the contents of a block (after codec decompression)
+      attr_reader :block_decoder
+
+      attr_reader :datum_reader, :sync_marker, :meta, :file_length, :codec
+      attr_accessor :block_count # records remaining in current block
+
+      def initialize(reader, datum_reader)
+        @reader = reader
+        @decoder = IO::BinaryDecoder.new(reader)
+        @datum_reader = datum_reader
+
+        # read the header: magic, meta, sync
+        read_header
+
+        @codec = DataFile.get_codec(meta['avro.codec'])
+
+        # get ready to read
+        @block_count = 0
+        datum_reader.writers_schema = Schema.parse meta['avro.schema']
+      end
+
+      # Iterates through each datum in this file
+      # TODO(jmhodges): handle block of length zero
+      def each
+        loop do
+          if block_count == 0
+            case
+            when eof?; break
+            when skip_sync
+              break if eof?
+              read_block_header
+            else
+              read_block_header
+            end
+          end
+
+          datum = datum_reader.read(block_decoder)
+          self.block_count -= 1
+          yield(datum)
+        end
+      end
+
+      def eof?; reader.eof?; end
+
+      def close
+        reader.close
+      end
+
+      private
+      def read_header
+        # seek to the beginning of the file to get magic block
+        reader.seek(0, 0)
+
+        # check magic number
+        magic_in_file = reader.read(MAGIC_SIZE)
+        if magic_in_file.size < MAGIC_SIZE
+          msg = 'Not an Avro data file: shorter than the Avro magic block'
+          raise DataFileError, msg
+        elsif magic_in_file != MAGIC
+          msg = "Not an Avro data file: #{magic_in_file.inspect} doesn't match #{MAGIC.inspect}"
+          raise DataFileError, msg
+        end
+
+        # read metadata
+        @meta = datum_reader.read_data(META_SCHEMA,
+                                       META_SCHEMA,
+                                       decoder)
+        # read sync marker
+        @sync_marker = reader.read(SYNC_SIZE)
+      end
+
+      def read_block_header
+        self.block_count = decoder.read_long
+        block_bytes = decoder.read_long
+        data = codec.decompress(reader.read(block_bytes))
+        @block_decoder = IO::BinaryDecoder.new(StringIO.new(data))
+      end
+
+      # read the length of the sync marker; if it matches the sync
+      # marker, return true. Otherwise, seek back to where we started
+      # and return false
+      def skip_sync
+        proposed_sync_marker = reader.read(SYNC_SIZE)
+        if proposed_sync_marker != sync_marker
+          reader.seek(-SYNC_SIZE, 1)
+          false
+        else
+          true
+        end
+      end
+    end
+
+
+    class NullCodec
+      def codec_name; 'null'; end
+      def decompress(data); data; end
+      def compress(data); data; end
+    end
+
+    class DeflateCodec
+      attr_reader :level
+
+      def initialize(level=Zlib::DEFAULT_COMPRESSION)
+        @level = level
+      end
+
+      def codec_name; 'deflate'; end
+
+      def decompress(compressed)
+        # Passing a negative number to Inflate puts it into "raw" RFC1951 mode
+        # (without the RFC1950 header & checksum). See the docs for
+        # inflateInit2 in http://www.zlib.net/manual.html
+        zstream = Zlib::Inflate.new(-Zlib::MAX_WBITS)
+        data = zstream.inflate(compressed)
+        data << zstream.finish
+      ensure
+        zstream.close
+      end
+
+      def compress(data)
+        zstream = Zlib::Deflate.new(level, -Zlib::MAX_WBITS)
+        compressed = zstream.deflate(data)
+        compressed << zstream.finish
+      ensure
+        zstream.close
+      end
+    end
+
+    class SnappyCodec
+      def codec_name; 'snappy'; end
+
+      def decompress(data)
+        load_snappy!
+        Snappy.inflate(data)
+      end
+
+      def compress(data)
+        load_snappy!
+        Snappy.deflate(data)
+      end
+
+      private
+
+      def load_snappy!
+        require 'snappy' unless defined?(Snappy)
+      rescue LoadError
+        raise LoadError, "Snappy compression is not available, please install the `snappy` gem."
+      end
+    end
+
+    DataFile.register_codec NullCodec
+    DataFile.register_codec DeflateCodec
+    DataFile.register_codec SnappyCodec
+
+    # TODO this constant won't be updated if you register another codec.
+    # Deprecated in favor of Avro::DataFile::codecs
+    VALID_CODECS = DataFile.codecs.keys
+  end
+end
diff --git a/lang/ruby/lib/avro/io.rb b/lang/ruby/lib/avro/io.rb
new file mode 100644
index 0000000..9dc41db
--- /dev/null
+++ b/lang/ruby/lib/avro/io.rb
@@ -0,0 +1,615 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+module Avro
+  module IO
+    # Raised when datum is not an example of schema
+    class AvroTypeError < AvroError
+      def initialize(expected_schema, datum)
+        super("The datum #{datum.inspect} is not an example of schema #{expected_schema}")
+      end
+    end
+
+    # Raised when writer's and reader's schema do not match
+    class SchemaMatchException < AvroError
+      def initialize(writers_schema, readers_schema)
+        super("Writer's schema #{writers_schema} and Reader's schema " +
+              "#{readers_schema} do not match.")
+      end
+    end
+
+    # FIXME(jmhodges) move validate to this module?
+
+    class BinaryDecoder
+      # Read leaf values
+
+      # reader is an object on which we can call read, seek and tell.
+      attr_reader :reader
+      def initialize(reader)
+        @reader = reader
+      end
+
+      def byte!
+        @reader.read(1).unpack('C').first
+      end
+      
+      def read_null
+        # null is written as zero byte's
+        nil
+      end
+
+      def read_boolean
+        byte! == 1
+      end
+
+      def read_int; read_long; end
+
+      def read_long
+        # int and long values are written using variable-length,
+        # zig-zag coding.
+        b = byte!
+        n = b & 0x7F
+        shift = 7
+        while (b & 0x80) != 0
+          b = byte!
+          n |= (b & 0x7F) << shift
+          shift += 7
+        end
+        (n >> 1) ^ -(n & 1)
+      end
+
+      def read_float
+        # A float is written as 4 bytes.
+        # The float is converted into a 32-bit integer using a method
+        # equivalent to Java's floatToIntBits and then encoded in
+        # little-endian format.
+        @reader.read(4).unpack('e')[0]
+      end
+
+      def read_double
+        #  A double is written as 8 bytes.
+        # The double is converted into a 64-bit integer using a method
+        # equivalent to Java's doubleToLongBits and then encoded in
+        # little-endian format.
+        @reader.read(8).unpack('E')[0]
+      end
+
+      def read_bytes
+        # Bytes are encoded as a long followed by that many bytes of
+        # data.
+        read(read_long)
+      end
+
+      def read_string
+        # A string is encoded as a long followed by that many bytes of
+        # UTF-8 encoded character data.
+        read_bytes.tap do |string|
+          string.force_encoding("UTF-8") if string.respond_to? :force_encoding
+        end
+      end
+
+      def read(len)
+        # Read n bytes
+        @reader.read(len)
+      end
+
+      def skip_null
+        nil
+      end
+
+      def skip_boolean
+        skip(1)
+      end
+
+      def skip_int
+        skip_long
+      end
+
+      def skip_long
+        b = byte!
+        while (b & 0x80) != 0
+          b = byte!
+        end
+      end
+
+      def skip_float
+        skip(4)
+      end
+
+      def skip_double
+        skip(8)
+      end
+
+      def skip_bytes
+        skip(read_long)
+      end
+
+      def skip_string
+        skip_bytes
+      end
+
+      def skip(n)
+        reader.seek(reader.tell() + n)
+      end
+    end
+
+    # Write leaf values
+    class BinaryEncoder
+      attr_reader :writer
+
+      def initialize(writer)
+        @writer = writer
+      end
+
+      # null is written as zero bytes
+      def write_null(datum)
+        nil
+      end
+
+      # a boolean is written as a single byte 
+      # whose value is either 0 (false) or 1 (true).
+      def write_boolean(datum)
+        on_disk = datum ? 1.chr : 0.chr
+        writer.write(on_disk)
+      end
+
+      # int and long values are written using variable-length,
+      # zig-zag coding.
+      def write_int(n)
+        write_long(n)
+      end
+
+      # int and long values are written using variable-length,
+      # zig-zag coding.
+      def write_long(n)
+        foo = n
+        n = (n << 1) ^ (n >> 63)
+        while (n & ~0x7F) != 0
+          @writer.write(((n & 0x7f) | 0x80).chr)
+          n >>= 7
+        end
+        @writer.write(n.chr)
+      end
+
+      # A float is written as 4 bytes.
+      # The float is converted into a 32-bit integer using a method
+      # equivalent to Java's floatToIntBits and then encoded in
+      # little-endian format.
+      def write_float(datum)
+        @writer.write([datum].pack('e'))
+      end
+
+      # A double is written as 8 bytes.
+      # The double is converted into a 64-bit integer using a method
+      # equivalent to Java's doubleToLongBits and then encoded in
+      # little-endian format.
+      def write_double(datum)
+        @writer.write([datum].pack('E'))
+      end
+
+      # Bytes are encoded as a long followed by that many bytes of data.
+      def write_bytes(datum)
+        write_long(datum.bytesize)
+        @writer.write(datum)
+      end
+
+      # A string is encoded as a long followed by that many bytes of
+      # UTF-8 encoded character data
+      def write_string(datum)
+        # FIXME utf-8 encode this in 1.9
+        write_bytes(datum)
+      end
+
+      # Write an arbritary datum.
+      def write(datum)
+        writer.write(datum)
+      end
+    end
+
+    class DatumReader
+      def self.match_schemas(writers_schema, readers_schema)
+        w_type = writers_schema.type_sym
+        r_type = readers_schema.type_sym
+
+        # This conditional is begging for some OO love.
+        if w_type == :union || r_type == :union
+          return true
+        end
+
+        if w_type == r_type
+          return true if Schema::PRIMITIVE_TYPES_SYM.include?(r_type)
+
+          case r_type
+          when :record
+            return writers_schema.fullname == readers_schema.fullname
+          when :error
+            return writers_schema.fullname == readers_schema.fullname
+          when :request
+            return true
+          when :fixed
+            return writers_schema.fullname == readers_schema.fullname &&
+                   writers_schema.size == readers_schema.size
+          when :enum
+            return writers_schema.fullname == readers_schema.fullname
+          when :map
+            return writers_schema.values.type == readers_schema.values.type
+          when :array
+            return writers_schema.items.type == readers_schema.items.type
+          end
+        end
+
+        # Handle schema promotion
+        if w_type == :int && [:long, :float, :double].include?(r_type)
+          return true
+        elsif w_type == :long && [:float, :double].include?(r_type)
+          return true
+        elsif w_type == :float && r_type == :double
+          return true
+        end
+
+        return false
+      end
+
+      attr_accessor :writers_schema, :readers_schema
+
+      def initialize(writers_schema=nil, readers_schema=nil)
+        @writers_schema = writers_schema
+        @readers_schema = readers_schema
+      end
+
+      def read(decoder)
+        self.readers_schema = writers_schema unless readers_schema
+        read_data(writers_schema, readers_schema, decoder)
+      end
+
+      def read_data(writers_schema, readers_schema, decoder)
+        # schema matching
+        unless self.class.match_schemas(writers_schema, readers_schema)
+          raise SchemaMatchException.new(writers_schema, readers_schema)
+        end
+
+        # schema resolution: reader's schema is a union, writer's
+        # schema is not
+        if writers_schema.type_sym != :union && readers_schema.type_sym == :union
+          rs = readers_schema.schemas.find{|s|
+            self.class.match_schemas(writers_schema, s)
+          }
+          return read_data(writers_schema, rs, decoder) if rs
+          raise SchemaMatchException.new(writers_schema, readers_schema)
+        end
+
+        # function dispatch for reading data based on type of writer's
+        # schema
+        case writers_schema.type_sym
+        when :null;    decoder.read_null
+        when :boolean; decoder.read_boolean
+        when :string;  decoder.read_string
+        when :int;     decoder.read_int
+        when :long;    decoder.read_long
+        when :float;   decoder.read_float
+        when :double;  decoder.read_double
+        when :bytes;   decoder.read_bytes
+        when :fixed;   read_fixed(writers_schema, readers_schema, decoder)
+        when :enum;    read_enum(writers_schema, readers_schema, decoder)
+        when :array;   read_array(writers_schema, readers_schema, decoder)
+        when :map;     read_map(writers_schema, readers_schema, decoder)
+        when :union;   read_union(writers_schema, readers_schema, decoder)
+        when :record, :error, :request;  read_record(writers_schema, readers_schema, decoder)
+        else
+          raise AvroError, "Cannot read unknown schema type: #{writers_schema.type}"
+        end
+      end
+
+      def read_fixed(writers_schema, readers_schema, decoder)
+        decoder.read(writers_schema.size)
+      end
+
+      def read_enum(writers_schema, readers_schema, decoder)
+        index_of_symbol = decoder.read_int
+        read_symbol = writers_schema.symbols[index_of_symbol]
+
+        # TODO(jmhodges): figure out what unset means for resolution
+        # schema resolution
+        unless readers_schema.symbols.include?(read_symbol)
+          # 'unset' here
+        end
+
+        read_symbol
+      end
+
+      def read_array(writers_schema, readers_schema, decoder)
+        read_items = []
+        block_count = decoder.read_long
+        while block_count != 0
+          if block_count < 0
+            block_count = -block_count
+            block_size = decoder.read_long
+          end
+          block_count.times do
+            read_items << read_data(writers_schema.items,
+                                    readers_schema.items,
+                                    decoder)
+          end
+          block_count = decoder.read_long
+        end
+
+        read_items
+      end
+
+      def read_map(writers_schema, readers_schema, decoder)
+        read_items = {}
+        block_count = decoder.read_long
+        while block_count != 0
+          if block_count < 0
+            block_count = -block_count
+            block_size = decoder.read_long
+          end
+          block_count.times do
+            key = decoder.read_string
+            read_items[key] = read_data(writers_schema.values,
+                                        readers_schema.values,
+                                        decoder)
+          end
+          block_count = decoder.read_long
+        end
+
+        read_items
+      end
+
+      def read_union(writers_schema, readers_schema, decoder)
+        index_of_schema = decoder.read_long
+        selected_writers_schema = writers_schema.schemas[index_of_schema]
+
+        read_data(selected_writers_schema, readers_schema, decoder)
+      end
+
+      def read_record(writers_schema, readers_schema, decoder)
+        readers_fields_hash = readers_schema.fields_hash
+        read_record = {}
+        writers_schema.fields.each do |field|
+          if readers_field = readers_fields_hash[field.name]
+            field_val = read_data(field.type, readers_field.type, decoder)
+            read_record[field.name] = field_val
+          else
+            skip_data(field.type, decoder)
+          end
+        end
+
+        # fill in the default values
+        if readers_fields_hash.size > read_record.size
+          writers_fields_hash = writers_schema.fields_hash
+          readers_fields_hash.each do |field_name, field|
+            unless writers_fields_hash.has_key? field_name
+              if !field.default.nil?
+                field_val = read_default_value(field.type, field.default)
+                read_record[field.name] = field_val
+              else
+                # FIXME(jmhodges) another 'unset' here
+              end
+            end
+          end
+        end
+
+        read_record
+      end
+
+      def read_default_value(field_schema, default_value)
+        # Basically a JSON Decoder?
+        case field_schema.type_sym
+        when :null
+          return nil
+        when :boolean
+          return default_value
+        when :int, :long
+          return Integer(default_value)
+        when :float, :double
+          return Float(default_value)
+        when :enum, :fixed, :string, :bytes
+          return default_value
+        when :array
+          read_array = []
+          default_value.each do |json_val|
+            item_val = read_default_value(field_schema.items, json_val)
+            read_array << item_val
+          end
+          return read_array
+        when :map
+          read_map = {}
+          default_value.each do |key, json_val|
+            map_val = read_default_value(field_schema.values, json_val)
+            read_map[key] = map_val
+          end
+          return read_map
+        when :union
+          return read_default_value(field_schema.schemas[0], default_value)
+        when :record, :error
+          read_record = {}
+          field_schema.fields.each do |field|
+            json_val = default_value[field.name]
+            json_val = field.default unless json_val
+            field_val = read_default_value(field.type, json_val)
+            read_record[field.name] = field_val
+          end
+          return read_record
+        else
+          fail_msg = "Unknown type: #{field_schema.type}"
+          raise AvroError, fail_msg
+        end
+      end
+
+      def skip_data(writers_schema, decoder)
+        case writers_schema.type_sym
+        when :null
+          decoder.skip_null
+        when :boolean
+          decoder.skip_boolean
+        when :string
+          decoder.skip_string
+        when :int
+          decoder.skip_int
+        when :long
+          decoder.skip_long
+        when :float
+          decoder.skip_float
+        when :double
+          decoder.skip_double
+        when :bytes
+          decoder.skip_bytes
+        when :fixed
+          skip_fixed(writers_schema, decoder)
+        when :enum
+          skip_enum(writers_schema, decoder)
+        when :array
+          skip_array(writers_schema, decoder)
+        when :map
+          skip_map(writers_schema, decoder)
+        when :union
+          skip_union(writers_schema, decoder)
+        when :record, :error, :request
+          skip_record(writers_schema, decoder)
+        else
+          raise AvroError, "Unknown schema type: #{writers_schema.type}"
+        end
+      end
+
+      def skip_fixed(writers_schema, decoder)
+        decoder.skip(writers_schema.size)
+      end
+
+      def skip_enum(writers_schema, decoder)
+        decoder.skip_int
+      end
+
+      def skip_union(writers_schema, decoder)
+        index = decoder.read_long
+        skip_data(writers_schema.schemas[index], decoder)
+      end
+
+      def skip_array(writers_schema, decoder)
+        skip_blocks(decoder) { skip_data(writers_schema.items, decoder) }
+      end
+
+      def skip_map(writers_schema, decoder)
+        skip_blocks(decoder) {
+          decoder.skip_string
+          skip_data(writers_schema.values, decoder)
+        }
+      end
+
+      def skip_record(writers_schema, decoder)
+        writers_schema.fields.each{|f| skip_data(f.type, decoder) }
+      end
+
+      private
+      def skip_blocks(decoder, &blk)
+        block_count = decoder.read_long
+        while block_count != 0
+          if block_count < 0
+            decoder.skip(decoder.read_long)
+          else
+            block_count.times &blk
+          end
+          block_count = decoder.read_long
+        end
+      end
+    end # DatumReader
+
+    # DatumWriter for generic ruby objects
+    class DatumWriter
+      attr_accessor :writers_schema
+      def initialize(writers_schema=nil)
+        @writers_schema = writers_schema
+      end
+
+      def write(datum, encoder)
+        write_data(writers_schema, datum, encoder)
+      end
+
+      def write_data(writers_schema, datum, encoder)
+        unless Schema.validate(writers_schema, datum)
+          raise AvroTypeError.new(writers_schema, datum)
+        end
+
+        # function dispatch to write datum
+        case writers_schema.type_sym
+        when :null;    encoder.write_null(datum)
+        when :boolean; encoder.write_boolean(datum)
+        when :string;  encoder.write_string(datum)
+        when :int;     encoder.write_int(datum)
+        when :long;    encoder.write_long(datum)
+        when :float;   encoder.write_float(datum)
+        when :double;  encoder.write_double(datum)
+        when :bytes;   encoder.write_bytes(datum)
+        when :fixed;   write_fixed(writers_schema, datum, encoder)
+        when :enum;    write_enum(writers_schema, datum, encoder)
+        when :array;   write_array(writers_schema, datum, encoder)
+        when :map;     write_map(writers_schema, datum, encoder)
+        when :union;   write_union(writers_schema, datum, encoder)
+        when :record, :error, :request;  write_record(writers_schema, datum, encoder)
+        else
+          raise AvroError.new("Unknown type: #{writers_schema.type}")
+        end
+      end
+
+      def write_fixed(writers_schema, datum, encoder)
+        encoder.write(datum)
+      end
+
+      def write_enum(writers_schema, datum, encoder)
+        index_of_datum = writers_schema.symbols.index(datum)
+        encoder.write_int(index_of_datum)
+      end
+
+      def write_array(writers_schema, datum, encoder)
+        if datum.size > 0
+          encoder.write_long(datum.size)
+          datum.each do |item|
+            write_data(writers_schema.items, item, encoder)
+          end
+        end
+        encoder.write_long(0)
+      end
+
+      def write_map(writers_schema, datum, encoder)
+        if datum.size > 0
+          encoder.write_long(datum.size)
+          datum.each do |k,v|
+            encoder.write_string(k)
+            write_data(writers_schema.values, v, encoder)
+          end
+        end
+        encoder.write_long(0)
+      end
+
+      def write_union(writers_schema, datum, encoder)
+        index_of_schema = -1
+        found = writers_schema.schemas.
+          find{|e| index_of_schema += 1; found = Schema.validate(e, datum) }
+        unless found  # Because find_index doesn't exist in 1.8.6
+          raise AvroTypeError.new(writers_schema, datum)
+        end
+        encoder.write_long(index_of_schema)
+        write_data(writers_schema.schemas[index_of_schema], datum, encoder)
+      end
+
+      def write_record(writers_schema, datum, encoder)
+        writers_schema.fields.each do |field|
+          write_data(field.type, datum[field.name], encoder)
+        end
+      end
+    end # DatumWriter
+  end
+end
diff --git a/lang/ruby/lib/avro/ipc.rb b/lang/ruby/lib/avro/ipc.rb
new file mode 100644
index 0000000..1ac8129
--- /dev/null
+++ b/lang/ruby/lib/avro/ipc.rb
@@ -0,0 +1,550 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require "net/http"
+
+module Avro::IPC
+
+  class AvroRemoteError < Avro::AvroError; end
+
+  HANDSHAKE_REQUEST_SCHEMA = Avro::Schema.parse <<-JSON
+  {
+    "type": "record",
+    "name": "HandshakeRequest", "namespace":"org.apache.avro.ipc",
+    "fields": [
+      {"name": "clientHash",
+       "type": {"type": "fixed", "name": "MD5", "size": 16}},
+      {"name": "clientProtocol", "type": ["null", "string"]},
+      {"name": "serverHash", "type": "MD5"},
+      {"name": "meta", "type": ["null", {"type": "map", "values": "bytes"}]}
+    ]
+  }
+  JSON
+
+  HANDSHAKE_RESPONSE_SCHEMA = Avro::Schema.parse <<-JSON
+  {
+    "type": "record",
+    "name": "HandshakeResponse", "namespace": "org.apache.avro.ipc",
+    "fields": [
+      {"name": "match",
+       "type": {"type": "enum", "name": "HandshakeMatch",
+                "symbols": ["BOTH", "CLIENT", "NONE"]}},
+      {"name": "serverProtocol", "type": ["null", "string"]},
+      {"name": "serverHash",
+       "type": ["null", {"type": "fixed", "name": "MD5", "size": 16}]},
+      {"name": "meta",
+       "type": ["null", {"type": "map", "values": "bytes"}]}
+    ]
+  }
+  JSON
+
+  HANDSHAKE_REQUESTOR_WRITER = Avro::IO::DatumWriter.new(HANDSHAKE_REQUEST_SCHEMA)
+  HANDSHAKE_REQUESTOR_READER = Avro::IO::DatumReader.new(HANDSHAKE_RESPONSE_SCHEMA)
+  HANDSHAKE_RESPONDER_WRITER = Avro::IO::DatumWriter.new(HANDSHAKE_RESPONSE_SCHEMA)
+  HANDSHAKE_RESPONDER_READER = Avro::IO::DatumReader.new(HANDSHAKE_REQUEST_SCHEMA)
+
+  META_SCHEMA = Avro::Schema.parse('{"type": "map", "values": "bytes"}')
+  META_WRITER = Avro::IO::DatumWriter.new(META_SCHEMA)
+  META_READER = Avro::IO::DatumReader.new(META_SCHEMA)
+
+  SYSTEM_ERROR_SCHEMA = Avro::Schema.parse('["string"]')
+
+  # protocol cache
+  REMOTE_HASHES = {}
+  REMOTE_PROTOCOLS = {}
+
+  BUFFER_HEADER_LENGTH = 4
+  BUFFER_SIZE = 8192
+
+  # Raised when an error message is sent by an Avro requestor or responder.
+  class AvroRemoteException < Avro::AvroError; end
+
+  class ConnectionClosedException < Avro::AvroError; end
+
+  class Requestor
+    """Base class for the client side of a protocol interaction."""
+    attr_reader :local_protocol, :transport
+    attr_accessor :remote_protocol, :remote_hash, :send_protocol
+
+    def initialize(local_protocol, transport)
+      @local_protocol = local_protocol
+      @transport = transport
+      @remote_protocol = nil
+      @remote_hash = nil
+      @send_protocol = nil
+    end
+
+    def remote_protocol=(new_remote_protocol)
+      @remote_protocol = new_remote_protocol
+      REMOTE_PROTOCOLS[transport.remote_name] = remote_protocol
+    end
+
+    def remote_hash=(new_remote_hash)
+      @remote_hash = new_remote_hash
+      REMOTE_HASHES[transport.remote_name] = remote_hash
+    end
+
+    def request(message_name, request_datum)
+      # Writes a request message and reads a response or error message.
+      # build handshake and call request
+      buffer_writer = StringIO.new('', 'w+')
+      buffer_encoder = Avro::IO::BinaryEncoder.new(buffer_writer)
+      write_handshake_request(buffer_encoder)
+      write_call_request(message_name, request_datum, buffer_encoder)
+
+      # send the handshake and call request;  block until call response
+      call_request = buffer_writer.string
+      call_response = transport.transceive(call_request)
+
+      # process the handshake and call response
+      buffer_decoder = Avro::IO::BinaryDecoder.new(StringIO.new(call_response))
+      if read_handshake_response(buffer_decoder)
+        read_call_response(message_name, buffer_decoder)
+      else
+        request(message_name, request_datum)
+      end
+    end
+
+    def write_handshake_request(encoder)
+      local_hash = local_protocol.md5
+      remote_name = transport.remote_name
+      remote_hash = REMOTE_HASHES[remote_name]
+      unless remote_hash
+        remote_hash = local_hash
+        self.remote_protocol = local_protocol
+      end
+      request_datum = {
+        'clientHash' => local_hash,
+        'serverHash' => remote_hash
+      }
+      if send_protocol
+        request_datum['clientProtocol'] = local_protocol.to_s
+      end
+      HANDSHAKE_REQUESTOR_WRITER.write(request_datum, encoder)
+    end
+
+    def write_call_request(message_name, request_datum, encoder)
+      # The format of a call request is:
+      #   * request metadata, a map with values of type bytes
+      #   * the message name, an Avro string, followed by
+      #   * the message parameters. Parameters are serialized according to
+      #     the message's request declaration.
+
+      # TODO request metadata (not yet implemented)
+      request_metadata = {}
+      META_WRITER.write(request_metadata, encoder)
+
+      message = local_protocol.messages[message_name]
+      unless message
+        raise AvroError, "Unknown message: #{message_name}"
+      end
+      encoder.write_string(message.name)
+
+      write_request(message.request, request_datum, encoder)
+    end
+
+    def write_request(request_schema, request_datum, encoder)
+      datum_writer = Avro::IO::DatumWriter.new(request_schema)
+      datum_writer.write(request_datum, encoder)
+    end
+
+    def read_handshake_response(decoder)
+      handshake_response = HANDSHAKE_REQUESTOR_READER.read(decoder)
+      we_have_matching_schema = false
+
+      case handshake_response['match']
+      when 'BOTH'
+        self.send_protocol = false
+        we_have_matching_schema = true
+      when 'CLIENT'
+        raise AvroError.new('Handshake failure. match == CLIENT') if send_protocol
+        self.remote_protocol = Avro::Protocol.parse(handshake_response['serverProtocol'])
+        self.remote_hash = handshake_response['serverHash']
+        self.send_protocol = false
+        we_have_matching_schema = true
+      when 'NONE'
+        raise AvroError.new('Handshake failure. match == NONE') if send_protocol
+        self.remote_protocol = Avro::Protocol.parse(handshake_response['serverProtocol'])
+        self.remote_hash = handshake_response['serverHash']
+        self.send_protocol = true
+      else
+        raise AvroError.new("Unexpected match: #{match}")
+      end
+
+      return we_have_matching_schema
+    end
+
+    def read_call_response(message_name, decoder)
+      # The format of a call response is:
+      #   * response metadata, a map with values of type bytes
+      #   * a one-byte error flag boolean, followed by either:
+      #     * if the error flag is false,
+      #       the message response, serialized per the message's response schema.
+      #     * if the error flag is true, 
+      #       the error, serialized per the message's error union schema.
+      response_metadata = META_READER.read(decoder)
+
+      # remote response schema
+      remote_message_schema = remote_protocol.messages[message_name]
+      raise AvroError.new("Unknown remote message: #{message_name}") unless remote_message_schema
+
+      # local response schema
+      local_message_schema = local_protocol.messages[message_name]
+      unless local_message_schema
+        raise AvroError.new("Unknown local message: #{message_name}")
+      end
+
+      # error flag
+      if !decoder.read_boolean
+        writers_schema = remote_message_schema.response
+        readers_schema = local_message_schema.response
+        read_response(writers_schema, readers_schema, decoder)
+      else
+        writers_schema = remote_message_schema.errors || SYSTEM_ERROR_SCHEMA
+        readers_schema = local_message_schema.errors || SYSTEM_ERROR_SCHEMA
+        raise read_error(writers_schema, readers_schema, decoder)
+      end
+    end
+
+    def read_response(writers_schema, readers_schema, decoder)
+      datum_reader = Avro::IO::DatumReader.new(writers_schema, readers_schema)
+      datum_reader.read(decoder)
+    end
+
+    def read_error(writers_schema, readers_schema, decoder)
+      datum_reader = Avro::IO::DatumReader.new(writers_schema, readers_schema)
+      AvroRemoteError.new(datum_reader.read(decoder))
+    end
+  end
+
+  # Base class for the server side of a protocol interaction.
+  class Responder
+    attr_reader :local_protocol, :local_hash, :protocol_cache
+    def initialize(local_protocol)
+      @local_protocol = local_protocol
+      @local_hash = self.local_protocol.md5
+      @protocol_cache = {}
+      protocol_cache[local_hash] = local_protocol
+    end
+
+    # Called by a server to deserialize a request, compute and serialize
+    # a response or error. Compare to 'handle()' in Thrift.
+    def respond(call_request, transport=nil)
+      buffer_decoder = Avro::IO::BinaryDecoder.new(StringIO.new(call_request))
+      buffer_writer = StringIO.new('', 'w+')
+      buffer_encoder = Avro::IO::BinaryEncoder.new(buffer_writer)
+      error = nil
+      response_metadata = {}
+
+      begin
+        remote_protocol = process_handshake(buffer_decoder, buffer_encoder, transport)
+        # handshake failure
+        unless remote_protocol
+          return buffer_writer.string
+        end
+
+        # read request using remote protocol
+        request_metadata = META_READER.read(buffer_decoder)
+        remote_message_name = buffer_decoder.read_string
+
+        # get remote and local request schemas so we can do
+        # schema resolution (one fine day)
+        remote_message = remote_protocol.messages[remote_message_name]
+        unless remote_message
+          raise AvroError.new("Unknown remote message: #{remote_message_name}")
+        end
+        local_message = local_protocol.messages[remote_message_name]
+        unless local_message
+          raise AvroError.new("Unknown local message: #{remote_message_name}")
+        end
+        writers_schema = remote_message.request
+        readers_schema = local_message.request
+        request = read_request(writers_schema, readers_schema, buffer_decoder)
+        # perform server logic
+        begin
+          response = call(local_message, request)
+        rescue AvroRemoteError => e
+          error = e
+        rescue Exception => e
+          error = AvroRemoteError.new(e.to_s)
+        end
+
+        # write response using local protocol
+        META_WRITER.write(response_metadata, buffer_encoder)
+        buffer_encoder.write_boolean(!!error)
+        if error.nil?
+          writers_schema = local_message.response
+          write_response(writers_schema, response, buffer_encoder)
+        else
+          writers_schema = local_message.errors || SYSTEM_ERROR_SCHEMA
+          write_error(writers_schema, error, buffer_encoder)
+        end
+      rescue Avro::AvroError => e
+        error = AvroRemoteException.new(e.to_s)
+        buffer_encoder = Avro::IO::BinaryEncoder.new(StringIO.new)
+        META_WRITER.write(response_metadata, buffer_encoder)
+        buffer_encoder.write_boolean(true)
+        self.write_error(SYSTEM_ERROR_SCHEMA, error, buffer_encoder)
+      end
+      buffer_writer.string
+    end
+
+    def process_handshake(decoder, encoder, connection=nil)
+      if connection && connection.is_connected?
+        return connection.protocol
+      end
+      handshake_request = HANDSHAKE_RESPONDER_READER.read(decoder)
+      handshake_response = {}
+
+      # determine the remote protocol
+      client_hash = handshake_request['clientHash']
+      client_protocol = handshake_request['clientProtocol']
+      remote_protocol = protocol_cache[client_hash]
+
+      if !remote_protocol && client_protocol
+        remote_protocol = Avro::Protocol.parse(client_protocol)
+        protocol_cache[client_hash] = remote_protocol
+      end
+
+      # evaluate remote's guess of the local protocol
+      server_hash = handshake_request['serverHash']
+      if local_hash == server_hash
+        if !remote_protocol
+          handshake_response['match'] = 'NONE'
+        else
+          handshake_response['match'] = 'BOTH'
+        end
+      else
+        if !remote_protocol
+          handshake_response['match'] = 'NONE'
+        else
+          handshake_response['match'] = 'CLIENT'
+        end
+      end
+
+      if handshake_response['match'] != 'BOTH'
+        handshake_response['serverProtocol'] = local_protocol.to_s
+        handshake_response['serverHash'] = local_hash
+      end
+
+      HANDSHAKE_RESPONDER_WRITER.write(handshake_response, encoder)
+
+      if connection && handshake_response['match'] != 'NONE'
+        connection.protocol = remote_protocol
+      end
+
+      remote_protocol
+    end
+
+    def call(local_message, request)
+      # Actual work done by server: cf. handler in thrift.
+      raise NotImplementedError
+    end
+
+    def read_request(writers_schema, readers_schema, decoder)
+      datum_reader = Avro::IO::DatumReader.new(writers_schema, readers_schema)
+      datum_reader.read(decoder)
+    end
+
+    def write_response(writers_schema, response_datum, encoder)
+      datum_writer = Avro::IO::DatumWriter.new(writers_schema)
+      datum_writer.write(response_datum, encoder)
+    end
+
+    def write_error(writers_schema, error_exception, encoder)
+      datum_writer = Avro::IO::DatumWriter.new(writers_schema)
+      datum_writer.write(error_exception.to_s, encoder)
+    end
+  end
+
+  class SocketTransport
+    # A simple socket-based Transport implementation.
+
+    attr_reader :sock, :remote_name
+    attr_accessor :protocol
+
+    def initialize(sock)
+      @sock = sock
+      @protocol = nil
+    end
+
+    def is_connected?()
+      !!@protocol
+    end
+
+    def transceive(request)
+      write_framed_message(request)
+      read_framed_message
+    end
+
+    def read_framed_message
+      message = []
+      loop do
+        buffer = StringIO.new
+        buffer_length = read_buffer_length
+        if buffer_length == 0
+          return message.join
+        end
+        while buffer.tell < buffer_length
+          chunk = sock.read(buffer_length - buffer.tell)
+          if chunk == ''
+            raise ConnectionClosedException.new("Socket read 0 bytes.")
+          end
+          buffer.write(chunk)
+        end
+        message << buffer.string
+      end
+    end
+
+    def write_framed_message(message)
+      message_length = message.size
+      total_bytes_sent = 0
+      while message_length - total_bytes_sent > 0
+        if message_length - total_bytes_sent > BUFFER_SIZE
+          buffer_length = BUFFER_SIZE
+        else
+          buffer_length = message_length - total_bytes_sent
+        end
+        write_buffer(message[total_bytes_sent,buffer_length])
+        total_bytes_sent += buffer_length
+      end
+      # A message is always terminated by a zero-length buffer.
+      write_buffer_length(0)
+    end
+
+    def write_buffer(chunk)
+      buffer_length = chunk.size
+      write_buffer_length(buffer_length)
+      total_bytes_sent = 0
+      while total_bytes_sent < buffer_length
+        bytes_sent = self.sock.write(chunk[total_bytes_sent..-1])
+        if bytes_sent == 0
+          raise ConnectionClosedException.new("Socket sent 0 bytes.")
+        end
+        total_bytes_sent += bytes_sent
+      end
+    end
+
+    def write_buffer_length(n)
+      bytes_sent = sock.write([n].pack('N'))
+      if bytes_sent == 0
+        raise ConnectionClosedException.new("socket sent 0 bytes")
+      end
+    end
+
+    def read_buffer_length
+      read = sock.read(BUFFER_HEADER_LENGTH)
+      if read == '' || read == nil
+        raise ConnectionClosedException.new("Socket read 0 bytes.")
+      end
+      read.unpack('N')[0]
+    end
+
+    def close
+      sock.close
+    end
+  end
+
+  class ConnectionClosedError < StandardError; end
+
+  class FramedWriter
+    attr_reader :writer
+    def initialize(writer)
+      @writer = writer
+    end
+
+    def write_framed_message(message)
+      message_size = message.size
+      total_bytes_sent = 0
+      while message_size - total_bytes_sent > 0
+        if message_size - total_bytes_sent > BUFFER_SIZE
+          buffer_size = BUFFER_SIZE
+        else
+          buffer_size = message_size - total_bytes_sent
+        end
+        write_buffer(message[total_bytes_sent, buffer_size])
+        total_bytes_sent += buffer_size
+      end
+      write_buffer_size(0)
+    end
+
+    def to_s; writer.string; end
+
+    private
+    def write_buffer(chunk)
+      buffer_size = chunk.size
+      write_buffer_size(buffer_size)
+      writer << chunk
+    end
+
+    def write_buffer_size(n)
+      writer.write([n].pack('N'))
+    end
+  end
+
+  class FramedReader
+    attr_reader :reader
+
+    def initialize(reader)
+      @reader = reader
+    end
+
+    def read_framed_message
+      message = []
+      loop do
+        buffer = ""
+        buffer_size = read_buffer_size
+
+        return message.join if buffer_size == 0
+
+        while buffer.size < buffer_size
+          chunk = reader.read(buffer_size - buffer.size)
+          chunk_error?(chunk)
+          buffer << chunk
+        end
+        message << buffer
+      end
+    end
+
+    private
+    def read_buffer_size
+      header = reader.read(BUFFER_HEADER_LENGTH)
+      chunk_error?(header)
+      header.unpack('N')[0]
+    end
+
+    def chunk_error?(chunk)
+      raise ConnectionClosedError.new("Reader read 0 bytes") if chunk == ''
+    end
+  end
+
+  # Only works for clients. Sigh.
+  class HTTPTransceiver
+    attr_reader :remote_name, :host, :port
+    def initialize(host, port)
+      @host, @port = host, port
+      @remote_name = "#{host}:#{port}"
+      @conn = Net::HTTP.start host, port
+    end
+
+    def transceive(message)
+      writer = FramedWriter.new(StringIO.new)
+      writer.write_framed_message(message)
+      resp = @conn.post('/', writer.to_s, {'Content-Type' => 'avro/binary'})
+      FramedReader.new(StringIO.new(resp.body)).read_framed_message
+    end
+  end
+end
diff --git a/lang/ruby/lib/avro/protocol.rb b/lang/ruby/lib/avro/protocol.rb
new file mode 100644
index 0000000..6c210e1
--- /dev/null
+++ b/lang/ruby/lib/avro/protocol.rb
@@ -0,0 +1,161 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+module Avro
+  class Protocol
+    VALID_TYPE_SCHEMA_TYPES = Set.new(%w[enum record error fixed])
+    VALID_TYPE_SCHEMA_TYPES_SYM = Set.new(VALID_TYPE_SCHEMA_TYPES.map(&:to_sym))
+    class ProtocolParseError < Avro::AvroError; end
+
+    attr_reader :name, :namespace, :types, :messages, :md5
+    def self.parse(protocol_string)
+      json_data = MultiJson.load(protocol_string)
+
+      if json_data.is_a? Hash
+        name = json_data['protocol']
+        namespace = json_data['namespace']
+        types = json_data['types']
+        messages = json_data['messages']
+        Protocol.new(name, namespace, types, messages)
+      else
+        raise ProtocolParseError, "Not a JSON object: #{json_data}"
+      end
+    end
+
+    def initialize(name, namespace=nil, types=nil, messages=nil)
+      # Ensure valid ctor args
+      if !name
+        raise ProtocolParseError, 'Protocols must have a non-empty name.'
+      elsif !name.is_a?(String)
+        raise ProtocolParseError, 'The name property must be a string.'
+      elsif !namespace.is_a?(String)
+        raise ProtocolParseError, 'The namespace property must be a string.'
+      elsif !types.is_a?(Array)
+        raise ProtocolParseError, 'The types property must be a list.'
+      elsif !messages.is_a?(Hash)
+        raise ProtocolParseError, 'The messages property must be a JSON object.'
+      end
+
+      @name = name
+      @namespace = namespace
+      type_names = {}
+      @types = parse_types(types, type_names)
+      @messages = parse_messages(messages, type_names)
+      @md5 = Digest::MD5.digest(to_s)
+    end
+
+    def to_s
+      MultiJson.dump to_avro
+    end
+
+    def ==(other)
+      to_avro == other.to_avro
+    end
+
+    private
+    def parse_types(types, type_names)
+      type_objects = []
+      types.collect do |type|
+        # FIXME adding type.name to type_names is not defined in the
+        # spec. Possible bug in the python impl and the spec.
+        type_object = Schema.real_parse(type, type_names, namespace)
+        unless VALID_TYPE_SCHEMA_TYPES_SYM.include?(type_object.type_sym)
+          msg = "Type #{type} not an enum, record, fixed or error."
+          raise ProtocolParseError, msg
+        end
+        type_object
+      end
+    end
+
+    def parse_messages(messages, names)
+      message_objects = {}
+      messages.each do |name, body|
+        if message_objects.has_key?(name)
+          raise ProtocolParseError, "Message name \"#{name}\" repeated."
+        elsif !body.is_a?(Hash)
+          raise ProtocolParseError, "Message name \"#{name}\" has non-object body #{body.inspect}"
+        end
+
+        request  = body['request']
+        response = body['response']
+        errors   = body['errors']
+        message_objects[name] = Message.new(name, request, response, errors, names, namespace)
+      end
+      message_objects
+    end
+
+    protected
+    def to_avro(names=Set.new)
+      hsh = {'protocol' => name}
+      hsh['namespace'] = namespace if namespace
+      hsh['types'] = types.map{|t| t.to_avro(names) } if types
+
+      if messages
+        hsh['messages'] = messages.inject({}) {|h, (k,t)| h[k] = t.to_avro(names); h }
+      end
+
+      hsh
+    end
+
+    class Message
+      attr_reader :name, :request, :response, :errors, :default_namespace
+
+      def initialize(name, request, response, errors=nil, names=nil, default_namespace=nil)
+        @name = name
+        @default_namespace = default_namespace
+        @request = parse_request(request, names)
+        @response = parse_response(response, names)
+        @errors = parse_errors(errors, names) if errors
+      end
+
+      def to_avro(names=Set.new)
+        {
+          'request' => request.to_avro(names),
+          'response' => response.to_avro(names)
+        }.tap do |hash|
+          hash['errors'] = errors.to_avro(names) if errors
+        end
+      end
+
+      def to_s
+        Yajl.dump to_avro
+      end
+
+      def parse_request(request, names)
+        unless request.is_a?(Array)
+          raise ProtocolParseError, "Request property not an Array: #{request.inspect}"
+        end
+        Schema::RecordSchema.new(nil, default_namespace, request, names, :request)
+      end
+
+      def parse_response(response, names)
+        if response.is_a?(String) && names
+          fullname = Name.make_fullname(response, default_namespace)
+          return names[fullname] if names.include?(fullname)
+        end
+
+        Schema.real_parse(response, names, default_namespace)
+      end
+
+      def parse_errors(errors, names)
+        unless errors.is_a?(Array)
+          raise ProtocolParseError, "Errors property not an Array: #{errors}"
+        end
+        Schema.real_parse(errors, names, default_namespace)
+      end
+    end
+  end
+end
diff --git a/lang/ruby/lib/avro/schema.rb b/lang/ruby/lib/avro/schema.rb
new file mode 100644
index 0000000..1421577
--- /dev/null
+++ b/lang/ruby/lib/avro/schema.rb
@@ -0,0 +1,413 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+module Avro
+  class Schema
+    # Sets of strings, for backwards compatibility. See below for sets of symbols,
+    # for better performance.
+    PRIMITIVE_TYPES = Set.new(%w[null boolean string bytes int long float double])
+    NAMED_TYPES =     Set.new(%w[fixed enum record error])
+
+    VALID_TYPES = PRIMITIVE_TYPES + NAMED_TYPES + Set.new(%w[array map union request])
+
+    PRIMITIVE_TYPES_SYM = Set.new(PRIMITIVE_TYPES.map(&:to_sym))
+    NAMED_TYPES_SYM     = Set.new(NAMED_TYPES.map(&:to_sym))
+    VALID_TYPES_SYM     = Set.new(VALID_TYPES.map(&:to_sym))
+
+    INT_MIN_VALUE = -(1 << 31)
+    INT_MAX_VALUE = (1 << 31) - 1
+    LONG_MIN_VALUE = -(1 << 63)
+    LONG_MAX_VALUE = (1 << 63) - 1
+
+    def self.parse(json_string)
+      real_parse(MultiJson.load(json_string), {})
+    end
+
+    # Build Avro Schema from data parsed out of JSON string.
+    def self.real_parse(json_obj, names=nil, default_namespace=nil)
+      if json_obj.is_a? Hash
+        type = json_obj['type']
+        raise SchemaParseError, %Q(No "type" property: #{json_obj}) if type.nil?
+
+        # Check that the type is valid before calling #to_sym, since symbols are never garbage
+        # collected (important to avoid DoS if we're accepting schemas from untrusted clients)
+        unless VALID_TYPES.include?(type)
+          raise SchemaParseError, "Unknown type: #{type}"
+        end
+
+        type_sym = type.to_sym
+        if PRIMITIVE_TYPES_SYM.include?(type_sym)
+          return PrimitiveSchema.new(type_sym)
+
+        elsif NAMED_TYPES_SYM.include? type_sym
+          name = json_obj['name']
+          namespace = json_obj.include?('namespace') ? json_obj['namespace'] : default_namespace
+          case type_sym
+          when :fixed
+            size = json_obj['size']
+            return FixedSchema.new(name, namespace, size, names)
+          when :enum
+            symbols = json_obj['symbols']
+            return EnumSchema.new(name, namespace, symbols, names)
+          when :record, :error
+            fields = json_obj['fields']
+            return RecordSchema.new(name, namespace, fields, names, type_sym)
+          else
+            raise SchemaParseError.new("Unknown named type: #{type}")
+          end
+
+        else
+          case type_sym
+          when :array
+            return ArraySchema.new(json_obj['items'], names, default_namespace)
+          when :map
+            return MapSchema.new(json_obj['values'], names, default_namespace)
+          else
+            raise SchemaParseError.new("Unknown Valid Type: #{type}")
+          end
+        end
+
+      elsif json_obj.is_a? Array
+        # JSON array (union)
+        return UnionSchema.new(json_obj, names, default_namespace)
+      elsif PRIMITIVE_TYPES.include? json_obj
+        return PrimitiveSchema.new(json_obj)
+      else
+        raise UnknownSchemaError.new(json_obj)
+      end
+    end
+
+    # Determine if a ruby datum is an instance of a schema
+    def self.validate(expected_schema, datum)
+      case expected_schema.type_sym
+      when :null
+        datum.nil?
+      when :boolean
+        datum == true || datum == false
+      when :string, :bytes
+        datum.is_a? String
+      when :int
+        (datum.is_a?(Fixnum) || datum.is_a?(Bignum)) &&
+            (INT_MIN_VALUE <= datum) && (datum <= INT_MAX_VALUE)
+      when :long
+        (datum.is_a?(Fixnum) || datum.is_a?(Bignum)) &&
+            (LONG_MIN_VALUE <= datum) && (datum <= LONG_MAX_VALUE)
+      when :float, :double
+        datum.is_a?(Float) || datum.is_a?(Fixnum) || datum.is_a?(Bignum)
+      when :fixed
+        datum.is_a?(String) && datum.size == expected_schema.size
+      when :enum
+        expected_schema.symbols.include? datum
+      when :array
+        datum.is_a?(Array) &&
+          datum.all?{|d| validate(expected_schema.items, d) }
+      when :map
+          datum.keys.all?{|k| k.is_a? String } &&
+          datum.values.all?{|v| validate(expected_schema.values, v) }
+      when :union
+        expected_schema.schemas.any?{|s| validate(s, datum) }
+      when :record, :error, :request
+        datum.is_a?(Hash) &&
+          expected_schema.fields.all?{|f| validate(f.type, datum[f.name]) }
+      else
+        raise "you suck #{expected_schema.inspect} is not allowed."
+      end
+    end
+
+    def initialize(type)
+      @type_sym = type.is_a?(Symbol) ? type : type.to_sym
+    end
+
+    attr_reader :type_sym
+
+    # Returns the type as a string (rather than a symbol), for backwards compatibility.
+    # Deprecated in favor of {#type_sym}.
+    def type; @type_sym.to_s; end
+
+    def ==(other, seen=nil)
+      other.is_a?(Schema) && type_sym == other.type_sym
+    end
+
+    def hash(seen=nil)
+      type_sym.hash
+    end
+
+    def subparse(json_obj, names=nil, namespace=nil)
+      if json_obj.is_a?(String) && names
+        fullname = Name.make_fullname(json_obj, namespace)
+        return names[fullname] if names.include?(fullname)
+      end
+
+      begin
+        Schema.real_parse(json_obj, names, namespace)
+      rescue => e
+        raise e if e.is_a? SchemaParseError
+        raise SchemaParseError, "Sub-schema for #{self.class.name} not a valid Avro schema. Bad schema: #{json_obj}"
+      end
+    end
+
+    def to_avro(names=nil)
+      {'type' => type}
+    end
+
+    def to_s
+      MultiJson.dump to_avro
+    end
+
+    class NamedSchema < Schema
+      attr_reader :name, :namespace
+      def initialize(type, name, namespace=nil, names=nil)
+        super(type)
+        @name, @namespace = Name.extract_namespace(name, namespace)
+        names = Name.add_name(names, self)
+      end
+
+      def to_avro(names=Set.new)
+        if @name
+          return fullname if names.include?(fullname)
+          names << fullname
+        end
+        props = {'name' => @name}
+        props.merge!('namespace' => @namespace) if @namespace
+        super.merge props
+      end
+
+      def fullname
+        @fullname ||= Name.make_fullname(@name, @namespace)
+      end
+    end
+
+    class RecordSchema < NamedSchema
+      attr_reader :fields
+
+      def self.make_field_objects(field_data, names, namespace=nil)
+        field_objects, field_names = [], Set.new
+        field_data.each_with_index do |field, i|
+          if field.respond_to?(:[]) # TODO(jmhodges) wtffffff
+            type = field['type']
+            name = field['name']
+            default = field['default']
+            order = field['order']
+            new_field = Field.new(type, name, default, order, names, namespace)
+            # make sure field name has not been used yet
+            if field_names.include?(new_field.name)
+              raise SchemaParseError, "Field name #{new_field.name.inspect} is already in use"
+            end
+            field_names << new_field.name
+          else
+            raise SchemaParseError, "Not a valid field: #{field}"
+          end
+          field_objects << new_field
+        end
+        field_objects
+      end
+
+      def initialize(name, namespace, fields, names=nil, schema_type=:record)
+        if schema_type == :request || schema_type == 'request'
+          @type_sym = schema_type.to_sym
+          @namespace = namespace
+        else
+          super(schema_type, name, namespace, names)
+        end
+        @fields = RecordSchema.make_field_objects(fields, names, self.namespace)
+      end
+
+      def fields_hash
+        @fields_hash ||= fields.inject({}){|hsh, field| hsh[field.name] = field; hsh }
+      end
+
+      def to_avro(names=Set.new)
+        hsh = super
+        return hsh unless hsh.is_a?(Hash)
+        hsh['fields'] = @fields.map {|f| f.to_avro(names) }
+        if type_sym == :request
+          hsh['fields']
+        else
+          hsh
+        end
+      end
+    end
+
+    class ArraySchema < Schema
+      attr_reader :items
+
+      def initialize(items, names=nil, default_namespace=nil)
+        super(:array)
+        @items = subparse(items, names, default_namespace)
+      end
+
+      def to_avro(names=Set.new)
+        super.merge('items' => items.to_avro(names))
+      end
+    end
+
+    class MapSchema < Schema
+      attr_reader :values
+
+      def initialize(values, names=nil, default_namespace=nil)
+        super(:map)
+        @values = subparse(values, names, default_namespace)
+      end
+
+      def to_avro(names=Set.new)
+        super.merge('values' => values.to_avro(names))
+      end
+    end
+
+    class UnionSchema < Schema
+      attr_reader :schemas
+
+      def initialize(schemas, names=nil, default_namespace=nil)
+        super(:union)
+
+        schema_objects = []
+        schemas.each_with_index do |schema, i|
+          new_schema = subparse(schema, names, default_namespace)
+          ns_type = new_schema.type_sym
+
+          if VALID_TYPES_SYM.include?(ns_type) &&
+              !NAMED_TYPES_SYM.include?(ns_type) &&
+              schema_objects.any?{|o| o.type_sym == ns_type }
+            raise SchemaParseError, "#{ns_type} is already in Union"
+          elsif ns_type == :union
+            raise SchemaParseError, "Unions cannot contain other unions"
+          else
+            schema_objects << new_schema
+          end
+          @schemas = schema_objects
+        end
+      end
+
+      def to_avro(names=Set.new)
+        schemas.map {|schema| schema.to_avro(names) }
+      end
+    end
+
+    class EnumSchema < NamedSchema
+      attr_reader :symbols
+      def initialize(name, space, symbols, names=nil)
+        if symbols.uniq.length < symbols.length
+          fail_msg = 'Duplicate symbol: %s' % symbols
+          raise Avro::SchemaParseError, fail_msg
+        end
+        super(:enum, name, space, names)
+        @symbols = symbols
+      end
+
+      def to_avro(names=Set.new)
+        avro = super
+        avro.is_a?(Hash) ? avro.merge('symbols' => symbols) : avro
+      end
+    end
+
+    # Valid primitive types are in PRIMITIVE_TYPES.
+    class PrimitiveSchema < Schema
+      def initialize(type)
+        if PRIMITIVE_TYPES_SYM.include?(type)
+          super(type)
+        elsif PRIMITIVE_TYPES.include?(type)
+          super(type.to_sym)
+        else
+          raise AvroError.new("#{type} is not a valid primitive type.")
+        end
+      end
+
+      def to_avro(names=nil)
+        hsh = super
+        hsh.size == 1 ? type : hsh
+      end
+    end
+
+    class FixedSchema < NamedSchema
+      attr_reader :size
+      def initialize(name, space, size, names=nil)
+        # Ensure valid cto args
+        unless size.is_a?(Fixnum) || size.is_a?(Bignum)
+          raise AvroError, 'Fixed Schema requires a valid integer for size property.'
+        end
+        super(:fixed, name, space, names)
+        @size = size
+      end
+
+      def to_avro(names=Set.new)
+        avro = super
+        avro.is_a?(Hash) ? avro.merge('size' => size) : avro
+      end
+    end
+
+    class Field < Schema
+      attr_reader :type, :name, :default, :order
+
+      def initialize(type, name, default=nil, order=nil, names=nil, namespace=nil)
+        @type = subparse(type, names, namespace)
+        @name = name
+        @default = default
+        @order = order
+      end
+
+      def to_avro(names=Set.new)
+        {'name' => name, 'type' => type.to_avro(names)}.tap do |avro|
+          avro['default'] = default if default
+          avro['order'] = order if order
+        end
+      end
+    end
+  end
+
+  class SchemaParseError < AvroError; end
+
+  class UnknownSchemaError < SchemaParseError
+    attr_reader :type_name
+
+    def initialize(type)
+      @type_name = type
+      super("#{type.inspect} is not a schema we know about.")
+    end
+  end
+
+  module Name
+    def self.extract_namespace(name, namespace)
+      parts = name.split('.')
+      if parts.size > 1
+        namespace, name = parts[0..-2].join('.'), parts.last
+      end
+      return name, namespace
+    end
+
+    # Add a new schema object to the names dictionary (in place).
+    def self.add_name(names, new_schema)
+      new_fullname = new_schema.fullname
+      if Avro::Schema::VALID_TYPES.include?(new_fullname)
+        raise SchemaParseError, "#{new_fullname} is a reserved type name."
+      elsif names.nil?
+        names = {}
+      elsif names.has_key?(new_fullname)
+        raise SchemaParseError, "The name \"#{new_fullname}\" is already in use."
+      end
+
+      names[new_fullname] = new_schema
+      names
+    end
+
+    def self.make_fullname(name, namespace)
+      if !name.include?('.') && !namespace.nil?
+        namespace + '.' + name
+      else
+        name
+      end
+    end
+  end
+end
diff --git a/lang/ruby/test/random_data.rb b/lang/ruby/test/random_data.rb
new file mode 100644
index 0000000..9d276f7
--- /dev/null
+++ b/lang/ruby/test/random_data.rb
@@ -0,0 +1,90 @@
+#!/usr/bin/env ruby
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class RandomData
+  def initialize(schm, seed=nil)
+    srand(seed) if seed
+    @seed = seed
+    @schm = schm
+  end
+
+  def next
+    nextdata(@schm)
+  end
+
+  def nextdata(schm, d=0)
+    case schm.type_sym
+    when :boolean
+      rand > 0.5
+    when :string
+      randstr()
+    when :int
+      rand(Avro::Schema::INT_MAX_VALUE - Avro::Schema::INT_MIN_VALUE) + Avro::Schema::INT_MIN_VALUE
+    when :long
+      rand(Avro::Schema::LONG_MAX_VALUE - Avro::Schema::LONG_MIN_VALUE) + Avro::Schema::LONG_MIN_VALUE
+    when :float
+      (-1024 + 2048 * rand).round.to_f
+    when :double
+      Avro::Schema::LONG_MIN_VALUE + (Avro::Schema::LONG_MAX_VALUE - Avro::Schema::LONG_MIN_VALUE) * rand
+    when :bytes
+      randstr(BYTEPOOL)
+    when :null
+      nil
+    when :array
+      arr = []
+      len = rand(5) + 2 - d
+      len = 0 if len < 0
+      len.times{ arr << nextdata(schm.items, d+1) }
+      arr
+    when :map
+      map = {}
+      len = rand(5) + 2 - d
+      len = 0 if len < 0
+      len.times do
+        map[nextdata(Avro::Schema::PrimitiveSchema.new(:string))] = nextdata(schm.values, d+1)
+      end
+      map
+    when :record, :error
+      m = {}
+      schm.fields.each do |field|
+        m[field.name] = nextdata(field.type, d+1)
+      end
+      m
+    when :union
+      types = schm.schemas
+      nextdata(types[rand(types.size)], d)
+    when :enum
+      symbols = schm.symbols
+      len = symbols.size
+      return nil if len == 0
+      symbols[rand(len)]
+    when :fixed
+      f = ""
+      schm.size.times { f << BYTEPOOL[rand(BYTEPOOL.size), 1] }
+      f
+    end
+  end
+
+  CHARPOOL = 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'
+  BYTEPOOL = '12345abcd'
+
+  def randstr(chars=CHARPOOL, length=20)
+    str = ''
+    rand(length+1).times { str << chars[rand(chars.size)] }
+    str
+  end
+end
diff --git a/lang/ruby/test/sample_ipc_client.rb b/lang/ruby/test/sample_ipc_client.rb
new file mode 100644
index 0000000..0a25e58
--- /dev/null
+++ b/lang/ruby/test/sample_ipc_client.rb
@@ -0,0 +1,85 @@
+#!/usr/bin/env ruby
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'socket'
+require 'avro'
+
+MAIL_PROTOCOL_JSON = <<-JSON
+{"namespace": "example.proto",
+ "protocol": "Mail",
+
+ "types": [
+     {"name": "Message", "type": "record",
+      "fields": [
+          {"name": "to",   "type": "string"},
+          {"name": "from", "type": "string"},
+          {"name": "body", "type": "string"}
+      ]
+     }
+ ],
+
+ "messages": {
+     "send": {
+         "request": [{"name": "message", "type": "Message"}],
+         "response": "string"
+     },
+     "replay": {
+         "request": [],
+         "response": "string"
+     }
+ }
+}
+JSON
+
+MAIL_PROTOCOL = Avro::Protocol.parse(MAIL_PROTOCOL_JSON)
+
+def make_requestor(server_address, port, protocol)
+  sock = TCPSocket.new(server_address, port)
+  client = Avro::IPC::SocketTransport.new(sock)
+  Avro::IPC::Requestor.new(protocol, client)
+end
+
+if $0 == __FILE__
+  if ![3, 4].include?(ARGV.length)
+    raise "Usage: <to> <from> <body> [<count>]"
+  end
+
+  # client code - attach to the server and send a message
+  # fill in the Message record
+  message = {
+    'to'   => ARGV[0],
+    'from' => ARGV[1],
+    'body' => ARGV[2]
+  }
+
+  num_messages = (ARGV[3] || 1).to_i
+
+  # build the parameters for the request
+  params = {'message' => message}
+
+  # send the requests and print the result
+  num_messages.times do
+    requestor = make_requestor('localhost', 9090, MAIL_PROTOCOL)
+    result = requestor.request('send', params)
+    puts("Result: " + result)
+  end
+
+  # try out a replay message
+  requestor = make_requestor('localhost', 9090, MAIL_PROTOCOL)
+  result = requestor.request('replay', {})
+  puts("Replay Result: " + result)
+end
diff --git a/lang/ruby/test/sample_ipc_http_client.rb b/lang/ruby/test/sample_ipc_http_client.rb
new file mode 100644
index 0000000..404cc9c
--- /dev/null
+++ b/lang/ruby/test/sample_ipc_http_client.rb
@@ -0,0 +1,84 @@
+#!/usr/bin/env ruby
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'socket'
+require 'avro'
+
+MAIL_PROTOCOL_JSON = <<-JSON
+{"namespace": "example.proto",
+ "protocol": "Mail",
+
+ "types": [
+     {"name": "Message", "type": "record",
+      "fields": [
+          {"name": "to",   "type": "string"},
+          {"name": "from", "type": "string"},
+          {"name": "body", "type": "string"}
+      ]
+     }
+ ],
+
+ "messages": {
+     "send": {
+         "request": [{"name": "message", "type": "Message"}],
+         "response": "string"
+     },
+     "replay": {
+         "request": [],
+         "response": "string"
+     }
+ }
+}
+JSON
+
+MAIL_PROTOCOL = Avro::Protocol.parse(MAIL_PROTOCOL_JSON)
+
+def make_requestor(server_address, port, protocol)
+  transport = Avro::IPC::HTTPTransceiver.new(server_address, port)
+  Avro::IPC::Requestor.new(protocol, transport)
+end
+
+if $0 == __FILE__
+  if ![3, 4].include?(ARGV.length)
+    raise "Usage: <to> <from> <body> [<count>]"
+  end
+
+  # client code - attach to the server and send a message
+  # fill in the Message record
+  message = {
+    'to'   => ARGV[0],
+    'from' => ARGV[1],
+    'body' => ARGV[2]
+  }
+
+  num_messages = (ARGV[3] || 1).to_i
+
+  # build the parameters for the request
+  params = {'message' => message}
+  # send the requests and print the result
+
+  num_messages.times do
+    requestor = make_requestor('localhost', 9090, MAIL_PROTOCOL)
+    result = requestor.request('send', params)
+    puts("Result: " + result)
+  end
+
+  # try out a replay message
+  requestor = make_requestor('localhost', 9090, MAIL_PROTOCOL)
+  result = requestor.request('replay', {})
+  puts("Replay Result: " + result)
+end
diff --git a/lang/ruby/test/sample_ipc_http_server.rb b/lang/ruby/test/sample_ipc_http_server.rb
new file mode 100644
index 0000000..f5266bb
--- /dev/null
+++ b/lang/ruby/test/sample_ipc_http_server.rb
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+require 'avro'
+require 'webrick'
+
+MAIL_PROTOCOL_JSON = <<-JSON
+{"namespace": "example.proto",
+ "protocol": "Mail",
+
+ "types": [
+     {"name": "Message", "type": "record",
+      "fields": [
+          {"name": "to",   "type": "string"},
+          {"name": "from", "type": "string"},
+          {"name": "body", "type": "string"}
+      ]
+     }
+ ],
+
+ "messages": {
+     "send": {
+         "request": [{"name": "message", "type": "Message"}],
+         "response": "string"
+     },
+     "replay": {
+         "request": [],
+         "response": "string"
+     }
+ }
+}
+JSON
+
+MAIL_PROTOCOL = Avro::Protocol.parse(MAIL_PROTOCOL_JSON)
+
+class MailResponder < Avro::IPC::Responder
+  def initialize
+    super(MAIL_PROTOCOL)
+  end
+
+  def call(message, request)
+    if message.name == 'send'
+      request_content = request['message']
+      "Sent message to #{request_content['to']} from #{request_content['from']} with body #{request_content['body']}"
+    elsif message.name == 'replay'
+      'replay'
+    end
+  end
+end
+
+class MailHandler < WEBrick::HTTPServlet::AbstractServlet
+  def do_POST(req, resp)
+    responder = MailResponder.new
+    call_request = Avro::IPC::FramedReader.new(StringIO.new(req.body)).read_framed_message
+    unframed_resp = responder.respond(call_request)
+    writer = Avro::IPC::FramedWriter.new(StringIO.new)
+    writer.write_framed_message(unframed_resp)
+    resp.body = writer.to_s
+  end
+end
+
+if $0 == __FILE__
+  server = WEBrick::HTTPServer.new(:Host => 'localhost', :Port => 9090)
+  server.mount '/', MailHandler
+  trap("INT") { server.shutdown }
+  server.start
+end
diff --git a/lang/ruby/test/sample_ipc_server.rb b/lang/ruby/test/sample_ipc_server.rb
new file mode 100644
index 0000000..b18312d
--- /dev/null
+++ b/lang/ruby/test/sample_ipc_server.rb
@@ -0,0 +1,92 @@
+#!/usr/bin/env ruby
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'socket'
+require 'avro'
+
+MAIL_PROTOCOL_JSON = <<-EOS
+{"namespace": "example.proto",
+ "protocol": "Mail",
+
+ "types": [
+     {"name": "Message", "type": "record",
+      "fields": [
+          {"name": "to",   "type": "string"},
+          {"name": "from", "type": "string"},
+          {"name": "body", "type": "string"}
+      ]
+     }
+ ],
+
+ "messages": {
+     "send": {
+         "request": [{"name": "message", "type": "Message"}],
+         "response": "string"
+     },
+     "replay": {
+         "request": [],
+         "response": "string"
+     }
+ }
+}
+EOS
+
+MAIL_PROTOCOL = Avro::Protocol.parse(MAIL_PROTOCOL_JSON)
+
+class MailResponder < Avro::IPC::Responder
+  def initialize
+    super(MAIL_PROTOCOL)
+  end
+
+  def call(message, request)
+    if message.name == 'send'
+      request_content = request['message']
+      "Sent message to #{request_content['to']} from #{request_content['from']} with body #{request_content['body']}"
+    elsif message.name == 'replay'
+      'replay'
+    end
+  end
+end
+
+class RequestHandler
+  def initialize(address, port)
+    @ip_address = address
+    @port = port
+  end
+
+  def run
+    server = TCPServer.new(@ip_address, @port)
+    while (session = server.accept)
+      handle(session)
+      session.close
+    end
+  end
+end
+
+class MailHandler < RequestHandler
+  def handle(request)
+    responder = MailResponder.new()
+    transport = Avro::IPC::SocketTransport.new(request)
+    str = transport.read_framed_message
+    transport.write_framed_message(responder.respond(str))
+  end
+end
+
+if $0 == __FILE__
+  handler = MailHandler.new('localhost', 9090)
+  handler.run
+end
diff --git a/lang/ruby/test/test_datafile.rb b/lang/ruby/test/test_datafile.rb
new file mode 100644
index 0000000..8fe05d7
--- /dev/null
+++ b/lang/ruby/test/test_datafile.rb
@@ -0,0 +1,214 @@
+# -*- coding: utf-8 -*-
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'test_help'
+
+class TestDataFile < Test::Unit::TestCase
+  HERE = File.expand_path File.dirname(__FILE__)
+  def setup
+    if File.exists?(HERE + '/data.avr')
+      File.unlink(HERE + '/data.avr')
+    end
+  end
+
+  def teardown
+    if File.exists?(HERE + '/data.avr')
+      File.unlink(HERE + '/data.avr')
+    end
+  end
+
+  def test_differing_schemas_with_primitives
+    writer_schema = <<-JSON
+{ "type": "record",
+  "name": "User",
+  "fields" : [
+    {"name": "username", "type": "string"},
+    {"name": "age", "type": "int"},
+    {"name": "verified", "type": "boolean", "default": "false"}
+  ]}
+JSON
+
+    data = [{"username" => "john", "age" => 25, "verified" => true},
+            {"username" => "ryan", "age" => 23, "verified" => false}]
+
+    Avro::DataFile.open('data.avr', 'w', writer_schema) do |dw|
+      data.each{|h| dw << h }
+    end
+
+    # extract the username only from the avro serialized file
+    reader_schema = <<-JSON
+{ "type": "record",
+  "name": "User",
+  "fields" : [
+    {"name": "username", "type": "string"}
+ ]}
+JSON
+
+    Avro::DataFile.open('data.avr', 'r', reader_schema) do |dr|
+      dr.each_with_index do |record, i|
+        assert_equal data[i]['username'], record['username']
+      end
+    end
+  end
+
+  def test_differing_schemas_with_complex_objects
+    writer_schema = <<-JSON
+{ "type": "record",
+  "name": "something",
+  "fields": [
+    {"name": "something_fixed", "type": {"name": "inner_fixed",
+                                         "type": "fixed", "size": 3}},
+    {"name": "something_enum", "type": {"name": "inner_enum",
+                                        "type": "enum",
+                                        "symbols": ["hello", "goodbye"]}},
+    {"name": "something_array", "type": {"type": "array", "items": "int"}},
+    {"name": "something_map", "type": {"type": "map", "values": "int"}},
+    {"name": "something_record", "type": {"name": "inner_record",
+                                          "type": "record",
+                                          "fields": [
+                                            {"name": "inner", "type": "int"}
+                                          ]}},
+    {"name": "username", "type": "string"}
+]}
+JSON
+
+    data = [{"username" => "john",
+              "something_fixed" => "foo",
+              "something_enum" => "hello",
+              "something_array" => [1,2,3],
+              "something_map" => {"a" => 1, "b" => 2},
+              "something_record" => {"inner" => 2},
+              "something_error" => {"code" => 403}
+            },
+            {"username" => "ryan",
+              "something_fixed" => "bar",
+              "something_enum" => "goodbye",
+              "something_array" => [1,2,3],
+              "something_map" => {"a" => 2, "b" => 6},
+              "something_record" => {"inner" => 1},
+              "something_error" => {"code" => 401}
+            }]
+
+    Avro::DataFile.open('data.avr', 'w', writer_schema) do |dw|
+      data.each{|d| dw << d }
+    end
+
+    %w[fixed enum record error array map union].each do |s|
+      reader = MultiJson.load(writer_schema)
+      reader['fields'] = reader['fields'].reject{|f| f['type']['type'] == s}
+      Avro::DataFile.open('data.avr', 'r', MultiJson.dump(reader)) do |dr|
+        dr.each_with_index do |obj, i|
+          reader['fields'].each do |field|
+            assert_equal data[i][field['name']], obj[field['name']]
+          end
+        end
+      end
+    end
+  end
+
+  def test_data_writer_handles_sync_interval
+    writer_schema = <<-JSON
+{ "type": "record",
+  "name": "something",
+  "fields": [
+    {"name": "something_boolean", "type": "boolean"}
+]}
+JSON
+
+    data = {"something_boolean" => true }
+
+    Avro::DataFile.open('data.avr', 'w', writer_schema) do |dw|
+      while dw.writer.tell < Avro::DataFile::SYNC_INTERVAL
+        dw << data
+      end
+      block_count = dw.block_count
+      dw << data
+      # ensure we didn't just write another block
+      assert_equal(block_count+1, dw.block_count)
+    end
+  end
+
+  def test_utf8
+    datafile = Avro::DataFile::open('data.avr', 'w', '"string"')
+    datafile << "家"
+    datafile.close
+
+    datafile = Avro::DataFile.open('data.avr')
+    datafile.each do |s|
+      assert_equal "家", s
+    end
+    datafile.close
+  end
+
+  def test_deflate
+    Avro::DataFile.open('data.avr', 'w', '"string"', :deflate) do |writer|
+      writer << 'a' * 10_000
+    end
+    assert(File.size('data.avr') < 500)
+
+    records = []
+    Avro::DataFile.open('data.avr') do |reader|
+      reader.each {|record| records << record }
+    end
+    assert_equal records, ['a' * 10_000]
+  end
+
+  def test_snappy
+    Avro::DataFile.open('data.avr', 'w', '"string"', :snappy) do |writer|
+      writer << 'a' * 10_000
+    end
+    assert(File.size('data.avr') < 600)
+
+    records = []
+    Avro::DataFile.open('data.avr') do |reader|
+      reader.each {|record| records << record }
+    end
+    assert_equal records, ['a' * 10_000]
+  end
+
+  def test_append_to_deflated_file
+    schema = Avro::Schema.parse('"string"')
+    writer = Avro::IO::DatumWriter.new(schema)
+    file = Avro::DataFile::Writer.new(File.open('data.avr', 'wb'), writer, schema, :deflate)
+    file << 'a' * 10_000
+    file.close
+
+    file = Avro::DataFile::Writer.new(File.open('data.avr', 'a+b'), writer)
+    file << 'b' * 10_000
+    file.close
+    assert(File.size('data.avr') < 1_000)
+
+    records = []
+    Avro::DataFile.open('data.avr') do |reader|
+      reader.each {|record| records << record }
+    end
+    assert_equal records, ['a' * 10_000, 'b' * 10_000]
+  end
+
+  def test_custom_meta
+    meta = { 'x.greeting' => 'yo' }
+
+    schema = Avro::Schema.parse('"string"')
+    writer = Avro::IO::DatumWriter.new(schema)
+    file = Avro::DataFile::Writer.new(File.open('data.avr', 'wb'), writer, schema, nil, meta)
+    file.close
+
+    Avro::DataFile.open('data.avr') do |reader|
+      assert_equal 'yo', reader.meta['x.greeting']
+    end
+  end
+end
diff --git a/lang/py/test/test_datafile_interop.py b/lang/ruby/test/test_help.rb
similarity index 56%
copy from lang/py/test/test_datafile_interop.py
copy to lang/ruby/test/test_help.rb
index 8f4e883..b921b1e 100644
--- a/lang/py/test/test_datafile_interop.py
+++ b/lang/ruby/test/test_help.rb
@@ -13,27 +13,11 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-import os
-import unittest
-from avro import io
-from avro import datafile
 
-class TestDataFileInterop(unittest.TestCase):
-  def test_interop(self):
-    print ''
-    print 'TEST INTEROP'
-    print '============'
-    print ''
-    for f in os.listdir('@INTEROP_DATA_DIR@'):
-      print 'READING %s' % f
-      print ''
-
-      # read data in binary from file
-      reader = open(os.path.join('@INTEROP_DATA_DIR@', f), 'rb')
-      datum_reader = io.DatumReader()
-      dfr = datafile.DataFileReader(reader, datum_reader)
-      for datum in dfr:
-        assert datum is not None
-
-if __name__ == '__main__':
-  unittest.main()
+require 'rubygems'
+require 'test/unit'
+require 'stringio'
+require 'fileutils'
+FileUtils.mkdir_p('tmp')
+require 'avro'
+require 'random_data'
diff --git a/lang/ruby/test/test_io.rb b/lang/ruby/test/test_io.rb
new file mode 100644
index 0000000..f8f3da1
--- /dev/null
+++ b/lang/ruby/test/test_io.rb
@@ -0,0 +1,406 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'test_help'
+
+class TestIO < Test::Unit::TestCase
+  DATAFILE = 'tmp/test.rb.avro'
+  Schema = Avro::Schema
+
+  def test_null
+    check('"null"')
+    check_default('"null"', "null", nil)
+  end
+
+  def test_boolean
+    check('"boolean"')
+    check_default('"boolean"', "true", true)
+    check_default('"boolean"', "false", false)
+  end
+
+  def test_string
+    check('"string"')
+    check_default('"string"', '"foo"', "foo")
+  end
+
+  def test_bytes
+    check('"bytes"')
+    check_default('"bytes"', '"foo"', "foo")
+  end
+
+  def test_int
+    check('"int"')
+    check_default('"int"', "5", 5)
+  end
+
+  def test_long
+    check('"long"')
+    check_default('"long"', "9", 9)
+  end
+
+  def test_float
+    check('"float"')
+    check_default('"float"', "1.2", 1.2)
+  end
+
+  def test_double
+    check('"double"')
+    check_default('"double"', "1.2", 1.2)
+  end
+
+  def test_array
+    array_schema = '{"type": "array", "items": "long"}'
+    check(array_schema)
+    check_default(array_schema, "[1]", [1])
+  end
+
+  def test_map
+    map_schema = '{"type": "map", "values": "long"}'
+    check(map_schema)
+    check_default(map_schema, '{"a": 1}', {"a" => 1})
+  end
+
+  def test_record
+    record_schema = <<EOS
+      {"type": "record",
+       "name": "Test",
+       "fields": [{"name": "f",
+                   "type": "long"}]}
+EOS
+    check(record_schema)
+    check_default(record_schema, '{"f": 11}', {"f" => 11})
+  end
+
+  def test_error
+    error_schema = <<EOS
+      {"type": "error",
+       "name": "TestError",
+       "fields": [{"name": "message",
+                   "type": "string"}]}
+EOS
+    check(error_schema)
+    check_default(error_schema, '{"message": "boom"}', {"message" => "boom"})
+  end
+
+  def test_enum
+    enum_schema = '{"type": "enum", "name": "Test","symbols": ["A", "B"]}'
+    check(enum_schema)
+    check_default(enum_schema, '"B"', "B")
+  end
+
+  def test_recursive
+    recursive_schema = <<EOS
+      {"type": "record",
+       "name": "Node",
+       "fields": [{"name": "label", "type": "string"},
+                  {"name": "children",
+                   "type": {"type": "array", "items": "Node"}}]}
+EOS
+    check(recursive_schema)
+  end
+
+  def test_union
+    union_schema = <<EOS
+      ["string",
+       "null",
+       "long",
+       {"type": "record",
+        "name": "Cons",
+        "fields": [{"name": "car", "type": "string"},
+                   {"name": "cdr", "type": "string"}]}]
+EOS
+    check(union_schema)
+    check_default('["double", "long"]', "1.1", 1.1)
+  end
+
+  def test_lisp
+    lisp_schema = <<EOS
+      {"type": "record",
+       "name": "Lisp",
+       "fields": [{"name": "value",
+                   "type": ["null", "string",
+                            {"type": "record",
+                             "name": "Cons",
+                             "fields": [{"name": "car", "type": "Lisp"},
+                                        {"name": "cdr", "type": "Lisp"}]}]}]}
+EOS
+    check(lisp_schema)
+  end
+
+  def test_fixed
+    fixed_schema = '{"type": "fixed", "name": "Test", "size": 1}'
+    check(fixed_schema)
+    check_default(fixed_schema, '"a"', "a")
+  end
+
+  def test_enum_with_duplicate
+    str = '{"type": "enum", "name": "Test","symbols" : ["AA", "AA"]}'
+    assert_raises(Avro::SchemaParseError) do
+      schema = Avro::Schema.parse str
+    end
+  end
+
+  BINARY_INT_ENCODINGS = [
+    [0, '00'],
+    [-1, '01'],
+    [1, '02'],
+    [-2, '03'],
+    [2, '04'],
+    [-64, '7f'],
+    [64, '80 01'],
+    [8192, '80 80 01'],
+    [-8193, '81 80 01'],
+  ]
+
+  def avro_hexlify(reader)
+    bytes = []
+    current_byte = reader.read(1)
+    bytes << hexlify(current_byte)
+    while (current_byte.unpack('C').first & 0x80) != 0
+      current_byte = reader.read(1)
+      bytes << hexlify(current_byte)
+    end
+    bytes.join ' '
+  end
+
+  def hexlify(msg)
+    msg.unpack("H*")
+  end
+
+  def test_binary_int_encoding
+    for value, hex_encoding in BINARY_INT_ENCODINGS
+      # write datum in binary to string buffer
+      buffer = StringIO.new
+      encoder = Avro::IO::BinaryEncoder.new(buffer)
+      datum_writer = Avro::IO::DatumWriter.new(Avro::Schema.parse('"int"'))
+      datum_writer.write(value, encoder)
+
+      buffer.seek(0)
+      hex_val = avro_hexlify(buffer)
+
+      assert_equal hex_encoding, hex_val
+    end
+  end
+
+  def test_binary_long_encoding
+    for value, hex_encoding in BINARY_INT_ENCODINGS
+      buffer = StringIO.new
+      encoder = Avro::IO::BinaryEncoder.new(buffer)
+      datum_writer = Avro::IO::DatumWriter.new(Avro::Schema.parse('"long"'))
+      datum_writer.write(value, encoder)
+
+      # read it out of the buffer and hexlify it
+      buffer.seek(0)
+      hex_val = avro_hexlify(buffer)
+
+      assert_equal hex_encoding, hex_val
+    end
+  end
+
+  def test_skip_long
+    for value_to_skip, hex_encoding in BINARY_INT_ENCODINGS
+      value_to_read = 6253
+
+      # write some data in binary to string buffer
+      writer = StringIO.new
+      encoder = Avro::IO::BinaryEncoder.new(writer)
+      datum_writer = Avro::IO::DatumWriter.new(Avro::Schema.parse('"long"'))
+      datum_writer.write(value_to_skip, encoder)
+      datum_writer.write(value_to_read, encoder)
+
+      # skip the value
+      reader = StringIO.new(writer.string())
+      decoder = Avro::IO::BinaryDecoder.new(reader)
+      decoder.skip_long()
+
+      # read data from string buffer
+      datum_reader = Avro::IO::DatumReader.new(Avro::Schema.parse('"long"'))
+      read_value = datum_reader.read(decoder)
+
+      # check it
+      assert_equal value_to_read, read_value
+    end
+  end
+
+  def test_skip_int
+    for value_to_skip, hex_encoding in BINARY_INT_ENCODINGS
+      value_to_read = 6253
+
+      writer = StringIO.new
+      encoder = Avro::IO::BinaryEncoder.new(writer)
+      datum_writer = Avro::IO::DatumWriter.new(Avro::Schema.parse('"int"'))
+      datum_writer.write(value_to_skip, encoder)
+      datum_writer.write(value_to_read, encoder)
+
+      reader = StringIO.new(writer.string)
+      decoder = Avro::IO::BinaryDecoder.new(reader)
+      decoder.skip_int
+
+      datum_reader = Avro::IO::DatumReader.new(Avro::Schema.parse('"int"'))
+      read_value = datum_reader.read(decoder)
+
+      assert_equal value_to_read, read_value
+    end
+  end
+
+  def test_skip_union
+    ["hello", -1, 32, nil].each do |value_to_skip|
+      value_to_read = 6253
+
+      schema = Avro::Schema.parse('["int", "string", "null"]')
+      writer = StringIO.new
+      encoder = Avro::IO::BinaryEncoder.new(writer)
+      datum_writer = Avro::IO::DatumWriter.new(schema)
+      datum_writer.write(value_to_skip, encoder)
+      datum_writer.write(value_to_read, encoder)
+
+      reader = StringIO.new(writer.string)
+      decoder = Avro::IO::BinaryDecoder.new(reader)
+      datum_reader = Avro::IO::DatumReader.new(schema)
+      datum_reader.skip_data(schema, decoder)
+      read_value = datum_reader.read(decoder)
+
+      assert_equal value_to_read, read_value
+    end
+  end
+
+
+  def test_schema_promotion
+    promotable_schemas = ['"int"', '"long"', '"float"', '"double"']
+    incorrect = 0
+    promotable_schemas.each_with_index do |ws, i|
+      writers_schema = Avro::Schema.parse(ws)
+      datum_to_write = 219
+      for rs in promotable_schemas[(i + 1)..-1]
+        readers_schema = Avro::Schema.parse(rs)
+        writer, enc, dw = write_datum(datum_to_write, writers_schema)
+        datum_read = read_datum(writer, writers_schema, readers_schema)
+        if datum_read != datum_to_write
+          incorrect += 1
+        end
+      end
+      assert_equal(incorrect, 0)
+    end
+  end
+  private
+
+  def check_default(schema_json, default_json, default_value)
+    actual_schema = '{"type": "record", "name": "Foo", "fields": []}'
+    actual = Avro::Schema.parse(actual_schema)
+
+    expected_schema = <<EOS
+      {"type": "record",
+       "name": "Foo",
+       "fields": [{"name": "f", "type": #{schema_json}, "default": #{default_json}}]}
+EOS
+    expected = Avro::Schema.parse(expected_schema)
+
+    reader = Avro::IO::DatumReader.new(actual, expected)
+    record = reader.read(Avro::IO::BinaryDecoder.new(StringIO.new))
+    assert_equal default_value, record["f"]
+  end
+
+  def check(str)
+    # parse schema, then convert back to string
+    schema = Avro::Schema.parse str
+
+    parsed_string = schema.to_s
+
+     # test that the round-trip didn't mess up anything
+    # NB: I don't think we should do this. Why enforce ordering?
+    assert_equal(MultiJson.load(str),
+                  MultiJson.load(parsed_string))
+
+    # test __eq__
+    assert_equal(schema, Avro::Schema.parse(str))
+
+    # test hashcode doesn't generate infinite recursion
+    schema.hash
+
+    # test serialization of random data
+    randomdata = RandomData.new(schema)
+    9.times { checkser(schema, randomdata) }
+
+    # test writing of data to file
+    check_datafile(schema)
+  end
+
+  def checkser(schm, randomdata)
+    datum = randomdata.next
+    assert validate(schm, datum)
+    w = Avro::IO::DatumWriter.new(schm)
+    writer = StringIO.new "", "w"
+    w.write(datum, Avro::IO::BinaryEncoder.new(writer))
+    r = datum_reader(schm)
+    reader = StringIO.new(writer.string)
+    ob = r.read(Avro::IO::BinaryDecoder.new(reader))
+    assert_equal(datum, ob) # FIXME check on assertdata conditional
+  end
+
+  def check_datafile(schm)
+    seed = 0
+    count = 10
+    random_data = RandomData.new(schm, seed)
+
+
+    f = File.open(DATAFILE, 'wb')
+    dw = Avro::DataFile::Writer.new(f, datum_writer(schm), schm)
+    count.times{ dw << random_data.next }
+    dw.close
+
+    random_data = RandomData.new(schm, seed)
+
+
+    f = File.open(DATAFILE, 'r+')
+    dr = Avro::DataFile::Reader.new(f, datum_reader(schm))
+
+    last_index = nil
+    dr.each_with_index do |data, c|
+      last_index = c
+      # FIXME assertdata conditional
+      assert_equal(random_data.next, data)
+    end
+    dr.close
+    assert_equal count, last_index+1
+  end
+
+  def validate(schm, datum)
+    Avro::Schema.validate(schm, datum)
+  end
+
+  def datum_writer(schm)
+    Avro::IO::DatumWriter.new(schm)
+  end
+
+  def datum_reader(schm)
+    Avro::IO::DatumReader.new(schm)
+  end
+
+  def write_datum(datum, writers_schema)
+    writer = StringIO.new
+    encoder = Avro::IO::BinaryEncoder.new(writer)
+    datum_writer = Avro::IO::DatumWriter.new(writers_schema)
+    datum_writer.write(datum, encoder)
+    [writer, encoder, datum_writer]
+  end
+
+  def read_datum(buffer, writers_schema, readers_schema=nil)
+    reader = StringIO.new(buffer.string)
+    decoder = Avro::IO::BinaryDecoder.new(reader)
+    datum_reader = Avro::IO::DatumReader.new(writers_schema, readers_schema)
+    datum_reader.read(decoder)
+  end
+end
diff --git a/lang/ruby/test/test_protocol.rb b/lang/ruby/test/test_protocol.rb
new file mode 100644
index 0000000..fda3882
--- /dev/null
+++ b/lang/ruby/test/test_protocol.rb
@@ -0,0 +1,199 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+# http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'test_help'
+
+class TestProtocol < Test::Unit::TestCase
+
+  class ExampleProtocol
+    attr_reader :protocol_string, :valid, :name
+    attr_accessor :comment
+    def initialize(protocol_string, name=nil, comment='')
+      @protocol_string = protocol_string
+      @name = name || protocol_string # default to schema_string for name
+      @comment = comment
+    end
+  end
+#
+# Example Protocols
+#
+
+EXAMPLES = [
+  ExampleProtocol.new(<<-EOS, true),
+{
+  "namespace": "com.acme",
+  "protocol": "HelloWorld",
+
+  "types": [
+    {"name": "Greeting", "type": "record", "fields": [
+      {"name": "message", "type": "string"}]},
+    {"name": "Curse", "type": "error", "fields": [
+      {"name": "message", "type": "string"}]}
+  ],
+
+  "messages": {
+    "hello": {
+      "request": [{"name": "greeting", "type": "Greeting" }],
+      "response": "Greeting",
+      "errors": ["Curse"]
+    }
+  }
+}
+EOS
+
+  ExampleProtocol.new(<<-EOS, true),
+{"namespace": "org.apache.avro.test",
+ "protocol": "Simple",
+
+ "types": [
+     {"name": "Kind", "type": "enum", "symbols": ["FOO","BAR","BAZ"]},
+
+     {"name": "MD5", "type": "fixed", "size": 16},
+
+     {"name": "TestRecord", "type": "record",
+      "fields": [
+          {"name": "name", "type": "string", "order": "ignore"},
+          {"name": "kind", "type": "Kind", "order": "descending"},
+          {"name": "hash", "type": "MD5"}
+      ]
+     },
+
+     {"name": "TestError", "type": "error", "fields": [
+         {"name": "message", "type": "string"}
+      ]
+     }
+
+ ],
+
+ "messages": {
+
+     "hello": {
+         "request": [{"name": "greeting", "type": "string"}],
+         "response": "string"
+     },
+
+     "echo": {
+         "request": [{"name": "record", "type": "TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "add": {
+         "request": [{"name": "arg1", "type": "int"}, {"name": "arg2", "type": "int"}],
+         "response": "int"
+     },
+
+     "echoBytes": {
+         "request": [{"name": "data", "type": "bytes"}],
+         "response": "bytes"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["TestError"]
+     }
+ }
+
+}
+EOS
+  ExampleProtocol.new(<<-EOS, true),
+{"namespace": "org.apache.avro.test.namespace",
+ "protocol": "TestNamespace",
+
+ "types": [
+     {"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
+     {"name": "TestRecord", "type": "record",
+      "fields": [ {"name": "hash", "type": "org.apache.avro.test.util.MD5"} ]
+     },
+     {"name": "TestError", "namespace": "org.apache.avro.test.errors",
+      "type": "error", "fields": [ {"name": "message", "type": "string"} ]
+     }
+ ],
+
+ "messages": {
+     "echo": {
+         "request": [{"name": "record", "type": "TestRecord"}],
+         "response": "TestRecord"
+     },
+
+     "error": {
+         "request": [],
+         "response": "null",
+         "errors": ["org.apache.avro.test.errors.TestError"]
+     }
+
+ }
+
+}
+EOS
+  ExampleProtocol.new(<<-EOS, true)
+{"namespace": "org.apache.avro.test",
+ "protocol": "BulkData",
+
+ "types": [],
+
+ "messages": {
+
+     "read": {
+         "request": [],
+         "response": "bytes"
+     },
+
+     "write": {
+         "request": [ {"name": "data", "type": "bytes"} ],
+         "response": "null"
+     }
+
+ }
+
+}
+EOS
+]
+
+  Protocol = Avro::Protocol
+  def test_parse
+    EXAMPLES.each do |example|
+      assert_nothing_raised("should be valid: #{example.protocol_string}") {
+        Protocol.parse(example.protocol_string)
+      }
+    end
+  end
+
+  def test_valid_cast_to_string_after_parse
+    EXAMPLES.each do |example|
+      assert_nothing_raised("round tripped okay #{example.protocol_string}") {
+        foo = Protocol.parse(example.protocol_string).to_s
+        Protocol.parse(foo)
+      }
+    end
+  end
+
+  def test_equivalence_after_round_trip
+    EXAMPLES.each do |example|
+      original = Protocol.parse(example.protocol_string)
+      round_trip = Protocol.parse(original.to_s)
+
+      assert_equal original, round_trip
+    end
+  end
+
+  def test_namespaces
+    protocol = Protocol.parse(EXAMPLES.first.protocol_string)
+    protocol.types.each do |type|
+      assert_equal type.namespace, 'com.acme'
+    end
+  end
+end
diff --git a/lang/ruby/test/test_schema.rb b/lang/ruby/test/test_schema.rb
new file mode 100644
index 0000000..0668cf2
--- /dev/null
+++ b/lang/ruby/test/test_schema.rb
@@ -0,0 +1,146 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'test_help'
+
+class TestSchema < Test::Unit::TestCase
+  def test_default_namespace
+    schema = Avro::Schema.parse <<-SCHEMA
+      {"type": "record", "name": "OuterRecord", "fields": [
+        {"name": "field1", "type": {
+          "type": "record", "name": "InnerRecord", "fields": []
+        }},
+        {"name": "field2", "type": "InnerRecord"}
+      ]}
+    SCHEMA
+
+    assert_equal schema.name, 'OuterRecord'
+    assert_equal schema.fullname, 'OuterRecord'
+    assert_nil schema.namespace
+
+    schema.fields.each do |field|
+      assert_equal field.type.name, 'InnerRecord'
+      assert_equal field.type.fullname, 'InnerRecord'
+      assert_nil field.type.namespace
+    end
+  end
+
+  def test_inherited_namespace
+    schema = Avro::Schema.parse <<-SCHEMA
+      {"type": "record", "name": "OuterRecord", "namespace": "my.name.space",
+       "fields": [
+          {"name": "definition", "type": {
+            "type": "record", "name": "InnerRecord", "fields": []
+          }},
+          {"name": "relativeReference", "type": "InnerRecord"},
+          {"name": "absoluteReference", "type": "my.name.space.InnerRecord"}
+      ]}
+    SCHEMA
+
+    assert_equal schema.name, 'OuterRecord'
+    assert_equal schema.fullname, 'my.name.space.OuterRecord'
+    assert_equal schema.namespace, 'my.name.space'
+    schema.fields.each do |field|
+      assert_equal field.type.name, 'InnerRecord'
+      assert_equal field.type.fullname, 'my.name.space.InnerRecord'
+      assert_equal field.type.namespace, 'my.name.space'
+    end
+  end
+
+  def test_inherited_namespace_from_dotted_name
+    schema = Avro::Schema.parse <<-SCHEMA
+      {"type": "record", "name": "my.name.space.OuterRecord", "fields": [
+        {"name": "definition", "type": {
+          "type": "enum", "name": "InnerEnum", "symbols": ["HELLO", "WORLD"]
+        }},
+        {"name": "relativeReference", "type": "InnerEnum"},
+        {"name": "absoluteReference", "type": "my.name.space.InnerEnum"}
+      ]}
+    SCHEMA
+
+    assert_equal schema.name, 'OuterRecord'
+    assert_equal schema.fullname, 'my.name.space.OuterRecord'
+    assert_equal schema.namespace, 'my.name.space'
+    schema.fields.each do |field|
+      assert_equal field.type.name, 'InnerEnum'
+      assert_equal field.type.fullname, 'my.name.space.InnerEnum'
+      assert_equal field.type.namespace, 'my.name.space'
+    end
+  end
+
+  def test_nested_namespaces
+    schema = Avro::Schema.parse <<-SCHEMA
+      {"type": "record", "name": "outer.OuterRecord", "fields": [
+        {"name": "middle", "type": {
+          "type": "record", "name": "middle.MiddleRecord", "fields": [
+            {"name": "inner", "type": {
+              "type": "record", "name": "InnerRecord", "fields": [
+                {"name": "recursive", "type": "MiddleRecord"}
+              ]
+            }}
+          ]
+        }}
+      ]}
+    SCHEMA
+
+    assert_equal schema.name, 'OuterRecord'
+    assert_equal schema.fullname, 'outer.OuterRecord'
+    assert_equal schema.namespace, 'outer'
+    middle = schema.fields.first.type
+    assert_equal middle.name, 'MiddleRecord'
+    assert_equal middle.fullname, 'middle.MiddleRecord'
+    assert_equal middle.namespace, 'middle'
+    inner = middle.fields.first.type
+    assert_equal inner.name, 'InnerRecord'
+    assert_equal inner.fullname, 'middle.InnerRecord'
+    assert_equal inner.namespace, 'middle'
+    assert_equal inner.fields.first.type, middle
+  end
+
+  def test_to_avro_includes_namespaces
+    schema = Avro::Schema.parse <<-SCHEMA
+      {"type": "record", "name": "my.name.space.OuterRecord", "fields": [
+        {"name": "definition", "type": {
+          "type": "fixed", "name": "InnerFixed", "size": 16
+        }},
+        {"name": "reference", "type": "InnerFixed"}
+      ]}
+    SCHEMA
+
+    assert_equal schema.to_avro, {
+      'type' => 'record', 'name' => 'OuterRecord', 'namespace' => 'my.name.space',
+      'fields' => [
+        {'name' => 'definition', 'type' => {
+          'type' => 'fixed', 'name' => 'InnerFixed', 'namespace' => 'my.name.space',
+          'size' => 16
+        }},
+        {'name' => 'reference', 'type' => 'my.name.space.InnerFixed'}
+      ]
+    }
+  end
+
+  def test_unknown_named_type
+    error = assert_raise Avro::UnknownSchemaError do
+      Avro::Schema.parse <<-SCHEMA
+        {"type": "record", "name": "my.name.space.Record", "fields": [
+          {"name": "reference", "type": "MissingType"}
+        ]}
+      SCHEMA
+    end
+
+    assert_equal '"MissingType" is not a schema we know about.', error.message
+  end
+end
diff --git a/lang/py/test/test_ipc.py b/lang/ruby/test/test_socket_transport.rb
similarity index 50%
copy from lang/py/test/test_ipc.py
copy to lang/ruby/test/test_socket_transport.rb
index 2545b15..0a15a9a 100644
--- a/lang/py/test/test_ipc.py
+++ b/lang/ruby/test/test_socket_transport.rb
@@ -13,26 +13,28 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-"""
-There are currently no IPC tests within python, in part because there are no
-servers yet available.
-"""
-import unittest
 
-# This test does import this code, to make sure it at least passes
-# compilation.
-from avro import ipc
+require 'test_help'
 
-class TestIPC(unittest.TestCase):
-  def test_placeholder(self):
-    pass
+class TestSocketTransport < Test::Unit::TestCase
+  def test_buffer_writing
+    io = StringIO.new
+    st = Avro::IPC::SocketTransport.new(io)
+    buffer_length = "\000\000\000\006"  # 6 in big-endian
+    message = 'abcdef'
+    null_ending = "\000\000\000\000" # 0 in big-endian
+    full = buffer_length + message + null_ending
+    st.write_framed_message('abcdef')
+    assert_equal full, io.string
+  end
 
-  def test_server_with_path(self):
-    client_with_custom_path = ipc.HTTPTransceiver('dummyserver.net', 80, '/service/article')
-    self.assertEqual('/service/article', client_with_custom_path.req_resource)
-
-    client_with_default_path = ipc.HTTPTransceiver('dummyserver.net', 80)
-    self.assertEqual('/', client_with_default_path.req_resource)
-
-if __name__ == '__main__':
-  unittest.main()
+  def test_buffer_reading
+    buffer_length = "\000\000\000\005" # 5 in big-endian
+    message = "hello"
+    null_ending = "\000\000\000\000" # 0 in big-endian
+    full = buffer_length + message + null_ending
+    io = StringIO.new(full)
+    st = Avro::IPC::SocketTransport.new(io)
+    assert_equal 'hello', st.read_framed_message
+  end
+end
diff --git a/lang/ruby/test/tool.rb b/lang/ruby/test/tool.rb
new file mode 100644
index 0000000..111b9d7
--- /dev/null
+++ b/lang/ruby/test/tool.rb
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+require 'avro'
+require 'webrick'
+require 'uri'
+require 'logger'
+
+class GenericResponder < Avro::IPC::Responder
+  def initialize(proto, msg, datum)
+    proto_json = open(proto).read
+    super(Avro::Protocol.parse(proto_json))
+    @msg = msg
+    @datum = datum
+  end
+
+  def call(message, request)
+    if message.name == @msg
+      STDERR.puts "Message: #{message.name} Datum: #{@datum.inspect}"
+      @datum
+    end
+  end
+end
+
+class GenericHandler < WEBrick::HTTPServlet::AbstractServlet
+  def do_POST(req, resp)
+    call_request = Avro::IPC::FramedReader.new(StringIO.new(req.body)).read_framed_message
+    unframed_resp = $responder.respond(call_request)
+    writer = Avro::IPC::FramedWriter.new(StringIO.new)
+    writer.write_framed_message(unframed_resp)
+    resp.body = writer.to_s
+    @server.stop
+  end
+end
+
+def run_server(uri, proto, msg, datum)
+  uri = URI.parse(uri)
+  $responder = GenericResponder.new(proto, msg, datum)
+  server = WEBrick::HTTPServer.new(:BindAddress => uri.host,
+                                   :Port => uri.port,
+                                   :Logger => Logger.new(StringIO.new))
+  server.mount '/', GenericHandler
+  puts "Port: #{server.config[:Port]}"
+  STDOUT.flush
+  trap("INT") { server.stop }
+  trap("TERM") { server.stop }
+  server.start
+end
+
+def send_message(uri, proto, msg, datum)
+  uri = URI.parse(uri)
+  trans = Avro::IPC::HTTPTransceiver.new(uri.host, uri.port)
+  proto_json = open(proto).read
+  requestor = Avro::IPC::Requestor.new(Avro::Protocol.parse(proto_json),
+                                       trans)
+  p requestor.request(msg, datum)
+end
+
+def file_or_stdin(f)
+  f == "-" ? STDIN : open(f)
+end
+
+def main
+  if ARGV.size == 0
+    puts "Usage: #{$0} [dump|rpcreceive|rpcsend]"
+    return 1
+  end
+
+  case ARGV[0]
+  when "dump"
+    if ARGV.size != 3
+      puts "Usage: #{$0} dump input_file"
+      return 1
+    end
+    d = Avro::DataFile.new(file_or_stdin(ARGV[1]), Avro::IO::DatumReader.new)
+    d.each{|o| puts o.inspect }
+    d.close
+  when "rpcreceive"
+    usage_str = "Usage: #{$0} rpcreceive uri protocol_file "
+    usage_str += "message_name (-data d | -file f)"
+
+    unless [4, 6].include?(ARGV.size)
+      puts usage_str
+      return 1
+    end
+    uri, proto, msg = ARGV[1,3]
+    datum = nil
+    if ARGV.size > 4
+      case ARGV[4]
+      when "-file"
+        Avro::DataFile.open(ARGV[5]) {|f|
+          f.each{|d| datum = d; break }
+        }
+      when "-data"
+        puts "JSON Decoder not yet implemented."
+        return 1
+      else
+        puts usage_str
+        return 1
+      end
+    end
+    run_server(uri, proto, msg, datum)
+  when "rpcsend"
+    usage_str = "Usage: #{$0} rpcsend uri protocol_file "
+    usage_str += "message_name (-data d | -file f)"
+    unless [4,6].include?(ARGV.size)
+      puts usage_str
+      return 1
+    end
+    uri, proto, msg = ARGV[1,3]
+    datum = nil
+    if ARGV.size > 4
+      case ARGV[4]
+      when "-file"
+        Avro::DataFile.open(ARGV[5]){|f| f.each{|d| datum = d; break } }
+      when "-data"
+        puts "JSON Decoder not yet implemented"
+        return 1
+      else
+        puts usage_str
+        return 1
+      end
+    end
+    send_message(uri, proto, msg, datum)
+  end
+  return 0
+end
+
+if __FILE__ == $0
+  exit(main)
+end
diff --git a/pom.xml b/pom.xml
index e188eb0..c3b6197 100644
--- a/pom.xml
+++ b/pom.xml
@@ -19,6 +19,10 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
 
+  <prerequisites>
+    <maven>2.2.1</maven>
+  </prerequisites>
+
   <parent>
     <groupId>org.apache</groupId>
     <artifactId>apache</artifactId>
@@ -27,7 +31,7 @@
 
   <groupId>org.apache.avro</groupId>
   <artifactId>avro-toplevel</artifactId>
-  <version>1.7.7</version>
+  <version>1.8.0</version>
   <packaging>pom</packaging>
 
   <name>Apache Avro Toplevel</name>
@@ -47,7 +51,7 @@
 
     <!-- plugin versions -->
     <antrun-plugin.version>1.7</antrun-plugin.version>
-    <enforcer-plugin.version>1.0.1</enforcer-plugin.version>
+    <enforcer-plugin.version>1.3.1</enforcer-plugin.version>
   </properties>
 
   <modules>
diff --git a/share/VERSION.txt b/share/VERSION.txt
index 73c8b4f..afa2b35 100644
--- a/share/VERSION.txt
+++ b/share/VERSION.txt
@@ -1 +1 @@
-1.7.7
\ No newline at end of file
+1.8.0
\ No newline at end of file
diff --git a/share/docker/Dockerfile b/share/docker/Dockerfile
new file mode 100644
index 0000000..3bc0b33
--- /dev/null
+++ b/share/docker/Dockerfile
@@ -0,0 +1,58 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Dockerfile for installing the necessary dependencies for building Avro.
+# See BUILD.txt.
+
+FROM java:7-jdk
+
+WORKDIR /root
+
+# Install dependencies from packages
+RUN apt-get update && apt-get install --no-install-recommends -y \
+  git subversion curl ant make maven \
+  gcc cmake asciidoc source-highlight \
+  g++ flex bison libboost-all-dev doxygen \
+  mono-devel mono-gmcs nunit \
+  nodejs nodejs-legacy npm \
+  perl \
+  php5 phpunit php5-gmp bzip2 \
+  python python-setuptools python3-setuptools \
+  ruby ruby-dev rake \
+  libsnappy1 libsnappy-dev
+
+# Install Forrest
+RUN mkdir -p /usr/local/apache-forrest
+RUN curl -O http://archive.apache.org/dist/forrest/0.8/apache-forrest-0.8.tar.gz
+RUN tar xzf *forrest* --strip-components 1 -C /usr/local/apache-forrest
+RUN echo 'forrest.home=/usr/local/apache-forrest' > build.properties
+RUN chmod -R 0777 /usr/local/apache-forrest/build /usr/local/apache-forrest/main \
+  /usr/local/apache-forrest/plugins
+ENV FORREST_HOME /usr/local/apache-forrest
+
+# Install Perl modules
+RUN curl -L http://cpanmin.us | perl - --self-upgrade # non-interactive cpan
+RUN cpanm install Module::Install Module::Install::ReadmeFromPod \
+  Module::Install::Repository \
+  Math::BigInt JSON::XS Try::Tiny Regexp::Common Encode \
+  IO::String Object::Tiny Compress::Zlib Test::More \
+  Test::Exception Test::Pod
+
+# Install Ruby modules
+RUN gem install echoe yajl-ruby multi_json snappy
+
+# Install global Node modules
+RUN npm install -g grunt-cli
diff --git a/share/rat-excludes.txt b/share/rat-excludes.txt
index c123a93..9b05e70 100644
--- a/share/rat-excludes.txt
+++ b/share/rat-excludes.txt
@@ -8,6 +8,7 @@
 **/*.js
 **/*.la
 **/*.m4
+**/*.md
 **/*.md5
 **/*.pom
 **/*.properties
diff --git a/share/schemas/org/apache/avro/ipc/trace/avroTrace.avdl b/share/schemas/org/apache/avro/ipc/trace/avroTrace.avdl
deleted file mode 100644
index 9fd5680..0000000
--- a/share/schemas/org/apache/avro/ipc/trace/avroTrace.avdl
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * A Span is our basic unit of tracing. It tracks the critical points
- * of a single RPC call and records other call meta-data. It also
- * allows arbitrary string annotations. Both the client and server create
- * Span objects, each of which is populated with half of the relevant event
- * data. They share a span ID, which allows us to merge them into one complete
- * span later on.
- */
- at namespace("org.apache.avro.ipc.trace")
-
-protocol AvroTrace {
-  enum SpanEvent { SERVER_RECV, SERVER_SEND, CLIENT_RECV, CLIENT_SEND }
-
-  fixed ID(8);
-
-  record TimestampedEvent {
-    long timeStamp; // Unix time, in nanoseconds
-    union { SpanEvent, string} event;
-  }
-
-  /**
-   * An individual span is the basic unit of testing.
-   * The record is used by both \"client\" and \"server\".
-   */
-  record Span {
-    ID  traceID;  // ID shared by all Spans in a given trace
-    ID spanID;    // Random ID for this Span
-    union { ID, null } parentSpanID; // Parent Span ID (null if root Span)
-    string messageName;       // Function call represented
-    long requestPayloadSize;  // Size (bytes) of the request
-    long responsePayloadSize; // Size (byts) of the response
-    union { string, null} requestorHostname; // Hostname of requestor
-//    int requestorPort;     // Port of the requestor (currently unused)
-    union { string, null } responderHostname; // Hostname of the responder
-//    int responderPort;     // Port of the responder (currently unused)
-    array<TimestampedEvent> events;  // List of critical events
-    boolean complete; // Whether includes data from both sides
-  }
-
-  /**
-   * Get all spans stored on this host.
-   */
-  array<Span> getAllSpans();
-
-  /**
-   * Get spans occuring between start and end. Each is a unix timestamp
-   * in nanosecond units (for consistency with TimestampedEvent).
-   */
-  array<Span> getSpansInRange(long start, long end);
-}
diff --git a/share/schemas/org/apache/avro/ipc/trace/avroTrace.avpr b/share/schemas/org/apache/avro/ipc/trace/avroTrace.avpr
deleted file mode 100644
index 041f3e8..0000000
--- a/share/schemas/org/apache/avro/ipc/trace/avroTrace.avpr
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "protocol" : "AvroTrace",
-  "namespace" : "org.apache.avro.ipc.trace",
-  "types" : [ {
-    "type" : "enum",
-    "name" : "SpanEvent",
-    "symbols" : [ "SERVER_RECV", "SERVER_SEND", "CLIENT_RECV", "CLIENT_SEND" ]
-  }, {
-    "type" : "fixed",
-    "name" : "ID",
-    "size" : 8
-  }, {
-    "type" : "record",
-    "name" : "TimestampedEvent",
-    "fields" : [ {
-      "name" : "timeStamp",
-      "type" : "long"
-    }, {
-      "name" : "event",
-      "type" : [ "SpanEvent", "string" ]
-    } ]
-  }, {
-    "type" : "record",
-    "name" : "Span",
-    "fields" : [ {
-      "name" : "traceID",
-      "type" : "ID"
-    }, {
-      "name" : "spanID",
-      "type" : "ID"
-    }, {
-      "name" : "parentSpanID",
-      "type" : [ "ID", "null" ]
-    }, {
-      "name" : "messageName",
-      "type" : "string"
-    }, {
-      "name" : "requestPayloadSize",
-      "type" : "long"
-    }, {
-      "name" : "responsePayloadSize",
-      "type" : "long"
-    }, {
-      "name" : "requestorHostname",
-      "type" : [ "string", "null" ]
-    }, {
-      "name" : "responderHostname",
-      "type" : [ "string", "null" ]
-    }, {
-      "name" : "events",
-      "type" : {
-        "type" : "array",
-        "items" : "TimestampedEvent"
-      }
-    }, {
-      "name" : "complete",
-      "type" : "boolean"
-    } ]
-  } ],
-  "messages" : {
-    "getAllSpans" : {
-      "request" : [ ],
-      "response" : {
-        "type" : "array",
-        "items" : "Span"
-      }
-    },
-    "getSpansInRange" : {
-      "request" : [ {
-        "name" : "start",
-        "type" : "long"
-      }, {
-        "name" : "end",
-        "type" : "long"
-      } ],
-      "response" : {
-        "type" : "array",
-        "items" : "Span"
-      }
-    }
-  }
-}
\ No newline at end of file
diff --git a/share/test/schemas/http.avdl b/share/test/schemas/http.avdl
new file mode 100644
index 0000000..52313e7
--- /dev/null
+++ b/share/test/schemas/http.avdl
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** NOTE: This structure was inspired by HTTP and deliberately skewed to get the effects that needed testing */
+
+ at namespace("org.apache.avro.test.http")
+protocol Http {
+
+    enum NetworkType {
+        IPv4,
+        IPv6
+    }
+
+    record NetworkConnection {
+        NetworkType networkType;
+        string      networkAddress;
+    }
+
+    record UserAgent {
+        union { null, string } id = null;
+        string                      useragent;
+    }
+
+    enum HttpMethod {
+        GET,
+        POST
+    }
+
+    record QueryParameter {
+        string                  name;
+        union { null, string }  value; // Sometimes there is no value.
+    }
+
+    record HttpURI {
+        HttpMethod method;
+        string                path;
+        array<QueryParameter> parameters = [];
+    }
+
+    record HttpRequest {
+        UserAgent         userAgent;
+        HttpURI    URI;
+    }
+
+    record Request {
+      long              timestamp;
+      NetworkConnection connection;
+      HttpRequest       httpRequest;
+    }
+
+}
diff --git a/share/test/schemas/reserved.avsc b/share/test/schemas/reserved.avsc
new file mode 100644
index 0000000..40f4849
--- /dev/null
+++ b/share/test/schemas/reserved.avsc
@@ -0,0 +1,2 @@
+{"name": "org.apache.avro.test.Reserved", "type": "enum",
+ "symbols": ["default","class","int"]},
diff --git a/share/test/schemas/specialtypes.avdl b/share/test/schemas/specialtypes.avdl
new file mode 100644
index 0000000..623e016
--- /dev/null
+++ b/share/test/schemas/specialtypes.avdl
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** NOTE: This structure is intended to contain names that are likely to cause collisions with the generated code. */
+
+ at namespace("org.apache.avro.test.specialtypes")
+protocol LetsBreakIt {
+
+    enum Enum {
+        builder,
+        Builder,
+        builderBuider,
+        value,
+        this
+    }
+
+    record One {
+        Enum    this;
+    }
+
+    record Two {
+        union { null, string } this = null;
+        string                 String;
+    }
+
+    record Variables {
+        One       this;
+
+        One       Boolean;
+        One       Integer;
+        One       Long;
+        One       Float;
+        One       String;
+    }
+
+    enum Boolean {
+        Yes,
+        No
+    }
+
+    record String {
+        string value;
+    }
+
+    record builder {
+        One      this;
+        Two      builder;
+    }
+
+    record builderBuilder {
+        One      this;
+        Two      that;
+    }
+
+    record Builder {
+        One      this;
+        Two      that;
+    }
+
+    record value {
+        One      this;
+        Two      that;
+    }
+
+    record Types {
+      Boolean one;
+      builder two;
+      Builder three;
+      builderBuilder four;
+      String five;
+      value six;
+    }
+
+    record Names {
+      string Boolean;
+      string builder;
+      string Builder;
+      string builderBuilder;
+      string String;
+      string value;
+    }
+
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-avro.git



More information about the debian-med-commit mailing list